'Camera2 Api..java.lang.IllegalArgumentException: Surface had no valid native Surface
I was trying so long for Camera2 api integration to my app.Its working fine for capturing image at first.But when i snap second time the preview was not coming.I tested it in genymotion nexus 5 emulator.Tried with all examples.Preview was not coming for snapping at second time.Getting this error also. java.lang.IllegalArgumentException: Surface had no valid native Surface...
and i followed this 2 codes http://inducesmile.com/android/android-camera2-api-example-tutorial/?cid=519 Github-Camera2Master.Please help anyone to resolve this error and give some links for more detailed explanation about camera 2 api
package com.example.cameraapi;
import android.Manifest;
import android.content.Context;
import android.content.pm.PackageManager;
import android.graphics.ImageFormat;
import android.graphics.SurfaceTexture;
import android.hardware.camera2.CameraAccessException;
import android.hardware.camera2.CameraCaptureSession;
import android.hardware.camera2.CameraCharacteristics;
import android.hardware.camera2.CameraDevice;
import android.hardware.camera2.CameraManager;
import android.hardware.camera2.CameraMetadata;
import android.hardware.camera2.CaptureRequest;
import android.hardware.camera2.TotalCaptureResult;
import android.hardware.camera2.params.StreamConfigurationMap;
import android.media.Image;
import android.media.ImageReader;
import android.os.Bundle;
import android.os.Environment;
import android.os.Handler;
import android.os.HandlerThread;
import android.support.annotation.NonNull;
import android.support.v4.app.ActivityCompat;
import android.support.v7.app.AppCompatActivity;
import android.util.Log;
import android.util.Size;
import android.util.SparseIntArray;
import android.view.Surface;
import android.view.TextureView;
import android.view.View;
import android.widget.Button;
import android.widget.Toast;
import java.io.File;
import java.io.FileNotFoundException;
import java.io.FileOutputStream;
import java.io.IOException;
import java.io.OutputStream;
import java.nio.ByteBuffer;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.List;
public class AndroidCameraApi extends AppCompatActivity {
private static final String TAG = "AndroidCameraApi";
private Button takePictureButton;
private TextureView textureView;
private static final SparseIntArray ORIENTATIONS = new SparseIntArray();
static {
ORIENTATIONS.append(Surface.ROTATION_0, 90);
ORIENTATIONS.append(Surface.ROTATION_90, 0);
ORIENTATIONS.append(Surface.ROTATION_180, 270);
ORIENTATIONS.append(Surface.ROTATION_270, 180);
}
private String cameraId;
protected CameraDevice cameraDevice;
protected CameraCaptureSession cameraCaptureSessions;
protected CaptureRequest captureRequest;
protected CaptureRequest.Builder captureRequestBuilder;
private Size imageDimension;
private ImageReader imageReader;
private File file;
private static final int REQUEST_CAMERA_PERMISSION = 200;
private boolean mFlashSupported;
private Handler mBackgroundHandler;
private HandlerThread mBackgroundThread;
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_camera);
textureView = (TextureView) findViewById(R.id.textureView);
assert textureView != null;
textureView.setSurfaceTextureListener(textureListener);
takePictureButton = (Button) findViewById(R.id.btn_takepicture);
assert takePictureButton != null;
takePictureButton.setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View v) {
takePicture();
}
});
}
TextureView.SurfaceTextureListener textureListener = new TextureView.SurfaceTextureListener() {
@Override
public void onSurfaceTextureAvailable(SurfaceTexture surface, int width, int height) {
//open your camera here
openCamera();
}
@Override
public void onSurfaceTextureSizeChanged(SurfaceTexture surface, int width, int height) {
// Transform you image captured size according to the surface width and height
}
@Override
public boolean onSurfaceTextureDestroyed(SurfaceTexture surface) {
return false;
}
@Override
public void onSurfaceTextureUpdated(SurfaceTexture surface) {
}
};
private final CameraDevice.StateCallback stateCallback = new CameraDevice.StateCallback() {
@Override
public void onOpened(CameraDevice camera) {
//This is called when the camera is open
Log.e(TAG, "onOpened");
cameraDevice = camera;
createCameraPreview();
}
@Override
public void onDisconnected(CameraDevice camera) {
cameraDevice.close();
}
@Override
public void onError(CameraDevice camera, int error) {
if(cameraDevice!=null)
cameraDevice.close();
cameraDevice = null;
}
};
final CameraCaptureSession.CaptureCallback captureCallbackListener = new CameraCaptureSession.CaptureCallback() {
@Override
public void onCaptureCompleted(CameraCaptureSession session, CaptureRequest request, TotalCaptureResult result) {
super.onCaptureCompleted(session, request, result);
// Toast.makeText(AndroidCameraApi.this, "Saved:" + file, Toast.LENGTH_SHORT).show();
createCameraPreview();
}
};
protected void startBackgroundThread() {
mBackgroundThread = new HandlerThread("Camera Background");
mBackgroundThread.start();
mBackgroundHandler = new Handler(mBackgroundThread.getLooper());
}
protected void stopBackgroundThread() {
mBackgroundThread.quitSafely();
try {
mBackgroundThread.join();
mBackgroundThread = null;
mBackgroundHandler = null;
} catch (InterruptedException e) {
e.printStackTrace();
}
}
protected void takePicture() {
if(null == cameraDevice) {
Log.e(TAG, "cameraDevice is null");
return;
}
CameraManager manager = (CameraManager) getSystemService(Context.CAMERA_SERVICE);
try {
CameraCharacteristics characteristics = manager.getCameraCharacteristics(cameraDevice.getId());
Size[] jpegSizes = null;
if (characteristics != null) {
jpegSizes = characteristics.get(CameraCharacteristics.SCALER_STREAM_CONFIGURATION_MAP).getOutputSizes(ImageFormat.JPEG);
}
int width = 640;
int height = 480;
if (jpegSizes != null && 0 < jpegSizes.length) {
width = jpegSizes[0].getWidth();
height = jpegSizes[0].getHeight();
}
ImageReader reader = ImageReader.newInstance(width, height, ImageFormat.JPEG, 1);
List<Surface> outputSurfaces = new ArrayList<Surface>(2);
outputSurfaces.add(reader.getSurface());
outputSurfaces.add(new Surface(textureView.getSurfaceTexture()));
final CaptureRequest.Builder captureBuilder = cameraDevice.createCaptureRequest(CameraDevice.TEMPLATE_STILL_CAPTURE);
captureBuilder.addTarget(reader.getSurface());
captureBuilder.set(CaptureRequest.CONTROL_MODE, CameraMetadata.CONTROL_MODE_AUTO);
// Orientation
int rotation = getWindowManager().getDefaultDisplay().getRotation();
captureBuilder.set(CaptureRequest.JPEG_ORIENTATION, ORIENTATIONS.get(rotation));
final File file = new File(Environment.getExternalStorageDirectory()+"/pic.jpg");
ImageReader.OnImageAvailableListener readerListener = new ImageReader.OnImageAvailableListener() {
@Override
public void onImageAvailable(ImageReader reader) {
Image image = null;
try {
image = reader.acquireLatestImage();
ByteBuffer buffer = image.getPlanes()[0].getBuffer();
byte[] bytes = new byte[buffer.capacity()];
buffer.get(bytes);
save(bytes);
} catch (FileNotFoundException e) {
e.printStackTrace();
} catch (IOException e) {
e.printStackTrace();
} finally {
if (image != null) {
image.close();
}
}
}
private void save(byte[] bytes) throws IOException {
OutputStream output = null;
try {
output = new FileOutputStream(file);
output.write(bytes);
} finally {
if (null != output) {
output.close();
}
}
}
};
reader.setOnImageAvailableListener(readerListener, mBackgroundHandler);
final CameraCaptureSession.CaptureCallback captureListener = new CameraCaptureSession.CaptureCallback() {
@Override
public void onCaptureCompleted(CameraCaptureSession session, CaptureRequest request, TotalCaptureResult result) {
super.onCaptureCompleted(session, request, result);
// Toast.makeText(AndroidCameraApi.this, "Saved:" + file, Toast.LENGTH_SHORT).show();
createCameraPreview();
}
};
cameraDevice.createCaptureSession(outputSurfaces, new CameraCaptureSession.StateCallback() {
@Override
public void onConfigured(CameraCaptureSession session) {
try {
session.capture(captureBuilder.build(), captureListener, mBackgroundHandler);
} catch (CameraAccessException e) {
e.printStackTrace();
}
}
@Override
public void onConfigureFailed(CameraCaptureSession session) {
}
}, mBackgroundHandler);
} catch (CameraAccessException e) {
e.printStackTrace();
}
}
protected void createCameraPreview() {
try {
SurfaceTexture texture = textureView.getSurfaceTexture();
assert texture != null;
texture.setDefaultBufferSize(imageDimension.getWidth(), imageDimension.getHeight());
Surface surface = new Surface(texture);
captureRequestBuilder = cameraDevice.createCaptureRequest(CameraDevice.TEMPLATE_PREVIEW);
captureRequestBuilder.addTarget(surface);
cameraDevice.createCaptureSession(Arrays.asList(surface), new CameraCaptureSession.StateCallback(){
@Override
public void onConfigured(@NonNull CameraCaptureSession cameraCaptureSession) {
//The camera is already closed
if (null == cameraDevice) {
return;
}
// When the session is ready, we start displaying the preview.
cameraCaptureSessions = cameraCaptureSession;
updatePreview();
}
@Override
public void onConfigureFailed(@NonNull CameraCaptureSession cameraCaptureSession) {
Toast.makeText(AndroidCameraApi.this, "Configuration change", Toast.LENGTH_SHORT).show();
}
}, null);
} catch (CameraAccessException e) {
e.printStackTrace();
}
}
private void openCamera() {
CameraManager manager = (CameraManager) getSystemService(Context.CAMERA_SERVICE);
Log.e(TAG, "is camera open");
try {
cameraId = manager.getCameraIdList()[0];
CameraCharacteristics characteristics = manager.getCameraCharacteristics(cameraId);
StreamConfigurationMap map = characteristics.get(CameraCharacteristics.SCALER_STREAM_CONFIGURATION_MAP);
assert map != null;
imageDimension = map.getOutputSizes(SurfaceTexture.class)[0];
// Add permission for camera and let user grant the permission
if (ActivityCompat.checkSelfPermission(this, Manifest.permission.CAMERA) != PackageManager.PERMISSION_GRANTED && ActivityCompat.checkSelfPermission(this, Manifest.permission.WRITE_EXTERNAL_STORAGE) != PackageManager.PERMISSION_GRANTED) {
ActivityCompat.requestPermissions(AndroidCameraApi.this, new String[]{Manifest.permission.CAMERA, Manifest.permission.WRITE_EXTERNAL_STORAGE}, REQUEST_CAMERA_PERMISSION);
return;
}
manager.openCamera(cameraId, stateCallback, null);
} catch (CameraAccessException e) {
e.printStackTrace();
}
Log.e(TAG, "openCamera X");
}
protected void updatePreview() {
if(null == cameraDevice) {
Log.e(TAG, "updatePreview error, return");
}
captureRequestBuilder.set(CaptureRequest.CONTROL_MODE, CameraMetadata.CONTROL_MODE_AUTO);
try {
cameraCaptureSessions.setRepeatingRequest(captureRequestBuilder.build(), null, mBackgroundHandler);
} catch (CameraAccessException e) {
e.printStackTrace();
}
}
private void closeCamera() {
if (null != cameraDevice) {
cameraDevice.close();
cameraDevice = null;
}
if (null != imageReader) {
imageReader.close();
imageReader = null;
}
}
@Override
public void onRequestPermissionsResult(int requestCode, @NonNull String[] permissions, @NonNull int[] grantResults) {
if (requestCode == REQUEST_CAMERA_PERMISSION) {
if (grantResults[0] == PackageManager.PERMISSION_DENIED) {
// close the app
Toast.makeText(AndroidCameraApi.this, "Sorry!!!, you can't use this app without granting permission", Toast.LENGTH_LONG).show();
finish();
}
}
}
@Override
protected void onResume() {
super.onResume();
Log.e(TAG, "onResume");
startBackgroundThread();
if (textureView.isAvailable()) {
openCamera();
} else {
textureView.setSurfaceTextureListener(textureListener);
}
}
@Override
protected void onPause() {
Log.e(TAG, "onPause");
//closeCamera();
stopBackgroundThread();
super.onPause();
}
}
<?xml version="1.0" encoding="utf-8"?>
<RelativeLayout xmlns:android="http://schemas.android.com/apk/res/android"
xmlns:tools="http://schemas.android.com/tools"
android:layout_width="match_parent"
android:layout_height="match_parent"
android:orientation="vertical"
>
<TextureView
android:id="@+id/textureView"
android:layout_width="match_parent"
android:layout_height="wrap_content"
android:layout_above="@+id/btn_takepicture"
android:layout_alignParentTop="true"/>
<Button
android:id="@+id/btn_takepicture"
android:layout_width="wrap_content"
android:layout_height="wrap_content"
android:layout_alignParentBottom="true"
android:layout_centerHorizontal="true"
android:layout_marginBottom="16dp"
android:layout_marginTop="16dp"
android:text="@string/take_picture" />
</RelativeLayout>
I got error like below when i start to capture more images.not more after second image itself.
09-30 07:36:43.199 6252-6305/com.example.cameraapi E/Legacy-CameraDevice-JNI: LegacyCameraDevice_nativeGetSurfaceId: Could not retrieve native Surface from surface.
09-30 07:36:43.199 6252-6305/com.example.cameraapi E/AndroidRuntime: FATAL EXCEPTION: Thread-275
Process: com.example.cameraapi, PID: 6252
java.lang.IllegalArgumentException: Surface had no valid native Surface.
at android.hardware.camera2.legacy.LegacyCameraDevice.nativeGetSurfaceId(Native Method)
at android.hardware.camera2.legacy.LegacyCameraDevice.getSurfaceId(LegacyCameraDevice.java:658)
at android.hardware.camera2.legacy.LegacyCameraDevice.containsSurfaceId(LegacyCameraDevice.java:678)
at android.hardware.camera2.legacy.RequestThreadManager$2.onPictureTaken(RequestThreadManager.java:220)
at android.hardware.Camera$EventHandler.handleMessage(Camera.java:1092)
at android.os.Handler.dispatchMessage(Handler.java:102)
at android.os.Looper.loop(Looper.java:148)
at android.hardware.camera2.legacy.CameraDeviceUserShim$CameraLooper.run(CameraDeviceUserShim.java:136)
at java.lang.Thread.run(Thread.java:818)
09-30 07:36:43.373 6252-6252/com.example.cameraapi E/AndroidCameraApi: onPause
09-30 07:36:43.463 6252-6292/com.example.cameraapi E/Surface: getSlotFromBufferLocked: unknown buffer: 0xe05090e0
09-30 07:36:43.475 6252-6292/com.example.cameraapi D/OpenGLRenderer: endAllStagingAnimators on 0xe8b85100 (RippleDrawable) with handle 0xdf9d43d0
09-30 07:36:47.201 6252-6313/com.example.cameraapi E/RequestThread-0: Hit timeout for jpeg callback!
09-30 07:36:47.202 6252-6313/com.example.cameraapi I/CameraDeviceState: Legacy camera service transitioning to state IDLE
09-30 07:36:47.203 6252-6252/com.example.cameraapi W/MessageQueue: Handler (android.os.Handler) {ef17f10} sending message to a Handler on a dead thread
java.lang.IllegalStateException: Handler (android.os.Handler) {ef17f10} sending message to a Handler on a dead thread
at android.os.MessageQueue.enqueueMessage(MessageQueue.java:543)
at android.os.Handler.enqueueMessage(Handler.java:631)
at android.os.Handler.sendMessageAtTime(Handler.java:600)
at android.os.Handler.sendMessageDelayed(Handler.java:570)
at android.os.Handler.post(Handler.java:326)
at android.hardware.camera2.dispatch.HandlerDispatcher.dispatch(HandlerDispatcher.java:61)
at android.hardware.camera2.dispatch.MethodNameInvoker.invoke(MethodNameInvoker.java:88)
at android.hardware.camera2.dispatch.DuckTypingDispatcher.dispatch(DuckTypingDispatcher.java:53)
at android.hardware.camera2.dispatch.ArgumentReplacingDispatcher.dispatch(ArgumentReplacingDispatcher.java:74)
at android.hardware.camera2.dispatch.BroadcastDispatcher.dispatch(BroadcastDispatcher.java:54)
at android.hardware.camera2.dispatch.MethodNameInvoker.invoke(MethodNameInvoker.java:88)
at android.hardware.camera2.impl.CallbackProxies$DeviceCaptureCallbackProxy.onCaptureCompleted(CallbackProxies.java:121)
at android.hardware.camera2.impl.CameraDeviceImpl$CameraDeviceCallbacks$4.run(CameraDeviceImpl.java:1828)
at android.os.Handler.handleCallback(Handler.java:739)
at android.os.Handler.dispatchMessage(Handler.java:95)
at android.os.Looper.loop(Looper.java:148)
at android.app.ActivityThread.main(ActivityThread.java:5417)
at java.lang.reflect.Method.invoke(Native Method)
at com.android.internal.os.ZygoteInit$MethodAndArgsCaller.run(ZygoteInit.java:726)
at com.android.internal.os.ZygoteInit.main(ZygoteInit.java:616)
09-30 07:36:47.204 6252-6252/com.example.cameraapi W/MessageQueue: Handler (android.os.Handler) {ef17f10} sending message to a Handler on a dead thread
java.lang.IllegalStateException: Handler (android.os.Handler) {ef17f10} sending message to a Handler on a dead thread
at android.os.MessageQueue.enqueueMessage(MessageQueue.java:543)
at android.os.Handler.enqueueMessage(Handler.java:631)
at android.os.Handler.sendMessageAtTime(Handler.java:600)
at android.os.Handler.sendMessageDelayed(Handler.java:570)
at android.os.Handler.post(Handler.java:326)
at android.hardware.camera2.dispatch.HandlerDispatcher.dispatch(HandlerDispatcher.java:61)
at android.hardware.camera2.dispatch.MethodNameInvoker.invoke(MethodNameInvoker.java:88)
at android.hardware.camera2.dispatch.DuckTypingDispatcher.dispatch(DuckTypingDispatcher.java:53)
at android.hardware.camera2.dispatch.ArgumentReplacingDispatcher.dispatch(ArgumentReplacingDispatcher.java:74)
at android.hardware.camera2.dispatch.BroadcastDispatcher.dispatch(BroadcastDispatcher.java:54)
at android.hardware.camera2.dispatch.MethodNameInvoker.invoke(MethodNameInvoker.java:88)
at android.hardware.camera2.impl.CallbackProxies$DeviceCaptureCallbackProxy.onCaptureSequenceCompleted(CallbackProxies.java:133)
at android.hardware.camera2.impl.CameraDeviceImpl$10.run(CameraDeviceImpl.java:1588)
at android.os.Handler.handleCallback(Handler.java:739)
at android.os.Handler.dispatchMessage(Handler.java:95)
at android.os.Looper.loop(Looper.java:148)
at android.app.ActivityThread.main(ActivityThread.java:5417)
at java.lang.reflect.Method.invoke(Native Method)
at com.android.internal.os.ZygoteInit$MethodAndArgsCaller.run(ZygoteInit.java:726)
at com.android.internal.os.ZygoteInit.main(ZygoteInit.java:616)
[ 09-30 07:36:48.448 6252: 6257 D/ ]
HostConnection::get() New Host Connection established 0xe9952950, tid 6257
Solution 1:[1]
I've seen this issue before and its because the thread being used by the Camera is no longer alive. Debugging the Camera API you'll see that it uses Handlers to invoke the necessary functions. The culprit for sending message to a Handler on a dead thread
is right here:
@Override
protected void onPause() {
Log.e(TAG, "onPause");
//closeCamera();
stopBackgroundThread();
super.onPause();
}
Your onPause() is killing the thread that is being used by the CameraAPI which is causing that error message. That's not the real issue, but instead the LegacyCameraDevice_nativeGetSurfaceId: Could not retrieve native Surface from surface.
The Camera2 API will complain a lot if the surfaces are not configured to the right params. Since you're using the example, perhaps you should initialize and configure the TextureView just like the sample you linked. I know they extend the class and provide a helper method to configure, might take care of this problem.
As a side note, you should invoke super before doing your own thing.
Solution 2:[2]
I figured out two reasons for this error on my side:
1) For Video I selected CamcorderProfile.QUALITY_HIGH which is not always supported by the MediaRecorder so I have to check both:
profileHQ = CamcorderProfile.get(Integer.parseInt(cameraDevice.getId()),
CamcorderProfile.QUALITY_HIGH);
foreach (Size size : cameraConfig.getOutputSizes(MediaRecorder.class))
if (size.getHeight() == profileHQ.videoFrameHeight && size.getWidth() == profileHQ.videoFrameWidth) {
// you may use this profile
}
2) For Image capturing I had one phone which can collect huge images (5248x3936) which took so long that the next image capturing request interrupted the first: The thread was closed which lead to the surface being unavailable. Now, I wait until the image was saved before I take the next picture:
@Override
public void onImageAvailable(ImageReader reader) {
Image image = null;
try {
image = reader.acquireLatestImage();
ByteBuffer buffer = image.getPlanes()[0].getBuffer();
byte[] bytes = new byte[buffer.capacity()];
buffer.get(bytes);
save(bytes);
} catch (IOException e) {
throw new IllegalStateException("Unable to write image to buffer.", e);
} finally {
if (image != null) {
image.close();
}
try {
LOGGER.debug("Taking next picture ...");
if (!closeRequestReceived) takePicture();
} catch (CameraAccessException e) {
e.printStackTrace();
}
}
}
Solution 3:[3]
I was getting this same error and it turned out to be an API level issue. Try setting this at the very top of your class and make sure only API 21+ devices can enter into the activity.
@RequiresApi(api = Build.VERSION_CODES.LOLLIPOP) //<-- This line
public class YourClassName {
....
}
Solution 4:[4]
A little bit late, but i did have the same problem for the same app but after i read what @Clocker said i realised he was absolutely right. We all tend to forget, the 2nd time around when the app runs(ie when it is brought back into the foreground) onPause() does get called, after onResume(), and hence it kills the thread... So all later camera setup operations fail, especially the Surface. So just use a bit of logic to only allow onPause() to run partially.
@Override
public void onPause() {
if(onResume_just_ran){super.onPause(); return;}
super.onPause();
closeCamera();
stopBackgroundThread();
}
So the boolean onResume_just_ran will be set to true in the onResume(). Now it wont run the stopBackgroundThread(). This worked fine for me! ALSO, remember to put that boolean back to false somewhere in the program, so it runs the bottom part of the code when it really goes into onPause() while exiting, as in my program it happened somewhere else.
Sources
This article follows the attribution requirements of Stack Overflow and is licensed under CC BY-SA 3.0.
Source: Stack Overflow
Solution | Source |
---|---|
Solution 1 | Clocker |
Solution 2 | |
Solution 3 | PGMacDesign |
Solution 4 | user3833732 |