Rxjava 2 exception with camera
I just switched my code from asynctask
to rxjava2
and I'm randomly getting this exception on my nexus:
Camera is being used after Camera.release() was called in Galaxy s6 Edge
Following is my code-
Class Camera :
public class Cameras {
private static final String TAG = Cameras.class.getSimpleName();
private static final String SP_CAMERA_ID = "camera_id";
private static final int NO_NEXT_TASK = 0;
private static final int NEXT_TASK_RELEASE_COMPLETE = 1;
private static final int NEXT_TASK_SWITCH_COMPLETE = 2;
private static final int NEXT_TASK_START_PREVIEW = 3;
private Camera camera;
private int currentCameraId = -1;
private Camera.PreviewCallback previewCallback;
private byte[] buffer1, buffer2, buffer3;
private SurfaceTexture surfaceTexture;
private Listener listener;
public interface Listener {
void onCameraOpened(Camera.Size size, int angle);
}
private boolean cameraReleased = false;
public Cameras(Camera.PreviewCallback previewCallback, Listener listener) {
this.listener = listener;
this.previewCallback = previewCallback;
this.currentCameraId = Spin.INSTANCE.getSp().getInt(SP_CAMERA_ID, -1);
getCameraList();
}
private void getCameraList() {
int numberOfCameras = Camera.getNumberOfCameras();
Camera.CameraInfo camInfo = new Camera.CameraInfo();
for (int i = 0; i < numberOfCameras; i++) {
Camera.getCameraInfo(i, camInfo);
cams.add(camInfo.facing);
}
if (Camera.CameraInfo.CAMERA_FACING_BACK != currentCameraId
&& Camera.CameraInfo.CAMERA_FACING_FRONT != currentCameraId) {
currentCameraId = cams.get(cams.size() == 2 ? 1 : 0);
}
}
public boolean isSwitchCamAvailable() {
return Camera.getNumberOfCameras() > 1;
}
public void open(SurfaceTexture surfaceTexture) {
this.surfaceTexture = surfaceTexture;
init(NEXT_TASK_START_PREVIEW);
}
private void init(final int nextTask) {
if (cams.isEmpty()) {
Toast.makeText(Spin.getContext(), "Device have no camera", Toast.LENGTH_SHORT).show();
return;
}
cameraReleased = false;
if (Looper.getMainLooper().getThread() == Thread.currentThread()) {
Observable.defer(new Callable<ObservableSource<?>>() {
@Override
public ObservableSource<?> call() throws Exception {
synchronized (this) {
try {
camera = Camera.open(currentCameraId);
} catch (RuntimeException e) {
e.printStackTrace();
}
}
return Completable.complete().toObservable();
}
}).doOnComplete(() -> {
initComplete(nextTask);
Log.d("Complete", "Complete");
})
.subscribeOn(Schedulers.computation())
.observeOn(AndroidSchedulers.mainThread()).subscribe();
} else {
try {
synchronized (this) {
this.camera = Camera.open(currentCameraId);
}
initComplete(nextTask);
} catch (RuntimeException ignored) {
}
}
}
private void initComplete(int nextTask) {
if (camera == null) {
return;
}
//noinspection SynchronizeOnNonFinalField
synchronized (camera) {
try {
Camera.Parameters params = camera.getParameters();
Camera.Size size = getClosestFrameSize(params, 640);
params.setPreviewSize(size.width, size.height);
camera.setParameters(params);
camera.setPreviewCallbackWithBuffer(previewCallback);
int bufferSize = size.width * size.height
* ImageFormat.getBitsPerPixel(ImageFormat.NV21) / 8;
buffer1 = new byte[bufferSize];
buffer2 = new byte[bufferSize];
buffer3 = new byte[bufferSize];
camera.addCallbackBuffer(buffer1);
camera.addCallbackBuffer(buffer2);
camera.addCallbackBuffer(buffer3);
camera.setPreviewTexture(surfaceTexture);
int angle = rotateStream();
camera.setDisplayOrientation(angle);
if (currentCameraId == Camera.CameraInfo.CAMERA_FACING_FRONT && angle > 0)
angle = 360 - angle;
listener.onCameraOpened(size, angle);
routNextTask(nextTask);
} catch (IOException | RuntimeException e) {
e.printStackTrace();
}
}
}
private Camera.Size getClosestFrameSize(Camera.Parameters params, int width) {
Camera.Size result = null;
List<Camera.Size> sizes = params.getSupportedPreviewSizes();
Camera.Size currentSize = null;
int closestDistance = 0;
int currentDistance = 0;
for (int i = 0; i < sizes.size(); ++i) {
if (null == result) {
result = sizes.get(i);
closestDistance = Math.abs(result.width - width);
continue;
}
currentSize = sizes.get(i);
currentDistance = Math.abs(currentSize.width - width);
if (currentDistance < closestDistance) {
closestDistance = currentDistance;
result = currentSize;
if (closestDistance == 0) break;
}
}
return result;
}
public void stopPreview() {
stopPreview(NO_NEXT_TASK);
}
private String nextTaskStr(final int nextTask) {
String nextTaskStr = null;
switch (nextTask) {
case NO_NEXT_TASK:
nextTaskStr = "NO_NEXT_TASK";
break;
case NEXT_TASK_RELEASE_COMPLETE:
nextTaskStr = "NEXT_TASK_RELEASE_COMPLETE";
break;
case NEXT_TASK_SWITCH_COMPLETE:
nextTaskStr = "NEXT_TASK_SWITCH_COMPLETE";
break;
case NEXT_TASK_START_PREVIEW:
nextTaskStr = "NEXT_TASK_START_PREVIEW";
break;
}
return nextTaskStr;
}
private void stopPreview(final int nextTask) {
if (null == camera) return;
if (Looper.getMainLooper().getThread() == Thread.currentThread()) {
Observable.defer(new Callable<ObservableSource<?>>() {
@Override
public ObservableSource<?> call() throws Exception {
synchronized (this) {
if ( (null != camera) && (!cameraReleased) ) {
if (LogDog.isEnabled) ;
camera.stopPreview();
}
}
return Completable.complete().toObservable();
}
}).doOnComplete(() -> {
routNextTask(nextTask);
Log.d("Complete", "Complete");
})
.subscribeOn(Schedulers.computation())
.observeOn(AndroidSchedulers.mainThread()).subscribe();
} else {
synchronized (this) {
if ( (null != camera) && (!cameraReleased) ) {
camera.stopPreview();
}
}
routNextTask(nextTask);
}
}
private void routNextTask(int nextTask) {
if (NO_NEXT_TASK == nextTask) return;
if (NEXT_TASK_SWITCH_COMPLETE == nextTask) {
switchCamComplete();
} else if (NEXT_TASK_RELEASE_COMPLETE == nextTask) {
releaseComplete();
} else if (NEXT_TASK_START_PREVIEW == nextTask) {
startPreview(null);
}
}
public void startPreview(Camera.PreviewCallback cpc) {
if (null == camera) return;
synchronized (this) {
camera.startPreview();
switchCamOnAir = false;
}
}
private void releaseCamera() {
synchronized (this) {
if (null == camera) return;
camera.setPreviewCallback(null);
camera.release();
camera = null;
cameraReleased = true;
}
}
public void release() {
synchronized (this) {
if (null == camera) return;
stopPreview(NEXT_TASK_RELEASE_COMPLETE);
}
}
private void releaseComplete() {
synchronized (this) {
if (camera != null) {
camera.release();
cameraReleased = true;
camera = null;
}
}
buffer1 = null;
buffer2 = null;
buffer3 = null;
}
private boolean switchCamOnAir = false;
public void switchCam() {
if (!isSwitchCamAvailable()) return;
if (null == camera) return;
if (switchCamOnAir) return;
this.switchCamOnAir = true;
stopPreview(NEXT_TASK_SWITCH_COMPLETE);
}
private void switchCamComplete() {
releaseCamera();
if (currentCameraId == Camera.CameraInfo.CAMERA_FACING_FRONT) {
currentCameraId = Camera.CameraInfo.CAMERA_FACING_BACK;
} else {
currentCameraId = Camera.CameraInfo.CAMERA_FACING_FRONT;
}
Spin.INSTANCE.getSp().edit().putInt(SP_CAMERA_ID, currentCameraId).apply();
init(NEXT_TASK_START_PREVIEW);
}
public int rotateStream() {
Camera.CameraInfo info = new Camera.CameraInfo();
Camera.getCameraInfo(currentCameraId, info);
WindowManager wm = (WindowManager) Spin.getContext()
.getSystemService(Context.WINDOW_SERVICE);
int rotation = wm.getDefaultDisplay().getRotation();
int degrees = 0;
switch (rotation) {
case Surface.ROTATION_0:
degrees = 0;
break;
case Surface.ROTATION_90:
degrees = 90;
break;
case Surface.ROTATION_180:
degrees = 180;
break;
case Surface.ROTATION_270:
degrees = 270;
break;
}
int result;
if (info.facing == Camera.CameraInfo.CAMERA_FACING_FRONT) {
result = (info.orientation + degrees) % 360;
result = (360 - result) % 360; // compensate the mirror
} else { // back-facing
result = (info.orientation - degrees + 360) % 360;
}
return result;
}
@SuppressWarnings("unused")
public int getDeviceDefaultOrientation() {
WindowManager windowManager = (WindowManager) Spin.getContext()
.getSystemService(Context.WINDOW_SERVICE);
Configuration config = Spin.getContext().getResources().getConfiguration();
int rotation = windowManager.getDefaultDisplay().getRotation();
if (((rotation == Surface.ROTATION_0 || rotation == Surface.ROTATION_180) &&
config.orientation == Configuration.ORIENTATION_LANDSCAPE)
|| ((rotation == Surface.ROTATION_90 || rotation == Surface.ROTATION_270) &&
config.orientation == Configuration.ORIENTATION_PORTRAIT)) {
return Configuration.ORIENTATION_LANDSCAPE;
} else {
return Configuration.ORIENTATION_PORTRAIT;
}
}
}
cameras.java :
if (Looper.getMainLooper().getThread() == Thread.currentThread()) {
Observable.defer(new Callable<ObservableSource<?>>() {
@Override
public ObservableSource<?> call() throws Exception {
if (LogDog.isEnabled) LogDog.e("Debug::"+TAG + "::stopPreview()::AsyncTask::doInBackground()", " (camera != null) =" + (camera != null) );
synchronized (this) {
if ( (null != camera) && (!cameraReleased) ) {
if (LogDog.isEnabled) LogDog.e("Debug::" + TAG + "::stopPreview()::AsyncTask::doInBackground()", " XXX CALL camera.stopPreview()");
camera.stopPreview();
}
}
return Completable.complete().toObservable();
}
}).doOnComplete(() -> {
routNextTask(nextTask);
Log.d("Complete", "Complete");
})
.subscribeOn(Schedulers.computation())
.observeOn(AndroidSchedulers.mainThread()).subscribe();
Not sure what I am doing wrong. Any ideas where I can release the camera or allocate it, so it works without any issues? Exception is as follows:
FATAL EXCEPTION: main io.reactivex.exceptions.OnErrorNotImplementedException: Camera is being used after Camera.release() was called at io.reactivex.internal.functions.Functions$OnErrorMissingConsumer.accept(Functions.java:704) at io.reactivex.internal.functions.Functions$OnErrorMissingConsumer.accept(Functions.java:701) at io.reactivex.internal.observers.LambdaObserver.onError(LambdaObserver.java:74) at io.reactivex.internal.operators.observable.ObservableObserveOn$ObserveOnObserver.checkTerminated(ObservableObserveOn.java:276) at io.reactivex.internal.operators.observable.ObservableObserveOn$ObserveOnObserver.drainNormal(ObservableObserveOn.java:172) at io.reactivex.internal.operators.observable.ObservableObserveOn$ObserveOnObserver.run(ObservableObserveOn.java:252) at io.reactivex.android.schedulers.HandlerScheduler$ScheduledRunnable.run(HandlerScheduler.java:109) at android.os.Handler.handleCallback(Handler.java:751) at android.os.Handler.dispatchMessage(Handler.java:95) at android .os.Looper.loop(Looper.java:154) at android.app.ActivityThread.main(ActivityThread.java:6119) at java.lang.reflect.Method.invoke(Native Method) at com.android.internal.os.ZygoteInit$MethodAndArgsCaller.run(ZygoteInit.java:886) at com.android.internal.os.ZygoteInit.main(ZygoteInit.java:776) Caused by: java.lang.RuntimeException: Camera is being used after Camera.release() was called at android.hardware.Camera._stopPreview(Native Method) at android.hardware.Camera.stopPreview(Camera.java:730) at com.media.video.Cameras$2.call(Cameras.java:413) at com.media.video.Cameras$2.call(Cameras.java:406) at io.reactivex.internal.operators.observable.ObservableDefer.subscribeActual(ObservableDefer.java:32) at io.reactivex.Observable.subscribe(Observable.java:10842) at io.reactivex.internal.operators.observable.ObservableDoOnEach.subscribeActual(ObservableDoOnEach.java:42) at io.reactivex.Observable.subscribe(Observable.java:10842) at io.reactivex.internal.operators.observable.ObservableSubscribeOn$ SubscribeTask.run(ObservableSubscribeOn.java:96) at io.reactivex.internal.schedulers.ScheduledDirectTask.call(ScheduledDirectTask.java:38) at io.reactivex.internal.schedulers.ScheduledDirectTask.call(ScheduledDirectTask.java:26) at java.util.concurrent.FutureTask.run(FutureTask.java:237) at java.util.concurrent.ScheduledThreadPoolExecutor$ScheduledFutureTask.run(ScheduledThreadPoolExecutor.java:272) at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1133) at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:607) at java.lang.Thread.run(Thread.java:761)
The code you wrote for releasing the camera is prone to have race-conditions. A small change which could already make a difference is setting the flag before the action happens.
cameraReleased = true;
camera.release();
camera = null;
It might be necessary to recheck the flag just before the release of the Camera
, but for this you synchronized the code before. In this context the issue is, that you're calling synchronized(this)
within the deferred Observable
. Instead you should synchronize on the same instance like everywhere else, because this
doesn't refer to the outer class. Instead use this@Cameras
@Override
public ObservableSource<?> call() throws Exception {
if (LogDog.isEnabled) LogDog.e("Debug::"+TAG + "::stopPreview()::AsyncTask::doInBackground()", " (camera != null) =" + (camera != null) );
synchronized (this@Cameras) {
if ( (null != camera) && (!cameraReleased) ) {
if (LogDog.isEnabled) LogDog.e("Debug::" + TAG + "::stopPreview()::AsyncTask::doInBackground()", " XXX CALL camera.stopPreview()");
camera.stopPreview();
}
}
return Completable.complete().toObservable();
}
Additional to this, you're use-case for Observable.defer()
doesn't look right. The Completable.fromAction()
factory might be more suitable.
上一篇: 你应该同步访问Java中的属性吗?
下一篇: Rxjava 2与相机异常