Rxjava 2与相机异常

我只是将我的代码从asynctask切换到rxjava2并且随机在我的连接中得到这个异常:

Camera.release()在Galaxy s6 Edge中被调用后正在使用相机

以下是我的代码 -

照相机

 public class Cameras {


    private static final String TAG = Cameras.class.getSimpleName();

    private static final String SP_CAMERA_ID = "camera_id";

    private static final int NO_NEXT_TASK = 0;

    private static final int NEXT_TASK_RELEASE_COMPLETE = 1;

    private static final int NEXT_TASK_SWITCH_COMPLETE = 2;

    private static final int NEXT_TASK_START_PREVIEW = 3;


    private Camera camera;

    private int currentCameraId = -1;


    private Camera.PreviewCallback previewCallback;

    private byte[] buffer1, buffer2, buffer3;

    private SurfaceTexture surfaceTexture;


    private Listener listener;


    public interface Listener {

        void onCameraOpened(Camera.Size size, int angle);
    }

    private boolean cameraReleased = false;


    public Cameras(Camera.PreviewCallback previewCallback, Listener listener) {
        this.listener = listener;
        this.previewCallback = previewCallback;
        this.currentCameraId = Spin.INSTANCE.getSp().getInt(SP_CAMERA_ID, -1);
        getCameraList();
    }


    private void getCameraList() {
        int numberOfCameras = Camera.getNumberOfCameras();
        Camera.CameraInfo camInfo = new Camera.CameraInfo();

        for (int i = 0; i < numberOfCameras; i++) {
            Camera.getCameraInfo(i, camInfo);
            cams.add(camInfo.facing);
        }

        if (Camera.CameraInfo.CAMERA_FACING_BACK != currentCameraId
                && Camera.CameraInfo.CAMERA_FACING_FRONT != currentCameraId) {
            currentCameraId = cams.get(cams.size() == 2 ? 1 : 0);
        }

    }



    public boolean isSwitchCamAvailable() {
        return Camera.getNumberOfCameras() > 1;
    }



    public void open(SurfaceTexture surfaceTexture) {
        this.surfaceTexture = surfaceTexture;
        init(NEXT_TASK_START_PREVIEW);
    }



    private void init(final int nextTask) {

        if (cams.isEmpty()) {
            Toast.makeText(Spin.getContext(), "Device have no camera", Toast.LENGTH_SHORT).show();
            return;
        }
        cameraReleased = false;

        if (Looper.getMainLooper().getThread() == Thread.currentThread()) {
            Observable.defer(new Callable<ObservableSource<?>>() {
                @Override
                public ObservableSource<?> call() throws Exception {
                    synchronized (this) {
                        try {
                            camera = Camera.open(currentCameraId);
                        } catch (RuntimeException e) {
                            e.printStackTrace();
                        }
                    }
                    return Completable.complete().toObservable();
                }
            }).doOnComplete(() -> {
                initComplete(nextTask);
                Log.d("Complete", "Complete");
            })
                    .subscribeOn(Schedulers.computation())
                    .observeOn(AndroidSchedulers.mainThread()).subscribe();



        } else {
            try {

                synchronized (this) {
                    this.camera = Camera.open(currentCameraId);
                }
                initComplete(nextTask);
            } catch (RuntimeException ignored) {

            }
        }
    }



    private void initComplete(int nextTask) {
        if (camera == null) {
            return;
        }

        //noinspection SynchronizeOnNonFinalField
        synchronized (camera) {

            try {

                Camera.Parameters params = camera.getParameters();
                Camera.Size size = getClosestFrameSize(params, 640);
                params.setPreviewSize(size.width, size.height);
                camera.setParameters(params);
                camera.setPreviewCallbackWithBuffer(previewCallback);

                int bufferSize = size.width * size.height
                        * ImageFormat.getBitsPerPixel(ImageFormat.NV21) / 8;


                buffer1 = new byte[bufferSize];
                buffer2 = new byte[bufferSize];
                buffer3 = new byte[bufferSize];

                camera.addCallbackBuffer(buffer1);
                camera.addCallbackBuffer(buffer2);
                camera.addCallbackBuffer(buffer3);

                camera.setPreviewTexture(surfaceTexture);

                int angle = rotateStream();

                camera.setDisplayOrientation(angle);

                if (currentCameraId == Camera.CameraInfo.CAMERA_FACING_FRONT && angle > 0)
                    angle = 360 - angle;

                listener.onCameraOpened(size, angle);

                routNextTask(nextTask);

            } catch (IOException | RuntimeException e) {
                e.printStackTrace();
            }
        }
    }



    private Camera.Size getClosestFrameSize(Camera.Parameters params, int width) {

        Camera.Size result = null;

        List<Camera.Size> sizes = params.getSupportedPreviewSizes();

        Camera.Size currentSize = null;
        int closestDistance = 0;
        int currentDistance = 0;

        for (int i = 0; i < sizes.size(); ++i) {

            if (null == result) {
                result = sizes.get(i);
                closestDistance = Math.abs(result.width - width);
                continue;
            }

            currentSize = sizes.get(i);
            currentDistance = Math.abs(currentSize.width - width);

            if (currentDistance < closestDistance) {
                closestDistance = currentDistance;
                result = currentSize;
                if (closestDistance == 0) break;
            }

        }

        return result;

    }



    public void stopPreview() {
        stopPreview(NO_NEXT_TASK);
    }

    private String nextTaskStr(final int nextTask) {
        String nextTaskStr = null;
        switch (nextTask) {
            case NO_NEXT_TASK:
                nextTaskStr = "NO_NEXT_TASK";
                break;
            case NEXT_TASK_RELEASE_COMPLETE:
                nextTaskStr = "NEXT_TASK_RELEASE_COMPLETE";
                break;
            case NEXT_TASK_SWITCH_COMPLETE:
                nextTaskStr = "NEXT_TASK_SWITCH_COMPLETE";
                break;
            case NEXT_TASK_START_PREVIEW:
                nextTaskStr = "NEXT_TASK_START_PREVIEW";
                break;
        }
        return nextTaskStr;
    }


    private void stopPreview(final int nextTask) {


        if (null == camera) return;

        if (Looper.getMainLooper().getThread() == Thread.currentThread()) {
            Observable.defer(new Callable<ObservableSource<?>>() {
                @Override
                public ObservableSource<?> call() throws Exception {

                    synchronized (this) {
                        if ( (null != camera) && (!cameraReleased) ) {
                            if (LogDog.isEnabled)  ;
                            camera.stopPreview();
                        }
                    }
                    return Completable.complete().toObservable();
                }
            }).doOnComplete(() -> {
                routNextTask(nextTask);
                Log.d("Complete", "Complete");
            })
                    .subscribeOn(Schedulers.computation())
                    .observeOn(AndroidSchedulers.mainThread()).subscribe();


        } else {

            synchronized (this) {
                if ( (null != camera) && (!cameraReleased) ) {

                    camera.stopPreview();
                }
            }
            routNextTask(nextTask);
        }
    }

    private void routNextTask(int nextTask) {

        if (NO_NEXT_TASK == nextTask) return;

        if (NEXT_TASK_SWITCH_COMPLETE == nextTask) {
            switchCamComplete();
        } else if (NEXT_TASK_RELEASE_COMPLETE == nextTask) {
            releaseComplete();
        } else if (NEXT_TASK_START_PREVIEW == nextTask) {
            startPreview(null);
        }
    }

    public void startPreview(Camera.PreviewCallback cpc) {


        if (null == camera) return;
        synchronized (this) {


            camera.startPreview();
            switchCamOnAir = false;
        }
    }


    private void releaseCamera() {
        synchronized (this) {

            if (null == camera) return;
            camera.setPreviewCallback(null);

            camera.release();
            camera = null;
            cameraReleased = true;
        }
    }


    public void release() {

        synchronized (this) {
            if (null == camera) return;
            stopPreview(NEXT_TASK_RELEASE_COMPLETE);
        }
    }


    private void releaseComplete() {
        synchronized (this) {
            if (camera != null) {

                camera.release();
                cameraReleased = true;
                camera = null;
            }
        }
        buffer1 = null;
        buffer2 = null;
        buffer3 = null;
    }



    private boolean switchCamOnAir = false;


    public void switchCam() {
        if (!isSwitchCamAvailable()) return;
        if (null == camera) return;
        if (switchCamOnAir) return;
        this.switchCamOnAir = true;
        stopPreview(NEXT_TASK_SWITCH_COMPLETE);
    }


    private void switchCamComplete() {

        releaseCamera();

        if (currentCameraId == Camera.CameraInfo.CAMERA_FACING_FRONT) {
            currentCameraId = Camera.CameraInfo.CAMERA_FACING_BACK;
        } else {
            currentCameraId = Camera.CameraInfo.CAMERA_FACING_FRONT;
        }

        Spin.INSTANCE.getSp().edit().putInt(SP_CAMERA_ID, currentCameraId).apply();

        init(NEXT_TASK_START_PREVIEW);
    }


    public int rotateStream() {

        Camera.CameraInfo info = new Camera.CameraInfo();
        Camera.getCameraInfo(currentCameraId, info);

        WindowManager wm = (WindowManager) Spin.getContext()
                .getSystemService(Context.WINDOW_SERVICE);

        int rotation = wm.getDefaultDisplay().getRotation();

        int degrees = 0;
        switch (rotation) {
            case Surface.ROTATION_0:
                degrees = 0;
                break;
            case Surface.ROTATION_90:
                degrees = 90;
                break;
            case Surface.ROTATION_180:
                degrees = 180;
                break;
            case Surface.ROTATION_270:
                degrees = 270;
                break;
        }

        int result;

        if (info.facing == Camera.CameraInfo.CAMERA_FACING_FRONT) {
            result = (info.orientation + degrees) % 360;
            result = (360 - result) % 360;  // compensate the mirror
        } else {  // back-facing
            result = (info.orientation - degrees + 360) % 360;
        }

        return result;
    }



    @SuppressWarnings("unused")
    public int getDeviceDefaultOrientation() {

        WindowManager windowManager = (WindowManager) Spin.getContext()
                .getSystemService(Context.WINDOW_SERVICE);

        Configuration config = Spin.getContext().getResources().getConfiguration();

        int rotation = windowManager.getDefaultDisplay().getRotation();

        if (((rotation == Surface.ROTATION_0 || rotation == Surface.ROTATION_180) &&
                config.orientation == Configuration.ORIENTATION_LANDSCAPE)
                || ((rotation == Surface.ROTATION_90 || rotation == Surface.ROTATION_270) &&
                config.orientation == Configuration.ORIENTATION_PORTRAIT)) {
            return Configuration.ORIENTATION_LANDSCAPE;
        } else {
            return Configuration.ORIENTATION_PORTRAIT;
        }
    }

}

cameras.java

if (Looper.getMainLooper().getThread() == Thread.currentThread()) {
            Observable.defer(new Callable<ObservableSource<?>>() {
                @Override
                public ObservableSource<?> call() throws Exception {
                    if (LogDog.isEnabled) LogDog.e("Debug::"+TAG + "::stopPreview()::AsyncTask::doInBackground()", " (camera != null) =" + (camera != null) );
                    synchronized (this) {
                        if ( (null != camera) && (!cameraReleased) ) {
                            if (LogDog.isEnabled)  LogDog.e("Debug::" + TAG + "::stopPreview()::AsyncTask::doInBackground()", " XXX CALL camera.stopPreview()");
                            camera.stopPreview();
                        }
                    }
                    return Completable.complete().toObservable();
                }
            }).doOnComplete(() -> {
                routNextTask(nextTask);
                Log.d("Complete", "Complete");
            })
                    .subscribeOn(Schedulers.computation())
                    .observeOn(AndroidSchedulers.mainThread()).subscribe();

不知道我做错了什么。 任何想法,我可以释放相机或分配它,所以它没有任何问题的作品? 例外情况如下:

致命例外:main io.reactivex.exceptions.OnErrorNotImplementedException:Camera.release()在io.reactivex.internal.functions.Functions处调用后使用相机$ OnErrorMissingConsumer.accept(Functions.java:704)at io.reactivex。 internal.functions.Functions $ OnErrorMissingConsumer.accept(Functions.java:701)at io.reactivex.internal.observers.LambdaObserver.onError(LambdaObserver.java:74)at io.reactivex.internal.operators.observable.ObservableObserveOn $ ObserveOnObserver。在io.reactivex.internal.operators.observable.ObservableObserveOn上的checkTerminated(ObservableObserveOn.java:276)。 java:252)at android.os.Handler.dispatchMessage(Handler.java:751)at android.os.Handler.handleCallback(Handler.java:751)at io.reactivex.android.schedulers.HandlerScheduler $ ScheduledRunnable.run(HandlerScheduler.java:109) .java:95)在android .os.Looper.loop(Looper.java:154)at android.app.ActivityThread.main(ActivityThread.java:6119)at java.lang.reflect.Method.invoke(Native Method)at com.android.internal.os .ZygoteInit $ MethodAndArgsCaller.run(ZygoteInit.java:886)at com.android.internal.os.ZygoteInit.main(ZygoteInit.java:776)引起:java.lang.RuntimeException:摄像头正在Camera.release之后使用( )在android.hardware.Camera.stopPreview(Camera.java:730)处的com.media.video.Cameras $ 2.call(Cameras.java:413)at com.android.hardware.Camera._stopPreview(Native Method)处被调用。com .media.video.Cameras $ 2.call(Cameras.java:406)at io.reactivex.internal.operators.observable.ObservableDefer.subscribeActual(ObservableDefer.java:32)at io.reactivex.Observable.subscribe(Observable.java: 10842)at io.reactivex.internal.operators.observable.ObservableDoOnEach.subscribeActual(ObservableDoOnEach.java:42)at io.reactivex.Observable.subscribe(Observable.java:10842)at io.reactivex.internal.operators.observable.ObservableSubscribeOn $ Subscribe toTask.run(ObservableSubscribeOn.java:96)at io.reactivex.internal.schedulers.ScheduledDirectTask.call(ScheduledDirectTask.java:38)at io.reactivex.internal.schedulers.ScheduledDirectTask.call(ScheduledDirectTask.java:26)at java .util.concurrent.FutureTask.run(FutureTask.java:237)at java.util.concurrent.ScheduledThreadPoolExecutor $ ScheduledFutureTask.run(ScheduledThreadPoolExecutor.java:272)at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1133 )在java.util.concurrent.ThreadPoolExecutor $ Worker.run(ThreadPoolExecutor.java:607)在java.lang.Thread.run(Thread.java:761)


您为释放相机而编写的代码很容易出现竞争状况。 一个可能已经有所改变的小变化是在动作发生之前设置标志。

cameraReleased = true;
camera.release();
camera = null;

Camera发布之前重新检查标志可能是必要的,但为此您之前同步了代码。 在这种情况下,问题在于,您在延迟的Observable内调用了synchronized(this) 。 相反,您应该像在其他地方一样在同一个实例上进行同步,因为this不会引用外部类。 而是使用this@Cameras

@Override
public ObservableSource<?> call() throws Exception {
    if (LogDog.isEnabled) LogDog.e("Debug::"+TAG + "::stopPreview()::AsyncTask::doInBackground()", " (camera != null) =" + (camera != null) );
    synchronized (this@Cameras) {
        if ( (null != camera) && (!cameraReleased) ) {
            if (LogDog.isEnabled)  LogDog.e("Debug::" + TAG + "::stopPreview()::AsyncTask::doInBackground()", " XXX CALL camera.stopPreview()");
            camera.stopPreview();
        }
    }
    return Completable.complete().toObservable();
}

除此之外, Observable.defer()用例不正确。 Completable.fromAction()工厂可能更适合。

链接地址: http://www.djcxy.com/p/40985.html

上一篇: Rxjava 2 exception with camera

下一篇: How to sort duplicated Strings using comparator?