Rxjava 2与相机异常

问题描述 投票:7回答:1

我只是将我的代码从asynctask切换到rxjava2,我在我的nexus中随机获取此异常:

在Galaxy s6 Edge中调用Camera.release()之后正在使用相机

以下是我的代码 -

班级相机:

 public class Cameras {


    private static final String TAG = Cameras.class.getSimpleName();

    private static final String SP_CAMERA_ID = "camera_id";

    private static final int NO_NEXT_TASK = 0;

    private static final int NEXT_TASK_RELEASE_COMPLETE = 1;

    private static final int NEXT_TASK_SWITCH_COMPLETE = 2;

    private static final int NEXT_TASK_START_PREVIEW = 3;


    private Camera camera;

    private int currentCameraId = -1;


    private Camera.PreviewCallback previewCallback;

    private byte[] buffer1, buffer2, buffer3;

    private SurfaceTexture surfaceTexture;


    private Listener listener;


    public interface Listener {

        void onCameraOpened(Camera.Size size, int angle);
    }

    private boolean cameraReleased = false;


    public Cameras(Camera.PreviewCallback previewCallback, Listener listener) {
        this.listener = listener;
        this.previewCallback = previewCallback;
        this.currentCameraId = Spin.INSTANCE.getSp().getInt(SP_CAMERA_ID, -1);
        getCameraList();
    }


    private void getCameraList() {
        int numberOfCameras = Camera.getNumberOfCameras();
        Camera.CameraInfo camInfo = new Camera.CameraInfo();

        for (int i = 0; i < numberOfCameras; i++) {
            Camera.getCameraInfo(i, camInfo);
            cams.add(camInfo.facing);
        }

        if (Camera.CameraInfo.CAMERA_FACING_BACK != currentCameraId
                && Camera.CameraInfo.CAMERA_FACING_FRONT != currentCameraId) {
            currentCameraId = cams.get(cams.size() == 2 ? 1 : 0);
        }

    }



    public boolean isSwitchCamAvailable() {
        return Camera.getNumberOfCameras() > 1;
    }



    public void open(SurfaceTexture surfaceTexture) {
        this.surfaceTexture = surfaceTexture;
        init(NEXT_TASK_START_PREVIEW);
    }



    private void init(final int nextTask) {

        if (cams.isEmpty()) {
            Toast.makeText(Spin.getContext(), "Device have no camera", Toast.LENGTH_SHORT).show();
            return;
        }
        cameraReleased = false;

        if (Looper.getMainLooper().getThread() == Thread.currentThread()) {
            Observable.defer(new Callable<ObservableSource<?>>() {
                @Override
                public ObservableSource<?> call() throws Exception {
                    synchronized (this) {
                        try {
                            camera = Camera.open(currentCameraId);
                        } catch (RuntimeException e) {
                            e.printStackTrace();
                        }
                    }
                    return Completable.complete().toObservable();
                }
            }).doOnComplete(() -> {
                initComplete(nextTask);
                Log.d("Complete", "Complete");
            })
                    .subscribeOn(Schedulers.computation())
                    .observeOn(AndroidSchedulers.mainThread()).subscribe();



        } else {
            try {

                synchronized (this) {
                    this.camera = Camera.open(currentCameraId);
                }
                initComplete(nextTask);
            } catch (RuntimeException ignored) {

            }
        }
    }



    private void initComplete(int nextTask) {
        if (camera == null) {
            return;
        }

        //noinspection SynchronizeOnNonFinalField
        synchronized (camera) {

            try {

                Camera.Parameters params = camera.getParameters();
                Camera.Size size = getClosestFrameSize(params, 640);
                params.setPreviewSize(size.width, size.height);
                camera.setParameters(params);
                camera.setPreviewCallbackWithBuffer(previewCallback);

                int bufferSize = size.width * size.height
                        * ImageFormat.getBitsPerPixel(ImageFormat.NV21) / 8;


                buffer1 = new byte[bufferSize];
                buffer2 = new byte[bufferSize];
                buffer3 = new byte[bufferSize];

                camera.addCallbackBuffer(buffer1);
                camera.addCallbackBuffer(buffer2);
                camera.addCallbackBuffer(buffer3);

                camera.setPreviewTexture(surfaceTexture);

                int angle = rotateStream();

                camera.setDisplayOrientation(angle);

                if (currentCameraId == Camera.CameraInfo.CAMERA_FACING_FRONT && angle > 0)
                    angle = 360 - angle;

                listener.onCameraOpened(size, angle);

                routNextTask(nextTask);

            } catch (IOException | RuntimeException e) {
                e.printStackTrace();
            }
        }
    }



    private Camera.Size getClosestFrameSize(Camera.Parameters params, int width) {

        Camera.Size result = null;

        List<Camera.Size> sizes = params.getSupportedPreviewSizes();

        Camera.Size currentSize = null;
        int closestDistance = 0;
        int currentDistance = 0;

        for (int i = 0; i < sizes.size(); ++i) {

            if (null == result) {
                result = sizes.get(i);
                closestDistance = Math.abs(result.width - width);
                continue;
            }

            currentSize = sizes.get(i);
            currentDistance = Math.abs(currentSize.width - width);

            if (currentDistance < closestDistance) {
                closestDistance = currentDistance;
                result = currentSize;
                if (closestDistance == 0) break;
            }

        }

        return result;

    }



    public void stopPreview() {
        stopPreview(NO_NEXT_TASK);
    }

    private String nextTaskStr(final int nextTask) {
        String nextTaskStr = null;
        switch (nextTask) {
            case NO_NEXT_TASK:
                nextTaskStr = "NO_NEXT_TASK";
                break;
            case NEXT_TASK_RELEASE_COMPLETE:
                nextTaskStr = "NEXT_TASK_RELEASE_COMPLETE";
                break;
            case NEXT_TASK_SWITCH_COMPLETE:
                nextTaskStr = "NEXT_TASK_SWITCH_COMPLETE";
                break;
            case NEXT_TASK_START_PREVIEW:
                nextTaskStr = "NEXT_TASK_START_PREVIEW";
                break;
        }
        return nextTaskStr;
    }


    private void stopPreview(final int nextTask) {


        if (null == camera) return;

        if (Looper.getMainLooper().getThread() == Thread.currentThread()) {
            Observable.defer(new Callable<ObservableSource<?>>() {
                @Override
                public ObservableSource<?> call() throws Exception {

                    synchronized (this) {
                        if ( (null != camera) && (!cameraReleased) ) {
                            if (LogDog.isEnabled)  ;
                            camera.stopPreview();
                        }
                    }
                    return Completable.complete().toObservable();
                }
            }).doOnComplete(() -> {
                routNextTask(nextTask);
                Log.d("Complete", "Complete");
            })
                    .subscribeOn(Schedulers.computation())
                    .observeOn(AndroidSchedulers.mainThread()).subscribe();


        } else {

            synchronized (this) {
                if ( (null != camera) && (!cameraReleased) ) {

                    camera.stopPreview();
                }
            }
            routNextTask(nextTask);
        }
    }

    private void routNextTask(int nextTask) {

        if (NO_NEXT_TASK == nextTask) return;

        if (NEXT_TASK_SWITCH_COMPLETE == nextTask) {
            switchCamComplete();
        } else if (NEXT_TASK_RELEASE_COMPLETE == nextTask) {
            releaseComplete();
        } else if (NEXT_TASK_START_PREVIEW == nextTask) {
            startPreview(null);
        }
    }

    public void startPreview(Camera.PreviewCallback cpc) {


        if (null == camera) return;
        synchronized (this) {


            camera.startPreview();
            switchCamOnAir = false;
        }
    }


    private void releaseCamera() {
        synchronized (this) {

            if (null == camera) return;
            camera.setPreviewCallback(null);

            camera.release();
            camera = null;
            cameraReleased = true;
        }
    }


    public void release() {

        synchronized (this) {
            if (null == camera) return;
            stopPreview(NEXT_TASK_RELEASE_COMPLETE);
        }
    }


    private void releaseComplete() {
        synchronized (this) {
            if (camera != null) {

                camera.release();
                cameraReleased = true;
                camera = null;
            }
        }
        buffer1 = null;
        buffer2 = null;
        buffer3 = null;
    }



    private boolean switchCamOnAir = false;


    public void switchCam() {
        if (!isSwitchCamAvailable()) return;
        if (null == camera) return;
        if (switchCamOnAir) return;
        this.switchCamOnAir = true;
        stopPreview(NEXT_TASK_SWITCH_COMPLETE);
    }


    private void switchCamComplete() {

        releaseCamera();

        if (currentCameraId == Camera.CameraInfo.CAMERA_FACING_FRONT) {
            currentCameraId = Camera.CameraInfo.CAMERA_FACING_BACK;
        } else {
            currentCameraId = Camera.CameraInfo.CAMERA_FACING_FRONT;
        }

        Spin.INSTANCE.getSp().edit().putInt(SP_CAMERA_ID, currentCameraId).apply();

        init(NEXT_TASK_START_PREVIEW);
    }


    public int rotateStream() {

        Camera.CameraInfo info = new Camera.CameraInfo();
        Camera.getCameraInfo(currentCameraId, info);

        WindowManager wm = (WindowManager) Spin.getContext()
                .getSystemService(Context.WINDOW_SERVICE);

        int rotation = wm.getDefaultDisplay().getRotation();

        int degrees = 0;
        switch (rotation) {
            case Surface.ROTATION_0:
                degrees = 0;
                break;
            case Surface.ROTATION_90:
                degrees = 90;
                break;
            case Surface.ROTATION_180:
                degrees = 180;
                break;
            case Surface.ROTATION_270:
                degrees = 270;
                break;
        }

        int result;

        if (info.facing == Camera.CameraInfo.CAMERA_FACING_FRONT) {
            result = (info.orientation + degrees) % 360;
            result = (360 - result) % 360;  // compensate the mirror
        } else {  // back-facing
            result = (info.orientation - degrees + 360) % 360;
        }

        return result;
    }



    @SuppressWarnings("unused")
    public int getDeviceDefaultOrientation() {

        WindowManager windowManager = (WindowManager) Spin.getContext()
                .getSystemService(Context.WINDOW_SERVICE);

        Configuration config = Spin.getContext().getResources().getConfiguration();

        int rotation = windowManager.getDefaultDisplay().getRotation();

        if (((rotation == Surface.ROTATION_0 || rotation == Surface.ROTATION_180) &&
                config.orientation == Configuration.ORIENTATION_LANDSCAPE)
                || ((rotation == Surface.ROTATION_90 || rotation == Surface.ROTATION_270) &&
                config.orientation == Configuration.ORIENTATION_PORTRAIT)) {
            return Configuration.ORIENTATION_LANDSCAPE;
        } else {
            return Configuration.ORIENTATION_PORTRAIT;
        }
    }

}

cameras.Java:

if (Looper.getMainLooper().getThread() == Thread.currentThread()) {
            Observable.defer(new Callable<ObservableSource<?>>() {
                @Override
                public ObservableSource<?> call() throws Exception {
                    if (LogDog.isEnabled) LogDog.e("Debug::"+TAG + "::stopPreview()::AsyncTask::doInBackground()", " (camera != null) =" + (camera != null) );
                    synchronized (this) {
                        if ( (null != camera) && (!cameraReleased) ) {
                            if (LogDog.isEnabled)  LogDog.e("Debug::" + TAG + "::stopPreview()::AsyncTask::doInBackground()", " XXX CALL camera.stopPreview()");
                            camera.stopPreview();
                        }
                    }
                    return Completable.complete().toObservable();
                }
            }).doOnComplete(() -> {
                routNextTask(nextTask);
                Log.d("Complete", "Complete");
            })
                    .subscribeOn(Schedulers.computation())
                    .observeOn(AndroidSchedulers.mainThread()).subscribe();

不确定我做错了什么。我可以释放相机或分配它的任何想法,所以它没有任何问题吗?例外情况如下:

致命异常:主要io.reactivex.exceptions.OnErrorNotImplementedException:在io.reactivex上的io.reactivex.internal.functions.Functions $ OnErrorMissingConsumer.accept(Functions.java:704)调用Camera.release()之后正在使用Camera。 internal.functions.Functions $ OnErrorMissingConsumer.accept(Functions.java:701)at io.reactivex.internal.observers.LambdaObserver.onError(LambdaObserver.java:74)at io.reactivex.internal.operators.observable.ObservableObserveOn $ ObserveOnObserver。 checkTerminated(ObservableObserveOn.java:276)at io.reactivex.internal.operators.observable.ObservableObserveOn $ ObserveOnObserver.drainNormal(ObservableObserveOn.java:172)at io.reactivex.internal.operators.observable.ObservableObserveOn $ ObserveOnObserver.run(ObservableObserveOn。 java:252)at io.reactivex.android.schedulers.HandlerScheduler $ ScheduledRunnable.run(HandlerScheduler.java:109)android.os.Handler.handleCallback(Handler.java:751)android.os.Handler.dispatchMessage(Handler) .java:95)在android位于com.android.internal.os的java.lang.reflect.Method.invoke(Native Method)的android.app.ActivityThread.main(ActivityThread.java:6119)上的.os.Looper.loop(Looper.java:154) .ZygoteInit $ MethodAndArgsCaller.run(ZygoteInit.java:886)at com.android.internal.os.ZygoteInit.main(ZygoteInit.java:776)引起:java.lang.RuntimeException:在Camera.release之后使用相机( )在com.media.video.Cameras $ 2.call(Cameras.java:413)的android.hardware.Camera.stopPreview(Camera.java:730)的android.hardware.Camera._stopPreview(Native Method)中调用了它。 .media.video.Cameras $ 2.call(Cameras.java:406)at io.reactivex.internal.operators.observable.ObservableDefer.subscribeActual(ObservableDefer.java:32)at io.reactivex.Observable.subscribe(Observable.java: 10842)at io.reactivex.internal.operators.observable.observable.observable。 $ SubscribeTask.run(ObservableSubscribeOn.java:96)at io.reactivex.internal.schedulers.ScheduledDirectTask.call(ScheduledDirectTask.java:38)at io.reactivex.internal.schedulers.ScheduledDirectTask.call(ScheduledDirectTask.java:26)at java .util.concurrent.FutureTask.run(FutureTask.java:237)java.util.concurrent.ScheduledThreadPoolExecutor $ ScheduledFutureTask.run(ScheduledThreadPoolExecutor.java:272)at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1133) )java.lang.Thread.run上的java.util.concurrent.ThreadPoolExecutor $ Worker.run(ThreadPoolExecutor.java:607)(Thread.java:761)

android rx-java rx-java2 rx-android
1个回答
3
投票

您为释放相机而编写的代码很容易出现竞争条件。一个可能已经产生影响的小变化是在动作发生之前设置标志。

cameraReleased = true;
camera.release();
camera = null;

可能有必要在Camera发布之前重新检查标志,但为此您之前同步了代码。在这种情况下,问题是你在延迟的synchronized(this)中调用Observable。相反,您应该像其他地方一样在同一个实例上进行同步,因为this不会引用外部类。而是使用this@Cameras

@Override
public ObservableSource<?> call() throws Exception {
    if (LogDog.isEnabled) LogDog.e("Debug::"+TAG + "::stopPreview()::AsyncTask::doInBackground()", " (camera != null) =" + (camera != null) );
    synchronized (this@Cameras) {
        if ( (null != camera) && (!cameraReleased) ) {
            if (LogDog.isEnabled)  LogDog.e("Debug::" + TAG + "::stopPreview()::AsyncTask::doInBackground()", " XXX CALL camera.stopPreview()");
            camera.stopPreview();
        }
    }
    return Completable.complete().toObservable();
}

除此之外,你是Observable.defer()的用例看起来不对。 Completable.fromAction()工厂可能更合适。

© www.soinside.com 2019 - 2024. All rights reserved.