Android-使用Renderscript和Camera2 API处理相机数据

问题描述 投票:1回答:1

以下,我想显示我的自定义相机应用程序代码的片段。我的目标是在传入的视频帧上应用过滤器并输出它们。为此,我使用了Renderscript和Camera2。

这里是我的MainActivity.java(因为它有点长,所以我删除了有关获取相机权限的方法):

public class MainActivity extends AppCompatActivity {

    private static final String TAG = "MainActivity";
    private static final int REQUEST_CAMERA_PERMISSION_RESULT = 0;

    private TextureView mTextureView;
    private Button mButton;
    private CameraDevice mCameraDevice;
    private String mCameraId;
    private HandlerThread mBackgroundHandlerThread;
    private Handler mBackgroundHandler;
    private Size mPreviewSize;
    private CaptureRequest.Builder mCaptureRequestBuilder;
    private RsSurfaceRenderer mRenderer;
    private Surface mPreviewSurface;
    private Surface mProcessingNormalSurface;
    private RsCameraPreviewRenderer cameraPreviewRenderer;
    private RenderScript rs;
    private List<Surface> mSurfaces;

    private Toast rendererNameToast;
    private String rendererName;

    private int currentRendererIndex = 0;

    private static List<Class<? extends RsRenderer>> rendererTypes;

    static {
        rendererTypes = new ArrayList<>();
        rendererTypes.add(DefaultRsRenderer.class);
        rendererTypes.add(GreyscaleRsRenderer.class);
        rendererTypes.add(SharpenRenderer.class);
        rendererTypes.add(BlurRsRenderer.class);
        rendererTypes.add(ColorFrameRenderer.class);
        rendererTypes.add(HueRotationRenderer.class);
        rendererTypes.add(TrailsRenderer.class);
        rendererTypes.add(AcidRenderer.class);
    }



    private static final SparseIntArray ORIENTATIONS =
            new SparseIntArray();

    static {
        ORIENTATIONS.append(Surface.ROTATION_0, 0);
        ORIENTATIONS.append(Surface.ROTATION_90, 90);
        ORIENTATIONS.append(Surface.ROTATION_180, 180);
        ORIENTATIONS.append(Surface.ROTATION_270, 270);
    }

    private TextureView.SurfaceTextureListener mSurfaceTextureListener = new TextureView.SurfaceTextureListener() {
        @Override
        public void onSurfaceTextureAvailable(SurfaceTexture surfaceTexture, int width, int height) {

            setupCamera(width, height);
            connectCamera();


        }

        @Override
        public void onSurfaceTextureSizeChanged(SurfaceTexture surfaceTexture, int width, int height) {

        }

        @Override
        public boolean onSurfaceTextureDestroyed(SurfaceTexture surfaceTexture) {
            return false;
        }

        @Override
        public void onSurfaceTextureUpdated(SurfaceTexture surfaceTexture) {

        }
    };

    private CameraDevice.StateCallback mCameraDeviceStateCallback = new CameraDevice.StateCallback() {
        @Override
        public void onOpened(@NonNull CameraDevice cameraDevice) {
            mCameraDevice = cameraDevice;
            startPreview();
        }
        @Override
        public void onDisconnected(@NonNull CameraDevice cameraDevice) {
            cameraDevice.close();
            mCameraDevice = null;
        }
        @Override
        public void onError(@NonNull CameraDevice cameraDevice, int i) {
            cameraDevice.close();
            mCameraDevice = null;
        }
    };

    private CameraCaptureSession.StateCallback mCameraCaptureSessionStateCallback = new CameraCaptureSession.StateCallback() {
        @Override
        public void onConfigured(@NonNull CameraCaptureSession cameraCaptureSession) {
            try {
                cameraCaptureSession.setRepeatingRequest(mCaptureRequestBuilder.build(), null, mBackgroundHandler);
            } catch (CameraAccessException e) {
                e.printStackTrace();
            }
        }

        @Override
        public void onConfigureFailed(@NonNull CameraCaptureSession cameraCaptureSession) {

            Toast.makeText(getApplicationContext(), "Unable to setup camera preview", Toast.LENGTH_SHORT).show();
        }
    };


    @Override
    protected void onCreate(Bundle savedInstanceState) {
        super.onCreate(savedInstanceState);
        setContentView(R.layout.activity_main);
        getWindow().addFlags(WindowManager.LayoutParams.FLAG_KEEP_SCREEN_ON);

        mTextureView = (TextureView) findViewById(R.id.preview);
        mButton= (Button) findViewById(R.id.next_button);

        mButton.setOnClickListener(new View.OnClickListener() {
            @Override
            public void onClick(View view) {
                cycleRendererType();
                updateRsRenderer();
                if (rendererNameToast != null) {
                    rendererNameToast.cancel();
                }
                rendererNameToast =
                        Toast.makeText(MainActivity.this, rendererName, Toast.LENGTH_LONG);
                rendererNameToast.show();
            }
        });

        rs = RenderScript.create(this);
        warmUpInBackground(rs);
    }

    private void cycleRendererType() {
        currentRendererIndex++;
        if (currentRendererIndex == rendererTypes.size()) {
            currentRendererIndex = 0;
        }
    }

    private void updateRsRenderer() {
        try {
            RsRenderer renderer = rendererTypes.get(currentRendererIndex).newInstance();
            rendererName = renderer.getName();
            cameraPreviewRenderer.setRsRenderer(renderer);


        } catch (InstantiationException | IllegalAccessException e) {
            throw new RuntimeException(
                    "Unable to create renderer for index " + currentRendererIndex +
                            ", make sure it has a no-arg constructor please.", e);
        }
    }

    @Override
    protected void onResume() {
        super.onResume();
        startBackgroundThread();

        if(mTextureView.isAvailable()){
            setupCamera(mTextureView.getWidth(), mTextureView.getHeight());
            connectCamera();

        }
        else{
            mTextureView.setSurfaceTextureListener(mSurfaceTextureListener);
        }
    }

    @Override
    public void onWindowFocusChanged(boolean hasFocus){
        super.onWindowFocusChanged(hasFocus);

        View decorView = getWindow().getDecorView();
        if(hasFocus){
            decorView.setSystemUiVisibility(
                    View.SYSTEM_UI_FLAG_LAYOUT_STABLE
                            | View.SYSTEM_UI_FLAG_HIDE_NAVIGATION
                            | View.SYSTEM_UI_FLAG_LAYOUT_FULLSCREEN
                            | View.SYSTEM_UI_FLAG_LAYOUT_HIDE_NAVIGATION
                            | View.SYSTEM_UI_FLAG_FULLSCREEN
                            | View.SYSTEM_UI_FLAG_IMMERSIVE_STICKY
            );
        }
    }

    private void connectCamera(){
        CameraManager cameraManager = (CameraManager) getSystemService(Context.CAMERA_SERVICE);
        try{
            if(Build.VERSION.SDK_INT >= Build.VERSION_CODES.M){
                if(ContextCompat.checkSelfPermission(this, Manifest.permission.CAMERA) ==
                        PackageManager.PERMISSION_GRANTED){
                    cameraManager.openCamera(mCameraId, mCameraDeviceStateCallback, mBackgroundHandler);
                }
                else{
                    if(shouldShowRequestPermissionRationale(Manifest.permission.CAMERA)){
                        Toast.makeText(this, "Video app required access to camera", Toast.LENGTH_SHORT).show();

                    }
                    requestPermissions(new String[] {Manifest.permission.CAMERA}, REQUEST_CAMERA_PERMISSION_RESULT);
                }
            }
            else{
                cameraManager.openCamera(mCameraId, mCameraDeviceStateCallback, mBackgroundHandler);
            }

        } catch (CameraAccessException e) {
            e.printStackTrace();
        }
    }

    @Override
    protected void onPause() {
        closeCamera();
        stopBackgroundThread();
        super.onPause();
    }

    private void closeCamera(){
        if(mCameraDevice != null){
            mCameraDevice.close();
            mCameraDevice = null;
        }
    }

    private void startPreview(){

        SurfaceTexture surfaceTexture = mTextureView.getSurfaceTexture();
        surfaceTexture.setDefaultBufferSize(mPreviewSize.getWidth(), mPreviewSize.getHeight());

        mPreviewSurface = new Surface(surfaceTexture);


        if (mRenderer == null) {
            mRenderer = createNewRendererForCurrentType(mPreviewSize);
        }
        if (mPreviewSurface == null)
            return;

        /*
        * leads us to rgbOutAlloc.setSurface(outputSurface) whereas outputSurface = mPreviewSurface
        *
        * setSurface(Surface):
        * Associate a Surface with this Allocation. This operation is only valid for Allocations with USAGE_IO_OUTPUT.
        *
        * rgbOutAlloc is an RGBA_8888 allocation that can act as a Surface producer.
        * */
        mRenderer.setOutputSurface(mPreviewSurface);

        /*
        * leads us to yuvInAlloc.getSurface()
        *
        * getSurface():
        * Returns the handle to a raw buffer that is being managed by the screen compositor.
        * This operation is only valid for Allocations with USAGE_IO_INPUT.
        *
        * HERE:
        * Get the Surface that the camera will push frames to. This is the Surface from our yuv
        * input allocation. It will recieve a callback when a frame is available from the camera.
        * */
        mProcessingNormalSurface = mRenderer.getInputSurface();



        List<Surface> cameraOutputSurfaces = new ArrayList<>();
        cameraOutputSurfaces.add(mProcessingNormalSurface);


        mSurfaces = cameraOutputSurfaces;

        try {
            mCaptureRequestBuilder = mCameraDevice.createCaptureRequest(CameraDevice.TEMPLATE_PREVIEW);
            mCaptureRequestBuilder.addTarget(mProcessingNormalSurface);

            mCameraDevice.createCaptureSession(
                    mSurfaces,
                    mCameraCaptureSessionStateCallback,
                    mBackgroundHandler);
        } catch (CameraAccessException e) {
            e.printStackTrace();
        }

    }

    private void setupCamera(int width, int height){
        CameraManager cameraManager = (CameraManager) getSystemService(Context.CAMERA_SERVICE);

        try{
            for(String cameraId: cameraManager.getCameraIdList()){
                CameraCharacteristics cameraCharacteristics =
                        cameraManager.getCameraCharacteristics(cameraId);

                if(cameraCharacteristics.get(CameraCharacteristics.LENS_FACING)==
                        CameraCharacteristics.LENS_FACING_FRONT){
                    continue;
                }

                StreamConfigurationMap map =
                        cameraCharacteristics.get(CameraCharacteristics.SCALER_STREAM_CONFIGURATION_MAP);

                int deviceOrientation = getWindowManager().getDefaultDisplay().getRotation();
                int totalRotation = sensorToDeviceRotation(cameraCharacteristics, deviceOrientation);
                boolean swapRotation = totalRotation == 90 || totalRotation == 270;
                int rotatedWidth = width;
                int rotatedHeigth = height;

                if(swapRotation){
                    rotatedWidth = height;
                    rotatedHeigth = width;
                }

                mPreviewSize = chooseOptimalSize(map.getOutputSizes(SurfaceTexture.class), rotatedWidth, rotatedHeigth);
                //mTextureView.setRotation(90);


                mCameraId = cameraId;
                return;
            }
        } catch (CameraAccessException e) {
            e.printStackTrace();
        }
    }

    private RsSurfaceRenderer createNewRendererForCurrentType(Size outputSize) {
        if (cameraPreviewRenderer == null) {
            cameraPreviewRenderer =
                    new RsCameraPreviewRenderer(rs, outputSize.getWidth(), outputSize.getHeight());
        }
        updateRsRenderer();
        return cameraPreviewRenderer;
    }

    private void startBackgroundThread(){
        mBackgroundHandlerThread = new HandlerThread("Camera2VideoImage");
        mBackgroundHandlerThread.start();
        mBackgroundHandler = new Handler(mBackgroundHandlerThread.getLooper());
    }

    private void stopBackgroundThread(){
        mBackgroundHandlerThread.quitSafely();
        try {
            mBackgroundHandlerThread.join();
            mBackgroundHandlerThread = null;
            mBackgroundHandler = null;
        } catch (InterruptedException e) {
            e.printStackTrace();
        }
    }

    private static int sensorToDeviceRotation(CameraCharacteristics c, int deviceOrientation){
        int sensorOrientation = c.get(CameraCharacteristics.SENSOR_ORIENTATION);

        // get device orientation in degrees
        deviceOrientation = ORIENTATIONS.get(deviceOrientation);

        // calculate desired JPEG orientation relative to camera orientation to make
        // the image upright relative to the device orientation
        return (sensorOrientation + deviceOrientation + 360) % 360;

    }

    static class CompareSizesByArea implements Comparator<Size>{
        @Override
        public int compare(Size lhs, Size rhs) {
            // we cast here to ensure the multiplications won't
            // overflow
            return Long.signum((long) lhs.getWidth() * lhs.getHeight() /
                    (long) rhs.getWidth() * rhs.getHeight());
        }
    }

    private static Size chooseOptimalSize(Size[] choices, int width, int height){
        // Collect the supported resolutions that are the least as big as the preview
        // Surface
        List<Size> bigEnough = new ArrayList<Size>();
        for(Size option: choices){
            if(option.getHeight() == option.getWidth() * height/width && option.getWidth() >= width && option.getHeight()>=height){
                bigEnough.add(option);
            }
        }

        // pick the smallest of those, assuming we found any
        if(bigEnough.size() > 0){
            return Collections.min(bigEnough, new CompareSizesByArea());
        }
        else{
            Log.e(TAG, "Couldn't find any suitable preview size");
            return choices[0];
        }
    }

    @Override
    public void onRequestPermissionsResult(int requestCode, @NonNull String[] permissions, @NonNull int[] grantResults) {
        super.onRequestPermissionsResult(requestCode, permissions, grantResults);
        if(requestCode == REQUEST_CAMERA_PERMISSION_RESULT){
            if(grantResults[0] != PackageManager.PERMISSION_GRANTED){
                Toast.makeText(
                        getApplicationContext(),
                        "Application will not run without camera service",
                        Toast.LENGTH_SHORT).show();
            }
        }
    }

    /**
     * These are custom kernels that are AoT compiled on the very first launch so we want to make
     * sure that happens outside of a render loop and also not in the UI thread.
     */
    public static void warmUpInBackground(RenderScript rs) {
        new Thread(() -> {
            Log.i(TAG, "RS warmup start...");
            long start = System.currentTimeMillis();
            try {
                ScriptC_color_frame color_frame = new ScriptC_color_frame(rs);
                ScriptC_set_alpha set_alpha = new ScriptC_set_alpha(rs);
                ScriptC_to_grey to_grey = new ScriptC_to_grey(rs);
            } catch (Exception e) {
                e.printStackTrace();
            }
            Log.i(TAG, "RS warmup end, " + (System.currentTimeMillis() - start) + " ms");
        }).start();
    }
}

现在,以下类包含用于创建渲染脚本分配的所有方法。它还具有将分配绑定到两个表面的方法,我们已经在上面的MainActivity.java类的startPreview()方法中看到了。它还启动一个渲染线程,该线程将传入的帧处理为yuvInAlloc

public class RsCameraPreviewRenderer
        implements RsSurfaceRenderer, Allocation.OnBufferAvailableListener, Runnable {

    private static final String TAG = "RsCameraPreviewRenderer";

    private final RenderScript rs;
    private final Allocation yuvInAlloc;
    private final Allocation rgbInAlloc;
    private final Allocation rgbOutAlloc;
    private final ScriptIntrinsicYuvToRGB yuvToRGBScript;

    @Nullable
    private final HandlerThread renderThread;

    // all guarded by "this"
    private Handler renderHandler;
    private RsRenderer rsRenderer;

    private int nFramesAvailable;
    private boolean outputSurfaceIsSet;

    /**
     * @param rs
     * @param x
     * @param y
     */
    public RsCameraPreviewRenderer(RenderScript rs, int x, int y) {
        this(rs, new DefaultRsRenderer(), x, y);
    }

    /**
     * @param rs
     * @param rsRenderer
     * @param x
     * @param y
     */
    public RsCameraPreviewRenderer(RenderScript rs, RsRenderer rsRenderer, int x, int y) {
        this(rs, rsRenderer, x, y, null);
    }

    /**
     * @param rs
     * @param rsRenderer
     * @param x
     * @param y
     * @param renderHandler
     */
    public RsCameraPreviewRenderer(RenderScript rs,
                                   RsRenderer rsRenderer,
                                   int x,
                                   int y,
                                   Handler renderHandler) {
        this.rs = rs;
        this.rsRenderer = rsRenderer;

        if (renderHandler == null) {
            this.renderThread = new HandlerThread(TAG);
            this.renderThread.start();
            this.renderHandler = new Handler(renderThread.getLooper());
        } else {
            this.renderThread = null;
            this.renderHandler = renderHandler;
        }

        Log.i(TAG,
                "Setting up RsCameraPreviewRenderer with " + rsRenderer.getName() + " (" + x + "," +
                        y + ")");

        /*
        * Create an YUV allocation that can act as a Surface consumer. This lets us call
        * Allocation#getSurface(), set a Allocation.OnBufferAvailableListener
        * callback to be notified when a frame is ready, and call Allocation#ioReceive() to
        * latch a frame and access its yuv pixel data.
        *
        * The yuvFormat should be the value ImageFormat#YUV_420_888, ImageFormat#NV21 or maybe
        * ImageFormat#YV12.
        *
        * @param rs        RenderScript context
        * @param x         width in pixels
        * @param y         height in pixels
        * @param yuvFormat yuv pixel format
        * @return a YUV Allocation with USAGE_IO_INPUT
        * */
        yuvInAlloc = RsUtil.createYuvIoInputAlloc(rs, x, y, ImageFormat.YUV_420_888);
        yuvInAlloc.setOnBufferAvailableListener(this);


        /**
         * Create a sized RGBA_8888 Allocation to use with scripts.
         *
         * @param rs RenderScript context
         * @param x  width in pixels
         * @param y  height in pixels
         * @return an RGBA_8888 Allocation
         */
        rgbInAlloc = RsUtil.createRgbAlloc(rs, x, y);

        /**
         * Create an RGBA_8888 allocation that can act as a Surface producer. This lets us call
         * Allocation#setSurface(Surface) and call Allocation#ioSend(). If
         * you wanted to read the data from this Allocation, do so before calling ioSend(), because
         * after, the data is undefined.
         *
         * @param rs rs context
         * @param x  width in pixels
         * @param y  height in pixels
         * @return an RGBA_8888 Allocation with USAGE_IO_INPUT
         */
        rgbOutAlloc = RsUtil.createRgbIoOutputAlloc(rs, x, y);

        yuvToRGBScript = ScriptIntrinsicYuvToRGB.create(rs, Element.RGBA_8888(rs));

        yuvToRGBScript.setInput(yuvInAlloc);
    }

    @Override
    @AnyThread
    public synchronized void setRsRenderer(RsRenderer rsRenderer) {
        if (isRunning()) {
            this.rsRenderer = rsRenderer;
        }
    }


    /**
     * Check if this renderer is still running or has been shutdown.
     *
     * @return true if we're running, else false
     */
    @Override
    @AnyThread
    public synchronized boolean isRunning() {
        if (renderHandler == null) {
            Log.w(TAG, "renderer was already shut down");
            return false;
        }
        return true;
    }

    /**
     * Set the output surface to consume the stream of edited camera frames. This is probably
     * from a SurfaceView or TextureView. Please make sure it's valid.
     *
     * @param outputSurface a valid surface to consume a stream of edited frames from the camera
     */
    @AnyThread
    @Override
    public synchronized void setOutputSurface(Surface outputSurface) {
        if (isRunning()) {
            if (!outputSurface.isValid()) {
                throw new IllegalArgumentException("output was invalid");
            }
            rgbOutAlloc.setSurface(outputSurface);
            outputSurfaceIsSet = true;
            Log.d(TAG, "output surface was set");
        }
    }

    /**
     * Get the Surface that the camera will push frames to. This is the Surface from our yuv
     * input allocation. It will recieve a callback when a frame is available from the camera.
     *
     * @return a surface that consumes yuv frames from the camera preview, or null renderer is
     * shutdown
     */
    @AnyThread
    @Override
    public synchronized Surface getInputSurface() {
        return isRunning() ? yuvInAlloc.getSurface() : null;
    }

    /**
     * Callback for when the camera has a new frame. We want to handle this on the render thread
     * specific thread, so we'll increment nFramesAvailable and post a render request.
     */
    @Override
    public synchronized void onBufferAvailable(Allocation a) {
        if (isRunning()) {
            if (!outputSurfaceIsSet) {
                Log.e(TAG, "We are getting frames from the camera but we never set the view " +
                        "surface to render to");
                return;
            }
            nFramesAvailable++;
            renderHandler.post(this);
        }
    }

    /**
     * Render a frame on the render thread. Everything is async except for ioSend() will block
     * until the rendering completes. If we wanted to time it, make sure to log the time after
     * that call.
     */
    @WorkerThread
    @Override
    public void run() {
        RsRenderer renderer;
        int nFrames;
        synchronized (this) {
            if (!isRunning()) {
                return;
            }
            renderer = rsRenderer;
            nFrames = nFramesAvailable;
            nFramesAvailable = 0;

            renderHandler.removeCallbacks(this);
        }

        for (int i = 0; i < nFrames; i++) {

            /*
            * Receive the latest input into the Allocation.
            * This operation is only valid if USAGE_IO_INPUT is set on the Allocation.
            * */
            yuvInAlloc.ioReceive();
        }

        yuvToRGBScript.forEach(rgbInAlloc);

        /*
        * Render an edit to an input Allocation and write it to an output allocation. This must
        * always overwrite the out Allocation. This is called once for a Bitmap, and once per frame
        * for stream rendering.
        * */
        renderer.renderFrame(rs, rgbInAlloc, rgbOutAlloc);

        /*
        * Send a buffer to the output stream. The contents of the Allocation will be undefined after this
        * operation. This operation is only valid if USAGE_IO_OUTPUT is set on the Allocation.
        * */
        rgbOutAlloc.ioSend();
    }


    /**
     * Shut down the renderer when you're finished.
     */
    @Override
    @AnyThread
    public void shutdown() {
        synchronized (this) {
            if (!isRunning()) {
                Log.d(TAG, "requesting shutdown...");
                renderHandler.removeCallbacks(this);
                renderHandler.postAtFrontOfQueue(() -> {
                    Log.i(TAG, "shutting down");
                    synchronized (this) {
                        yuvInAlloc.destroy();
                        rgbInAlloc.destroy();
                        rgbOutAlloc.destroy();
                        yuvToRGBScript.destroy();
                        if (renderThread != null) {
                            renderThread.quitSafely();
                        }
                    }
                });
                renderHandler = null;
            }
        }
    }
}

我扰乱了以下项目的大部分代码:

我的问题是输出显示为以下GIF:enter image description here它以某种方式旋转。不是我所期望的。那么,为什么会这样呢?我找不到答案。有人可以帮忙吗?

java android android-camera2 landscape-portrait
1个回答
0
投票

这是因为YUV格式仅以默认方向输出。您只有sensorToDeviceRotation()旋转用于JPEG输出。您需要自己处理YUV旋转,或者在Activity中创建一个函数旋转位图,或者使用其他脚本在RenderScript中旋转。推荐稍后,因为它更快。

我在这里RenderScript找到了一个使用旋转脚本的示例

我没有对此进行测试,但是如果您对其进行更多研究,它应该可以完成工作。希望这会有所帮助。

© www.soinside.com 2019 - 2024. All rights reserved.