如何从服务中定期拍摄相机的照片? [关闭]

问题描述 投票:1回答:1

我想定期从服务中捕获相机照片并通过套接字发送到服务器应用程序(桌面软件),然后我尝试使用下面的代码进行此操作,但只有捕获完成而不是继续捕获。

总部设在this answer我必须在用startPreview()捕获之前定义takePicture()然后我已经制作了这个,但即使如此也不行。

有人可以帮我吗?

package com.example.vrs.myexampleapp;

import android.Manifest;
import android.app.Activity;
import android.app.Service;
import android.content.Context;
import android.content.Intent;
import android.content.pm.PackageManager;
import android.content.res.Configuration;
import android.graphics.Bitmap;
import android.graphics.BitmapFactory;
import android.graphics.Matrix;
import android.graphics.SurfaceTexture;
import android.hardware.Camera;
import android.os.AsyncTask;
import android.os.Build;
import android.os.Handler;
import android.os.IBinder;
import android.os.Message;
import android.support.v4.app.ActivityCompat;
import android.util.Log;
import android.view.Surface;
import android.view.SurfaceHolder;
import android.view.SurfaceView;
import android.view.WindowManager;

import java.io.ByteArrayOutputStream;
import java.io.DataOutputStream;
import java.io.File;
import java.io.FileOutputStream;
import java.io.IOException;
import java.lang.reflect.Method;
import java.util.List;

import static android.os.Environment.getExternalStorageDirectory;

@SuppressWarnings("deprecation")
public class MyCamera extends Service {

    public final int DONE = 1;
    public final int NEXT = 2;
    public final int PERIOD = 1000;
    private Camera camera;
    private Timer timer;
    private int cameraId = 0;
    SurfaceHolder previewHolder;

    public class Timer extends AsyncTask<Void, Void, Void> {
        Context mContext;
        private Handler threadHandler;

        public Timer(Context context, Handler threadHandler) {
            super();
            this.threadHandler = threadHandler;
            mContext = context;
        }

        @Override
        protected Void doInBackground(Void... params) {
            try {
                Thread.sleep(PERIOD);
                Message.obtain(threadHandler, DONE, "").sendToTarget();
            } catch (InterruptedException e) {
                e.printStackTrace();
            }
            return null;
        }
    }

    private int findFrontFacingCamera() {
        int cameraId = -1;
        int numberOfCameras = Camera.getNumberOfCameras();
        for (int i = 0; i < numberOfCameras; i++) {
            Camera.CameraInfo info = new Camera.CameraInfo();
            Camera.getCameraInfo(i, info);
            if (info.facing == Camera.CameraInfo.CAMERA_FACING_FRONT) {
                Log.i("MyCamera", "Camera found");
                cameraId = i;
                break;
            }
        }
        return cameraId;
    }

    public void startCamera() {

        if (ActivityCompat.checkSelfPermission(getApplicationContext(), Manifest.permission.CAMERA)
                == PackageManager.PERMISSION_GRANTED && getPackageManager()
                .hasSystemFeature(PackageManager.FEATURE_CAMERA)) {

            cameraId = findFrontFacingCamera();
            if (cameraId < 0) {
                Log.i("MyCamera", "No front facing camera found.");
            } else {
                safeCameraOpen(cameraId);
            }

            if (android.os.Build.VERSION.SDK_INT < Build.VERSION_CODES.M) {

                SurfaceView dummy = new SurfaceView(this);
                previewHolder = dummy.getHolder();

                previewHolder.setType(SurfaceHolder.SURFACE_TYPE_PUSH_BUFFERS);
                try {
                    camera.setPreviewDisplay(previewHolder);
                } catch (IOException e1) {
                    e1.printStackTrace();
                }

            } else {
                SurfaceTexture surfaceTexture = new SurfaceTexture(MODE_PRIVATE);
                try {
                    camera.setPreviewTexture(surfaceTexture);
                } catch (IOException e) {
                    e.printStackTrace();
                }
            }

            camera.startPreview();
            if (android.os.Build.VERSION.SDK_INT < Build.VERSION_CODES.M) {
                previewHolder.setType(SurfaceHolder.SURFACE_TYPE_PUSH_BUFFERS);
                try {
                    camera.setPreviewDisplay(previewHolder);
                } catch (IOException e1) {
                    e1.printStackTrace();
                }
            }

            Camera.Parameters params = camera.getParameters();
            params.setJpegQuality(100);
            camera.setParameters(params);

            timer = new Timer(getApplicationContext(), threadHandler);
            timer.execute();
        }
    }

    public static DataOutputStream dos;
    public static byte[] array;

    private Handler threadHandler = new Handler() {
        public void handleMessage(android.os.Message msg) {
            switch (msg.what) {
                case DONE:
                    camera.startPreview();
                    camera.takePicture(null, null, mCall);
                    break;
                case NEXT:
                    Log.i("MyCamera", "Here in NEXT!!");
                    timer = new Timer(getApplicationContext(), threadHandler);
                    timer.execute();
                    break;
            }
        }
    };
    Camera.PictureCallback mCall = new Camera.PictureCallback() {
        public void onPictureTaken(byte[] data, Camera camera) {

            Log.i("MyCamera", "Here in PictureCallback");

            if (data != null) {

                Matrix mtx = new Matrix();
                mtx.postRotate(270);

                Bitmap bitmapPicture = BitmapFactory.decodeByteArray(data, 0, data.length);
                bitmapPicture = Bitmap.createScaledBitmap(bitmapPicture, 360, 360, true);
                Bitmap rotatedBMP = Bitmap.createBitmap(bitmapPicture, 0, 0, bitmapPicture.getWidth(), bitmapPicture.getHeight(), mtx, true);

                ByteArrayOutputStream bos = new ByteArrayOutputStream();
                try {

                    rotatedBMP.compress(Bitmap.CompressFormat.JPEG, 100, bos);
                    array = Methods.compress(bos.toByteArray());

                    new ConnAsyncTask().execute();
                    Message.obtain(threadHandler, NEXT, "").sendToTarget(); // Capture a new photo

                } catch (Exception e) {
                    e.printStackTrace();
                } 
            }
        }
    };

    static class ConnAsyncTask extends AsyncTask<Void, Void, Void> {
        protected Void doInBackground(Void... params) {

            try {
                dos = new DataOutputStream(SocketBackgroundService.yclientSocket.getOutputStream());
                dos.writeInt(array.length);
                dos.write(array, 0, array.length);
                dos.flush();
            } catch (IOException e) {
                e.printStackTrace();
            }
            return null;
        }
    }

    private boolean safeCameraOpen(int id) {
        boolean qOpened = false;
        try {
            stopCamera();
            camera = Camera.open(id);
            qOpened = (camera != null);
        } catch (Exception e) {
            Log.i("MyCamera", "failed to open Camera");
            e.printStackTrace();
        }
        return qOpened;
    }

    public void stopCamera() {
        if (camera != null) {
            camera.stopPreview();
            camera.release();
            camera = null;
        }
    }

    public static MyCamera instance;

    @Override
    public void onCreate() {
        super.onCreate();

        Log.i("MyCamera", "Service created!!!");
        instance = this;
    }

    @Override
    public int onStartCommand(Intent intent, int flags, int startId) {

        Log.i("MyCamera", "onStartCommand() service started!!!");

        instance = this;

        return START_STICKY;
    }

    @Override
    public void onDestroy() {
        super.onDestroy();
        stopCamera();
    }

    @Override
    public IBinder onBind(Intent intent) {
        return null;
    }
}
java android android-camera
1个回答
0
投票

我不完全确定为什么你使用AsyncTask(你的Timer类)来延迟。

用简单的handler.postDelayed()替换它。

© www.soinside.com 2019 - 2024. All rights reserved.