如何在Android的视频服务中实时录制视频并从视频帧中检测人脸?

3

我创建了一个服务,当有人尝试解锁手机却失败时,该服务会记录视频。当服务正在运行并捕获视频时,它还实时捕获视频帧并从这些视频帧中检测面部。

问题在于当服务从视频中检测面部时,在 log cat 中看到超过150个帧被跳过。

如何高效地检测面部,以便不会跳过这么多帧?

以下是我的代码:

public class Background_Recording extends Service implements SurfaceHolder.Callback,Camera.PreviewCallback {
    private WindowManager windowManager;
    public static int MAX_FACES = 5;
    boolean stopped = false;
    Timer t;
    Bitmap bitmaper;
    Handler handler;
    ArrayList<Bitmap> bit_collect = new ArrayList<Bitmap>();
    private SurfaceView surfaceView;
    private Camera camera = null;
    int camera_type = 1;
    private MediaRecorder mediaRecorder = null;

    @Override
    public void onCreate() {
        windowManager = (WindowManager) this.getSystemService(Context.WINDOW_SERVICE);
        surfaceView = new SurfaceView(this);
        WindowManager.LayoutParams layoutParams = new WindowManager.LayoutParams(1, 1, WindowManager.LayoutParams.TYPE_SYSTEM_OVERLAY,
                WindowManager.LayoutParams.FLAG_WATCH_OUTSIDE_TOUCH,
                PixelFormat.TRANSLUCENT
        );
        layoutParams.gravity = Gravity.LEFT | Gravity.TOP;
        windowManager.addView(surfaceView, layoutParams);
        surfaceView.getHolder().addCallback(this);
    }

    @Override
    public void surfaceCreated(SurfaceHolder surfaceHolder) {
        camera = Camera.open(camera_type);
        mediaRecorder = new MediaRecorder();
        camera.unlock();
        mediaRecorder.setPreviewDisplay(surfaceHolder.getSurface());
        mediaRecorder.setCamera(camera);
        mediaRecorder.setAudioSource(MediaRecorder.AudioSource.CAMCORDER);
        mediaRecorder.setVideoSource(MediaRecorder.VideoSource.CAMERA);
        mediaRecorder.setProfile(CamcorderProfile.get(CamcorderProfile.QUALITY_HIGH));
        mediaRecorder.setOutputFile("/sdcard/unlock.mp4");
        try {
            mediaRecorder.prepare();
            mediaRecorder.start();
            camera.setPreviewCallback(this);
            t = new Timer();
            t.schedule(new TimerTask() {
                @Override
                public void run() {
                    camera.setPreviewCallback(null);
                    stopSelf();
                    stopped = true;
                }
            }, 8000);
        } catch (Exception e) {
            Toast.makeText(getApplicationContext(), "getting exception ", Toast.LENGTH_LONG).show();
        }
    }

    public int onStartCommand(Intent intent, int flags, int flagID) {
        handler = new Handler(Looper.getMainLooper());
        return super.onStartCommand(intent, flags, flagID);
    }

    @Override
    public void onDestroy() {
        mediaRecorder.stop();
        mediaRecorder.reset();
        mediaRecorder.release();
        camera.lock();
        camera.release();
        windowManager.removeView(surfaceView);
    }

    @Override
    public void surfaceChanged(SurfaceHolder surfaceHolder, int format, int width, int height) {
    }

    @Override
    public void surfaceDestroyed(SurfaceHolder surfaceHolder) {
    }

    @Override
    public IBinder onBind(Intent intent) {
        return null;
    }

    @Override
    public void onPreviewFrame(byte[] data, Camera camera) {
        try {
            Camera.Parameters parameters = camera.getParameters();
            Camera.Size size = parameters.getPreviewSize();
            YuvImage image = new YuvImage(data, parameters.getPreviewFormat(), size.width, size.height, null);
            ByteArrayOutputStream os = new ByteArrayOutputStream();
            image.compressToJpeg(new Rect(0, 0, size.width, size.height), 100, os);
            byte[] jpegByteArray = os.toByteArray();
            bitmaper = BitmapFactory.decodeByteArray(jpegByteArray, 0, jpegByteArray.length);
            bit_collect.add(bitmaper);
            handler.post(new Runnable() {
                @Override
                public void run() {
                    processing(bitmaper);
                }
            });
        } catch (Exception e) {
            Toast.makeText(getApplicationContext(), "no frames ", Toast.LENGTH_SHORT).show();
        }
    }

    public void processing(Bitmap final_byte) {
        if (final_byte != null) {
            int width = final_byte.getWidth();
            int height = final_byte.getHeight();
            FaceDetector detector = new FaceDetector(width, height, Background_Recording.MAX_FACES);
            FaceDetector.Face[] faces = new FaceDetector.Face[Background_Recording.MAX_FACES];
            int facesFound = detector.findFaces(final_byte, faces);
            if (facesFound > 0) {
                Toast.makeText(getApplicationContext(), "face found", Toast.LENGTH_SHORT).show();
            } else {
                final_byte.recycle();
                Toast.makeText(getApplicationContext(), "no face found", Toast.LENGTH_SHORT).show();
            }
        }
    }
}
1个回答

1
你正在UI线程上运行面部检测 - 因为你的handler附加到主线程的Looper。你应该将所有内容移至后台。尝试使用下面的代码并查看我的注释以解释我的更改。它可能不会100%正常工作,但应该非常接近。
//at the beginning of your class
private HandlerThread handlerThread;

//...
@Override
public int onStartCommand(Intent intent, int flags, int flagID) {
    //Create a new thread which will be used by the Handler to
    //process the image data and run frame deteciton
    handlerThread = new HandlerThread("faceDetectionThread");
    handlerThread.start();
    Looper looper = handlerThread.getLooper();
    Handler handler = new Handler(looper); 
    return super.onStartCommand(intent, flags, flagID);
}

@Override
public void onPreviewFrame(final byte[] data, final Camera camera) {
    try {
        //This solution only processes the last frame.
        //It waits for 100ms, and if no more data is received,
        //it will start processing. Otherwise, the previous posted
        //Runnable will be cancelled and replaced with the new frame.
        handler.removeCallbacksAndMessages(null);
        handler.postDelayed(new Runnable() {
            @Override
            public void run() {
                processing(data, camera);
            }
        }, 100);
    } catch (Exception e) {
        Toast.makeText(getApplicationContext(), "no frames ", Toast.LENGTH_SHORT).show();
    }
}

public void processing(final byte[] data, final Camera camera) {

    // Bitmap processing must be on a background thread!
    // Moved this from the onPreviewFrame method
    Camera.Parameters parameters = camera.getParameters();
    Camera.Size size = parameters.getPreviewSize();
    YuvImage image = new YuvImage(data, parameters.getPreviewFormat(), size.width, size.height, null);
    ByteArrayOutputStream os = new ByteArrayOutputStream();
    image.compressToJpeg(new Rect(0, 0, size.width, size.height), 100, os);
    byte[] jpegByteArray = os.toByteArray();
    bitmaper = BitmapFactory.decodeByteArray(jpegByteArray, 0, jpegByteArray.length);
        bit_collect.add(bitmaper);

    if (bitmaper != null) {
        int width = bitmaper.getWidth();
        int height = bitmaper.getHeight();
        FaceDetector detector = new FaceDetector(width, height, Background_Recording.MAX_FACES);
        FaceDetector.Face[] faces = new FaceDetector.Face[Background_Recording.MAX_FACES];
        int facesFound = detector.findFaces(bitmaper, faces);
        if (facesFound > 0) {
            Toast.makeText(getApplicationContext(), "face found", Toast.LENGTH_SHORT).show();
        } else {
            bitmaper.recycle();
            Toast.makeText(getApplicationContext(), "no face found", Toast.LENGTH_SHORT).show();
        }
    }
}

//...

@Override
public void onDestroy() {
    mediaRecorder.stop();
    mediaRecorder.reset();
    mediaRecorder.release();
    camera.lock();
    camera.release();
    windowManager.removeView(surfaceView);
    handlerThread.quit(); //Don't forget this!
}

而且,如果你将MAX_FACES减少到1,面部检测速度可能会更快。


很高兴听到这个消息!如果是这样,请给答案点赞。 - kenny_k
嘿,@SachinVashistha,你能否再解释一下或分享一下你的示例代码? - Nitin

网页内容由stack overflow 提供, 点击上面的
可以查看英文原文,
原文链接