title: Android原生人脸识别Camera2+FaceDetector 快速实现人脸跟踪
categories:

  • Android
    tags:
  • 人脸识别
  • FaceDetector
  • Camera2
    date: 2020-05-27 14:02:13

前言本人博客

本篇主要介绍Android原生Api人脸检测FaceDetector的使用,该方法检测人脸可以
做到的是,检测到屏幕有无人脸,有多少个人脸,人脸的双眼眉心位置2d坐标,双眼间距,
但是本人测到该方法的坑,检测有无人脸确实好用,但是如果要精确的测量人脸位置,距离等,会有偏差,毕竟是2d坐标,对现实
定位不准确,我感觉可以这样理解,
然后大家如果要实现该功能的时候,如果这些不够用,就不用考虑该方法了。
废话不多说,实现开始,

实现

1.首先可以实现一个自定义view用来在屏幕上画方框

class FaceView : View {   lateinit var mPaint: Paint   private var mCorlor = "#42ed45"   private var mFaces: ArrayList? = null   constructor(context: Context) : super(context) {       init()   }   constructor(context: Context, attrs: AttributeSet?) : super(context, attrs) {       init()   }   constructor(context: Context, attrs: AttributeSet?, defStyleAttr: Int) : super(context, attrs, defStyleAttr) {       init()   }   private fun init() {       mPaint = Paint()       mPaint.color = Color.parseColor(mCorlor)       mPaint.style = Paint.Style.STROKE       mPaint.strokeWidth = TypedValue.applyDimension(TypedValue.COMPLEX_UNIT_DIP, 3f, context.resources.displayMetrics)       mPaint.isAntiAlias = true   }   override fun onDraw(canvas: Canvas) {       super.onDraw(canvas)       mFaces?.let {           for (face in it) {               canvas.drawRect(face, mPaint)           }       }   }   fun setFaces(faces: ArrayList) {       this.mFaces = faces       invalidate()   }}

imageUtil用来处理返回的基础格式

/*** Author: Sar_Wang* Date: 2020/5/11 3:40 PM* Description:*/public class ImageUtil {  /**   * 将Y:U:V == 4:2:2的数据转换为nv21   *   * @param y      Y 数据   * @param u      U 数据   * @param v      V 数据   * @param nv21   生成的nv21,需要预先分配内存   * @param stride 步长   * @param height 图像高度   */  public static void yuv422ToYuv420sp(byte[] y, byte[] u, byte[] v, byte[] nv21, int stride, int height) {      System.arraycopy(y, 0, nv21, 0, y.length);      // 注意,若length值为 y.length * 3 / 2 会有数组越界的风险,需使用真实数据长度计算      int length = y.length + u.length / 2 + v.length / 2;      int uIndex = 0, vIndex = 0;      for (int i = stride * height; i < length; i += 2) {          nv21[i] = v[vIndex];          nv21[i + 1] = u[uIndex];          vIndex += 2;          uIndex += 2;      }  }  /**   * 将Y:U:V == 4:1:1的数据转换为nv21   *   * @param y      Y 数据   * @param u      U 数据   * @param v      V 数据   * @param nv21   生成的nv21,需要预先分配内存   * @param stride 步长   * @param height 图像高度   */  public static void yuv420ToYuv420sp(byte[] y, byte[] u, byte[] v, byte[] nv21, int stride, int height) {      System.arraycopy(y, 0, nv21, 0, y.length);      // 注意,若length值为 y.length * 3 / 2 会有数组越界的风险,需使用真实数据长度计算      int length = y.length + u.length + v.length;      int uIndex = 0, vIndex = 0;      for (int i = stride * height; i < length; i++) {          nv21[i] = v[vIndex++];          nv21[i + 1] = u[uIndex++];      }  }}

然后是调用相机的activity的布局

<?xml version="1.0" encoding="utf-8"?>         

然后这里剩的麻烦,用了一个大神写的相机辅助类,感兴趣的可以看一下源码

public class Camera2Helper {    private static final String TAG = "Camera2Helper";    private Point maxPreviewSize;    private Point minPreviewSize;    public static final String CAMERA_ID_FRONT = "1";    public static final String CAMERA_ID_BACK = "0";    private String mCameraId;    private String specificCameraId;    private Camera2Listener camera2Listener;    private TextureView mTextureView;    private int rotation;    private Point previewViewSize;    private Point specificPreviewSize;    private boolean isMirror;    private Context context;    private boolean mCalibrated;    private boolean mIsVertical = true;    /**     * A {@link CameraCaptureSession } for camera preview.     */    private CameraCaptureSession mCaptureSession;    /**     * A reference to the opened {@link CameraDevice}.     */    private CameraDevice mCameraDevice;    private Size mPreviewSize;    private Camera2Helper(Camera2Helper.Builder builder) {        mTextureView = builder.previewDisplayView;        specificCameraId = builder.specificCameraId;        camera2Listener = builder.camera2Listener;        rotation = builder.rotation;        previewViewSize = builder.previewViewSize;        specificPreviewSize = builder.previewSize;        maxPreviewSize = builder.maxPreviewSize;        minPreviewSize = builder.minPreviewSize;        isMirror = builder.isMirror;        context = builder.context;        if (isMirror) {            mTextureView.setScaleX(-1);        }    }    public void setConfiguration(boolean val) {        mIsVertical = val;    }    public void switchCamera() {        if (CAMERA_ID_BACK.equals(mCameraId)) {            specificCameraId = CAMERA_ID_FRONT;        } else if (CAMERA_ID_FRONT.equals(mCameraId)) {            specificCameraId = CAMERA_ID_BACK;        }        stop();        start();    }    private int getCameraOri(int rotation, String cameraId) {        int degrees = rotation * 90;        switch (rotation) {            case Surface.ROTATION_0:                degrees = 0;                break;            case Surface.ROTATION_90:                degrees = 90;                break;            case Surface.ROTATION_180:                degrees = 180;                break;            case Surface.ROTATION_270:                degrees = 270;                break;            default:                break;        }        int result;        if (CAMERA_ID_FRONT.equals(cameraId)) {            result = (mSensorOrientation + degrees) % 360;            result = (360 - result) % 360;        } else {            result = (mSensorOrientation - degrees + 360) % 360;        }        Log.i(TAG, "getCameraOri: " + rotation + " " + result + " " + mSensorOrientation);        return result;    }    private final TextureView.SurfaceTextureListener mSurfaceTextureListener            = new TextureView.SurfaceTextureListener() {        @Override        public void onSurfaceTextureAvailable(SurfaceTexture texture, int width, int height) {            Log.i(TAG, "onSurfaceTextureAvailable: ");            openCamera();        }        @Override        public void onSurfaceTextureSizeChanged(SurfaceTexture texture, int width, int height) {            Log.i(TAG, "onSurfaceTextureSizeChanged: ");            configureTransform(width, height);        }        @Override        public boolean onSurfaceTextureDestroyed(SurfaceTexture texture) {            Log.i(TAG, "onSurfaceTextureDestroyed: ");            return true;        }        @Override        public void onSurfaceTextureUpdated(SurfaceTexture texture) {        }    };    private CameraDevice.StateCallback mDeviceStateCallback = new CameraDevice.StateCallback() {        @Override        public void onOpened(@NonNull CameraDevice cameraDevice) {            Log.i(TAG, "onOpened: ");            // This method is called when the camera is opened.  We start camera preview here.            mCameraOpenCloseLock.release();            mCameraDevice = cameraDevice;            createCameraPreviewSession();            if (camera2Listener != null) {                camera2Listener.onCameraOpened(cameraDevice, mCameraId, mPreviewSize, getCameraOri(rotation, mCameraId), isMirror);            }        }        @Override        public void onDisconnected(@NonNull CameraDevice cameraDevice) {            Log.i(TAG, "onDisconnected: ");            mCameraOpenCloseLock.release();            cameraDevice.close();            mCameraDevice = null;            if (camera2Listener != null) {                camera2Listener.onCameraClosed();            }        }        @Override        public void onError(@NonNull CameraDevice cameraDevice, int error) {            Log.i(TAG, "onError: ");            mCameraOpenCloseLock.release();            cameraDevice.close();            mCameraDevice = null;            if (camera2Listener != null) {                camera2Listener.onCameraError(new Exception("error occurred, code is " + error));            }        }    };    private CameraCaptureSession.StateCallback mCaptureStateCallback = new CameraCaptureSession.StateCallback() {        @Override        public void onConfigured(@NonNull CameraCaptureSession cameraCaptureSession) {            Log.i(TAG, "onConfigured: ");            // The camera is already closed            if (null == mCameraDevice) {                return;            }            // When the session is ready, we start displaying the preview.            mCaptureSession = cameraCaptureSession;            try {                mCaptureSession.setRepeatingRequest(mPreviewRequestBuilder.build(),                        mCaptureCallBack, mBackgroundHandler);            } catch (CameraAccessException e) {                e.printStackTrace();            }        }        @Override        public void onConfigureFailed(                @NonNull CameraCaptureSession cameraCaptureSession) {            Log.i(TAG, "onConfigureFailed: ");            if (camera2Listener != null) {                camera2Listener.onCameraError(new Exception("configureFailed"));            }        }    };    private CameraCaptureSession.CaptureCallback mCaptureCallBack = new CameraCaptureSession.CaptureCallback(){        @Override        public void onCaptureCompleted(@NonNull CameraCaptureSession session, @NonNull CaptureRequest request, @NonNull TotalCaptureResult result) {            super.onCaptureCompleted(session, request, result);            camera2Listener.onHandleFaces(result);        }        @Override        public void onCaptureFailed(@NonNull CameraCaptureSession session, @NonNull CaptureRequest request, @NonNull CaptureFailure failure) {            super.onCaptureFailed(session, request, failure);        }    };    /**     * An additional thread for running tasks that shouldn't block the UI.     */    private HandlerThread mBackgroundThread;    /**     * A {@link Handler} for running tasks in the background.     */    private Handler mBackgroundHandler;    private ImageReader mImageReader;    /**     * {@link CaptureRequest.Builder} for the camera preview     */    private CaptureRequest.Builder mPreviewRequestBuilder;    /**     * A {@link Semaphore} to prevent the app from exiting before closing the camera.     */    private Semaphore mCameraOpenCloseLock = new Semaphore(1);    /**     * Orientation of the camera sensor     */    private int mSensorOrientation;    private Size getBestSupportedSize(List sizes) {        Size defaultSize = sizes.get(0);        Size[] tempSizes = sizes.toArray(new Size[0]);        Arrays.sort(tempSizes, new Comparator() {            @Override            public int compare(Size o1, Size o2) {                if (o1.getWidth() > o2.getWidth()) {                    return -1;                } else if (o1.getWidth() == o2.getWidth()) {                    return o1.getHeight() > o2.getHeight() ? -1 : 1;                } else {                    return 1;                }            }        });        sizes = new ArrayList<>(Arrays.asList(tempSizes));        for (int i = sizes.size() - 1; i >= 0; i--) {            if (maxPreviewSize != null) {                if (sizes.get(i).getWidth() > maxPreviewSize.x || sizes.get(i).getHeight() > maxPreviewSize.y) {                    sizes.remove(i);                    continue;                }            }            if (minPreviewSize != null) {                if (sizes.get(i).getWidth() < minPreviewSize.x || sizes.get(i).getHeight() < minPreviewSize.y) {                    sizes.remove(i);                }            }        }        if (sizes.size() == 0) {            String msg = "can not find suitable previewSize, now using default";            if (camera2Listener != null) {                Log.e(TAG, msg);                camera2Listener.onCameraError(new Exception(msg));            }            return defaultSize;        }        Size bestSize = sizes.get(0);        float previewViewRatio;        if (previewViewSize != null) {            previewViewRatio = (float) previewViewSize.x / (float) previewViewSize.y;        } else {            previewViewRatio = (float) bestSize.getWidth() / (float) bestSize.getHeight();        }        if (previewViewRatio > 1) {            previewViewRatio = 1 / previewViewRatio;        }        for (Size s : sizes) {            if (specificPreviewSize != null && specificPreviewSize.x == s.getWidth() && specificPreviewSize.y == s.getHeight()) {                return s;            }            if (Math.abs((s.getHeight() / (float) s.getWidth()) - previewViewRatio) < Math.abs(bestSize.getHeight() / (float) bestSize.getWidth() - previewViewRatio)) {                bestSize = s;            }        }        return bestSize;    }    public synchronized void start() {        if (mCameraDevice != null) {            return;        }        startBackgroundThread();        // When the screen is turned off and turned back on, the SurfaceTexture is already        // available, and "onSurfaceTextureAvailable" will not be called. In that case, we can open        // a camera and start preview from here (otherwise, we wait until the surface is ready in        // the SurfaceTextureListener).        if (mTextureView.isAvailable()) {            openCamera();        } else {            mTextureView.setSurfaceTextureListener(mSurfaceTextureListener);        }    }    public synchronized void stop() {        if (mCameraDevice == null) {            return;        }        closeCamera();        stopBackgroundThread();    }    public void release() {        stop();        mTextureView = null;        camera2Listener = null;        context = null;    }    private void setUpCameraOutputs(CameraManager cameraManager) {        try {            if (configCameraParams(cameraManager, specificCameraId)) {                return;            }            for (String cameraId : cameraManager.getCameraIdList()) {                if (configCameraParams(cameraManager, cameraId)) {                    return;                }            }        } catch (CameraAccessException e) {            e.printStackTrace();        } catch (NullPointerException e) {            // Currently an NPE is thrown when the Camera2API is used but not supported on the            // device this code runs.            if (camera2Listener != null) {                camera2Listener.onCameraError(e);            }        }    }    private boolean configCameraParams(CameraManager manager, String cameraId) throws CameraAccessException {        CameraCharacteristics characteristics                = manager.getCameraCharacteristics(cameraId);        StreamConfigurationMap map = characteristics.get(                CameraCharacteristics.SCALER_STREAM_CONFIGURATION_MAP);        if (map == null) {            return false;        }        mPreviewSize = getBestSupportedSize(new ArrayList(Arrays.asList(map.getOutputSizes(SurfaceTexture.class))));        mImageReader = ImageReader.newInstance(mPreviewSize.getWidth(), mPreviewSize.getHeight(),                ImageFormat.YUV_420_888, 2);        mImageReader.setOnImageAvailableListener(                new OnImageAvailableListenerImpl(), mBackgroundHandler);        mSensorOrientation = characteristics.get(CameraCharacteristics.SENSOR_ORIENTATION);        mCameraId = cameraId;        return true;    }    private void openCamera() {        CameraManager cameraManager = (CameraManager) context.getSystemService(Context.CAMERA_SERVICE);        setUpCameraOutputs(cameraManager);        configureTransform(mTextureView.getWidth(), mTextureView.getHeight());        try {            if (!mCameraOpenCloseLock.tryAcquire(2500, TimeUnit.MILLISECONDS)) {                throw new RuntimeException("Time out waiting to lock camera opening.");            }            cameraManager.openCamera(mCameraId, mDeviceStateCallback, mBackgroundHandler);        } catch (CameraAccessException e) {            if (camera2Listener != null) {                camera2Listener.onCameraError(e);            }        } catch (InterruptedException e) {            if (camera2Listener != null) {                camera2Listener.onCameraError(e);            }        }    }    /**     * Closes the current {@link CameraDevice}.     */    private void closeCamera() {        try {            mCameraOpenCloseLock.acquire();            if (null != mCaptureSession) {                mCaptureSession.close();                mCaptureSession = null;            }            if (null != mCameraDevice) {                mCameraDevice.close();                mCameraDevice = null;            }            if (null != mImageReader) {                mImageReader.close();                mImageReader = null;            }            if (camera2Listener != null) {                camera2Listener.onCameraClosed();            }        } catch (InterruptedException e) {            if (camera2Listener != null) {                camera2Listener.onCameraError(e);            }        } finally {            mCameraOpenCloseLock.release();        }    }    /**     * Starts a background thread and its {@link Handler}.     */    private void startBackgroundThread() {        mBackgroundThread = new HandlerThread("CameraBackground");        mBackgroundThread.start();        mBackgroundHandler = new Handler(mBackgroundThread.getLooper());    }    /**     * Stops the background thread and its {@link Handler}.     */    private void stopBackgroundThread() {        mBackgroundThread.quitSafely();        try {            mBackgroundThread.join();            mBackgroundThread = null;            mBackgroundHandler = null;        } catch (InterruptedException e) {            e.printStackTrace();        }    }    /**     * Creates a new {@link CameraCaptureSession} for camera preview.     */    private void createCameraPreviewSession() {        try {            SurfaceTexture texture = mTextureView.getSurfaceTexture();            assert texture != null;            // We configure the size of default buffer to be the size of camera preview we want.            texture.setDefaultBufferSize(mPreviewSize.getWidth(), mPreviewSize.getHeight());            // This is the output Surface we need to start preview.            Surface surface = new Surface(texture);            // We set up a CaptureRequest.Builder with the output Surface.            mPreviewRequestBuilder                    = mCameraDevice.createCaptureRequest(CameraDevice.TEMPLATE_PREVIEW);            mPreviewRequestBuilder.set(CaptureRequest.CONTROL_AF_MODE,                    CaptureRequest.CONTROL_AF_MODE_CONTINUOUS_PICTURE);            mPreviewRequestBuilder.addTarget(surface);            mPreviewRequestBuilder.addTarget(mImageReader.getSurface());            // Here, we create a CameraCaptureSession for camera preview.            mCameraDevice.createCaptureSession(Arrays.asList(surface, mImageReader.getSurface()),                    mCaptureStateCallback, mBackgroundHandler            );        } catch (CameraAccessException e) {            e.printStackTrace();        }    }    /**     * Configures the necessary {@link Matrix} transformation to `mTextureView`.     * This method should be called after the camera preview size is determined in     * setUpCameraOutputs and also the size of `mTextureView` is fixed.     *     * @param viewWidth  The width of `mTextureView`     * @param viewHeight The height of `mTextureView`     */    private void configureTransform(int viewWidth, int viewHeight) {        if (null == mTextureView || null == mPreviewSize) {            return;        }        Matrix matrix = new Matrix();        RectF viewRect = new RectF(0, 0, viewWidth, viewHeight);        RectF bufferRect = new RectF(0, 0, mPreviewSize.getHeight(), mPreviewSize.getWidth());        float centerX = viewRect.centerX();        float centerY = viewRect.centerY();        if (Surface.ROTATION_90 == rotation || Surface.ROTATION_270 == rotation) {            bufferRect.offset(centerX - bufferRect.centerX(), centerY - bufferRect.centerY());            matrix.setRectToRect(viewRect, bufferRect, Matrix.ScaleToFit.FILL);            float scale = Math.max(                    (float) viewHeight / mPreviewSize.getHeight(),                    (float) viewWidth / mPreviewSize.getWidth());            matrix.postScale(scale, scale, centerX, centerY);            matrix.postRotate((90 * (rotation - 2)) % 360, centerX, centerY);        } else if (Surface.ROTATION_180 == rotation) {            matrix.postRotate(180, centerX, centerY);        }        Log.i(TAG, "configureTransform: " + getCameraOri(rotation, mCameraId) + "  " + rotation * 90);        mTextureView.setTransform(matrix);    }    public static final class Builder {        /**         * 预览显示的view,目前仅支持textureView         */        private TextureView previewDisplayView;        /**         * 是否镜像显示,只支持textureView         */        private boolean isMirror;        /**         * 指定的相机ID         */        private String specificCameraId;        /**         * 事件回调         */        private Camera2Listener camera2Listener;        /**         * 屏幕的长宽,在选择最佳相机比例时用到         */        private Point previewViewSize;        /**         * 传入getWindowManager().getDefaultDisplay().getRotation()的值即可         */        private int rotation;        /**         * 指定的预览宽高,若系统支持则会以这个预览宽高进行预览         */        private Point previewSize;        /**         * 最大分辨率         */        private Point maxPreviewSize;        /**         * 最小分辨率         */        private Point minPreviewSize;        /**         * 上下文,用于获取CameraManager         */        private Context context;        public Builder() {        }        public Builder previewOn(TextureView val) {            previewDisplayView = val;            return this;        }        public Builder isMirror(boolean val) {            isMirror = val;            return this;        }        public Builder previewSize(Point val) {            previewSize = val;            return this;        }        public Builder maxPreviewSize(Point val) {            maxPreviewSize = val;            return this;        }        public Builder minPreviewSize(Point val) {            minPreviewSize = val;            return this;        }        public Builder previewViewSize(Point val) {            previewViewSize = val;            return this;        }        public Builder rotation(int val) {            rotation = val;            return this;        }        public Builder specificCameraId(String val) {            specificCameraId = val;            return this;        }        public Builder cameraListener(Camera2Listener val) {            camera2Listener = val;            return this;        }        public Builder context(Context val) {            context = val;            return this;        }        public Camera2Helper build() {            if (previewViewSize == null) {                Log.e(TAG, "previewViewSize is null, now use default previewSize");            }            if (camera2Listener == null) {                Log.e(TAG, "camera2Listener is null, callback will not be called");            }            if (previewDisplayView == null) {                throw new NullPointerException("you must preview on a textureView or a surfaceView");            }            if (maxPreviewSize != null && minPreviewSize != null) {                if (maxPreviewSize.x < minPreviewSize.x || maxPreviewSize.y < minPreviewSize.y) {                    throw new IllegalArgumentException("maxPreviewSize must greater than minPreviewSize");                }            }            return new Camera2Helper(this);        }    }    private class OnImageAvailableListenerImpl implements ImageReader.OnImageAvailableListener {        private byte[] y;        private byte[] u;        private byte[] v;        private ReentrantLock lock = new ReentrantLock();        @Override        public void onImageAvailable(ImageReader reader) {            Image image = reader.acquireNextImage();            // Y:U:V == 4:2:2            if (camera2Listener != null && image.getFormat() == ImageFormat.YUV_420_888) {                Image.Plane[] planes = image.getPlanes();                // 加锁确保y、u、v来源于同一个Image                lock.lock();                // 重复使用同一批byte数组,减少gc频率                if (y == null) {                    y = new byte[planes[0].getBuffer().limit() - planes[0].getBuffer().position()];                    u = new byte[planes[1].getBuffer().limit() - planes[1].getBuffer().position()];                    v = new byte[planes[2].getBuffer().limit() - planes[2].getBuffer().position()];                }                if (image.getPlanes()[0].getBuffer().remaining() == y.length) {                    planes[0].getBuffer().get(y);                    planes[1].getBuffer().get(u);                    planes[2].getBuffer().get(v);                    camera2Listener.onPreview(y, u, v, mPreviewSize, planes[0].getRowStride());                }                lock.unlock();            }            image.close();        }    }}

然后初始化后绑定布局

texture_preview.viewTreeObserver.addOnGlobalLayoutListener(this)override fun onGlobalLayout() {        texture_preview.viewTreeObserver.removeOnGlobalLayoutListener(this)        if (!checkPermissions(NEEDED_PERMISSIONS)) {            ActivityCompat.requestPermissions(this, NEEDED_PERMISSIONS, ACTION_REQUEST_PERMISSIONS)        } else {            initCamera()        }    }

初始化相机

camera2Helper = Camera2Helper.Builder()            .cameraListener(this)            .maxPreviewSize(Point(1920, 1080))            .minPreviewSize(Point(1280, 720))            .specificCameraId(CAMERA_ID)            .context(applicationContext)            .previewOn(texture_preview)            .previewViewSize(Point(texture_preview.width,                texture_preview.height))            .rotation(windowManager.defaultDisplay.rotation)            .build()        camera2Helper.start()

然后在相机的回调里面,我们看看做了什么,首先是相机启动的时候

override fun onCameraOpened(        cameraDevice: CameraDevice?,        cameraId: String?,        previewSize: Size?,        displayOrientation: Int,        isMirror: Boolean    ) {        Log.i("Wzz", "onCameraOpened:  previewSize = ${previewSize?.width} x ${previewSize?.height}")        mDisplayOrientation = displayOrientation        isMirrorPreview = isMirror        openedCameraId = cameraId    }

然后重要的就是preview里面返回的yuv原始数据

if (!this::nv21.isInitialized) {                    nv21 = ByteArray(stride * previewSize!!.height * 3 / 2)                }                // 回传数据是YUV422                if (y!!.size / u!!.size == 2) {                    ImageUtil.yuv422ToYuv420sp(y, u, v, nv21, stride, previewSize!!.height)                } else if (y.size / u.size == 4) {                    ImageUtil.yuv420ToYuv420sp(y, u, v, nv21, stride, previewSize!!.height)                }                val yuvImage = YuvImage(nv21, ImageFormat.NV21, stride, previewSize!!.height, null)

然后转换nv21

YuvImage yuvimage = new YuvImage(_data, ImageFormat.NV21,                _previewSize.getWidth(), _previewSize.getHeight(), null);

再继续转换为rgb_565格式

ByteArrayOutputStream baos = new ByteArrayOutputStream();BitmapFactory.Options bfo = new BitmapFactory.Options();bfo.inPreferredConfig = Bitmap.Config.RGB_565;Bitmap _currentFrame = BitmapFactory.decodeStream(new ByteArrayInputStream(baos.toByteArray()), null, bfo);

如果需要转换方向

Matrix matrix = new Matrix();        if(mIsVertical){            matrix.postRotate(90);            matrix.preScale(-1, 1);  //Android内置人脸识别的图像必须是头在上,所以要做旋转变换            // We rotate the same Bitmap            _currentFrame = Bitmap.createBitmap(_currentFrame, 0, 0,                    _previewSize.getWidth(), _previewSize.getHeight(), matrix, false);        }

然后就可以用faceDetector来进行检测了

FaceDetector d = new FaceDetector(               _currentFrame.getWidth(),               _currentFrame.getHeight(),               1);       Face[] faces = new Face[1];       d.findFaces(_currentFrame, faces);

接下来就可以自己对face进行判断处理进行自己需要的操作了
然后介绍如何绘制人脸位置方框

private fun handleFaces(face: FaceDetector.Face) {        var pointF = PointF()        face.getMidPoint(pointF)        mFacesRect.clear()        val widthp = texture_preview.width/height        val heightP = texture_preview.height/width        val spec = face.eyesDistance() / heightP        val bounds = pointF        val y = bounds.y * heightP        val x = bounds.x * widthp        val left = x - spec        val top = y - spec        val right = x + spec        val bottom = y + spec        val rawFaceRect = RectF(left.toFloat(), top.toFloat(), right.toFloat(), bottom.toFloat())//        val rawFaceRect3 = RectF(0f, 0f, 10f, 20f)        val rawFaceRect3 = RectF( 0f,            0f,            texture_preview.width.toFloat(),            texture_preview.height.toFloat())        mFaceDetectMatrix.mapRect(rawFaceRect)        Log.d("wzz","prewview: ${width} * ${height}")        Log.d("wzz","texture_preview: ${texture_preview.width} * ${texture_preview.height}")        Log.d("wzz","texture_preview: ${texture_preview.top} * ${texture_preview.left} --- ${texture_preview.right}---${texture_preview.bottom}")        val resultFaceRect = rawFaceRect        mFacesRect.add(resultFaceRect)        mFacesRect.add(rawFaceRect3)        Log.d("wzz","原始人脸位置: ${bounds.x} * ${bounds.y} ----${face.eyesDistance()}   ")        Log.d("wzz","转换后人脸位置: ${resultFaceRect.width()} * ${resultFaceRect.height()}   ${resultFaceRect.left} ${resultFaceRect.top} ${resultFaceRect.right} ${resultFaceRect.bottom} ")        runOnUiThread {            faceView.setFaces(mFacesRect)        }    }

然后具体的参数,大家就可以调试着玩了,
后续会推出人脸识别opencv方案,
1.最近要研究一下opencv 2d人脸模型转3d
2.以及arcore的人脸增强玩法
大家有什么问题可以评论讨论,也可以直接联系博主

更多相关文章

  1. 【Android】数据存储之SQLite
  2. 如何查看无法导出的android数据库文件?
  3. Android本地数据存储之Sharedpreference
  4. Android实现数据存储技术
  5. Android处理后台返回数据——Json转实体类
  6. 【Android】数据存储之Network
  7. day02 数据存储和界面展现(1)
  8. Android中的5种数据存储方式

随机推荐

  1. Android(安卓)筆記-Linux Kernel SMP (Sy
  2. 【contacts】Phonebook电话本
  3. gson使用在android使用例子
  4. 利用Handler来更新android的UI
  5. Android(安卓)封装json数据
  6. android SQLiteDatebase 实践
  7. Android(安卓)LocationManager 使用
  8. android音频、视频、拍照基础操作
  9. 如何去除launcher 上默认的 google searc
  10. Android简介