|
@@ -3,11 +3,16 @@ package com.ethan.psbc.ui.dialogs
|
|
import android.Manifest
|
|
import android.Manifest
|
|
import android.content.Context
|
|
import android.content.Context
|
|
import android.content.pm.PackageManager
|
|
import android.content.pm.PackageManager
|
|
|
|
+import android.graphics.ImageFormat
|
|
import android.util.Log
|
|
import android.util.Log
|
|
|
|
+import android.util.Size
|
|
import android.view.View
|
|
import android.view.View
|
|
import android.widget.Toast
|
|
import android.widget.Toast
|
|
import androidx.appcompat.app.AppCompatActivity
|
|
import androidx.appcompat.app.AppCompatActivity
|
|
import androidx.camera.core.*
|
|
import androidx.camera.core.*
|
|
|
|
+import androidx.camera.core.ImageAnalysis.OUTPUT_IMAGE_FORMAT_YUV_420_888
|
|
|
|
+import androidx.camera.core.resolutionselector.ResolutionSelector
|
|
|
|
+import androidx.camera.core.resolutionselector.ResolutionStrategy
|
|
import androidx.camera.lifecycle.ProcessCameraProvider
|
|
import androidx.camera.lifecycle.ProcessCameraProvider
|
|
import androidx.camera.view.PreviewView
|
|
import androidx.camera.view.PreviewView
|
|
import androidx.core.app.ActivityCompat
|
|
import androidx.core.app.ActivityCompat
|
|
@@ -26,6 +31,8 @@ import org.koin.core.component.inject
|
|
import java.nio.ByteBuffer
|
|
import java.nio.ByteBuffer
|
|
import java.util.concurrent.ExecutorService
|
|
import java.util.concurrent.ExecutorService
|
|
import java.util.concurrent.Executors
|
|
import java.util.concurrent.Executors
|
|
|
|
+import kotlin.math.min
|
|
|
|
+
|
|
|
|
|
|
/**
|
|
/**
|
|
* <p>人脸验证界面</p>
|
|
* <p>人脸验证界面</p>
|
|
@@ -64,6 +71,11 @@ class DialogValidateFace( mContext: Context) : FullScreenPopupView(mContext), Ko
|
|
}
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
+
|
|
|
|
+
|
|
|
|
+
|
|
|
|
+
|
|
|
|
+
|
|
private lateinit var cameraExecutor: ExecutorService
|
|
private lateinit var cameraExecutor: ExecutorService
|
|
private var cameraProvider: ProcessCameraProvider? = null
|
|
private var cameraProvider: ProcessCameraProvider? = null
|
|
private var preview: Preview? = null
|
|
private var preview: Preview? = null
|
|
@@ -93,8 +105,14 @@ class DialogValidateFace( mContext: Context) : FullScreenPopupView(mContext), Ko
|
|
cameraProviderFuture.addListener({
|
|
cameraProviderFuture.addListener({
|
|
cameraProvider = cameraProviderFuture.get()//获取相机信息
|
|
cameraProvider = cameraProviderFuture.get()//获取相机信息
|
|
|
|
|
|
|
|
+
|
|
|
|
+
|
|
|
|
+ val resolutionBuilder = ResolutionSelector
|
|
|
|
+ .Builder()
|
|
|
|
+ .setResolutionStrategy(ResolutionStrategy(Size(640, 480), ResolutionStrategy.FALLBACK_RULE_NONE))
|
|
|
|
+ .build()
|
|
//预览配置
|
|
//预览配置
|
|
- preview = Preview.Builder()
|
|
|
|
|
|
+ preview = Preview.Builder().setResolutionSelector(resolutionBuilder)
|
|
.build()
|
|
.build()
|
|
|
|
|
|
|
|
|
|
@@ -102,27 +120,86 @@ class DialogValidateFace( mContext: Context) : FullScreenPopupView(mContext), Ko
|
|
|
|
|
|
|
|
|
|
|
|
|
|
-
|
|
|
|
imageCamera = ImageCapture.Builder()
|
|
imageCamera = ImageCapture.Builder()
|
|
.setCaptureMode(ImageCapture.CAPTURE_MODE_MINIMIZE_LATENCY)
|
|
.setCaptureMode(ImageCapture.CAPTURE_MODE_MINIMIZE_LATENCY)
|
|
.build()
|
|
.build()
|
|
|
|
|
|
|
|
|
|
- imageAnalysis = ImageAnalysis.Builder()
|
|
|
|
|
|
+ imageAnalysis = ImageAnalysis.Builder().setOutputImageFormat(OUTPUT_IMAGE_FORMAT_YUV_420_888)
|
|
.setBackpressureStrategy(ImageAnalysis.STRATEGY_KEEP_ONLY_LATEST)
|
|
.setBackpressureStrategy(ImageAnalysis.STRATEGY_KEEP_ONLY_LATEST)
|
|
|
|
+ .setTargetResolution(Size(640, 480))
|
|
.build()
|
|
.build()
|
|
|
|
|
|
|
|
|
|
imageAnalysis.setAnalyzer(cameraExecutor) { imageProxy ->
|
|
imageAnalysis.setAnalyzer(cameraExecutor) { imageProxy ->
|
|
- val planes: Array<ImageProxy.PlaneProxy> = imageProxy.planes
|
|
|
|
- val buffer: ByteBuffer = planes[0].buffer;
|
|
|
|
- val data: ByteArray = ByteArray(buffer.capacity())
|
|
|
|
- buffer.rewind()
|
|
|
|
- buffer.get(data)
|
|
|
|
|
|
|
|
|
|
+ var imageFormat=imageProxy.format
|
|
|
|
|
|
- //第一步调用人脸跟踪API分析图像流特征
|
|
|
|
|
|
+ if (ImageFormat.YUV_420_888 == imageProxy.getFormat()) {
|
|
|
|
+ Log.d("demo", "图片格式为:YUV_420_888")
|
|
|
|
+ }
|
|
|
|
+
|
|
|
|
+
|
|
|
|
+
|
|
|
|
+ val yPlane: ImageProxy.PlaneProxy =imageProxy.planes[0]
|
|
|
|
+ val uPlane: ImageProxy.PlaneProxy =imageProxy.planes[1]
|
|
|
|
+ val vPlane: ImageProxy.PlaneProxy =imageProxy.planes[2]
|
|
|
|
+
|
|
|
|
+ val yBuffer: ByteBuffer=yPlane.buffer
|
|
|
|
+ val uBuffer: ByteBuffer=uPlane.buffer
|
|
|
|
+ val vBuffer: ByteBuffer=vPlane.buffer
|
|
|
|
+
|
|
|
|
+
|
|
|
|
+ yBuffer.rewind();
|
|
|
|
+ uBuffer.rewind();
|
|
|
|
+ vBuffer.rewind();
|
|
|
|
+
|
|
|
|
+
|
|
|
|
+ var ySize:Int = yBuffer.remaining()
|
|
|
|
+ var position:Int= 0;
|
|
|
|
+ var nv21:ByteArray = ByteArray(ySize + imageProxy.width * imageProxy.height / 2);
|
|
|
|
|
|
|
|
+ var chromaHeight:Int=0
|
|
|
|
+
|
|
|
|
+
|
|
|
|
+ for( i in 0.rangeTo(imageProxy.getHeight()) step 1) {
|
|
|
|
+ yBuffer.get(nv21, position, imageProxy.getWidth());
|
|
|
|
+ position += imageProxy.getWidth();
|
|
|
|
+ yBuffer.position(Math.min(ySize, yBuffer.position() - imageProxy.getWidth() + yPlane.getRowStride()));
|
|
|
|
+ chromaHeight+= chromaHeight+1
|
|
|
|
+ }
|
|
|
|
+
|
|
|
|
+
|
|
|
|
+ chromaHeight = imageProxy.getHeight() / 2
|
|
|
|
+ var chromaWidth:Int = imageProxy.getWidth() / 2
|
|
|
|
+ var vRowStride:Int = vPlane.getRowStride()
|
|
|
|
+ var uRowStride:Int = uPlane.getRowStride()
|
|
|
|
+ var vPixelStride:Int = vPlane.getPixelStride()
|
|
|
|
+ var uPixelStride:Int = uPlane.getPixelStride()
|
|
|
|
+ var vLineBuffer:ByteArray = ByteArray(vRowStride)
|
|
|
|
+ var uLineBuffer:ByteArray = ByteArray(uRowStride)
|
|
|
|
+
|
|
|
|
+ for (row in 0 until chromaHeight) {
|
|
|
|
+ vBuffer[vLineBuffer, 0, min(vRowStride.toDouble(), vBuffer.remaining().toDouble()).toInt()]
|
|
|
|
+ uBuffer[uLineBuffer, 0, min(uRowStride.toDouble(), uBuffer.remaining().toDouble()).toInt()]
|
|
|
|
+ var vLineBufferPosition = 0
|
|
|
|
+ var uLineBufferPosition = 0
|
|
|
|
+
|
|
|
|
+ for (col in 0 until chromaWidth) {
|
|
|
|
+ nv21[position++] = vLineBuffer[vLineBufferPosition]
|
|
|
|
+ nv21[position++] = uLineBuffer[uLineBufferPosition]
|
|
|
|
+ vLineBufferPosition += vPixelStride
|
|
|
|
+ uLineBufferPosition += uPixelStride
|
|
|
|
+ }
|
|
|
|
+ }
|
|
|
|
+
|
|
|
|
+
|
|
|
|
+
|
|
|
|
+
|
|
|
|
+
|
|
|
|
+
|
|
|
|
+
|
|
|
|
+ //第一步调用人脸跟踪API分析图像流特征
|
|
|
|
|
|
|
|
|
|
if(handle==-1){
|
|
if(handle==-1){
|
|
@@ -132,9 +209,8 @@ class DialogValidateFace( mContext: Context) : FullScreenPopupView(mContext), Ko
|
|
try{
|
|
try{
|
|
val iFeaLen =FaceRecog.cwGetFeatureLength(handle)
|
|
val iFeaLen =FaceRecog.cwGetFeatureLength(handle)
|
|
Log.d("demo", "获取到人脸特征长度:${iFeaLen}")
|
|
Log.d("demo", "获取到人脸特征长度:${iFeaLen}")
|
|
-
|
|
|
|
- var pFaceBuffer=Array(3){_ ->FaceInfo()}
|
|
|
|
- val faceDetRet:Int= FaceDetTrack.cwFaceDetection(handle,data,0, 0, FaceInterface.cw_img_form_t.CW_IMAGE_BINARY, 0, 0, FaceInterface.cw_op_t.CW_OP_ALIGN , pFaceBuffer)
|
|
|
|
|
|
+ var pFaceBuffer=Array(1){_ ->FaceInfo()}
|
|
|
|
+ val faceDetRet:Int= FaceDetTrack.cwFaceDetection(handle,nv21,640, 480, FaceInterface.cw_img_form_t.CW_IMAGE_NV21 , 0, 0, FaceInterface.cw_op_t.CW_OP_DET , pFaceBuffer)
|
|
if (faceDetRet >= FaceInterface.cw_errcode_t.CW_UNKNOWN_ERR) {
|
|
if (faceDetRet >= FaceInterface.cw_errcode_t.CW_UNKNOWN_ERR) {
|
|
Log.d("demo", "检测到人脸异常:${faceDetRet}" )
|
|
Log.d("demo", "检测到人脸异常:${faceDetRet}" )
|
|
}else if(faceDetRet<1){
|
|
}else if(faceDetRet<1){
|
|
@@ -177,6 +253,9 @@ class DialogValidateFace( mContext: Context) : FullScreenPopupView(mContext), Ko
|
|
|
|
|
|
}
|
|
}
|
|
|
|
|
|
|
|
+ val cameraCtrl=camera.cameraControl
|
|
|
|
+ val cameraInfo=camera.cameraInfo
|
|
|
|
+
|
|
|
|
|
|
|
|
|
|
|
|
|
|
@@ -197,4 +276,6 @@ class DialogValidateFace( mContext: Context) : FullScreenPopupView(mContext), Ko
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
+
|
|
|
|
+
|
|
}
|
|
}
|