Commit 02d1a237 authored by 马乐's avatar 马乐

1.增加标定

2.增加人脸比对采样
parent 202c5cfa
...@@ -6,8 +6,10 @@ import android.content.pm.PackageManager ...@@ -6,8 +6,10 @@ import android.content.pm.PackageManager
import android.media.AudioManager import android.media.AudioManager
import android.media.CamcorderProfile import android.media.CamcorderProfile
import android.os.Bundle import android.os.Bundle
import android.os.CountDownTimer
import android.os.Environment import android.os.Environment
import android.speech.tts.TextToSpeech import android.speech.tts.TextToSpeech
import android.view.View
import android.widget.Toast import android.widget.Toast
import androidx.activity.result.contract.ActivityResultContracts import androidx.activity.result.contract.ActivityResultContracts
import androidx.appcompat.app.AppCompatActivity import androidx.appcompat.app.AppCompatActivity
...@@ -60,10 +62,16 @@ import com.mediatek.smartplatform.VideoInfoMap ...@@ -60,10 +62,16 @@ import com.mediatek.smartplatform.VideoInfoMap
import kotlinx.coroutines.CoroutineScope import kotlinx.coroutines.CoroutineScope
import kotlinx.coroutines.Dispatchers import kotlinx.coroutines.Dispatchers
import kotlinx.coroutines.Job import kotlinx.coroutines.Job
import kotlinx.coroutines.NonCancellable
import kotlinx.coroutines.cancel
import kotlinx.coroutines.delay import kotlinx.coroutines.delay
import kotlinx.coroutines.ensureActive import kotlinx.coroutines.ensureActive
import kotlinx.coroutines.flow.debounce
import kotlinx.coroutines.flow.sample
import kotlinx.coroutines.flow.takeWhile import kotlinx.coroutines.flow.takeWhile
import kotlinx.coroutines.isActive
import kotlinx.coroutines.launch import kotlinx.coroutines.launch
import kotlinx.coroutines.withContext
import java.io.File import java.io.File
import java.util.Locale import java.util.Locale
import java.util.concurrent.CancellationException import java.util.concurrent.CancellationException
...@@ -121,6 +129,10 @@ class MainActivity : AppCompatActivity(),TextToSpeech.OnInitListener,VideoCallba ...@@ -121,6 +129,10 @@ class MainActivity : AppCompatActivity(),TextToSpeech.OnInitListener,VideoCallba
LogUtils.file("点击开始DMS按钮") LogUtils.file("点击开始DMS按钮")
if (dmsCoroutineScope == null) { if (dmsCoroutineScope == null) {
dmsCameraDevice = SmartPlatformManager.get().openCameraDevice(CAMERA_ID_DMS) dmsCameraDevice = SmartPlatformManager.get().openCameraDevice(CAMERA_ID_DMS)
dmsCameraDevice?.setErrorCallback { i, s, spmCameraDevice ->
LogUtils.file("error camera id:${spmCameraDevice.cameraId},event id:$i,content:$s")
LogUtils.e("error camera id:${spmCameraDevice.cameraId},event id:$i,content:$s")
}
dmsCoroutineScope = DmsCoroutineScope(DmsCoroutineContext(engine = dmsEngine,dmsCameraDevice!!) + Dispatchers.IO + COROUTINE_DMS) dmsCoroutineScope = DmsCoroutineScope(DmsCoroutineContext(engine = dmsEngine,dmsCameraDevice!!) + Dispatchers.IO + COROUTINE_DMS)
} }
LogUtils.d("dms context:${dmsCoroutineScope?.coroutineContext}") LogUtils.d("dms context:${dmsCoroutineScope?.coroutineContext}")
...@@ -165,8 +177,31 @@ class MainActivity : AppCompatActivity(),TextToSpeech.OnInitListener,VideoCallba ...@@ -165,8 +177,31 @@ class MainActivity : AppCompatActivity(),TextToSpeech.OnInitListener,VideoCallba
engine.setDmsDistractScope() engine.setDmsDistractScope()
} }
coroutineContext[DmsCoroutineContext]?.engine?.let {engine-> coroutineContext[DmsCoroutineContext]?.engine?.let {engine->
withContext(Dispatchers.Main){
binding.countDown.visibility = View.VISIBLE
binding.countDown.text = "10s"
}
var startTime = System.currentTimeMillis()
launch(Dispatchers.Default) {
var i = 9
while (isActive && i > 0){
delay(1000)
withContext(Dispatchers.Main) {
binding.countDown.text = "${i--}s"
}
}
withContext(Dispatchers.Main){
binding.countDown.visibility = View.GONE
}
}
dmsCameraDevice?.startDms(engine)?.collect{result-> dmsCameraDevice?.startDms(engine)?.collect{result->
ensureActive() ensureActive()
if (System.currentTimeMillis() - startTime < 10) {
LogUtils.e("标定失败")
LogUtils.file("标定失败")
binding.detectResult.text = "标定失败"
cancel("标定失败")
}
binding.detectResult.text = result binding.detectResult.text = result
tts.speak(result, TextToSpeech.QUEUE_ADD, null,null) tts.speak(result, TextToSpeech.QUEUE_ADD, null,null)
launch(Dispatchers.IO){ launch(Dispatchers.IO){
...@@ -188,6 +223,12 @@ class MainActivity : AppCompatActivity(),TextToSpeech.OnInitListener,VideoCallba ...@@ -188,6 +223,12 @@ class MainActivity : AppCompatActivity(),TextToSpeech.OnInitListener,VideoCallba
} }
} catch (e: CancellationException) { } catch (e: CancellationException) {
LogUtils.e("${e.message}") LogUtils.e("${e.message}")
if (e.message == "标定失败"){
withContext(NonCancellable+Dispatchers.Main){
binding.countDown.visibility = View.GONE
Toast.makeText(this@MainActivity,"标定失败,请重新开始",Toast.LENGTH_SHORT).show()
}
}
dmsCameraDevice?.stopPreview() dmsCameraDevice?.stopPreview()
dmsCameraDevice?.stopRecord(RecordSource.GENERAL_CAMERA) dmsCameraDevice?.stopRecord(RecordSource.GENERAL_CAMERA)
dmsCameraDevice?.let { smartPlatformManager.closeCameraDevice(it)} dmsCameraDevice?.let { smartPlatformManager.closeCameraDevice(it)}
...@@ -204,6 +245,7 @@ class MainActivity : AppCompatActivity(),TextToSpeech.OnInitListener,VideoCallba ...@@ -204,6 +245,7 @@ class MainActivity : AppCompatActivity(),TextToSpeech.OnInitListener,VideoCallba
dmsCoroutineScope = null dmsCoroutineScope = null
} else { } else {
LogUtils.d("DMS协程已经执行完成") LogUtils.d("DMS协程已经执行完成")
LogUtils.file("DMS协程已经执行完成")
dmsCameraDevice?.stopPreview() dmsCameraDevice?.stopPreview()
dmsCameraDevice?.stopRecord(RecordSource.GENERAL_CAMERA) dmsCameraDevice?.stopRecord(RecordSource.GENERAL_CAMERA)
dmsCameraDevice?.let { smartPlatformManager.closeCameraDevice(it)} dmsCameraDevice?.let { smartPlatformManager.closeCameraDevice(it)}
...@@ -238,17 +280,14 @@ class MainActivity : AppCompatActivity(),TextToSpeech.OnInitListener,VideoCallba ...@@ -238,17 +280,14 @@ class MainActivity : AppCompatActivity(),TextToSpeech.OnInitListener,VideoCallba
LogUtils.eTag("fr","初始化结果:$result") LogUtils.eTag("fr","初始化结果:$result")
if (result == ArcErrorInfo.ARC_ERROR_OK) { if (result == ArcErrorInfo.ARC_ERROR_OK) {
if (featureList.isNotEmpty()) { if (featureList.isNotEmpty()) {
engine?.extractFeatureFromDir( engine.extractFeatureFromDir(
File( File(
Environment.getExternalStorageDirectory(), Environment.getExternalStorageDirectory(),
"fr/" "fr/"
).path ).path
) )
} }
var startTime = System.currentTimeMillis() frCameraDevice.startFr().sample(5000).collect {
frCameraDevice.startFr().takeWhile { _ ->
System.currentTimeMillis() - startTime > 5000
}.collect {
ensureActive() ensureActive()
val liveExtractResult = engine.extractRecognizeFeatureFromByteArray( val liveExtractResult = engine.extractRecognizeFeatureFromByteArray(
DMS_PIC_WIDTH, DMS_PIC_WIDTH,
...@@ -263,7 +302,6 @@ class MainActivity : AppCompatActivity(),TextToSpeech.OnInitListener,VideoCallba ...@@ -263,7 +302,6 @@ class MainActivity : AppCompatActivity(),TextToSpeech.OnInitListener,VideoCallba
tts.speak("人脸识别失败", TextToSpeech.QUEUE_ADD, null, null) tts.speak("人脸识别失败", TextToSpeech.QUEUE_ADD, null, null)
binding.detectResult.text = "人脸识别失败" binding.detectResult.text = "人脸识别失败"
} }
startTime = System.currentTimeMillis()
} }
} else { } else {
LogUtils.eTag("fr", "人脸算法初始化失败") LogUtils.eTag("fr", "人脸算法初始化失败")
......
package com.intergration.test.fr
import android.os.Environment
import com.arcsoft.imageutil.ArcSoftImageFormat
import com.arcsoft.imageutil.ArcSoftImageUtil
import com.arcsoft.visdrive.sdk.ArcErrorInfo
import com.arcsoft.visdrive.sdk.ArcVisDriveEngine
import com.arcsoft.visdrive.sdk.constant.common.ArcImageFormat
import com.arcsoft.visdrive.sdk.constant.common.ArcModType
import com.arcsoft.visdrive.sdk.constant.fr.ArcFRDetectMaskType
import com.arcsoft.visdrive.sdk.model.common.ArcInitParamInfo
import com.arcsoft.visdrive.sdk.model.common.ArcInitParamInfoDetail
import com.arcsoft.visdrive.sdk.model.fr.ArcFRExtractResult
import com.arcsoft.visdrive.sdk.model.fr.ArcFRInitParam
import com.arcsoft.visdrive.sdk.model.fr.ArcFRSimilarity
import com.blankj.utilcode.util.FileUtils
import com.blankj.utilcode.util.ImageUtils
import com.blankj.utilcode.util.LogUtils
import com.intergration.test.dms.CAMERA_ID_DMS
import com.intergration.test.utils.DMS_PIC_HEIGHT
import com.intergration.test.utils.DMS_PIC_WIDTH
import com.intergration.test.utils.toByteBuffer
import com.mediatek.smartplatform.ImageReaderEx
import com.mediatek.smartplatform.PictureConfiguration
import com.mediatek.smartplatform.PictureSequenceSource
import com.mediatek.smartplatform.SmartPlatformManager
import com.mediatek.smartplatform.SpmCameraDevice
import kotlinx.coroutines.CoroutineName
import kotlinx.coroutines.CoroutineScope
import kotlinx.coroutines.Job
import kotlinx.coroutines.cancel
import kotlinx.coroutines.channels.awaitClose
import kotlinx.coroutines.flow.callbackFlow
import java.io.Closeable
import java.io.File
import kotlin.coroutines.AbstractCoroutineContextElement
import kotlin.coroutines.CoroutineContext
val COROUTINE_FR = CoroutineName("fr")
val frJob = Job()
val featureList by lazy {
mutableListOf<ArcFRExtractResult>()
}
class FrCoroutineContext(val engine: ArcVisDriveEngine): AbstractCoroutineContextElement(FrCoroutineContext){
companion object Key: CoroutineContext.Key<FrCoroutineContext>
}
class FrCoroutineScope(context: CoroutineContext) : Closeable, CoroutineScope {
override val coroutineContext: CoroutineContext = context + frJob
override fun close() {
coroutineContext[FrCoroutineContext]?.engine?.unInit()
coroutineContext.cancel()
}
}
fun ArcVisDriveEngine.initializeFr():Int{
val frInitParam = ArcFRInitParam().apply {
detectMask = ArcFRDetectMaskType.MOD_FR_LIVE_FACE or ArcFRDetectMaskType.MOD_FR_FACE_QUALITY
}
val initParamDetail = ArcInitParamInfoDetail()
initParamDetail.modType = ArcModType.TYPE_FR
initParamDetail.arcInitParamBase = frInitParam
val arcInfoParam = ArcInitParamInfo()
arcInfoParam.arcInitParamInfoDetailArray = Array(1) { initParamDetail }
return init(arcInfoParam)
}
fun ArcVisDriveEngine.extractFeatureFromDir(featureDir:String) {
if (FileUtils.isFileExists(featureDir) && FileUtils.isDir(featureDir)) {
FileUtils.listFilesInDir(featureDir).forEach {
val bitmap = ImageUtils.getBitmap(it)
val nv21Data = ArcSoftImageUtil.createImageData(bitmap.width, bitmap.height, ArcSoftImageFormat.NV21)
ArcSoftImageUtil.bitmapToImageData(bitmap, nv21Data, ArcSoftImageFormat.NV21)
ArcFRExtractResult().also { result ->
extractRegisterFeature(
bitmap.width, bitmap.height, ArcImageFormat.ARC_IMAGE_FORMAT_NV21,
nv21Data.toByteBuffer(), result
)
LogUtils.dTag(
"fr",
"extract nv21 data for face recognize with file ${it.absoluteFile}"
)
featureList.add(result)
}
}
}
}
fun ArcVisDriveEngine.extractRecognizeFeatureFromByteArray(width:Int,Height:Int,data:ByteArray):ArcFRExtractResult{
val liveExtractResult = ArcFRExtractResult()
extractRecognizeFeature(width,Height,
ArcImageFormat.ARC_IMAGE_FORMAT_NV21,data.toByteBuffer(),liveExtractResult
)
return liveExtractResult
}
fun ArcVisDriveEngine.compareFeatureWithScore(liveExtractResult:ArcFRExtractResult):Boolean{
return featureList.any {
val frSimilarity = ArcFRSimilarity()
val result = compareFeature(
it.arcFRFeature.featureData,
liveExtractResult.arcFRFeature.featureData,
frSimilarity
)
result == ArcErrorInfo.ARC_ERROR_OK && frSimilarity.score > 75
}
}
fun SpmCameraDevice.startFr() = callbackFlow{
val frDir = File(Environment.getExternalStorageDirectory(),"fr/")
if (!FileUtils.isFileExists(frDir)) {
FileUtils.createOrExistsDir(frDir)
}
val pictureConfig = PictureConfiguration.get(PictureSequenceSource.GENERAL_CAMERA)
pictureConfig.mPath = frDir.absolutePath
pictureConfig.mImageFormat = SpmCameraDevice.ImageDataCallback.IMAGE_FORMAT_NV21
pictureConfig.mPicWidth = DMS_PIC_WIDTH
pictureConfig.mPicHeight = DMS_PIC_HEIGHT
pictureConfig.mDataType = SpmCameraDevice.ImageDataCallback.IMAGE_DATA_RAW
pictureConfig.mImageCallback = ImageReaderEx.ImageCallback { _, _, _, data, _ ->
trySend(data)
}
startPictureSequence(PictureSequenceSource.GENERAL_CAMERA,pictureConfig)
awaitClose {
LogUtils.eTag("fr","fr detect ended")
stopPictureSequence(PictureSequenceSource.GENERAL_CAMERA)
}
}
val frCameraDevice: SpmCameraDevice by lazy {
SmartPlatformManager.get().openCameraDevice(CAMERA_ID_DMS)
}
\ No newline at end of file
<?xml version="1.0" encoding="utf-8"?>
<shape xmlns:android="http://schemas.android.com/apk/res/android">
<size android:width="128dp" android:height="128dp"/>
<corners android:radius="64dp"/>
<stroke android:width="2dp" android:color="@android:color/holo_blue_light"/>
</shape>
\ No newline at end of file
...@@ -85,4 +85,17 @@ ...@@ -85,4 +85,17 @@
app:layout_constraintTop_toBottomOf="@id/detectResult" app:layout_constraintTop_toBottomOf="@id/detectResult"
app:layout_constraintEnd_toEndOf="parent" app:layout_constraintEnd_toEndOf="parent"
app:layout_constraintBottom_toBottomOf="parent"/> app:layout_constraintBottom_toBottomOf="parent"/>
<androidx.appcompat.widget.AppCompatTextView
android:id="@+id/countDown"
android:layout_width="wrap_content"
android:layout_height="wrap_content"
android:gravity="center"
android:background="@drawable/background_count_down_textview"
android:textColor="@android:color/holo_red_dark"
android:textSize="48sp"
android:text="10s"
app:layout_constraintStart_toStartOf="@id/dmsPreview"
app:layout_constraintTop_toTopOf="@id/dmsPreview"
app:layout_constraintEnd_toEndOf="@id/dmsPreview"
app:layout_constraintBottom_toBottomOf="@id/dmsPreview"/>
</androidx.constraintlayout.widget.ConstraintLayout> </androidx.constraintlayout.widget.ConstraintLayout>
\ No newline at end of file
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment