Commit 7773cb05 authored by 马乐's avatar 马乐

first commit

parents
*.iml
.gradle
.idea
/local.properties
.DS_Store
/build
/captures
.externalNativeBuild
.cxx
local.properties
/build
\ No newline at end of file
plugins {
id 'com.android.application'
id 'org.jetbrains.kotlin.android'
}
android {
namespace 'com.intergration.avm'
compileSdk 33
defaultConfig {
applicationId "com.intergration.avm"
minSdk 24
targetSdk 33
versionCode 1
versionName "1.0"
testInstrumentationRunner "androidx.test.runner.AndroidJUnitRunner"
}
signingConfigs {
debug {
storeFile file("key/waytous.jks")
storePassword '123456'
keyAlias 'waytous'
keyPassword '123456'
}
release {
storeFile file("key/waytous.jks")
storePassword '123456'
keyAlias 'waytous'
keyPassword '123456'
}
}
buildTypes {
release {
minifyEnabled false
proguardFiles getDefaultProguardFile('proguard-android-optimize.txt'), 'proguard-rules.pro'
}
}
sourceSets {
main {
jni.srcDirs = []
jniLibs.srcDirs = ['src/main/jniLibs']
}
}
compileOptions {
sourceCompatibility JavaVersion.VERSION_1_8
targetCompatibility JavaVersion.VERSION_1_8
}
kotlinOptions {
jvmTarget = '1.8'
}
viewBinding {
enabled = true
}
}
dependencies {
implementation fileTree(include: ['*.jar', '*.aar'], dir: 'libs')
implementation 'androidx.core:core-ktx:1.8.0'
implementation 'androidx.appcompat:appcompat:1.6.1'
implementation 'com.google.android.material:material:1.5.0'
implementation 'androidx.constraintlayout:constraintlayout:2.1.4'
implementation 'org.jetbrains.kotlinx:kotlinx-coroutines-android:1.7.3'
implementation 'org.greenrobot:eventbus:3.3.1'
implementation 'com.blankj:utilcodex:1.31.1'
testImplementation 'junit:junit:4.13.2'
androidTestImplementation 'androidx.test.ext:junit:1.1.5'
androidTestImplementation 'androidx.test.espresso:espresso-core:3.5.1'
}
\ No newline at end of file
# Add project specific ProGuard rules here.
# You can control the set of applied configuration files using the
# proguardFiles setting in build.gradle.
#
# For more details, see
# http://developer.android.com/guide/developing/tools/proguard.html
# If your project uses WebView with JS, uncomment the following
# and specify the fully qualified class name to the JavaScript interface
# class:
#-keepclassmembers class fqcn.of.javascript.interface.for.webview {
# public *;
#}
# Uncomment this to preserve the line number information for
# debugging stack traces.
#-keepattributes SourceFile,LineNumberTable
# If you keep the line number information, uncomment this to
# hide the original source file name.
#-renamesourcefileattribute SourceFile
\ No newline at end of file
package com.intergration.avm
import androidx.test.platform.app.InstrumentationRegistry
import androidx.test.ext.junit.runners.AndroidJUnit4
import org.junit.Test
import org.junit.runner.RunWith
import org.junit.Assert.*
/**
* Instrumented test, which will execute on an Android device.
*
* See [testing documentation](http://d.android.com/tools/testing).
*/
@RunWith(AndroidJUnit4::class)
class ExampleInstrumentedTest {
@Test
fun useAppContext() {
// Context of the app under test.
val appContext = InstrumentationRegistry.getInstrumentation().targetContext
assertEquals("com.intergration.avm", appContext.packageName)
}
}
\ No newline at end of file
<?xml version="1.0" encoding="utf-8"?>
<manifest xmlns:android="http://schemas.android.com/apk/res/android"
xmlns:tools="http://schemas.android.com/tools">
<uses-feature android:glEsVersion="0x00030000" android:required="true" />
<uses-feature android:name="android.hardware.camera.any" />
<uses-permission android:name= "android.permission.SYSTEM_ALERT_WINDOW"/>
<uses-permission android:name= "android.permission.SYSTEM_OVERLAY_WINDOW"/>
<uses-permission android:name="android.permission.FOREGROUND_SERVICE" />
<uses-permission android:name="android.permission.ACCESS_WIFI_STATE" />
<uses-permission android:name="android.permission.CHANGE_WIFI_STATE" />
<uses-permission android:name="android.permission.INTERNET" />
<uses-permission android:name="android.permission.FOREGROUND_SERVICE" />
<uses-permission android:name="android.permission.CAMERA" />
<uses-permission android:name="android.permission.READ_PHONE_STATE" />
<uses-permission android:name="android.permission.RECORD_AUDIO" />
<uses-permission
android:name="android.permission.READ_EXTERNAL_STORAGE"
tools:ignore="ScopedStorage" />
<uses-permission
android:name="android.permission.WRITE_EXTERNAL_STORAGE"
tools:ignore="ScopedStorage" />
<uses-permission
android:name="android.permission.MOUNT_UNMOUNT_FILESYSTEMS"
tools:ignore="ProtectedPermissions" />
<application
android:allowBackup="true"
android:dataExtractionRules="@xml/data_extraction_rules"
android:fullBackupContent="@xml/backup_rules"
android:requestLegacyExternalStorage="true"
android:icon="@mipmap/ic_launcher"
android:label="@string/app_name"
android:roundIcon="@mipmap/ic_launcher_round"
android:supportsRtl="true"
android:theme="@style/Theme.AvmIntergration"
tools:targetApi="31">
<activity
android:name=".MainActivity"
android:exported="true">
<intent-filter>
<action android:name="android.intent.action.MAIN" />
<category android:name="android.intent.category.LAUNCHER" />
</intent-filter>
</activity>
<service android:name=".presentation.MultiScreenService"
android:enabled="true"
android:exported="true"/>
</application>
</manifest>
\ No newline at end of file
package com.intergration.avm
import android.app.Application
import com.blankj.utilcode.util.CrashUtils
import com.blankj.utilcode.util.LogUtils
import com.blankj.utilcode.util.Utils
class AvmApp: Application() {
override fun onCreate() {
super.onCreate()
Utils.init(this)
CrashUtils.init()
LogUtils.getConfig().globalTag = "avm"
}
}
\ No newline at end of file
package com.intergration.avm
import android.Manifest
import android.content.Intent
import android.content.pm.PackageManager
import android.net.Uri
import android.os.Bundle
import android.os.Environment
import android.provider.Settings
import android.widget.Toast
import androidx.activity.result.contract.ActivityResultContracts
import androidx.appcompat.app.AppCompatActivity
import androidx.core.app.ActivityCompat
import androidx.core.content.ContextCompat
import com.arcsoft.visdrive.avmsdk.ArcErrorInfo
import com.arcsoft.visdrive.avmsdk.ArcVisDriveAVMEngine
import com.arcsoft.visdrive.avmsdk.constant.avm.ArcAVMType
import com.arcsoft.visdrive.avmsdk.model.avm.ArcAVMOutputImage
import com.arcsoft.visdrive.avmsdk.model.common.ArcActiveEnvParam
import com.blankj.utilcode.util.FileUtils
import com.blankj.utilcode.util.LogUtils
import com.intergration.avm.databinding.ActivityMainBinding
import com.intergration.avm.utils.ArcAVMInputImageFactory
import com.intergration.avm.utils.CALIB_RESULT
import com.intergration.avm.utils.IS_ACTIVATED
import com.intergration.avm.utils.LOOKUP_TABLE
import com.intergration.avm.utils.closeAuxiliaryPresentation
import com.intergration.avm.utils.feedData
import com.intergration.avm.utils.openAuxiliaryPresentation
import com.intergration.avm.utils.spUtils
import com.mediatek.smartplatform.PictureSequenceSource
import com.mediatek.smartplatform.PreviewSource
import com.mediatek.smartplatform.SmartPlatformManager
import com.mediatek.smartplatform.SpmCameraDevice
import kotlinx.coroutines.CancellationException
import kotlinx.coroutines.CoroutineScope
import kotlinx.coroutines.Dispatchers
import kotlinx.coroutines.Job
import kotlinx.coroutines.MainScope
import kotlinx.coroutines.cancel
import kotlinx.coroutines.delay
import kotlinx.coroutines.launch
import kotlinx.coroutines.withContext
import java.io.File
class MainActivity : AppCompatActivity() {
private lateinit var binding: ActivityMainBinding
private val smartPlatformManager by lazy {
SmartPlatformManager.get()
}
private val launchSettingsActivity = registerForActivityResult(ActivityResultContracts.StartActivityForResult()){
if (it.resultCode == RESULT_OK) {
LogUtils.d("分屏权限设置成功!")
}
}
private val avmDir = File(Environment.getExternalStorageDirectory(), "avm/")
private val calibResultPath by lazy {
File(getExternalFilesDir("avm"),"calibresult.dat")
}
private val lookupPath by lazy {
File(getExternalFilesDir("avm"),"lookuptable.dat")
}
private var job:Job? = null
override fun onCreate(savedInstanceState: Bundle?) {
super.onCreate(savedInstanceState)
binding = ActivityMainBinding.inflate(layoutInflater)
setContentView(binding.root)
if (allPermissionsGranted()) {
LogUtils.e("allPermissionsGranted...")
startPreview()
CoroutineScope(Dispatchers.IO).launch {
if (!FileUtils.isFileExists(avmDir)) {
FileUtils.createOrExistsDir(avmDir)
}
}
} else {
ActivityCompat.requestPermissions(
this, REQUIRED_PERMISSIONS, REQUEST_CODE_PERMISSIONS)
}
if (!Settings.canDrawOverlays(this@MainActivity)) {
val intent = Intent(
Settings.ACTION_MANAGE_OVERLAY_PERMISSION,
Uri.parse("package:$packageName")
)
launchSettingsActivity.launch(intent)
Toast.makeText(this@MainActivity, "需要取得权限以使用悬浮窗", Toast.LENGTH_SHORT).show()
}
binding.activeAvm.setOnClickListener {
avmCoroutineScope.launch {
val result = ArcVisDriveAVMEngine.activate(
APP_ID, APP_SECRET, ArcAVMType.AVM_CAM_4,
ArcActiveEnvParam().apply {
this.IMEI = "d5e2f07694f674c4"
this.storagePath = getExternalFilesDir("")?.path
})
if (result == ArcErrorInfo.ARC_ERROR_OK) {
LogUtils.d("激活成功!")
withContext(Dispatchers.Main){
Toast.makeText(this@MainActivity,"激活成功",Toast.LENGTH_SHORT).show()
}
spUtils.put(IS_ACTIVATED,true)
coroutineContext[AvmCoroutineContext]?.engine?.initializeAvmParams(calibResultPath,lookupPath)
} else {
LogUtils.e("激活失败!")
withContext(Dispatchers.Main){
Toast.makeText(this@MainActivity,"激活失败",Toast.LENGTH_SHORT).show()
}
spUtils.put(IS_ACTIVATED,false)
}
}
}
binding.autoCalib.setOnClickListener {
avmCoroutineScope.launch {
coroutineContext[AvmCoroutineContext]?.engine?.autoCalib(calibResultPath,lookupPath)
}
}
binding.manualCalib.setOnClickListener {
avmCoroutineScope.launch {
coroutineContext[AvmCoroutineContext]?.engine?.manualCalib(calibResultPath,lookupPath)
}
}
binding.startAvm.setOnClickListener {
openAuxiliaryPresentation()
avmCoroutineScope.launch {
launch(Dispatchers.Default) {
avmFrontCamera.startPictureSequence(
PictureSequenceSource.GENERAL_CAMERA,
configCameraSequence(avmDir.absolutePath){ _, _, status, data, _ ->
if (status == SpmCameraDevice.ImageDataCallback.IMAGE_STATUS_SUCCEEDED) {
ArcAVMInputImageFactory.ofFront(data).also {
avmInputImages[0] = it
}
}
}
)
}
launch(Dispatchers.Default) {
avmRightCamera.startPictureSequence(
PictureSequenceSource.GENERAL_CAMERA,
configCameraSequence(avmDir.absolutePath){ _, _, status, data, _ ->
if (status == SpmCameraDevice.ImageDataCallback.IMAGE_STATUS_SUCCEEDED) {
ArcAVMInputImageFactory.ofRight(data).also {
avmInputImages[1] = it
}
}
}
)
}
launch(Dispatchers.Default) {
avmBackCamera.startPictureSequence(
PictureSequenceSource.GENERAL_CAMERA,
configCameraSequence(avmDir.absolutePath){ _, _, status, data, _ ->
if (status == SpmCameraDevice.ImageDataCallback.IMAGE_STATUS_SUCCEEDED) {
ArcAVMInputImageFactory.ofBack(data).also {
avmInputImages[2] = it
}
}
}
)
}
launch(Dispatchers.Default) {
avmLeftCamera.startPictureSequence(
PictureSequenceSource.GENERAL_CAMERA,
configCameraSequence(avmDir.absolutePath){ _, _, status, data, _ ->
if (status == SpmCameraDevice.ImageDataCallback.IMAGE_STATUS_SUCCEEDED) {
ArcAVMInputImageFactory.ofLeft(data).also {
avmInputImages[3] = it
}
}
}
)
}
val outputImageList = arrayListOf<ArcAVMOutputImage>()
while (avmInputImages.size != 4) {
delay(100)
}
LogUtils.d("准备播放融合图...")
if (spUtils.getBoolean(CALIB_RESULT) && spUtils.getBoolean(LOOKUP_TABLE)) {
val result = coroutineContext[AvmCoroutineContext]?.engine?.drawAVM(
31,
avmInputImages,
outputImageList
)
if (result == ArcErrorInfo.ARC_ERROR_OK) {
if (outputImageList.isNotEmpty()) {
feedData(outputImageList[0].imageData)
}
}
} else {
withContext(Dispatchers.Main){
Toast.makeText(this@MainActivity,"请先进行标定",Toast.LENGTH_SHORT).show()
}
}
}
}
binding.stopAvm.setOnClickListener {
avmCoroutineScope.launch {
avmLeftCamera.stopPictureSequence(PictureSequenceSource.GENERAL_CAMERA)
avmFrontCamera.stopPictureSequence(PictureSequenceSource.GENERAL_CAMERA)
avmRightCamera.stopPictureSequence(PictureSequenceSource.GENERAL_CAMERA)
avmBackCamera.stopPictureSequence(PictureSequenceSource.GENERAL_CAMERA)
}
avmCoroutineScope.cancel(kotlinx.coroutines.CancellationException("Click Stop Avm Button"))
closeAuxiliaryPresentation()
}
}
private fun allPermissionsGranted() = REQUIRED_PERMISSIONS.all {
ContextCompat.checkSelfPermission(
baseContext, it) == PackageManager.PERMISSION_GRANTED
}
override fun onRequestPermissionsResult(
requestCode: Int, permissions: Array<String>, grantResults:
IntArray) {
LogUtils.e("onRequestPermissionsResult")
if (requestCode == REQUEST_CODE_PERMISSIONS) {
if (allPermissionsGranted()) {
startPreview()
CoroutineScope(Dispatchers.IO).launch {
if (!FileUtils.isFileExists(avmDir)) {
FileUtils.createOrExistsDir(avmDir)
}
}
} else {
Toast.makeText(this,
"Permissions not granted by the user.",
Toast.LENGTH_SHORT).show()
finish()
}
}
super.onRequestPermissionsResult(requestCode, permissions, grantResults)
}
override fun onDestroy() {
super.onDestroy()
SmartPlatformManager.get().run {
closeCameraDevice(avmFrontCamera)
closeCameraDevice(avmBackCamera)
closeCameraDevice(avmLeftCamera)
closeCameraDevice(avmRightCamera)
}
job?.cancel(CancellationException("MainActivity Destroyed"))
}
private fun startPreview(){
job = MainScope().launch {
while (!smartPlatformManager.isServiceAlive){
LogUtils.e("SmartPlatformService had not started,waiting for it starting")
delay(500)
}
LogUtils.d("SmartPlatformService had started")
try {
withContext(Dispatchers.IO) {
avmFrontCamera.setPreviewSurface(binding.frontPreview.holder.surface,
PreviewSource.GENERAL_CAMERA_EX)
avmFrontCamera.startPreview()
LogUtils.d("avmFrontCamera id:${avmFrontCamera.cameraId}")
}
withContext(Dispatchers.IO) {
avmRightCamera.setPreviewSurface(binding.rightPreview.holder.surface,
PreviewSource.GENERAL_CAMERA_EX)
avmRightCamera.startPreview()
LogUtils.d("avmRightCamera id:${avmRightCamera.cameraId}")
}
withContext(Dispatchers.IO) {
avmBackCamera.setPreviewSurface(binding.backPreview.holder.surface,
PreviewSource.GENERAL_CAMERA_EX)
avmBackCamera.startPreview()
LogUtils.d("avmBackCamera id:${avmBackCamera.cameraId}")
}
withContext(Dispatchers.IO) {
avmLeftCamera.setPreviewSurface(binding.leftPreview.holder.surface,
PreviewSource.GENERAL_CAMERA_EX)
avmLeftCamera.startPreview()
LogUtils.d("avmLeftCamera id:${avmLeftCamera.cameraId}")
}
} catch (e: Exception) {
LogUtils.e("${e.message}")
avmLeftCamera.stopPreview()
avmFrontCamera.stopPreview()
avmRightCamera.stopPreview()
avmBackCamera.stopPreview()
}
}
}
companion object {
private val APP_ID = "vdpoYwdwW15s8seKveftWJfsarrre9"
private val APP_SECRET = "dCdVvhH6tqgWuPMpsvuihPQDHkssWi"
private const val REQUEST_CODE_PERMISSIONS = 10
private val REQUIRED_PERMISSIONS =
mutableListOf (
Manifest.permission.CAMERA,
Manifest.permission.RECORD_AUDIO,
Manifest.permission.READ_PHONE_STATE,
Manifest.permission.ACCESS_WIFI_STATE,
Manifest.permission.CHANGE_WIFI_STATE,
Manifest.permission.WRITE_EXTERNAL_STORAGE,
Manifest.permission.READ_EXTERNAL_STORAGE,
).toTypedArray()
}
}
\ No newline at end of file
package com.intergration.avm
import com.arcsoft.imageutil.ArcSoftImageFormat
import com.arcsoft.imageutil.ArcSoftImageUtil
import com.arcsoft.visdrive.avmsdk.ArcErrorInfo
import com.arcsoft.visdrive.avmsdk.ArcVisDriveAVMEngine
import com.arcsoft.visdrive.avmsdk.constant.avm.ArcAVMCameraPosType
import com.arcsoft.visdrive.avmsdk.constant.avm.ArcAVMViewPortType
import com.arcsoft.visdrive.avmsdk.constant.common.ArcImageFormat
import com.arcsoft.visdrive.avmsdk.model.avm.ArcAVMCalibInfo
import com.arcsoft.visdrive.avmsdk.model.avm.ArcAVMCalibResult
import com.arcsoft.visdrive.avmsdk.model.avm.ArcAVMCarInfo
import com.arcsoft.visdrive.avmsdk.model.avm.ArcAVMChessInfo
import com.arcsoft.visdrive.avmsdk.model.avm.ArcAVMClothInfo
import com.arcsoft.visdrive.avmsdk.model.avm.ArcAVMInitParam
import com.arcsoft.visdrive.avmsdk.model.avm.ArcAVMInputImage
import com.arcsoft.visdrive.avmsdk.model.avm.ArcAVMIntrinsicParam
import com.arcsoft.visdrive.avmsdk.model.avm.ArcAVMLookupTable
import com.blankj.utilcode.util.FileIOUtils
import com.blankj.utilcode.util.FileUtils
import com.blankj.utilcode.util.ImageUtils
import com.blankj.utilcode.util.LogUtils
import com.intergration.avm.utils.CALIB_RESULT
import com.intergration.avm.utils.IMAGE_HEIGHT
import com.intergration.avm.utils.IMAGE_WIDTH
import com.intergration.avm.utils.LOOKUP_TABLE
import com.intergration.avm.utils.spUtils
import com.intergration.avm.utils.toByteBuffer
import com.mediatek.smartplatform.ImageReaderEx
import com.mediatek.smartplatform.PictureConfiguration
import com.mediatek.smartplatform.PictureSequenceSource
import com.mediatek.smartplatform.SmartPlatformManager
import com.mediatek.smartplatform.SpmCameraDevice
import com.mediatek.smartplatform.SpmCameraDevice.CamPictureCallback
import kotlinx.coroutines.CoroutineName
import kotlinx.coroutines.CoroutineScope
import kotlinx.coroutines.Dispatchers
import kotlinx.coroutines.Job
import kotlinx.coroutines.cancel
import kotlinx.coroutines.coroutineScope
import kotlinx.coroutines.delay
import kotlinx.coroutines.launch
import kotlinx.coroutines.sync.Mutex
import java.io.Closeable
import java.io.File
import kotlin.coroutines.AbstractCoroutineContextElement
import kotlin.coroutines.CoroutineContext
val COROUTINE_AVM = CoroutineName("avm")
val avmInputImages by lazy {
MutableList<ArcAVMInputImage?>(4){null}
}
val avmJob = Job()
val calibDataLock by lazy {
Mutex()
}
internal val avmEngine by lazy {
ArcVisDriveAVMEngine()
}
class AvmCoroutineContext(val engine: ArcVisDriveAVMEngine): AbstractCoroutineContextElement(
AvmCoroutineContext
){
companion object Key: CoroutineContext.Key<AvmCoroutineContext>
}
val avmCoroutineScope by lazy {
AvmCoroutineScope(AvmCoroutineContext(engine = avmEngine) + Dispatchers.IO + COROUTINE_AVM)
}
class AvmCoroutineScope(context: CoroutineContext) : Closeable, CoroutineScope {
override val coroutineContext: CoroutineContext = context + avmJob
override fun close() {
LogUtils.e("Avm关闭")
SmartPlatformManager.get().closeCameraDevice(avmFrontCamera)
SmartPlatformManager.get().closeCameraDevice(avmLeftCamera)
SmartPlatformManager.get().closeCameraDevice(avmRightCamera)
SmartPlatformManager.get().closeCameraDevice(avmBackCamera)
coroutineContext[AvmCoroutineContext]?.engine?.unInit()
coroutineContext.cancel()
}
}
fun ArcVisDriveAVMEngine.initializeAvmParams(calibResultPath:File,lookupPath:File):Int {
val avmInitInfo = ArcAVMInitParam()
val intrinsicParam = ArcAVMIntrinsicParam()
intrinsicParam.imageInputWidth = IMAGE_WIDTH
intrinsicParam.imageInputHeight = IMAGE_HEIGHT
intrinsicParam.width = IMAGE_WIDTH
intrinsicParam.height = IMAGE_HEIGHT
intrinsicParam.fx = 361.406950
intrinsicParam.fy = 350.914546
intrinsicParam.cx = 649.595275
intrinsicParam.cy = 369.397888
intrinsicParam.skew = 0.000000
intrinsicParam.k1 = 1.000000
intrinsicParam.k2 = -0.023704
intrinsicParam.k3 = -0.003416
intrinsicParam.p1 = 0.000000
intrinsicParam.p2 = 0.000000
intrinsicParam.fisheye = true
intrinsicParam.checksum = 373328.753896
val intrinsicArray = arrayOf(intrinsicParam, intrinsicParam, intrinsicParam, intrinsicParam)
avmInitInfo.avmIntrinsicParamArray = intrinsicArray
val calibInfo = ArcAVMCalibInfo()
calibInfo.avm2DImageWidth = IMAGE_WIDTH
calibInfo.avm2DImageHeight = IMAGE_HEIGHT
calibInfo.singleImageWidth = 480
calibInfo.singleImageHeight = 456
val carInfo = ArcAVMCarInfo()
carInfo.width = 2500
carInfo.length = 6000
carInfo.blinkAreaFront = 0
carInfo.blinkAreaRight = 0
carInfo.blinkAreaBack = 0
carInfo.blinkAreaLeft = 0
calibInfo.avmCarInfo = carInfo
calibInfo.viewPortType = ArcAVMViewPortType.LARGE.value
avmInitInfo.avmCalibInfo = calibInfo
if (spUtils.getBoolean(CALIB_RESULT)) {
val avmCalibResult = ArcAVMCalibResult()
val calibResultData = FileIOUtils.readFile2BytesByStream(calibResultPath)
avmCalibResult.dataSize = calibResultData.size
avmCalibResult.data = calibResultData
avmInitInfo.avmCalibResult = avmCalibResult
}
if (spUtils.getBoolean(LOOKUP_TABLE)){
val avmCalibLookup = ArcAVMLookupTable()
val lookupData = FileIOUtils.readFile2BytesByStream(lookupPath)
avmCalibLookup.data = lookupData
avmCalibLookup.dataSize = lookupData.size
avmInitInfo.avmLookupTable = avmCalibLookup
}
return init(avmInitInfo)
}
fun ArcVisDriveAVMEngine.autoCalib(calibResultPath:File,lookupPath:File){
val clothInfo = ArcAVMClothInfo()
clothInfo.d1 = 600
clothInfo.d3 = 5000
clothInfo.d4 = 4900
clothInfo.d5 = 5000
clothInfo.d6 = 4000
val chessInfoFront = ArcAVMChessInfo(ArcAVMCameraPosType.TYPE_FRONT)
val chessInfoRight = ArcAVMChessInfo(ArcAVMCameraPosType.TYPE_RIGHT)
val chessInfoBack = ArcAVMChessInfo(ArcAVMCameraPosType.TYPE_BACK)
val chessInfoLeft = ArcAVMChessInfo(ArcAVMCameraPosType.TYPE_LEFT)
val chessInfoList = arrayListOf(chessInfoFront, chessInfoRight, chessInfoBack, chessInfoLeft)
val calibResult = ArcAVMCalibResult()
val lookupTable = ArcAVMLookupTable()
var result = autoCalibrate(avmInputImages, clothInfo, chessInfoList, calibResult, lookupTable)
LogUtils.i("autoCalibrate:${result}")
if (result == ArcErrorInfo.ARC_ERROR_OK) {
LogUtils.i("autoCalibrate calibResult:${calibResult.data.size}")
LogUtils.i("autoCalibrate lookupTable:${lookupTable.data.size}")
spUtils.put(CALIB_RESULT,FileIOUtils.writeFileFromBytesByChannel(calibResultPath, calibResult.data, true))
spUtils.put(LOOKUP_TABLE,FileIOUtils.writeFileFromBytesByChannel(lookupPath, lookupTable.data, true))
} else {
LogUtils.e("autoCalibrate failed, chessPoints1:${chessInfoList[0].leftChessPoints[0].x}_${chessInfoList[0].leftChessPoints[0].y}," +
"chessPoints2:${chessInfoList[1].leftChessPoints[1].x}_${chessInfoList[1].leftChessPoints[1].y}," +
"chessPoints3:${chessInfoList[2].leftChessPoints[2].x}_${chessInfoList[2].leftChessPoints[2].y}," +
"chessPoints4:${chessInfoList[3].leftChessPoints[3].x}_${chessInfoList[3].leftChessPoints[3].y},")
}
val getCalibInfo = ArcAVMCalibInfo()
val getCalibResult = ArcAVMCalibResult()
val getLookupTable = ArcAVMLookupTable()
result = getCalibrateResults(getCalibInfo, getCalibResult, getLookupTable)
if (result == ArcErrorInfo.ARC_ERROR_OK) {
LogUtils.i("getCalibrateResults, calibInfo:${getCalibInfo}, result:${getCalibResult.data.size}, lookup:${getLookupTable.data.size}")
}
}
fun ArcVisDriveAVMEngine.manualCalib(calibResultPath:File,lookupPath:File){
val clothInfo = ArcAVMClothInfo()
clothInfo.d1 = 600
clothInfo.d3 = 5000
clothInfo.d4 = 4900
clothInfo.d5 = 5000
clothInfo.d6 = 4000
val chessInfoFront = ArcAVMChessInfo()
chessInfoFront.imagePosType = ArcAVMCameraPosType.TYPE_FRONT
if (chessInfoFront.leftChessPoints != null) {
chessInfoFront.leftChessPoints[0].x = 457f
chessInfoFront.leftChessPoints[0].y = 487f
chessInfoFront.leftChessPoints[1].x = 518f
chessInfoFront.leftChessPoints[1].y = 503f
chessInfoFront.leftChessPoints[2].x = 440f
chessInfoFront.leftChessPoints[2].y = 527f
chessInfoFront.leftChessPoints[3].x = 503f
chessInfoFront.leftChessPoints[3].y = 550f
if (chessInfoFront.rightChessPoints != null) {
chessInfoFront.rightChessPoints[0].x = 871f
chessInfoFront.rightChessPoints[0].y = 533f
chessInfoFront.rightChessPoints[1].x = 931f
chessInfoFront.rightChessPoints[1].y = 526f
chessInfoFront.rightChessPoints[2].x = 876f
chessInfoFront.rightChessPoints[2].y = 581f
chessInfoFront.rightChessPoints[3].x = 938f
chessInfoFront.rightChessPoints[3].y = 570f
}
}
val chessInfoRight = ArcAVMChessInfo()
chessInfoRight.imagePosType = ArcAVMCameraPosType.TYPE_RIGHT
if (chessInfoRight.leftChessPoints != null) {
chessInfoRight.leftChessPoints[0].x = 344f
chessInfoRight.leftChessPoints[0].y = 479f
chessInfoRight.leftChessPoints[1].x = 368f
chessInfoRight.leftChessPoints[1].y = 487f
chessInfoRight.leftChessPoints[2].x = 327f
chessInfoRight.leftChessPoints[2].y = 532F
chessInfoRight.leftChessPoints[3].x = 351F
chessInfoRight.leftChessPoints[3].y = 544F
if (chessInfoRight.rightChessPoints != null) {
chessInfoRight.rightChessPoints[0].x = 896F
chessInfoRight.rightChessPoints[0].y = 507F
chessInfoRight.rightChessPoints[1].x = 931F
chessInfoRight.rightChessPoints[1].y = 499F
chessInfoRight.rightChessPoints[2].x = 911F
chessInfoRight.rightChessPoints[2].y = 574F
chessInfoRight.rightChessPoints[3].x = 947F
chessInfoRight.rightChessPoints[3].y = 562F
}
}
val chessInfoBack = ArcAVMChessInfo()
chessInfoBack.imagePosType = ArcAVMCameraPosType.TYPE_BACK
if (chessInfoBack.leftChessPoints != null) {
chessInfoBack.leftChessPoints[0].x = 384F
chessInfoBack.leftChessPoints[0].y = 554F
chessInfoBack.leftChessPoints[1].x = 445F
chessInfoBack.leftChessPoints[1].y = 574F
chessInfoBack.leftChessPoints[2].x = 368F
chessInfoBack.leftChessPoints[2].y = 591F
chessInfoBack.leftChessPoints[3].x = 430F
chessInfoBack.leftChessPoints[3].y = 616F
if (chessInfoBack.rightChessPoints != null) {
chessInfoBack.rightChessPoints[0].x = 797F
chessInfoBack.rightChessPoints[0].y = 595F
chessInfoBack.rightChessPoints[1].x = 860F
chessInfoBack.rightChessPoints[1].y = 582F
chessInfoBack.rightChessPoints[2].x = 804F
chessInfoBack.rightChessPoints[2].y = 639F
chessInfoBack.rightChessPoints[3].x = 871F
chessInfoBack.rightChessPoints[3].y = 620F
}
}
val chessInfoLeft = ArcAVMChessInfo()
chessInfoLeft.imagePosType = ArcAVMCameraPosType.TYPE_LEFT
if (chessInfoLeft.leftChessPoints != null) {
chessInfoLeft.leftChessPoints[0].x = 375F
chessInfoLeft.leftChessPoints[0].y = 475F
chessInfoLeft.leftChessPoints[1].x = 410F
chessInfoLeft.leftChessPoints[1].y = 482F
chessInfoLeft.leftChessPoints[2].x = 361F
chessInfoLeft.leftChessPoints[2].y = 538F
chessInfoLeft.leftChessPoints[3].x = 397F
chessInfoLeft.leftChessPoints[3].y = 550F
if (chessInfoLeft.rightChessPoints != null) {
chessInfoLeft.rightChessPoints[0].x = 933F
chessInfoLeft.rightChessPoints[0].y = 454F
chessInfoLeft.rightChessPoints[1].x = 956F
chessInfoLeft.rightChessPoints[1].y = 447F
chessInfoLeft.rightChessPoints[2].x = 950F
chessInfoLeft.rightChessPoints[2].y = 510F
chessInfoLeft.rightChessPoints[3].x = 974F
chessInfoLeft.rightChessPoints[3].y = 499F
}
}
val chessInfoList = arrayListOf(chessInfoFront, chessInfoRight, chessInfoBack, chessInfoLeft)
val calibResult = ArcAVMCalibResult()
val lookupTable = ArcAVMLookupTable()
val result = manualCalibrate(avmInputImages, clothInfo, chessInfoList, calibResult, lookupTable)
LogUtils.i("manualCalibrate:${result}")
if (result == ArcErrorInfo.ARC_ERROR_OK) {
LogUtils.i("manualCalibrate calibResult:${calibResult.data.size}")
LogUtils.i("manualCalibrate lookupTable:${lookupTable.data.size}")
spUtils.put(CALIB_RESULT,FileIOUtils.writeFileFromBytesByChannel(calibResultPath, calibResult.data, true))
spUtils.put(LOOKUP_TABLE,FileIOUtils.writeFileFromBytesByChannel(lookupPath, lookupTable.data, true))
}
}
val avmLeftCamera: SpmCameraDevice by lazy {
SmartPlatformManager.get().openCameraDevice("2")
}.also {
it.value.parameters
}
val avmFrontCamera: SpmCameraDevice by lazy {
SmartPlatformManager.get().openCameraDevice("1")
}.also {
it.value.parameters
}
val avmRightCamera: SpmCameraDevice by lazy {
SmartPlatformManager.get().openCameraDevice("3")
}.also {
it.value.parameters
}
val avmBackCamera: SpmCameraDevice by lazy {
SmartPlatformManager.get().openCameraDevice("4")
}.also {
it.value.parameters
}
suspend fun SpmCameraDevice.createFrontArcAVMInputImage(picturePath:String,imageDataPath:String,cameraType: ArcAVMCameraPosType): ArcAVMInputImage = coroutineScope {
takePicture(picturePath,null){status,_,fileName->
launch{
if (status == CamPictureCallback.PICTURE_TAKEN_SUCCESS) {
val frontCalibBitmap = ImageUtils.getBitmap(fileName)
val imageData = ArcSoftImageUtil.createImageData(frontCalibBitmap.width,frontCalibBitmap.height,ArcSoftImageFormat.NV21)
ArcSoftImageUtil.bitmapToImageData(frontCalibBitmap, imageData, ArcSoftImageFormat.NV21)
FileIOUtils.writeFileFromBytesByChannel(imageDataPath,imageData,true)
}
}
}
while (!FileUtils.isFileExists(imageDataPath)) {
LogUtils.e("nv21文件尚未生成请稍后...")
delay(200)
}
val frontNV21 = FileIOUtils.readFile2BytesByStream(imageDataPath)
ArcAVMInputImage().apply {
width = IMAGE_WIDTH
height = IMAGE_HEIGHT
imageFormat = ArcImageFormat.ARC_IMAGE_FORMAT_NV21.value
imagePosType = cameraType
imageData = frontNV21.toByteBuffer()
}
}
fun getNv21ImageFromFile(file: File, cameraType: ArcAVMCameraPosType):ArcAVMInputImage{
val frontNV21 = FileIOUtils.readFile2BytesByStream(file)
return ArcAVMInputImage().apply {
width = IMAGE_WIDTH
height = IMAGE_HEIGHT
imageFormat = ArcImageFormat.ARC_IMAGE_FORMAT_NV21.value
imagePosType = cameraType
imageData = frontNV21.toByteBuffer()
}
}
fun configCameraSequence(path:String,callback: ImageReaderEx.ImageCallback): PictureConfiguration {
return PictureConfiguration.get(PictureSequenceSource.GENERAL_CAMERA).apply {
mPath = path
mImageFormat = SpmCameraDevice.ImageDataCallback.IMAGE_FORMAT_NV21
mPicWidth = 1280
mPicHeight = 720
mImageCallback = callback
}
}
package com.intergration.avm.glsurface
import android.opengl.GLES20
/**
* 顶点着色器程序
* vertex shader在每个顶点上都执行一次,通过不同世界的坐标系转化定位顶点的最终位置。
* 它可以传递数据给fragment shader,如纹理坐标、顶点坐标,变换矩阵等
*/
const val vertexShaderCode =
"uniform mat4 uMVPMatrix;" +
"attribute vec4 vPosition;" +
"attribute vec2 texCoord;" +
"varying vec2 tc;" +
"void main() {" +
" gl_Position = uMVPMatrix * vPosition;" +
" tc = texCoord;" +
"}"
/**
* 片段着色器程序
* fragment shader在每个像素上都会执行一次,通过插值确定像素的最终显示颜色
*/
const val fragmentShaderCode =
"precision mediump float;" +
"uniform sampler2D samplerY;" +
"uniform sampler2D samplerU;" +
"uniform sampler2D samplerV;" +
"uniform sampler2D samplerUV;" +
"uniform int yuvType;" +
"varying vec2 tc;" +
"void main() {" +
" vec4 c = vec4((texture2D(samplerY, tc).r - 16./255.) * 1.164);" +
" vec4 U; vec4 V;" +
" if (yuvType == 0){" +
// 因为是YUV的一个平面,所以采样后的r,g,b,a这四个参数的数值是一样的
" U = vec4(texture2D(samplerU, tc).r - 128./255.);" +
" V = vec4(texture2D(samplerV, tc).r - 128./255.);" +
" } else if (yuvType == 1){" +
// 因为NV12是2平面的,对于UV平面,在加载纹理时,会指定格式,让U值存在r,g,b中,V值存在a中
" U = vec4(texture2D(samplerUV, tc).r - 128./255.);" +
" V = vec4(texture2D(samplerUV, tc).a - 128./255.);" +
" } else {" +
// 因为NV21是2平面的,对于UV平面,在加载纹理时,会指定格式,让U值存在a中,V值存在r,g,b中
" U = vec4(texture2D(samplerUV, tc).a - 128./255.);" +
" V = vec4(texture2D(samplerUV, tc).r - 128./255.);" +
" } " +
" c += V * vec4(1.596, -0.813, 0, 0);" +
" c += U * vec4(0, -0.392, 2.017, 0);" +
" c.a = 1.0;" +
" gl_FragColor = c;" +
"}"
/**
* 加载着色器程序
* @param type GLES20.GL_VERTEX_SHADER -> vertex shader
* GLES20.GL_FRAGMENT_SHADER -> fragment shader
* @param shaderCode 着色器程序代码
*/
fun loadShader(type: Int, shaderCode: String): Int {
// create a vertex shader type (GLES20.GL_VERTEX_SHADER)
// or a fragment shader type (GLES20.GL_FRAGMENT_SHADER)
return GLES20.glCreateShader(type).also { shader ->
// add the source code to the shader and compile it
GLES20.glShaderSource(shader, shaderCode)
GLES20.glCompileShader(shader)
}
}
package com.intergration.avm.glsurface
import android.opengl.GLES20
import com.blankj.utilcode.util.LogUtils
import java.nio.ByteBuffer
import java.nio.ByteOrder
import java.nio.FloatBuffer
import java.nio.IntBuffer
class MyGLProgram {
companion object {
private const val TAG = "MyGLProgram"
var squareVertices = floatArrayOf(-1.0f, -1.0f, 1.0f, -1.0f, -1.0f, 1.0f, 1.0f, 1.0f) // 全屏
}
private var mProgram: Int
private var mPlanarTextureHandles = IntBuffer.wrap(IntArray(3))
private val mSampleHandle = IntArray(3)
// handles
private var mPositionHandle = -1
private var mCoordHandle = -1
private var mVPMatrixHandle: Int = -1
// vertices buffer
private var mVertexBuffer: FloatBuffer? = null
private var mCoordBuffer: FloatBuffer? = null
// whole-texture
private val mCoordVertices = floatArrayOf(0.0f, 1.0f, 1.0f, 1.0f, 0.0f, 0.0f, 1.0f, 0.0f)
init {
val vertexShader: Int = loadShader(GLES20.GL_VERTEX_SHADER, vertexShaderCode)
val fragmentShader: Int = loadShader(GLES20.GL_FRAGMENT_SHADER, fragmentShaderCode)
LogUtils.dTag(TAG, "vertexShader = $vertexShader \n fragmentShader = $fragmentShader")
// create empty OpenGL ES Program
mProgram = GLES20.glCreateProgram().also {
checkGlError("glCreateProgram")
// add the vertex shader to program
GLES20.glAttachShader(it, vertexShader)
// add the fragment shader to program
GLES20.glAttachShader(it, fragmentShader)
// creates OpenGL ES program executables
GLES20.glLinkProgram(it)
}
val linkStatus = IntArray(1)
GLES20.glGetProgramiv(mProgram, GLES20.GL_LINK_STATUS, linkStatus, 0)
if (linkStatus[0] != GLES20.GL_TRUE) {
LogUtils.wTag(TAG, "Could not link program: ${GLES20.glGetProgramInfoLog(mProgram)}")
GLES20.glDeleteProgram(mProgram)
mProgram = 0
}
LogUtils.dTag(TAG, "mProgram = $mProgram")
checkGlError("glCreateProgram")
// 生成纹理句柄
GLES20.glGenTextures(3, mPlanarTextureHandles)
checkGlError("glGenTextures")
}
/**
* 绘制纹理贴图
* @param mvpMatrix 顶点坐标变换矩阵
* @param type YUV数据格式类型
*/
fun drawTexture(mvpMatrix: FloatArray, type: Int) {
GLES20.glUseProgram(mProgram)
checkGlError("glUseProgram")
/*
* get handle for "vPosition" and "a_texCoord"
*/
mPositionHandle = GLES20.glGetAttribLocation(mProgram, "vPosition").also {
GLES20.glVertexAttribPointer(it, 2, GLES20.GL_FLOAT, false, 8, mVertexBuffer)
GLES20.glEnableVertexAttribArray(it)
}
// 传纹理坐标给fragment shader
mCoordHandle = GLES20.glGetAttribLocation(mProgram, "texCoord").also {
GLES20.glVertexAttribPointer(it, 2, GLES20.GL_FLOAT, false, 8, mCoordBuffer)
GLES20.glEnableVertexAttribArray(it)
}
// get handle to shape's transformation matrix
mVPMatrixHandle = GLES20.glGetUniformLocation(mProgram, "uMVPMatrix")
// Pass the projection and view transformation to the shader
GLES20.glUniformMatrix4fv(mVPMatrixHandle, 1, false, mvpMatrix, 0)
//传纹理的像素格式给fragment shader
val yuvType = GLES20.glGetUniformLocation(mProgram, "yuvType")
checkGlError("glGetUniformLocation yuvType")
GLES20.glUniform1i(yuvType, type)
//type: 0是I420, 1是NV12
var planarCount = 0
if (type == 0) {
//I420有3个平面
planarCount = 3
mSampleHandle[0] = GLES20.glGetUniformLocation(mProgram, "samplerY")
mSampleHandle[1] = GLES20.glGetUniformLocation(mProgram, "samplerU")
mSampleHandle[2] = GLES20.glGetUniformLocation(mProgram, "samplerV")
} else {
//NV12、NV21有两个平面
planarCount = 2
mSampleHandle[0] = GLES20.glGetUniformLocation(mProgram, "samplerY")
mSampleHandle[1] = GLES20.glGetUniformLocation(mProgram, "samplerUV")
}
(0 until planarCount).forEach { i ->
GLES20.glActiveTexture(GLES20.GL_TEXTURE0 + i)
GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, mPlanarTextureHandles[i])
GLES20.glUniform1i(mSampleHandle[i], i)
}
// 调用这个函数后,vertex shader先在每个顶点执行一次,之后fragment shader在每个像素执行一次,
// 绘制后的图像存储在render buffer中
GLES20.glDrawArrays(GLES20.GL_TRIANGLE_STRIP, 0, 4)
GLES20.glFinish()
GLES20.glDisableVertexAttribArray(mPositionHandle)
GLES20.glDisableVertexAttribArray(mCoordHandle)
}
/**
* 将图片数据绑定到纹理目标,适用于UV分量分开存储的(I420)
* @param yPlane YUV数据的Y分量
* @param uPlane YUV数据的U分量
* @param vPlane YUV数据的V分量
* @param width YUV图片宽度
* @param height YUV图片高度
*/
fun feedTextureWithImageData(yPlane: ByteBuffer, uPlane: ByteBuffer, vPlane: ByteBuffer, width: Int, height: Int) {
//根据YUV编码的特点,获得不同平面的基址
textureYUV(yPlane, width, height, 0)
textureYUV(uPlane, width / 2, height / 2, 1)
textureYUV(vPlane, width / 2, height / 2, 2)
}
/**
* 将图片数据绑定到纹理目标,适用于UV分量交叉存储的(NV12、NV21)
* @param yPlane YUV数据的Y分量
* @param uvPlane YUV数据的UV分量
* @param width YUV图片宽度
* @param height YUV图片高度
*/
fun feedTextureWithImageData(yPlane: ByteBuffer, uvPlane: ByteBuffer, width: Int, height: Int) {
//根据YUV编码的特点,获得不同平面的基址
textureYUV(yPlane, width, height, 0)
textureNV12(uvPlane, width / 2, height / 2, 1)
}
/**
* 将图片数据绑定到纹理目标,适用于UV分量分开存储的(I420)
* @param imageData YUV数据的Y/U/V分量
* @param width YUV图片宽度
* @param height YUV图片高度
*/
private fun textureYUV(imageData: ByteBuffer, width: Int, height: Int, index: Int) {
// 将纹理对象绑定到纹理目标
GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, mPlanarTextureHandles[index])
// 设置放大和缩小时,纹理的过滤选项为:线性过滤
GLES20.glTexParameteri(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_MIN_FILTER, GLES20.GL_LINEAR)
GLES20.glTexParameteri(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_MAG_FILTER, GLES20.GL_LINEAR)
// 设置纹理X,Y轴的纹理环绕选项为:边缘像素延伸
GLES20.glTexParameteri(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_WRAP_S, GLES20.GL_CLAMP_TO_EDGE)
GLES20.glTexParameteri(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_WRAP_T, GLES20.GL_CLAMP_TO_EDGE)
// 加载图像数据到纹理,GL_LUMINANCE指明了图像数据的像素格式为只有亮度,虽然第三个和第七个参数都使用了GL_LUMINANCE,
// 但意义是不一样的,前者指明了纹理对象的颜色分量成分,后者指明了图像数据的像素格式
// 获得纹理对象后,其每个像素的r,g,b,a值都为相同,为加载图像的像素亮度,在这里就是YUV某一平面的分量值
GLES20.glTexImage2D(
GLES20.GL_TEXTURE_2D, 0,
GLES20.GL_LUMINANCE, width, height, 0,
GLES20.GL_LUMINANCE,
GLES20.GL_UNSIGNED_BYTE, imageData
)
}
/**
* 将图片数据绑定到纹理目标,适用于UV分量交叉存储的(NV12、NV21)
* @param imageData YUV数据的UV分量
* @param width YUV图片宽度
* @param height YUV图片高度
*/
private fun textureNV12(imageData: ByteBuffer, width: Int, height: Int, index: Int) {
GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, mPlanarTextureHandles[index])
GLES20.glTexParameteri(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_MIN_FILTER, GLES20.GL_LINEAR)
GLES20.glTexParameteri(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_MAG_FILTER, GLES20.GL_LINEAR)
GLES20.glTexParameteri(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_WRAP_S, GLES20.GL_CLAMP_TO_EDGE)
GLES20.glTexParameteri(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_WRAP_T, GLES20.GL_CLAMP_TO_EDGE)
GLES20.glTexImage2D(
GLES20.GL_TEXTURE_2D, 0, GLES20.GL_LUMINANCE_ALPHA, width, height, 0,
GLES20.GL_LUMINANCE_ALPHA, GLES20.GL_UNSIGNED_BYTE, imageData
)
}
/**
* 创建两个缓冲区用于保存顶点 -> 屏幕顶点和纹理顶点
* @param vert 屏幕顶点数据
*/
fun createBuffers(vert: FloatArray) {
mVertexBuffer = ByteBuffer.allocateDirect(vert.size * 4).run {
// use the device hardware's native byte order
order(ByteOrder.nativeOrder())
// create a floating point buffer from the ByteBuffer
asFloatBuffer().apply {
// add the coordinates to the FloatBuffer
put(vert)
// set the buffer to read the first coordinate
position(0)
}
}
if (mCoordBuffer == null) {
mCoordBuffer = ByteBuffer.allocateDirect(mCoordVertices.size * 4).run {
// use the device hardware's native byte order
order(ByteOrder.nativeOrder())
// create a floating point buffer from the ByteBuffer
asFloatBuffer().apply {
// add the coordinates to the FloatBuffer
put(mCoordVertices)
// set the buffer to read the first coordinate
position(0)
}
}
}
LogUtils.dTag(TAG, "createBuffers vertice_buffer $mVertexBuffer coord_buffer $mCoordBuffer")
}
/**
* 检查GL操作是否有error
* @param op 检查当前所做的操作
*/
private fun checkGlError(op: String) {
var error: Int = GLES20.glGetError()
while (error != GLES20.GL_NO_ERROR) {
LogUtils.eTag(TAG, "***** $op: glError $error")
error = GLES20.glGetError()
}
}
}
package com.waytous.avmdetect
import android.opengl.GLES20
import android.opengl.GLSurfaceView
import android.opengl.Matrix
import com.blankj.utilcode.util.LogUtils
import com.intergration.avm.glsurface.MyGLProgram
import java.nio.ByteBuffer
import javax.microedition.khronos.opengles.GL10
class MyGLRenderer : GLSurfaceView.Renderer {
companion object {
private const val TAG = "MyGLRenderer"
}
private lateinit var mProgram: MyGLProgram
// GLSurfaceView宽度
private var mScreenWidth: Int = 0
// GLSurfaceView高度
private var mScreenHeight: Int = 0
// 预览YUV数据宽度
private var mVideoWidth: Int = 0
// 预览YUV数据高度
private var mVideoHeight: Int = 0
// vPMatrix is an abbreviation for "Model View Projection Matrix"
private val vPMatrix = FloatArray(16)
private val projectionMatrix = FloatArray(16)
private val viewMatrix = FloatArray(16)
// y分量数据
private var y: ByteBuffer = ByteBuffer.allocate(0)
// u分量数据
private var u: ByteBuffer = ByteBuffer.allocate(0)
// v分量数据
private var v: ByteBuffer = ByteBuffer.allocate(0)
// uv分量数据
private var uv: ByteBuffer = ByteBuffer.allocate(0)
// YUV数据格式 0 -> I420 1 -> NV12 2 -> NV21
private var type: Int = 0
// 标识GLSurfaceView是否准备好
private var hasVisibility = false
override fun onSurfaceCreated(gl: GL10?, config: javax.microedition.khronos.egl.EGLConfig?) {
GLES20.glClearColor(0.0f, 0.0f, 0.0f, 1.0f)
// 配置OpenGL ES 环境
mProgram = MyGLProgram()
}
// Called if the geometry of the view changes, for example when the device's screen orientation changes.
override fun onSurfaceChanged(unused: GL10, width: Int, height: Int) {
GLES20.glViewport(0, 0, width, height)
mScreenWidth = width
mScreenHeight = height
mScreenWidth = width
mScreenHeight = height
val ratio: Float = width.toFloat() / height.toFloat()
// this projection matrix is applied to object coordinates
// in the onDrawFrame() method
Matrix.frustumM(projectionMatrix, 0, -ratio, ratio, -1f, 1f, 3f, 7f)
// Set the camera position (View matrix)
Matrix.setLookAtM(viewMatrix, 0, 0f, 0f, -3f, 0f, 0f, 0f, 1.0f, 0.0f, 0.0f)
if (mVideoWidth > 0 && mVideoHeight > 0) {
createBuffers(mVideoWidth, mVideoHeight)
}
hasVisibility = true
LogUtils.dTag(TAG, "onSurfaceChanged width:$width * height:$height")
}
// Called for each redraw of the view.
override fun onDrawFrame(unused: GL10) {
synchronized(this) {
if (y.capacity() > 0) {
y.position(0)
if (type == 0) {
u.position(0)
v.position(0)
mProgram.feedTextureWithImageData(y, u, v, mVideoWidth, mVideoHeight)
} else {
uv.position(0)
mProgram.feedTextureWithImageData(y, uv, mVideoWidth, mVideoHeight)
}
// Redraw background color
GLES20.glClear(GLES20.GL_COLOR_BUFFER_BIT)
// Calculate the projection and view transformation
Matrix.multiplyMM(vPMatrix, 0, projectionMatrix, 0, viewMatrix, 0)
try {
mProgram.drawTexture(vPMatrix, type)
} catch (e: Exception) {
LogUtils.wTag(TAG, e.message)
}
}
}
}
/**
* 设置显示方向
* @param degrees 显示旋转角度(逆时针),有效值是(0, 90, 180, and 270.)
*/
fun setDisplayOrientation(degrees: Int) {
// Set the camera position (View matrix)
when (degrees) {
0 -> {
Matrix.setLookAtM(viewMatrix, 0, 0f, 0f, -3f, 0f, 0f, 0f, 1.0f, 0.0f, 0.0f)
}
90 -> {
Matrix.setLookAtM(viewMatrix, 0, 0f, 0f, -3f, 0f, 0f, 0f, 0.0f, 1.0f, 0.0f)
}
180 -> {
Matrix.setLookAtM(viewMatrix, 0, 0f, 0f, -3f, 0f, 0f, 0f, -1.0f, 0.0f, 0.0f)
}
270 -> {
Matrix.setLookAtM(viewMatrix, 0, 0f, 0f, -3f, 0f, 0f, 0f, 0.0f, -1.0f, 0.0f)
}
else -> {
LogUtils.eTag(TAG, "degrees pram must be in (0, 90, 180, 270) ")
}
}
}
/**
* 设置渲染的YUV数据的宽高
* @param width 宽度
* @param height 高度
*/
fun setYuvDataSize(width: Int, height: Int) {
if (width > 0 && height > 0) {
// 调整比例
createBuffers(width, height)
// 初始化容器
if (width != mVideoWidth && height != mVideoHeight) {
this.mVideoWidth = width
this.mVideoHeight = height
val yarraySize = width * height
val uvarraySize = yarraySize / 4
synchronized(this) {
y = ByteBuffer.allocate(yarraySize)
u = ByteBuffer.allocate(uvarraySize)
v = ByteBuffer.allocate(uvarraySize)
uv = ByteBuffer.allocate(uvarraySize * 4)
}
}
}
}
/**
* 调整渲染纹理的缩放比例
* @param width YUV数据宽度
* @param height YUV数据高度
*/
private fun createBuffers(width: Int, height: Int) {
if (mScreenWidth > 0 && mScreenHeight > 0) {
val f1 = mScreenHeight.toFloat() / mScreenWidth.toFloat()
val f2 = height.toFloat() / width.toFloat()
if (f1 == f2) {
mProgram.createBuffers(MyGLProgram.squareVertices)
} else if (f1 < f2) {
val widthScale = f1 / f2
mProgram.createBuffers(
floatArrayOf(-widthScale, -1.0f, widthScale, -1.0f, -widthScale, 1.0f, widthScale, 1.0f)
)
} else {
val heightScale = f2 / f1
mProgram.createBuffers(
floatArrayOf(-1.0f, -heightScale, 1.0f, -heightScale, -1.0f, heightScale, 1.0f, heightScale)
)
}
}
}
/**
* 预览YUV格式数据
* @param yuvdata yuv格式的数据
* @param type YUV数据的格式 0 -> I420 1 -> NV12 2 -> NV21
*/
fun feedData(yuvdata: ByteArray, type: Int = 0) {
synchronized(this) {
if (hasVisibility) {
this.type = type
if (type == 0) {
y.clear()
u.clear()
v.clear()
y.put(yuvdata, 0, mVideoWidth * mVideoHeight)
u.put(yuvdata, mVideoWidth * mVideoHeight, mVideoWidth * mVideoHeight / 4)
v.put(yuvdata, mVideoWidth * mVideoHeight * 5 / 4, mVideoWidth * mVideoHeight / 4)
} else {
y.clear()
uv.clear()
y.put(yuvdata, 0, mVideoWidth * mVideoHeight)
uv.put(yuvdata, mVideoWidth * mVideoHeight, mVideoWidth * mVideoHeight / 2)
}
}
}
}
}
package com.waytous.avmdetect
import android.content.Context
import android.opengl.GLSurfaceView
import android.util.AttributeSet
import com.blankj.utilcode.util.LogUtils
class MyGLSurfaceView(context: Context, attributeSet: AttributeSet?) : GLSurfaceView(context, attributeSet) {
companion object {
private const val TAG = "MyGLSurfaceView"
}
constructor(context: Context) : this(context, null)
private val renderer: MyGLRenderer
init {
// Create an OpenGL ES 2.0 context
setEGLContextClientVersion(3)
renderer = MyGLRenderer()
// Set the Renderer for drawing on the GLSurfaceView
setRenderer(renderer)
// Render the view only when there is a change in the drawing data
renderMode = RENDERMODE_WHEN_DIRTY
}
/**
* 设置显示方向
* @param degrees 显示旋转角度(逆时针),有效值是(0, 90, 180, and 270.)
*/
fun setDisplayOrientation(degrees: Int) {
renderer.setDisplayOrientation(degrees)
}
/**
* 设置渲染的YUV数据的宽高
* @param width 宽度
* @param height 高度
*/
fun setYuvDataSize(width: Int, height: Int) {
LogUtils.dTag(TAG, "setYuvDataSize $width * $height")
renderer.setYuvDataSize(width, height)
}
/**
* 填充预览YUV格式数据
* @param yuvData yuv格式的数据
* @param type YUV数据的格式 0 -> I420 1 -> NV12 2 -> NV21
*/
fun feedData(yuvData: ByteArray?, type: Int = 0) {
if (yuvData == null) {
return
}
// renderer.feedData(yuvData, type)
renderer.feedData(yuvData, type)
// 请求渲染新的YUV数据
requestRender()
}
}
package com.intergration.avm.presentation
import android.app.Presentation
import android.content.Context
import android.os.Bundle
import android.view.Display
import android.widget.FrameLayout
import com.intergration.avm.R
import com.intergration.avm.utils.IMAGE_HEIGHT
import com.intergration.avm.utils.IMAGE_WIDTH
import com.mediatek.smartplatform.PictureConfiguration
import com.waytous.avmdetect.MyGLSurfaceView
import org.greenrobot.eventbus.EventBus
class AuxiliaryScreenPresentation(outerContext: Context, display: Display) :
Presentation(outerContext, display) {
private lateinit var pictureConfig:PictureConfiguration
lateinit var frameLayout: FrameLayout
private lateinit var rendererView: MyGLSurfaceView
override fun onCreate(savedInstanceState: Bundle?) {
super.onCreate(savedInstanceState)
frameLayout = layoutInflater.inflate(R.layout.layout_presentation,null) as FrameLayout
setContentView(frameLayout)
rendererView = frameLayout.findViewById<MyGLSurfaceView?>(R.id.rendererView).also {
it.setDisplayOrientation(90)
it.setYuvDataSize(IMAGE_WIDTH, IMAGE_HEIGHT)
}
}
/**
* 打印辅屏的宽和高
*/
private fun windowWidthAndHeightTest() {
val height = context.resources.displayMetrics.heightPixels
val width = context.resources.displayMetrics.widthPixels
}
/**
* EventBus注册
*/
override fun show() {
super.show()
// if (!EventBus.getDefault().isRegistered(this)) {
// EventBus.getDefault().register(this)
// }
}
/**
* EventBus解注册
*/
override fun dismiss() {
// if (EventBus.getDefault().isRegistered(this)) {
// EventBus.getDefault().unregister(this)
// }
super.dismiss()
}
fun feedData(data:ByteArray){
rendererView.feedData(data,2)
}
@Synchronized
fun setDisplayOrientation(degree:Int){
rendererView.setDisplayOrientation(degree)
}
@Synchronized
fun setYuvDataSize(width:Int,height:Int){
rendererView.setYuvDataSize(width,height)
}
}
\ No newline at end of file
package com.intergration.avm.presentation
import android.app.Service
import android.content.Context
import android.content.Intent
import android.hardware.display.DisplayManager
import android.os.Binder
import android.os.Build
import android.os.IBinder
import android.view.Display
import android.view.WindowManager
import com.blankj.utilcode.util.LogUtils
class MultiScreenService : Service() {
/**屏幕管理器*/
private var mDisplayManager: DisplayManager? = null
/**屏幕数组*/
private lateinit var displays: Array<Display>
/**辅屏*/
private var presentation: AuxiliaryScreenPresentation? = null
override fun onBind(intent: Intent): IBinder {
return MultiScreenBinder()
}
override fun onCreate() {
super.onCreate()
initPresentation()
}
/** 初始化第二块屏幕 */
private fun initPresentation() {
if (null == presentation) {
mDisplayManager = this.getSystemService(Context.DISPLAY_SERVICE) as DisplayManager
displays = mDisplayManager?.displays as Array<Display>
if (displays.size > 1) {
// displays[1]是副屏
presentation = AuxiliaryScreenPresentation(this, displays[1])
if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.O){//8.0
presentation?.window?.setType(WindowManager.LayoutParams.TYPE_APPLICATION_OVERLAY)
}else {
presentation?.window?.setType(WindowManager.LayoutParams.TYPE_SYSTEM_OVERLAY)
}
}
}
}
/** 显示第二块屏 */
fun showSecondPresentation() {
LogUtils.d("showSecondPresentation")
presentation?.show()
}
fun dismissSecondPresentation(){
presentation?.dismiss()
}
fun setSecondPresentationContentView(layoutID:Int){
presentation?.setContentView(layoutID)
}
fun feedNv21Data(data:ByteArray){
presentation?.feedData(data)
}
fun setDisplayOrientation(degree:Int){
presentation?.setDisplayOrientation(270)
}
fun setYuvDataSize(width:Int,height:Int){
presentation?.setYuvDataSize(width,height)
}
inner class MultiScreenBinder : Binder() {
fun getService(): MultiScreenService {
return this@MultiScreenService
}
}
}
\ No newline at end of file
package com.intergration.avm.utils
import com.arcsoft.visdrive.avmsdk.constant.avm.ArcAVMCameraPosType
import com.arcsoft.visdrive.avmsdk.constant.common.ArcImageFormat
import com.arcsoft.visdrive.avmsdk.model.avm.ArcAVMInputImage
class ArcAVMInputImageFactory {
companion object{
private val frontImageData = ArcAVMInputImage().apply {
width = IMAGE_WIDTH
height = IMAGE_HEIGHT
imageFormat = ArcImageFormat.ARC_IMAGE_FORMAT_NV21.value
imagePosType = ArcAVMCameraPosType.TYPE_FRONT
imageData = null
}
fun ofFront(data:ByteArray):ArcAVMInputImage = frontImageData.apply {
imageData = data.toByteBuffer()
}
private val backImageData = ArcAVMInputImage().apply {
width = IMAGE_WIDTH
height = IMAGE_HEIGHT
imageFormat = ArcImageFormat.ARC_IMAGE_FORMAT_NV21.value
imagePosType = ArcAVMCameraPosType.TYPE_BACK
imageData = null
}
fun ofBack(data:ByteArray):ArcAVMInputImage = backImageData.apply {
imageData = data.toByteBuffer()
}
private val leftImageData = ArcAVMInputImage().apply {
width = IMAGE_WIDTH
height = IMAGE_HEIGHT
imageFormat = ArcImageFormat.ARC_IMAGE_FORMAT_NV21.value
imagePosType = ArcAVMCameraPosType.TYPE_LEFT
imageData = null
}
fun ofLeft(data:ByteArray):ArcAVMInputImage = leftImageData.apply {
imageData = data.toByteBuffer()
}
private val rightImageData = ArcAVMInputImage().apply {
width = IMAGE_WIDTH
height = IMAGE_HEIGHT
imageFormat = ArcImageFormat.ARC_IMAGE_FORMAT_NV21.value
imagePosType = ArcAVMCameraPosType.TYPE_RIGHT
imageData = null
}
fun ofRight(data:ByteArray):ArcAVMInputImage = rightImageData.apply {
imageData = data.toByteBuffer()
}
}
}
\ No newline at end of file
package com.intergration.avm.utils
import android.content.ComponentName
import android.content.Context
import android.content.Intent
import android.content.ServiceConnection
import android.os.IBinder
import com.blankj.utilcode.util.LogUtils
import com.blankj.utilcode.util.SPUtils
import com.intergration.avm.presentation.MultiScreenService
import java.nio.ByteBuffer
const val IMAGE_WIDTH = 1280
const val IMAGE_HEIGHT = 720
fun ByteArray.toByteBuffer():ByteBuffer{
val buffer = ByteBuffer.allocateDirect(size).also {
it.put(this)
}
buffer.flip()
return buffer
}
val spUtils: SPUtils by lazy {
SPUtils.getInstance("avm_settings")
}
const val IS_ACTIVATED = "is_activated"
const val CALIB_RESULT = "calib_result"
const val LOOKUP_TABLE = "lookup_table"
private var multiScreenService: MultiScreenService? = null
private val serviceConnection: ServiceConnection = object : ServiceConnection {
override fun onServiceConnected(name: ComponentName?, service: IBinder) {
LogUtils.d("启动副屏服务...")
multiScreenService = (service as MultiScreenService.MultiScreenBinder).getService()
//显示第二块屏幕
multiScreenService?.showSecondPresentation()
}
override fun onServiceDisconnected(name: ComponentName?) {
//恢复置空
multiScreenService = null
}
}
fun Context.openAuxiliaryPresentation(){
val intent = Intent(this, MultiScreenService::class.java)
bindService(intent, serviceConnection, Context.BIND_AUTO_CREATE)
}
fun Context.closeAuxiliaryPresentation(){
multiScreenService?.dismissSecondPresentation()
unbindService(serviceConnection)
}
fun feedData(data: ByteArray){
multiScreenService?.feedNv21Data(data)
}
<vector xmlns:android="http://schemas.android.com/apk/res/android"
xmlns:aapt="http://schemas.android.com/aapt"
android:width="108dp"
android:height="108dp"
android:viewportWidth="108"
android:viewportHeight="108">
<path android:pathData="M31,63.928c0,0 6.4,-11 12.1,-13.1c7.2,-2.6 26,-1.4 26,-1.4l38.1,38.1L107,108.928l-32,-1L31,63.928z">
<aapt:attr name="android:fillColor">
<gradient
android:endX="85.84757"
android:endY="92.4963"
android:startX="42.9492"
android:startY="49.59793"
android:type="linear">
<item
android:color="#44000000"
android:offset="0.0" />
<item
android:color="#00000000"
android:offset="1.0" />
</gradient>
</aapt:attr>
</path>
<path
android:fillColor="#FFFFFF"
android:fillType="nonZero"
android:pathData="M65.3,45.828l3.8,-6.6c0.2,-0.4 0.1,-0.9 -0.3,-1.1c-0.4,-0.2 -0.9,-0.1 -1.1,0.3l-3.9,6.7c-6.3,-2.8 -13.4,-2.8 -19.7,0l-3.9,-6.7c-0.2,-0.4 -0.7,-0.5 -1.1,-0.3C38.8,38.328 38.7,38.828 38.9,39.228l3.8,6.6C36.2,49.428 31.7,56.028 31,63.928h46C76.3,56.028 71.8,49.428 65.3,45.828zM43.4,57.328c-0.8,0 -1.5,-0.5 -1.8,-1.2c-0.3,-0.7 -0.1,-1.5 0.4,-2.1c0.5,-0.5 1.4,-0.7 2.1,-0.4c0.7,0.3 1.2,1 1.2,1.8C45.3,56.528 44.5,57.328 43.4,57.328L43.4,57.328zM64.6,57.328c-0.8,0 -1.5,-0.5 -1.8,-1.2s-0.1,-1.5 0.4,-2.1c0.5,-0.5 1.4,-0.7 2.1,-0.4c0.7,0.3 1.2,1 1.2,1.8C66.5,56.528 65.6,57.328 64.6,57.328L64.6,57.328z"
android:strokeWidth="1"
android:strokeColor="#00000000" />
</vector>
\ No newline at end of file
<?xml version="1.0" encoding="utf-8"?>
<vector xmlns:android="http://schemas.android.com/apk/res/android"
android:width="108dp"
android:height="108dp"
android:viewportWidth="108"
android:viewportHeight="108">
<path
android:fillColor="#3DDC84"
android:pathData="M0,0h108v108h-108z" />
<path
android:fillColor="#00000000"
android:pathData="M9,0L9,108"
android:strokeWidth="0.8"
android:strokeColor="#33FFFFFF" />
<path
android:fillColor="#00000000"
android:pathData="M19,0L19,108"
android:strokeWidth="0.8"
android:strokeColor="#33FFFFFF" />
<path
android:fillColor="#00000000"
android:pathData="M29,0L29,108"
android:strokeWidth="0.8"
android:strokeColor="#33FFFFFF" />
<path
android:fillColor="#00000000"
android:pathData="M39,0L39,108"
android:strokeWidth="0.8"
android:strokeColor="#33FFFFFF" />
<path
android:fillColor="#00000000"
android:pathData="M49,0L49,108"
android:strokeWidth="0.8"
android:strokeColor="#33FFFFFF" />
<path
android:fillColor="#00000000"
android:pathData="M59,0L59,108"
android:strokeWidth="0.8"
android:strokeColor="#33FFFFFF" />
<path
android:fillColor="#00000000"
android:pathData="M69,0L69,108"
android:strokeWidth="0.8"
android:strokeColor="#33FFFFFF" />
<path
android:fillColor="#00000000"
android:pathData="M79,0L79,108"
android:strokeWidth="0.8"
android:strokeColor="#33FFFFFF" />
<path
android:fillColor="#00000000"
android:pathData="M89,0L89,108"
android:strokeWidth="0.8"
android:strokeColor="#33FFFFFF" />
<path
android:fillColor="#00000000"
android:pathData="M99,0L99,108"
android:strokeWidth="0.8"
android:strokeColor="#33FFFFFF" />
<path
android:fillColor="#00000000"
android:pathData="M0,9L108,9"
android:strokeWidth="0.8"
android:strokeColor="#33FFFFFF" />
<path
android:fillColor="#00000000"
android:pathData="M0,19L108,19"
android:strokeWidth="0.8"
android:strokeColor="#33FFFFFF" />
<path
android:fillColor="#00000000"
android:pathData="M0,29L108,29"
android:strokeWidth="0.8"
android:strokeColor="#33FFFFFF" />
<path
android:fillColor="#00000000"
android:pathData="M0,39L108,39"
android:strokeWidth="0.8"
android:strokeColor="#33FFFFFF" />
<path
android:fillColor="#00000000"
android:pathData="M0,49L108,49"
android:strokeWidth="0.8"
android:strokeColor="#33FFFFFF" />
<path
android:fillColor="#00000000"
android:pathData="M0,59L108,59"
android:strokeWidth="0.8"
android:strokeColor="#33FFFFFF" />
<path
android:fillColor="#00000000"
android:pathData="M0,69L108,69"
android:strokeWidth="0.8"
android:strokeColor="#33FFFFFF" />
<path
android:fillColor="#00000000"
android:pathData="M0,79L108,79"
android:strokeWidth="0.8"
android:strokeColor="#33FFFFFF" />
<path
android:fillColor="#00000000"
android:pathData="M0,89L108,89"
android:strokeWidth="0.8"
android:strokeColor="#33FFFFFF" />
<path
android:fillColor="#00000000"
android:pathData="M0,99L108,99"
android:strokeWidth="0.8"
android:strokeColor="#33FFFFFF" />
<path
android:fillColor="#00000000"
android:pathData="M19,29L89,29"
android:strokeWidth="0.8"
android:strokeColor="#33FFFFFF" />
<path
android:fillColor="#00000000"
android:pathData="M19,39L89,39"
android:strokeWidth="0.8"
android:strokeColor="#33FFFFFF" />
<path
android:fillColor="#00000000"
android:pathData="M19,49L89,49"
android:strokeWidth="0.8"
android:strokeColor="#33FFFFFF" />
<path
android:fillColor="#00000000"
android:pathData="M19,59L89,59"
android:strokeWidth="0.8"
android:strokeColor="#33FFFFFF" />
<path
android:fillColor="#00000000"
android:pathData="M19,69L89,69"
android:strokeWidth="0.8"
android:strokeColor="#33FFFFFF" />
<path
android:fillColor="#00000000"
android:pathData="M19,79L89,79"
android:strokeWidth="0.8"
android:strokeColor="#33FFFFFF" />
<path
android:fillColor="#00000000"
android:pathData="M29,19L29,89"
android:strokeWidth="0.8"
android:strokeColor="#33FFFFFF" />
<path
android:fillColor="#00000000"
android:pathData="M39,19L39,89"
android:strokeWidth="0.8"
android:strokeColor="#33FFFFFF" />
<path
android:fillColor="#00000000"
android:pathData="M49,19L49,89"
android:strokeWidth="0.8"
android:strokeColor="#33FFFFFF" />
<path
android:fillColor="#00000000"
android:pathData="M59,19L59,89"
android:strokeWidth="0.8"
android:strokeColor="#33FFFFFF" />
<path
android:fillColor="#00000000"
android:pathData="M69,19L69,89"
android:strokeWidth="0.8"
android:strokeColor="#33FFFFFF" />
<path
android:fillColor="#00000000"
android:pathData="M79,19L79,89"
android:strokeWidth="0.8"
android:strokeColor="#33FFFFFF" />
</vector>
<?xml version="1.0" encoding="utf-8"?>
<androidx.constraintlayout.widget.ConstraintLayout xmlns:android="http://schemas.android.com/apk/res/android"
xmlns:app="http://schemas.android.com/apk/res-auto"
xmlns:tools="http://schemas.android.com/tools"
android:layout_width="match_parent"
android:layout_height="match_parent"
tools:context=".MainActivity">
<Button
android:id="@+id/activeAvm"
android:layout_width="110dp"
android:layout_height="50dp"
android:layout_marginTop="10dp"
android:elevation="2dp"
android:text="@string/active_avm"
app:layout_constraintEnd_toStartOf="@id/vertical_guideline1"
app:layout_constraintStart_toStartOf="parent"
app:layout_constraintTop_toTopOf="parent"/>
<androidx.constraintlayout.widget.Guideline
android:id="@+id/vertical_guideline1"
android:layout_width="wrap_content"
android:layout_height="wrap_content"
android:orientation="vertical"
app:layout_constraintGuide_percent=".20" />
<androidx.constraintlayout.widget.Guideline
android:id="@+id/vertical_guideline2"
android:layout_width="wrap_content"
android:layout_height="wrap_content"
android:orientation="vertical"
app:layout_constraintGuide_percent=".60" />
<Button
android:id="@+id/autoCalib"
android:layout_width="110dp"
android:layout_height="50dp"
android:layout_marginBottom="20dp"
android:elevation="2dp"
android:text="@string/auto_calib"
app:layout_constraintEnd_toStartOf="@id/vertical_guideline1"
app:layout_constraintStart_toStartOf="parent"
app:layout_constraintBottom_toTopOf="@id/manualCalib"/>
<Button
android:id="@+id/manualCalib"
android:layout_width="110dp"
android:layout_height="50dp"
android:layout_marginBottom="20dp"
android:elevation="2dp"
android:text="@string/manual_calib"
app:layout_constraintEnd_toStartOf="@id/vertical_guideline1"
app:layout_constraintStart_toStartOf="parent"
app:layout_constraintBottom_toTopOf="@id/horizontal_guideline1"/>
<androidx.constraintlayout.widget.Guideline
android:id="@+id/horizontal_guideline1"
android:layout_width="wrap_content"
android:layout_height="wrap_content"
android:orientation="horizontal"
app:layout_constraintGuide_percent=".50" />
<Button
android:id="@+id/startAvm"
android:layout_width="110dp"
android:layout_height="50dp"
android:layout_marginTop="20dp"
android:elevation="2dp"
android:text="@string/start_avm"
app:layout_constraintEnd_toStartOf="@id/vertical_guideline1"
app:layout_constraintStart_toStartOf="parent"
app:layout_constraintTop_toBottomOf="@id/horizontal_guideline1"/>
<Button
android:id="@+id/stopAvm"
android:layout_width="110dp"
android:layout_height="50dp"
android:layout_marginTop="20dp"
android:elevation="2dp"
android:text="@string/stop_avm"
app:layout_constraintEnd_toStartOf="@id/vertical_guideline1"
app:layout_constraintStart_toStartOf="parent"
app:layout_constraintTop_toBottomOf="@id/startAvm"/>
<TextView
android:id="@+id/frontCameraTxt"
android:layout_width="wrap_content"
android:layout_height="wrap_content"
android:layout_marginTop="15dp"
android:text="@string/front_camera"
android:textSize="24sp"
android:textColor="@android:color/holo_blue_dark"
app:layout_constraintStart_toStartOf="@id/frontPreview"
app:layout_constraintEnd_toEndOf="@id/frontPreview"
app:layout_constraintTop_toTopOf="@id/frontPreview"/>
<SurfaceView
android:id="@+id/frontPreview"
android:layout_width="0dp"
android:layout_height="0dp"
app:layout_constraintTop_toTopOf="parent"
app:layout_constraintStart_toEndOf="@id/vertical_guideline1"
app:layout_constraintEnd_toStartOf="@id/vertical_guideline2"
app:layout_constraintBottom_toTopOf="@id/horizontal_guideline1" />
<TextView
android:id="@+id/rightCameraTxt"
android:layout_width="wrap_content"
android:layout_height="wrap_content"
android:layout_marginTop="15dp"
android:text="@string/right_camera"
android:textSize="24sp"
android:textColor="@android:color/holo_blue_dark"
app:layout_constraintStart_toStartOf="@id/rightPreview"
app:layout_constraintEnd_toEndOf="@id/rightPreview"
app:layout_constraintTop_toTopOf="@id/rightPreview"/>
<SurfaceView
android:id="@+id/rightPreview"
android:layout_width="0dp"
android:layout_height="0dp"
app:layout_constraintStart_toEndOf="@id/vertical_guideline2"
app:layout_constraintEnd_toEndOf="parent"
app:layout_constraintTop_toTopOf="parent"
app:layout_constraintBottom_toTopOf="@id/horizontal_guideline1" />
<TextView
android:id="@+id/backCameraTxt"
android:layout_width="wrap_content"
android:layout_height="wrap_content"
android:layout_marginTop="15dp"
android:text="@string/back_camera"
android:textSize="24sp"
android:textColor="@android:color/holo_blue_dark"
app:layout_constraintStart_toStartOf="@id/backPreview"
app:layout_constraintEnd_toEndOf="@id/backPreview"
app:layout_constraintTop_toTopOf="@id/backPreview"/>
<SurfaceView
android:id="@+id/backPreview"
android:layout_width="0dp"
android:layout_height="0dp"
app:layout_constraintStart_toEndOf="@id/vertical_guideline1"
app:layout_constraintEnd_toStartOf="@id/vertical_guideline2"
app:layout_constraintTop_toBottomOf="@id/horizontal_guideline1"
app:layout_constraintBottom_toBottomOf="parent" />
<TextView
android:id="@+id/leftCameraTxt"
android:layout_width="wrap_content"
android:layout_height="wrap_content"
android:layout_marginTop="15dp"
android:text="@string/left_camera"
android:textSize="24sp"
android:textColor="@android:color/holo_blue_dark"
app:layout_constraintStart_toStartOf="@id/leftPreview"
app:layout_constraintEnd_toEndOf="@id/leftPreview"
app:layout_constraintTop_toTopOf="@id/leftPreview"/>
<SurfaceView
android:id="@+id/leftPreview"
android:layout_width="0dp"
android:layout_height="0dp"
app:layout_constraintStart_toEndOf="@id/vertical_guideline2"
app:layout_constraintEnd_toEndOf="parent"
app:layout_constraintTop_toBottomOf="@id/horizontal_guideline1"
app:layout_constraintBottom_toBottomOf="parent"/>
</androidx.constraintlayout.widget.ConstraintLayout>
\ No newline at end of file
<?xml version="1.0" encoding="utf-8"?>
<FrameLayout xmlns:android="http://schemas.android.com/apk/res/android"
android:layout_width="match_parent"
android:layout_height="match_parent">
<TextView
android:layout_width="wrap_content"
android:layout_height="wrap_content"
android:text="分屏显示"
android:textSize="24sp"
android:textColor="@android:color/holo_blue_light"
android:layout_gravity="center_horizontal|top"/>
<com.waytous.avmdetect.MyGLSurfaceView
android:id="@+id/rendererView"
android:layout_width="match_parent"
android:layout_height="match_parent"/>
</FrameLayout>
\ No newline at end of file
<?xml version="1.0" encoding="utf-8"?>
<adaptive-icon xmlns:android="http://schemas.android.com/apk/res/android">
<background android:drawable="@drawable/ic_launcher_background" />
<foreground android:drawable="@drawable/ic_launcher_foreground" />
<monochrome android:drawable="@drawable/ic_launcher_foreground" />
</adaptive-icon>
\ No newline at end of file
<?xml version="1.0" encoding="utf-8"?>
<adaptive-icon xmlns:android="http://schemas.android.com/apk/res/android">
<background android:drawable="@drawable/ic_launcher_background" />
<foreground android:drawable="@drawable/ic_launcher_foreground" />
<monochrome android:drawable="@drawable/ic_launcher_foreground" />
</adaptive-icon>
\ No newline at end of file
<resources xmlns:tools="http://schemas.android.com/tools">
<!-- Base application theme. -->
<style name="Base.Theme.AvmIntergration" parent="Theme.Material3.DayNight.NoActionBar">
<!-- Customize your dark theme here. -->
<!-- <item name="colorPrimary">@color/my_dark_primary</item> -->
</style>
</resources>
\ No newline at end of file
<?xml version="1.0" encoding="utf-8"?>
<resources>
<color name="black">#FF000000</color>
<color name="white">#FFFFFFFF</color>
</resources>
\ No newline at end of file
<resources>
<string name="app_name">avmIntegration</string>
<string name="active_avm">激活AVM</string>
<string name="start_avm">开启AVM</string>
<string name="stop_avm">停止AVM</string>
<string name="auto_calib">自动标定</string>
<string name="manual_calib">手动标定</string>
<string name="left_camera">左摄像头</string>
<string name="front_camera">前摄像头</string>
<string name="right_camera">右摄像头</string>
<string name="back_camera">后摄像头</string>
</resources>
\ No newline at end of file
<resources xmlns:tools="http://schemas.android.com/tools">
<!-- Base application theme. -->
<style name="Base.Theme.AvmIntergration" parent="Theme.Material3.DayNight.NoActionBar">
<!-- Customize your light theme here. -->
<!-- <item name="colorPrimary">@color/my_light_primary</item> -->
</style>
<style name="Theme.AvmIntergration" parent="Base.Theme.AvmIntergration" />
</resources>
\ No newline at end of file
<?xml version="1.0" encoding="utf-8"?><!--
Sample backup rules file; uncomment and customize as necessary.
See https://developer.android.com/guide/topics/data/autobackup
for details.
Note: This file is ignored for devices older that API 31
See https://developer.android.com/about/versions/12/backup-restore
-->
<full-backup-content>
<!--
<include domain="sharedpref" path="."/>
<exclude domain="sharedpref" path="device.xml"/>
-->
</full-backup-content>
\ No newline at end of file
<?xml version="1.0" encoding="utf-8"?><!--
Sample data extraction rules file; uncomment and customize as necessary.
See https://developer.android.com/about/versions/12/backup-restore#xml-changes
for details.
-->
<data-extraction-rules>
<cloud-backup>
<!-- TODO: Use <include> and <exclude> to control what is backed up.
<include .../>
<exclude .../>
-->
</cloud-backup>
<!--
<device-transfer>
<include .../>
<exclude .../>
</device-transfer>
-->
</data-extraction-rules>
\ No newline at end of file
package com.intergration.avm
import org.junit.Test
import org.junit.Assert.*
/**
* Example local unit test, which will execute on the development machine (host).
*
* See [testing documentation](http://d.android.com/tools/testing).
*/
class ExampleUnitTest {
@Test
fun addition_isCorrect() {
assertEquals(4, 2 + 2)
}
}
\ No newline at end of file
// Top-level build file where you can add configuration options common to all sub-projects/modules.
plugins {
id 'com.android.application' version '8.0.2' apply false
id 'com.android.library' version '8.0.2' apply false
id 'org.jetbrains.kotlin.android' version '1.8.20' apply false
}
\ No newline at end of file
# Project-wide Gradle settings.
# IDE (e.g. Android Studio) users:
# Gradle settings configured through the IDE *will override*
# any settings specified in this file.
# For more details on how to configure your build environment visit
# http://www.gradle.org/docs/current/userguide/build_environment.html
# Specifies the JVM arguments used for the daemon process.
# The setting is particularly useful for tweaking memory settings.
org.gradle.jvmargs=-Xmx2048m -Dfile.encoding=UTF-8
# When configured, Gradle will run in incubating parallel mode.
# This option should only be used with decoupled projects. More details, visit
# http://www.gradle.org/docs/current/userguide/multi_project_builds.html#sec:decoupled_projects
# org.gradle.parallel=true
# AndroidX package structure to make it clearer which packages are bundled with the
# Android operating system, and which are packaged with your app's APK
# https://developer.android.com/topic/libraries/support-library/androidx-rn
android.useAndroidX=true
# Kotlin code style for this project: "official" or "obsolete":
kotlin.code.style=official
# Enables namespacing of each library's R class so that its R class includes only the
# resources declared in the library itself and none from the library's dependencies,
# thereby reducing the size of the R class for that library
android.nonTransitiveRClass=true
\ No newline at end of file
#Tue Sep 26 14:54:07 CST 2023
distributionBase=GRADLE_USER_HOME
distributionPath=wrapper/dists
distributionUrl=https\://services.gradle.org/distributions/gradle-8.0-bin.zip
zipStoreBase=GRADLE_USER_HOME
zipStorePath=wrapper/dists
#!/usr/bin/env sh
#
# Copyright 2015 the original author or authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
##############################################################################
##
## Gradle start up script for UN*X
##
##############################################################################
# Attempt to set APP_HOME
# Resolve links: $0 may be a link
PRG="$0"
# Need this for relative symlinks.
while [ -h "$PRG" ] ; do
ls=`ls -ld "$PRG"`
link=`expr "$ls" : '.*-> \(.*\)$'`
if expr "$link" : '/.*' > /dev/null; then
PRG="$link"
else
PRG=`dirname "$PRG"`"/$link"
fi
done
SAVED="`pwd`"
cd "`dirname \"$PRG\"`/" >/dev/null
APP_HOME="`pwd -P`"
cd "$SAVED" >/dev/null
APP_NAME="Gradle"
APP_BASE_NAME=`basename "$0"`
# Add default JVM options here. You can also use JAVA_OPTS and GRADLE_OPTS to pass JVM options to this script.
DEFAULT_JVM_OPTS='"-Xmx64m" "-Xms64m"'
# Use the maximum available, or set MAX_FD != -1 to use that value.
MAX_FD="maximum"
warn () {
echo "$*"
}
die () {
echo
echo "$*"
echo
exit 1
}
# OS specific support (must be 'true' or 'false').
cygwin=false
msys=false
darwin=false
nonstop=false
case "`uname`" in
CYGWIN* )
cygwin=true
;;
Darwin* )
darwin=true
;;
MINGW* )
msys=true
;;
NONSTOP* )
nonstop=true
;;
esac
CLASSPATH=$APP_HOME/gradle/wrapper/gradle-wrapper.jar
# Determine the Java command to use to start the JVM.
if [ -n "$JAVA_HOME" ] ; then
if [ -x "$JAVA_HOME/jre/sh/java" ] ; then
# IBM's JDK on AIX uses strange locations for the executables
JAVACMD="$JAVA_HOME/jre/sh/java"
else
JAVACMD="$JAVA_HOME/bin/java"
fi
if [ ! -x "$JAVACMD" ] ; then
die "ERROR: JAVA_HOME is set to an invalid directory: $JAVA_HOME
Please set the JAVA_HOME variable in your environment to match the
location of your Java installation."
fi
else
JAVACMD="java"
which java >/dev/null 2>&1 || die "ERROR: JAVA_HOME is not set and no 'java' command could be found in your PATH.
Please set the JAVA_HOME variable in your environment to match the
location of your Java installation."
fi
# Increase the maximum file descriptors if we can.
if [ "$cygwin" = "false" -a "$darwin" = "false" -a "$nonstop" = "false" ] ; then
MAX_FD_LIMIT=`ulimit -H -n`
if [ $? -eq 0 ] ; then
if [ "$MAX_FD" = "maximum" -o "$MAX_FD" = "max" ] ; then
MAX_FD="$MAX_FD_LIMIT"
fi
ulimit -n $MAX_FD
if [ $? -ne 0 ] ; then
warn "Could not set maximum file descriptor limit: $MAX_FD"
fi
else
warn "Could not query maximum file descriptor limit: $MAX_FD_LIMIT"
fi
fi
# For Darwin, add options to specify how the application appears in the dock
if $darwin; then
GRADLE_OPTS="$GRADLE_OPTS \"-Xdock:name=$APP_NAME\" \"-Xdock:icon=$APP_HOME/media/gradle.icns\""
fi
# For Cygwin or MSYS, switch paths to Windows format before running java
if [ "$cygwin" = "true" -o "$msys" = "true" ] ; then
APP_HOME=`cygpath --path --mixed "$APP_HOME"`
CLASSPATH=`cygpath --path --mixed "$CLASSPATH"`
JAVACMD=`cygpath --unix "$JAVACMD"`
# We build the pattern for arguments to be converted via cygpath
ROOTDIRSRAW=`find -L / -maxdepth 1 -mindepth 1 -type d 2>/dev/null`
SEP=""
for dir in $ROOTDIRSRAW ; do
ROOTDIRS="$ROOTDIRS$SEP$dir"
SEP="|"
done
OURCYGPATTERN="(^($ROOTDIRS))"
# Add a user-defined pattern to the cygpath arguments
if [ "$GRADLE_CYGPATTERN" != "" ] ; then
OURCYGPATTERN="$OURCYGPATTERN|($GRADLE_CYGPATTERN)"
fi
# Now convert the arguments - kludge to limit ourselves to /bin/sh
i=0
for arg in "$@" ; do
CHECK=`echo "$arg"|egrep -c "$OURCYGPATTERN" -`
CHECK2=`echo "$arg"|egrep -c "^-"` ### Determine if an option
if [ $CHECK -ne 0 ] && [ $CHECK2 -eq 0 ] ; then ### Added a condition
eval `echo args$i`=`cygpath --path --ignore --mixed "$arg"`
else
eval `echo args$i`="\"$arg\""
fi
i=`expr $i + 1`
done
case $i in
0) set -- ;;
1) set -- "$args0" ;;
2) set -- "$args0" "$args1" ;;
3) set -- "$args0" "$args1" "$args2" ;;
4) set -- "$args0" "$args1" "$args2" "$args3" ;;
5) set -- "$args0" "$args1" "$args2" "$args3" "$args4" ;;
6) set -- "$args0" "$args1" "$args2" "$args3" "$args4" "$args5" ;;
7) set -- "$args0" "$args1" "$args2" "$args3" "$args4" "$args5" "$args6" ;;
8) set -- "$args0" "$args1" "$args2" "$args3" "$args4" "$args5" "$args6" "$args7" ;;
9) set -- "$args0" "$args1" "$args2" "$args3" "$args4" "$args5" "$args6" "$args7" "$args8" ;;
esac
fi
# Escape application args
save () {
for i do printf %s\\n "$i" | sed "s/'/'\\\\''/g;1s/^/'/;\$s/\$/' \\\\/" ; done
echo " "
}
APP_ARGS=`save "$@"`
# Collect all arguments for the java command, following the shell quoting and substitution rules
eval set -- $DEFAULT_JVM_OPTS $JAVA_OPTS $GRADLE_OPTS "\"-Dorg.gradle.appname=$APP_BASE_NAME\"" -classpath "\"$CLASSPATH\"" org.gradle.wrapper.GradleWrapperMain "$APP_ARGS"
exec "$JAVACMD" "$@"
@rem
@rem Copyright 2015 the original author or authors.
@rem
@rem Licensed under the Apache License, Version 2.0 (the "License");
@rem you may not use this file except in compliance with the License.
@rem You may obtain a copy of the License at
@rem
@rem https://www.apache.org/licenses/LICENSE-2.0
@rem
@rem Unless required by applicable law or agreed to in writing, software
@rem distributed under the License is distributed on an "AS IS" BASIS,
@rem WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
@rem See the License for the specific language governing permissions and
@rem limitations under the License.
@rem
@if "%DEBUG%" == "" @echo off
@rem ##########################################################################
@rem
@rem Gradle startup script for Windows
@rem
@rem ##########################################################################
@rem Set local scope for the variables with windows NT shell
if "%OS%"=="Windows_NT" setlocal
set DIRNAME=%~dp0
if "%DIRNAME%" == "" set DIRNAME=.
set APP_BASE_NAME=%~n0
set APP_HOME=%DIRNAME%
@rem Resolve any "." and ".." in APP_HOME to make it shorter.
for %%i in ("%APP_HOME%") do set APP_HOME=%%~fi
@rem Add default JVM options here. You can also use JAVA_OPTS and GRADLE_OPTS to pass JVM options to this script.
set DEFAULT_JVM_OPTS="-Xmx64m" "-Xms64m"
@rem Find java.exe
if defined JAVA_HOME goto findJavaFromJavaHome
set JAVA_EXE=java.exe
%JAVA_EXE% -version >NUL 2>&1
if "%ERRORLEVEL%" == "0" goto execute
echo.
echo ERROR: JAVA_HOME is not set and no 'java' command could be found in your PATH.
echo.
echo Please set the JAVA_HOME variable in your environment to match the
echo location of your Java installation.
goto fail
:findJavaFromJavaHome
set JAVA_HOME=%JAVA_HOME:"=%
set JAVA_EXE=%JAVA_HOME%/bin/java.exe
if exist "%JAVA_EXE%" goto execute
echo.
echo ERROR: JAVA_HOME is set to an invalid directory: %JAVA_HOME%
echo.
echo Please set the JAVA_HOME variable in your environment to match the
echo location of your Java installation.
goto fail
:execute
@rem Setup the command line
set CLASSPATH=%APP_HOME%\gradle\wrapper\gradle-wrapper.jar
@rem Execute Gradle
"%JAVA_EXE%" %DEFAULT_JVM_OPTS% %JAVA_OPTS% %GRADLE_OPTS% "-Dorg.gradle.appname=%APP_BASE_NAME%" -classpath "%CLASSPATH%" org.gradle.wrapper.GradleWrapperMain %*
:end
@rem End local scope for the variables with windows NT shell
if "%ERRORLEVEL%"=="0" goto mainEnd
:fail
rem Set variable GRADLE_EXIT_CONSOLE if you need the _script_ return code instead of
rem the _cmd.exe /c_ return code!
if not "" == "%GRADLE_EXIT_CONSOLE%" exit 1
exit /b 1
:mainEnd
if "%OS%"=="Windows_NT" endlocal
:omega
pluginManagement {
repositories {
google()
mavenCentral()
gradlePluginPortal()
}
}
dependencyResolutionManagement {
repositoriesMode.set(RepositoriesMode.FAIL_ON_PROJECT_REPOS)
repositories {
google()
mavenCentral()
}
}
rootProject.name = "avmIntergration"
include ':app'
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment