Commit b97bd49c authored by 马乐's avatar 马乐

融合图全屏

parent 59027733
...@@ -38,7 +38,8 @@ ...@@ -38,7 +38,8 @@
tools:targetApi="31"> tools:targetApi="31">
<activity <activity
android:name=".MainActivity" android:name=".MainActivity"
android:exported="true"> android:exported="true"
android:screenOrientation="landscape">
<intent-filter> <intent-filter>
<action android:name="android.intent.action.MAIN" /> <action android:name="android.intent.action.MAIN" />
...@@ -47,7 +48,8 @@ ...@@ -47,7 +48,8 @@
</activity> </activity>
<service android:name=".presentation.MultiScreenService" <service android:name=".presentation.MultiScreenService"
android:enabled="true" android:enabled="true"
android:exported="true"/> android:exported="true"
android:screenOrientation="landscape"/>
</application> </application>
</manifest> </manifest>
\ No newline at end of file
...@@ -12,6 +12,8 @@ import androidx.activity.result.contract.ActivityResultContracts ...@@ -12,6 +12,8 @@ import androidx.activity.result.contract.ActivityResultContracts
import androidx.appcompat.app.AppCompatActivity import androidx.appcompat.app.AppCompatActivity
import androidx.core.app.ActivityCompat import androidx.core.app.ActivityCompat
import androidx.core.content.ContextCompat import androidx.core.content.ContextCompat
import com.arcsoft.imageutil.ArcSoftImageFormat
import com.arcsoft.imageutil.ArcSoftImageUtil
import com.arcsoft.visdrive.avmsdk.ArcErrorInfo import com.arcsoft.visdrive.avmsdk.ArcErrorInfo
import com.arcsoft.visdrive.avmsdk.ArcVisDriveAVMEngine import com.arcsoft.visdrive.avmsdk.ArcVisDriveAVMEngine
import com.arcsoft.visdrive.avmsdk.constant.avm.ArcAVMType import com.arcsoft.visdrive.avmsdk.constant.avm.ArcAVMType
...@@ -21,6 +23,7 @@ import com.arcsoft.visdrive.avmsdk.model.avm.ArcAVMLookupTable ...@@ -21,6 +23,7 @@ import com.arcsoft.visdrive.avmsdk.model.avm.ArcAVMLookupTable
import com.arcsoft.visdrive.avmsdk.model.avm.ArcAVMOutputImage import com.arcsoft.visdrive.avmsdk.model.avm.ArcAVMOutputImage
import com.arcsoft.visdrive.avmsdk.model.common.ArcActiveEnvParam import com.arcsoft.visdrive.avmsdk.model.common.ArcActiveEnvParam
import com.blankj.utilcode.util.FileUtils import com.blankj.utilcode.util.FileUtils
import com.blankj.utilcode.util.ImageUtils
import com.blankj.utilcode.util.LogUtils import com.blankj.utilcode.util.LogUtils
import com.intergration.avm.databinding.ActivityMainBinding import com.intergration.avm.databinding.ActivityMainBinding
import com.intergration.avm.utils.ArcAVMInputImageFactory import com.intergration.avm.utils.ArcAVMInputImageFactory
...@@ -212,14 +215,13 @@ class MainActivity : AppCompatActivity() { ...@@ -212,14 +215,13 @@ class MainActivity : AppCompatActivity() {
} }
avmPlayJob = avmStartScope?.launch { avmPlayJob = avmStartScope?.launch {
try { try {
val frontScope = CoroutineScope(Dispatchers.Default)
avmFrontCamera.startPictureSequence( avmFrontCamera.startPictureSequence(
PictureSequenceSource.GENERAL_CAMERA, PictureSequenceSource.GENERAL_CAMERA,
configCameraSequence(avmDir.absolutePath) { _, _, status, data, _ -> configCameraSequence(avmDir.absolutePath) { _, _, status, data, _ ->
if (status == SpmCameraDevice.ImageDataCallback.IMAGE_STATUS_SUCCEEDED) { if (status == SpmCameraDevice.ImageDataCallback.IMAGE_STATUS_SUCCEEDED) {
ArcAVMInputImageFactory.ofFront(data).also { ArcAVMInputImageFactory.ofFront(data).also {
avmInputImages[0] = it avmInputImages[0] = it
frontScope.launch { launch {
ensureActive() ensureActive()
channel.send(true) channel.send(true)
} }
...@@ -227,14 +229,13 @@ class MainActivity : AppCompatActivity() { ...@@ -227,14 +229,13 @@ class MainActivity : AppCompatActivity() {
} }
} }
) )
val rightScope = CoroutineScope(Dispatchers.Default)
avmRightCamera.startPictureSequence( avmRightCamera.startPictureSequence(
PictureSequenceSource.GENERAL_CAMERA, PictureSequenceSource.GENERAL_CAMERA,
configCameraSequence(avmDir.absolutePath) { _, _, status, data, _ -> configCameraSequence(avmDir.absolutePath) { _, _, status, data, _ ->
if (status == SpmCameraDevice.ImageDataCallback.IMAGE_STATUS_SUCCEEDED) { if (status == SpmCameraDevice.ImageDataCallback.IMAGE_STATUS_SUCCEEDED) {
ArcAVMInputImageFactory.ofRight(data).also { ArcAVMInputImageFactory.ofRight(data).also {
avmInputImages[1] = it avmInputImages[1] = it
rightScope.launch { launch {
ensureActive() ensureActive()
channel.send(true) channel.send(true)
} }
...@@ -242,14 +243,13 @@ class MainActivity : AppCompatActivity() { ...@@ -242,14 +243,13 @@ class MainActivity : AppCompatActivity() {
} }
} }
) )
val backScope = CoroutineScope(Dispatchers.Default)
avmBackCamera.startPictureSequence( avmBackCamera.startPictureSequence(
PictureSequenceSource.GENERAL_CAMERA, PictureSequenceSource.GENERAL_CAMERA,
configCameraSequence(avmDir.absolutePath) { _, _, status, data, _ -> configCameraSequence(avmDir.absolutePath) { _, _, status, data, _ ->
if (status == SpmCameraDevice.ImageDataCallback.IMAGE_STATUS_SUCCEEDED) { if (status == SpmCameraDevice.ImageDataCallback.IMAGE_STATUS_SUCCEEDED) {
ArcAVMInputImageFactory.ofBack(data).also { ArcAVMInputImageFactory.ofBack(data).also {
avmInputImages[2] = it avmInputImages[2] = it
backScope.launch { launch {
ensureActive() ensureActive()
channel.send(true) channel.send(true)
} }
...@@ -257,14 +257,13 @@ class MainActivity : AppCompatActivity() { ...@@ -257,14 +257,13 @@ class MainActivity : AppCompatActivity() {
} }
} }
) )
val leftScope = CoroutineScope(Dispatchers.Default)
avmLeftCamera.startPictureSequence( avmLeftCamera.startPictureSequence(
PictureSequenceSource.GENERAL_CAMERA, PictureSequenceSource.GENERAL_CAMERA,
configCameraSequence(avmDir.absolutePath) { _, _, status, data, _ -> configCameraSequence(avmDir.absolutePath) { _, _, status, data, _ ->
if (status == SpmCameraDevice.ImageDataCallback.IMAGE_STATUS_SUCCEEDED) { if (status == SpmCameraDevice.ImageDataCallback.IMAGE_STATUS_SUCCEEDED) {
ArcAVMInputImageFactory.ofLeft(data).also { ArcAVMInputImageFactory.ofLeft(data).also {
avmInputImages[3] = it avmInputImages[3] = it
leftScope.launch { launch {
ensureActive() ensureActive()
channel.send(true) channel.send(true)
} }
...@@ -287,7 +286,7 @@ class MainActivity : AppCompatActivity() { ...@@ -287,7 +286,7 @@ class MainActivity : AppCompatActivity() {
) )
if (result == ArcErrorInfo.ARC_ERROR_OK) { if (result == ArcErrorInfo.ARC_ERROR_OK) {
if (outputImageList.isNotEmpty()) { if (outputImageList.isNotEmpty()) {
feedData(outputImageList[0].imageData) feedData(outputImageList[0].imageData,outputImageList[0].width,outputImageList[0].height)
} }
} }
} }
...@@ -351,12 +350,10 @@ class MainActivity : AppCompatActivity() { ...@@ -351,12 +350,10 @@ class MainActivity : AppCompatActivity() {
override fun onDestroy() { override fun onDestroy() {
super.onDestroy() super.onDestroy()
SmartPlatformManager.get().run { smartPlatformManager.closeCameraDevice(avmFrontCamera)
closeCameraDevice(avmFrontCamera) smartPlatformManager.closeCameraDevice(avmBackCamera)
closeCameraDevice(avmBackCamera) smartPlatformManager.closeCameraDevice(avmLeftCamera)
closeCameraDevice(avmLeftCamera) smartPlatformManager.closeCameraDevice(avmRightCamera)
closeCameraDevice(avmRightCamera)
}
job?.cancel(CancellationException("MainActivity Destroyed")) job?.cancel(CancellationException("MainActivity Destroyed"))
} }
......
...@@ -104,8 +104,8 @@ fun ArcVisDriveAVMEngine.initializeAvmParams(calibResultPath:File,lookupPath:Fil ...@@ -104,8 +104,8 @@ fun ArcVisDriveAVMEngine.initializeAvmParams(calibResultPath:File,lookupPath:Fil
calibInfo.singleImageWidth = IMAGE_WIDTH calibInfo.singleImageWidth = IMAGE_WIDTH
calibInfo.singleImageHeight = IMAGE_HEIGHT calibInfo.singleImageHeight = IMAGE_HEIGHT
val carInfo = ArcAVMCarInfo() val carInfo = ArcAVMCarInfo()
carInfo.width = 590 carInfo.width = 600
carInfo.length = 1800 carInfo.length = 1200
carInfo.blinkAreaFront = 0 carInfo.blinkAreaFront = 0
carInfo.blinkAreaRight = 0 carInfo.blinkAreaRight = 0
carInfo.blinkAreaBack = 0 carInfo.blinkAreaBack = 0
...@@ -133,10 +133,10 @@ fun ArcVisDriveAVMEngine.initializeAvmParams(calibResultPath:File,lookupPath:Fil ...@@ -133,10 +133,10 @@ fun ArcVisDriveAVMEngine.initializeAvmParams(calibResultPath:File,lookupPath:Fil
fun ArcVisDriveAVMEngine.autoCalib(calibResultPath:File,lookupPath:File):Int{ fun ArcVisDriveAVMEngine.autoCalib(calibResultPath:File,lookupPath:File):Int{
val clothInfo = ArcAVMClothInfo() val clothInfo = ArcAVMClothInfo()
clothInfo.d1 = 120 clothInfo.d1 = 120
clothInfo.d3 = 690 clothInfo.d3 = 590
clothInfo.d4 = 690 clothInfo.d4 = 590
clothInfo.d5 = 690 clothInfo.d5 = 550
clothInfo.d6 = 690 clothInfo.d6 = 550
val chessInfoFront = ArcAVMChessInfo(ArcAVMCameraPosType.TYPE_FRONT) val chessInfoFront = ArcAVMChessInfo(ArcAVMCameraPosType.TYPE_FRONT)
val chessInfoRight = ArcAVMChessInfo(ArcAVMCameraPosType.TYPE_RIGHT) val chessInfoRight = ArcAVMChessInfo(ArcAVMCameraPosType.TYPE_RIGHT)
val chessInfoBack = ArcAVMChessInfo(ArcAVMCameraPosType.TYPE_BACK) val chessInfoBack = ArcAVMChessInfo(ArcAVMCameraPosType.TYPE_BACK)
......
package com.intergration.avm.glsurface;
import android.opengl.GLES20;
import java.nio.IntBuffer;
public class GLUtils {
private static final String TAG = "GLUtils";
/**
* 顶点着色器
*/
private static String VERTEX_SHADER =
" attribute vec4 attr_position;\n" +
" attribute vec2 attr_tc;\n" +
" varying vec2 tc;\n" +
" void main() {\n" +
" gl_Position = attr_position;\n" +
" tc = attr_tc;\n" +
" }";
/**
* 片段着色器,正常效果
*/
public static String FRAG_SHADER_NORMAL =
"precision mediump float;\n" +
" varying vec2 tc;\n" +
" uniform sampler2D ySampler;\n" +
" uniform sampler2D uSampler;\n" +
" uniform sampler2D vSampler;\n" +
" const mat3 convertMat = mat3(1.0, 1.0, 1.0, 0, -0.344, 1.77, 1.403, -0.714,0);\n" +
" void main()\n" +
" {\n" +
" vec3 yuv;\n" +
" yuv.x = texture2D(ySampler, tc).r;\n" +
" yuv.y = texture2D(vSampler, tc).r - 0.5;\n" +
" yuv.z = texture2D(uSampler, tc).r - 0.5;\n" +
" gl_FragColor = vec4(convertMat * yuv, 1.0);\n" +
" }";
/**
* 片段着色器,灰度效果。不需要 U V 数据(Java代码中可以做对应修改,仅需拷贝NV21数据中的Y分量即可)
*/
public static String FRAG_SHADER_GRAY =
"precision mediump float;\n" +
" varying vec2 tc;\n" +
" uniform sampler2D ySampler;\n" +
" void main()\n" +
" {\n" +
" vec3 yuv;\n" +
" yuv.x = texture2D(ySampler, tc).r;\n" +
" gl_FragColor = vec4(vec3(yuv.x), 1.0);\n" +
" }";
/**
* 片段着色器,浮雕效果。不需要 U V 数据(Java代码中可以做对应修改,仅需拷贝NV21数据中的Y分量即可)
*/
public static String FRAG_SHADER_SCULPTURE =
"precision mediump float;\n" +
"varying vec2 tc;\n" +
" uniform sampler2D ySampler;\n" +
" const vec2 texSize = vec2(100.0, 100.0);\n" +
" const vec4 sculptureColor = vec4(0.5, 0.5, 0.5, 1.0);\n" +
"\n" +
"void main()\n" +
"{\n" +
" vec2 upLeftCoord = vec2(tc.x-1.0/texSize.x, tc.y-1.0/texSize.y);\n" +
" vec4 curColor = texture2D(ySampler, tc);\n" +
" vec4 upLeftColor = texture2D(ySampler, upLeftCoord);\n" +
" vec4 delColor = curColor - upLeftColor;\n" +
" gl_FragColor = vec4(vec3(delColor), 0.0) + sculptureColor;\n" +
"}";
//SQUARE_VERTICES每2个值作为一个顶点
static final int COUNT_PER_SQUARE_VERTICE = 2;
//COORD_VERTICES每2个值作为一个顶点
static final int COUNT_PER_COORD_VERTICES = 2;
/**
* 显示的顶点
*/
static final float[] SQUARE_VERTICES = {
-1.0f, -1.0f,
1.0f, -1.0f,
-1.0f, 1.0f,
1.0f, 1.0f
};
/**
* 原数据显示
* 0,1***********1,1
* * *
* * *
* * *
* * *
* * *
* 0,0***********1,0
*/
static final float[] COORD_VERTICES = {
0.0f, 1.0f,
1.0f, 1.0f,
0.0f, 0.0f,
1.0f, 0.0f
};
/**
* 逆时针旋转90度显示
* 1,1***********1,0
* * *
* * *
* * *
* * *
* * *
* 0,1***********0,0
*/
static final float[] ROTATE_90_COORD_VERTICES = {
1.0f, 1.0f,
1.0f, 0.0f,
0.0f, 1.0f,
0.0f, 0.0f
};
/**
* 逆时针旋转180度显示
* 0,1***********1,1
* * *
* * *
* * *
* * *
* * *
* 0,0***********1,0
*/
static final float[] ROTATE_180_COORD_VERTICES = {
1.0f, 0.0f,
0.0f, 0.0f,
1.0f, 1.0f,
0.0f, 1.0f
};
/**
* 逆时针旋转270度显示
* 0,1***********1,1
* * *
* * *
* * *
* * *
* * *
* 0,0***********1,0
*/
static final float[] ROTATE_270_COORD_VERTICES = {
0.0f, 0.0f,
0.0f, 1.0f,
1.0f, 0.0f,
1.0f, 1.0f
};
/**
* 镜像显示
* 1,1***********0,1
* * *
* * *
* * *
* * *
* * *
* 1,0***********0,0
*/
static final float[] MIRROR_COORD_VERTICES = {
1.0f, 1.0f,
0.0f, 1.0f,
1.0f, 0.0f,
0.0f, 0.0f
};
/**
* 镜像并逆时针旋转90度显示
* 0,1***********0,0
* * *
* * *
* * *
* * *
* * *
* 1,1***********1,0
*/
static final float[] ROTATE_90_MIRROR_COORD_VERTICES = {
0.0f, 1.0f,
0.0f, 0.0f,
1.0f, 1.0f,
1.0f, 0.0f
};
/**
* 镜像并逆时针旋转180度显示
* 1,0***********0,0
* * *
* * *
* * *
* * *
* * *
* 1,1***********0,1
*/
static final float[] ROTATE_180_MIRROR_COORD_VERTICES = {
1.0f, 0.0f,
0.0f, 0.0f,
1.0f, 1.0f,
0.0f, 1.0f
};
/**
* 镜像并逆时针旋转270度显示
* 1,0***********1,1
* * *
* * *
* * *
* * *
* * *
* 0,0***********0,1
*/
static final float[] ROTATE_270_MIRROR_COORD_VERTICES = {
1.0f, 0.0f,
1.0f, 1.0f,
0.0f, 0.0f,
0.0f, 1.0f
};
/**
* 创建OpenGL Program,并链接
*
* @param fragmentShaderCode 片段着色器代码
* @return OpenGL Program对象的引用
*/
static int createShaderProgram(String fragmentShaderCode) {
int vertexShader = loadShader(GLES20.GL_VERTEX_SHADER, VERTEX_SHADER);
int fragmentShader = loadShader(GLES20.GL_FRAGMENT_SHADER, fragmentShaderCode);
// 创建一个空的OpenGL ES Program
int mProgram = GLES20.glCreateProgram();
// 将vertex shader添加到program
GLES20.glAttachShader(mProgram, vertexShader);
// 将fragment shader添加到program
GLES20.glAttachShader(mProgram, fragmentShader);
// 链接创建好的 OpenGL ES program
GLES20.glLinkProgram(mProgram);
// 检查链接状态
IntBuffer linked = IntBuffer.allocate(1);
GLES20.glGetProgramiv(mProgram, GLES20.GL_LINK_STATUS, linked);
if (linked.get(0) == 0) {
return -1;
}
return mProgram;
}
/**
* 加载着色器
*
* @param type 着色器类型,可以是片段着色器{@link GLES20#GL_FRAGMENT_SHADER}或顶点着色器{@link GLES20#GL_VERTEX_SHADER}
* @param shaderCode 着色器代码
* @return 着色器对象的引用
*/
private static int loadShader(int type, String shaderCode) {
//创建空的shader
int shader = GLES20.glCreateShader(type);
//加载shader代码
GLES20.glShaderSource(shader, shaderCode);
//编译shader
GLES20.glCompileShader(shader);
//检查编译是否成功
IntBuffer compiled = IntBuffer.allocate(1);
GLES20.glGetShaderiv(shader, GLES20.GL_COMPILE_STATUS, compiled);
if (compiled.get(0) == 0) {
return 0;
}
return shader;
}
}
...@@ -6,10 +6,12 @@ import android.os.Bundle ...@@ -6,10 +6,12 @@ import android.os.Bundle
import android.view.Display import android.view.Display
import android.widget.FrameLayout import android.widget.FrameLayout
import com.intergration.avm.R import com.intergration.avm.R
import com.intergration.avm.glsurface.CameraGLSurfaceView
import com.intergration.avm.glsurface.GLUtils
import com.intergration.avm.glsurface.MyGLSurfaceView
import com.intergration.avm.utils.IMAGE_HEIGHT import com.intergration.avm.utils.IMAGE_HEIGHT
import com.intergration.avm.utils.IMAGE_WIDTH import com.intergration.avm.utils.IMAGE_WIDTH
import com.mediatek.smartplatform.PictureConfiguration import com.mediatek.smartplatform.PictureConfiguration
import com.intergration.avm.glsurface.MyGLSurfaceView
class AuxiliaryScreenPresentation(outerContext: Context, display: Display) : class AuxiliaryScreenPresentation(outerContext: Context, display: Display) :
Presentation(outerContext, display) { Presentation(outerContext, display) {
...@@ -18,15 +20,15 @@ class AuxiliaryScreenPresentation(outerContext: Context, display: Display) : ...@@ -18,15 +20,15 @@ class AuxiliaryScreenPresentation(outerContext: Context, display: Display) :
lateinit var frameLayout: FrameLayout lateinit var frameLayout: FrameLayout
private lateinit var rendererView: MyGLSurfaceView private lateinit var rendererView: CameraGLSurfaceView
override fun onCreate(savedInstanceState: Bundle?) { override fun onCreate(savedInstanceState: Bundle?) {
super.onCreate(savedInstanceState) super.onCreate(savedInstanceState)
frameLayout = layoutInflater.inflate(R.layout.layout_presentation,null) as FrameLayout frameLayout = layoutInflater.inflate(R.layout.layout_presentation,null) as FrameLayout
setContentView(frameLayout) setContentView(frameLayout)
rendererView = frameLayout.findViewById<MyGLSurfaceView?>(R.id.rendererView).also { rendererView = frameLayout.findViewById(R.id.rendererView)
it.setYuvDataSize(IMAGE_HEIGHT,IMAGE_WIDTH) rendererView.setFragmentShaderCode(GLUtils.FRAG_SHADER_NORMAL)
} rendererView.init(true,0,IMAGE_HEIGHT,IMAGE_WIDTH)
} }
/** /**
...@@ -59,17 +61,17 @@ class AuxiliaryScreenPresentation(outerContext: Context, display: Display) : ...@@ -59,17 +61,17 @@ class AuxiliaryScreenPresentation(outerContext: Context, display: Display) :
fun feedData(data:ByteArray){ fun feedData(data:ByteArray){
rendererView.feedData(data,2) rendererView.refreshFrameNV21(data)
} }
@Synchronized // @Synchronized
fun setDisplayOrientation(degree:Int){ // fun setDisplayOrientation(degree:Int){
rendererView.setDisplayOrientation(degree) // rendererView.setDisplayOrientation(degree)
} // }
@Synchronized // @Synchronized
fun setYuvDataSize(width:Int,height:Int){ // fun setYuvDataSize(width:Int,height:Int){
rendererView.setYuvDataSize(width,height) // rendererView.setYuvDataSize(width,height)
} // }
} }
\ No newline at end of file
...@@ -69,13 +69,13 @@ class MultiScreenService : Service() { ...@@ -69,13 +69,13 @@ class MultiScreenService : Service() {
presentation?.feedData(data) presentation?.feedData(data)
} }
fun setDisplayOrientation(degree:Int){ // fun setDisplayOrientation(degree:Int){
presentation?.setDisplayOrientation(270) // presentation?.setDisplayOrientation(270)
} // }
//
fun setYuvDataSize(width:Int,height:Int){ // fun setYuvDataSize(width:Int,height:Int){
presentation?.setYuvDataSize(width,height) // presentation?.setYuvDataSize(width,height)
} // }
inner class MultiScreenBinder : Binder() { inner class MultiScreenBinder : Binder() {
fun getService(): MultiScreenService { fun getService(): MultiScreenService {
......
...@@ -5,6 +5,7 @@ import android.content.Context ...@@ -5,6 +5,7 @@ import android.content.Context
import android.content.Intent import android.content.Intent
import android.content.ServiceConnection import android.content.ServiceConnection
import android.os.IBinder import android.os.IBinder
import android.text.style.LineHeightSpan
import com.blankj.utilcode.util.LogUtils import com.blankj.utilcode.util.LogUtils
import com.blankj.utilcode.util.SPUtils import com.blankj.utilcode.util.SPUtils
import com.intergration.avm.presentation.MultiScreenService import com.intergration.avm.presentation.MultiScreenService
...@@ -61,7 +62,8 @@ fun Context.closeAuxiliaryPresentation(){ ...@@ -61,7 +62,8 @@ fun Context.closeAuxiliaryPresentation(){
} }
} }
fun feedData(data: ByteArray){ fun feedData(data: ByteArray,width:Int = 0,height:Int = 0){
// multiScreenService?.setYuvDataSize(width = width,height = height)
multiScreenService?.feedNv21Data(data) multiScreenService?.feedNv21Data(data)
} }
...@@ -9,8 +9,8 @@ ...@@ -9,8 +9,8 @@
android:textSize="24sp" android:textSize="24sp"
android:textColor="@android:color/holo_blue_light" android:textColor="@android:color/holo_blue_light"
android:layout_gravity="center_horizontal|top"/> android:layout_gravity="center_horizontal|top"/>
<com.intergration.avm.glsurface.MyGLSurfaceView <com.intergration.avm.glsurface.CameraGLSurfaceView
android:id="@+id/rendererView" android:id="@+id/rendererView"
android:layout_width="match_parent" android:layout_width="match_parent"
android:layout_height="match_parent"/> android:layout_height="match_parent" />
</FrameLayout> </FrameLayout>
\ No newline at end of file
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment