摄像头预览可以使用SurfaceView或者TextureView,在Android中也可以使用OpenGL ES中的GLSurfaceView来预览摄像头画面,并在此基础上来充分发挥OpenGL ES的能力,实现AR,添加滤镜等效果。
实现方案
前面文章介绍过二维纹理贴图。使用GLSurfaceView来预览Camera也可以参照这样的思路。在 GLSurfaceView上画一个全屏大小的矩形,然后得到Camera的预览画面数据,生成纹理贴图,贴到所画的矩形上面。
OpenGL ES纹理贴图可以参考前面文章
https://www.jianshu.com/p/4c178360d229
一、Camera预览四分屏
顶点着色器
uniform mat4 textureTransform;
attribute vec2 inputTextureCoordinate;
attribute vec4 position; //NDK坐标点
varying vec2 textureCoordinate; //纹理坐标点变换后输出
void main() {
gl_Position = position;
textureCoordinate = inputTextureCoordinate;
}
片元着色器
#extension GL_OES_EGL_image_external : require
precision mediump float;
uniform samplerExternalOES videoTex;
varying vec2 textureCoordinate;
void main() {
vec2 uv = textureCoordinate;
if (uv.x <= 0.5) {
uv.x = uv.x * 2.0;
} else {
uv.x = (uv.x - 0.5) * 2.0;
}
if (uv.y <= 0.5) {
uv.y = uv.y * 2.0;
} else {
uv.y = (uv.y - 0.5) * 2.0;
}
vec4 tc = texture2D(videoTex, fract(uv));
float color = tc.r * 0.3 + tc.g * 0.59 + tc.b * 0.11;
gl_FragColor = vec4(tc.r,tc.g,tc.b,1.0);
}
使用
public class CameraActivity extends AppCompatActivity {
private static final String TAG = "CameraActivity";
private CameraGLSurface glSurface;
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_camera);
glSurface = findViewById(R.id.glSurface);
}
@Override
public boolean onCreateOptionsMenu(Menu menu) {
menu.add(1,1,1,"颜色三角形");
menu.add(1,2,2,"纹理三角形");
return super.onCreateOptionsMenu(menu);
}
@Override
public boolean onOptionsItemSelected(@NonNull MenuItem item) {
Log.e(TAG, "onOptionsItemSelected: itemId="+item.getItemId());
switch (item.getItemId()){
case 1:
glSurface.setObjectRender(new TrianCamColorRender());
break;
case 2:
glSurface.setObjectRender(new TrianCamTextureRender());
break;
}
return super.onOptionsItemSelected(item);
}
}
预览四分屏Render
/**
* 四分屏预览
*
* **/
public class CameraQuarRender extends BaseCameraRenderer{
private static final String TAG = "CameraTriangleRender";
private Context mContext;
private CameraManeger mCameraManeger;
private SurfaceTexture mCameraTexture;
private SurfaceTexture.OnFrameAvailableListener listener;
private int mProgram;
private int uPosHandle;
private int aTexHandle;
private int mMVPMatrixHandle;
//透视矩阵、相机矩阵定义放在基类中,方便传给其他绘制对象
// private float[] mProjectMatrix = new float[16];
// private float[] mCameraMatrix = new float[16];
private float[] mMVPMatrix = new float[16];
private float[] mTempMatrix = new float[16];
private float[] mPosCoordinate = {
-1, -1,
-1, 1,
1, -1,
1, 1};
private float[] mTexCoordinate = {
0, 1,
1, 1,
0, 0,
1, 0};
private FloatBuffer mPosBuffer;
private FloatBuffer mTexBuffer;
public CameraQuarRender(Context mContext, SurfaceTexture.OnFrameAvailableListener listener) {
this.mContext = mContext;
Matrix.setIdentityM(mProjectMatrix, 0);
Matrix.setIdentityM(mCameraMatrix, 0);
Matrix.setIdentityM(mMVPMatrix, 0);
Matrix.setIdentityM(mTempMatrix, 0);
this.listener = listener;
mCameraManeger = new CameraManeger();
mPosBuffer = GLDataUtil.createFloatBuffer(mPosCoordinate);
mTexBuffer = GLDataUtil.createFloatBuffer(mTexCoordinate);
}
@Override
public void onSurfaceCreated(GL10 gl, EGLConfig config) {
//编译顶点着色程序
String vertexShaderStr = ResReadUtils.readResource(R.raw.vertex_camera_texture);
int vertexShaderId = ShaderUtils.compileVertexShader(vertexShaderStr);
//编译片段着色程序
String fragmentShaderStr = ResReadUtils.readResource(R.raw.fragment_camera_quarter_mirror_shade);
int fragmentShaderId = ShaderUtils.compileFragmentShader(fragmentShaderStr);
//连接程序
mProgram = ShaderUtils.linkProgram(vertexShaderId, fragmentShaderId);
createAndBindVideoTexture();
mCameraManeger.OpenCamera(mCameraTexture);
// 调用父类,完成另外添加进来的图形的初始化
super.onSurfaceCreated(gl,config);
}
@Override
public void onSurfaceChanged(GL10 gl, int width, int height) {
GLES30.glViewport(0, 0, width, height);
float ratio = (float)width/height;
Matrix.orthoM(mProjectMatrix,0,-ratio,ratio,-1,1,1,7);
Matrix.setLookAtM(mCameraMatrix, 0, 0, 0, 3,
0f, 0f, 0f,
0f, 1.0f, 0.0f);// 3代表眼睛的坐标点
// triangleRender.setProjAndCamMatrix(mProjectMatrix,mCameraMatrix);
Matrix.multiplyMM(mMVPMatrix, 0, mProjectMatrix, 0, mCameraMatrix, 0);
// 调用父类,完成另外添加进来的图形的透视、相机矩阵初始化
super.onSurfaceChanged(gl,width,height);
}
@Override
public void onDrawFrame(GL10 gl) {
GLES20.glClear(GLES20.GL_COLOR_BUFFER_BIT | GLES20.GL_DEPTH_BUFFER_BIT);
/********** 绘制摄像头画面 ****************/
//在OpenGLES环境中使用程序
GLES30.glUseProgram(mProgram);
uPosHandle = GLES20.glGetAttribLocation (mProgram, "position");
aTexHandle = GLES20.glGetAttribLocation (mProgram, "inputTextureCoordinate");
mMVPMatrixHandle = GLES20.glGetUniformLocation(mProgram, "textureTransform");
// 将前面计算得到的mMVPMatrix(frustumM setLookAtM 通过multiplyMM 相乘得到的矩阵) 传入vMatrix中,与顶点矩阵进行相乘
GLES30.glUniformMatrix4fv(mMVPMatrixHandle,1,false,mMVPMatrix,0);
mCameraTexture.updateTexImage();//通过此方法更新接收到的预览数据
GLES30.glVertexAttribPointer(uPosHandle,2,GLES30.GL_FLOAT,false,0, mPosBuffer);
GLES30.glVertexAttribPointer(aTexHandle,2,GLES30.GL_FLOAT,false,0, mTexBuffer);
GLES30.glEnableVertexAttribArray(uPosHandle);
GLES30.glEnableVertexAttribArray(aTexHandle);
//顶点个数是4个 mPosCoordinate.length/2每个定点x、y2个坐标,所以得到顶点个数。
GLES30.glDrawArrays(GLES30.GL_TRIANGLE_STRIP,0,mPosCoordinate.length/2);
GLES30.glDisableVertexAttribArray(uPosHandle);
GLES30.glDisableVertexAttribArray(aTexHandle);
GLES30.glUseProgram(0);
GLES20.glDepthFunc(GLES20.GL_LESS);
/********* 开始绘制三角形 *********/
GLES20.glDepthFunc(GLES20.GL_LEQUAL);
// 调用父类,完成另外添加进来的图形的绘制
super.onDrawFrame(gl);
}
private void createAndBindVideoTexture(){
int[] texture = new int[1];
GLES30.glGenTextures(1, texture, 0);//生成一个OpenGl纹理
GLES30.glBindTexture(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, texture[0]);//申请纹理存储区域并设置相关参数
GLES30.glTexParameterf(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, GL10.GL_TEXTURE_MIN_FILTER,GL10.GL_LINEAR);
GLES30.glTexParameterf(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, GL10.GL_TEXTURE_MAG_FILTER, GL10.GL_LINEAR);
GLES30.glTexParameteri(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, GL10.GL_TEXTURE_WRAP_S, GL10.GL_CLAMP_TO_EDGE);
GLES30.glTexParameteri(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, GL10.GL_TEXTURE_WRAP_T, GL10.GL_CLAMP_TO_EDGE);
mCameraTexture = new SurfaceTexture(texture[0]);//以上面OpenGl生成的纹理函数参数创建SurfaceTexture,SurfaceTexture接收的数据将传入该纹理
mCameraTexture.setOnFrameAvailableListener(listener);//设置SurfaceTexture的回调,通过摄像头预览数据已更新
}
}
CameraQuarRender继承了父类BaseCameraRenderer。我们在预览Camera时,可能会需要添加其它类似道具等其它绘图元素。绘图元素等添加及绘制都在父类BaseCameraRenderer中处理。作为子类CameraQuarRender在onSurfaceCreated、 onSurfaceChanged 、onDrawFrame方法中调用父类等实现。
BaseCameraRenderer实现如下:
/**
* 【说明】:摄像头预览Render基类,主要便于在预览画面上叠加其他渲染对象
*
* @author daijun
* @version 2.0
* @date 2020/6/30 15:32
*/
public abstract class BaseCameraRenderer implements GLSurfaceView.Renderer {
private static final String TAG = "BaseCameraRenderer";
private List<AbsObjectRender> objectRenders = new ArrayList<>();
protected float[] mProjectMatrix = new float[16];
protected float[] mCameraMatrix = new float[16];
public void setObjectRender(AbsObjectRender absObjectRender){
objectRenders.clear();
objectRenders.add(absObjectRender);
}
public void setObjectRenders(List<AbsObjectRender> absObjectRenders){
objectRenders.clear();
objectRenders.addAll(absObjectRenders);
}
@Override
public void onSurfaceCreated(GL10 gl10, EGLConfig eglConfig) {
for (AbsObjectRender objRender:objectRenders){
objRender.initProgram();
}
}
@Override
public void onSurfaceChanged(GL10 gl10, int i, int i1) {
for (AbsObjectRender objRender:objectRenders){
objRender.setProjAndCamMatrix(mProjectMatrix,mCameraMatrix);
}
}
@Override
public void onDrawFrame(GL10 gl10) {
// Log.e(TAG, "onDrawFrame: onDrawFrame");
for (AbsObjectRender objRender:objectRenders){
if (!objRender.isAlreadyInited()){ //初始化不成功,先进行初始化
Log.e(TAG, "onDrawFrame: 初始化不成功,重新初始化");
objRender.initProgram();
objRender.setProjAndCamMatrix(mProjectMatrix,mCameraMatrix);
}
objRender.onDrawFrame();
}
}
}
BaseCameraRenderer的主要目的是将Camera预览画面显示逻辑与在画面中添加其他渲染元素逻辑分开。
二、在Camera预览画面中添加其他渲染对象
渲染对象也需要符合一定的规范。定义一个抽象类
/**
* 【说明】:增加摄像头预览添加其他绘制元素时的抽象类
*
* @author daijun
* @version 2.0
* @date 2020/6/30 14:04
*/
public abstract class AbsObjectRender {
//投影矩阵
protected float[] projectMatrix = new float[16];
//相机矩阵
protected float[] cameraMatrix = new float[16];
//渲染程序
public int mProgram = 0;
/**
*【说明】: 在onSurfaceCreated中调用,program要在onSurfaceCreated中调用才能成功
*@author daijun
*@date 2020/6/30 13:57
*@param
*@return
*/
abstract public void initProgram();
/**
*【说明】:在onSurfaceChanged中调用,保存投影矩阵和相机矩阵
*@author daijun
*@date 2020/6/30 13:57
*@param
*@return
*/
public void setProjAndCamMatrix(float[] projectMatrix,float[] cameraMatrix){
this.projectMatrix = projectMatrix;
this.cameraMatrix = cameraMatrix;
}
public boolean isAlreadyInited(){
return !(mProgram == 0);
}
/**
*【说明】:在onDrawFrame中调用
*@author daijun
*@date 2020/6/30 14:23
*@param
*@return
*/
abstract public void onDrawFrame();
}
投影矩阵和相机矩阵都采用Camera预览Render中的投影矩阵和相机矩阵。
AbsObjectRender的其中一个实现如TrianCamColorRender
public class TrianCamColorRender extends AbsObjectRender {
private static final String TAG = "TriangleColorRender";
//3个定点,等腰直角
private float vertexCoords[] ={
0.5f, 0.5f, 0.0f, // top
-0.5f, -0.5f, 0.0f, // bottom left
0.5f, -0.5f, 0.0f // bottom right
};
private float colorCoords[] = {
0.0f, 1.0f, 0.0f, 1.0f,
1.0f, 0.0f, 0.0f, 1.0f,
0.0f, 0.0f, 1.0f, 1.0f
};
//顶点数组buffer
private FloatBuffer vertexBuffer;
//颜色数组buffer
private FloatBuffer colorBuffer;
//渲染程序
// public int mProgram;
//三角形变换临时矩阵
private final float[] rotationMatrix = new float[16];
private final float[] mTriangleTempMatrix = new float[16];
private float[] mvpMatrix = new float[16];
//旋转角度
private float angle =0;
public TrianCamColorRender() {
}
/**
*【说明】: 在onSurfaceCreated中调用,program要在onSurfaceCreated中调用才能成功
*@author daijun
*@date 2020/6/30 13:57
*@param
*@return
*/
@Override
public void initProgram(){
// 三角形绘制相关初始化
vertexBuffer = GLDataUtil.createFloatBuffer(vertexCoords);
colorBuffer = GLDataUtil.createFloatBuffer(colorCoords);
//编译顶点着色程序
String verTriShaderStr = ResReadUtils.readResource(R.raw.vertex_base_matrix_shader);
int verTriShaderId = ShaderUtils.compileVertexShader(verTriShaderStr);
//编译片段着色程序
String fragTriShaderStr = ResReadUtils.readResource(R.raw.fragment_base_common_shader);
int fragTriShaderId = ShaderUtils.compileFragmentShader(fragTriShaderStr);
//连接程序
mProgram = ShaderUtils.linkProgram(verTriShaderId, fragTriShaderId);
if (mProgram == 0){
Log.e(TAG, "initProgram: 初始化失败");
}else{
Log.e(TAG, "initProgram: 初始化成功"+mProgram);
}
}
@Override
public void onDrawFrame(){
// Log.e(TAG, "start: 绘制三角形"+mProgram);
GLES30.glUseProgram(mProgram);
Matrix.setIdentityM(rotationMatrix,0);
Matrix.multiplyMM(mTriangleTempMatrix, 0, projectMatrix, 0, cameraMatrix, 0);
Matrix.rotateM(rotationMatrix,0,angle,0,0,1);
Matrix.multiplyMM(mvpMatrix,0,mTriangleTempMatrix,0,rotationMatrix,0);
//左乘矩阵
int uMaxtrixLocation = GLES30.glGetUniformLocation(mProgram,"vMatrix");
// 将前面计算得到的mMVPMatrix(frustumM setLookAtM 通过multiplyMM 相乘得到的矩阵) 传入vMatrix中,与顶点矩阵进行相乘
GLES30.glUniformMatrix4fv(uMaxtrixLocation,1,false,mvpMatrix,0);
int aPositionLocation = GLES30.glGetAttribLocation(mProgram,"vPosition");
GLES30.glEnableVertexAttribArray(aPositionLocation);
//x y z 所以数据size 是3
GLES30.glVertexAttribPointer(aPositionLocation,3,GLES30.GL_FLOAT,false,0,vertexBuffer);
int aColorLocation = GLES20.glGetAttribLocation(mProgram,"aColor");
//准备颜色数据 rgba 所以数据size是 4
GLES30.glVertexAttribPointer(aColorLocation, 4, GLES30.GL_FLOAT, false, 0, colorBuffer);
//启用顶点颜色句柄
GLES30.glEnableVertexAttribArray(aColorLocation);
GLES30.glDrawArrays(GLES30.GL_TRIANGLES, 0, 3);
//禁止顶点数组的句柄
GLES30.glDisableVertexAttribArray(aPositionLocation);
GLES30.glDisableVertexAttribArray(aColorLocation);
GLES30.glUseProgram(0);
angle += 1;
}
}
三、实现效果
Screenshot_2020-06-30-23-26-55-55.png Screenshot_2020-06-30-23-27-03-54.png如上图所示,可以动态在Camera预览画面中添加渲染元素。
代码:
https://github.com/godtrace12/DOpenglTest.git
参考:
https://blog.csdn.net/qq_38261174/article/details/83140543
网友评论