一.布局文件
<?xml version="1.0" encoding="utf-8"?>
<RelativeLayout xmlns:android="http://schemas.android.com/apk/res/android"
xmlns:app="http://schemas.android.com/apk/res-auto"
xmlns:tools="http://schemas.android.com/tools"
android:layout_width="match_parent"
android:layout_height="match_parent"
tools:context=".MainActivity">
<com.luisliuyi.demo.opengl.CameraView
android:id="@+id/cameraView"
android:layout_width="match_parent"
android:layout_height="match_parent" />
<RadioGroup
android:id="@+id/rg_speed"
android:layout_width="wrap_content"
android:layout_height="35dp"
android:layout_above="@+id/btn_record"
android:layout_centerHorizontal="true"
android:layout_marginBottom="20dp"
android:background="@drawable/record_speed_background"
android:orientation="horizontal">
<RadioButton
android:id="@+id/btn_extra_slow"
android:layout_width="50dp"
android:layout_height="match_parent"
android:background="@drawable/record_speed_button_background"
android:button="@null"
android:gravity="center"
android:text="极慢"
android:textColor="@drawable/record_speed_button_text" />
<RadioButton
android:id="@+id/btn_slow"
android:layout_width="50dp"
android:layout_height="match_parent"
android:background="@drawable/record_speed_button_background"
android:button="@null"
android:gravity="center"
android:text="慢"
android:textColor="@drawable/record_speed_button_text" />
<RadioButton
android:id="@+id/btn_normal"
android:layout_width="50dp"
android:layout_height="match_parent"
android:background="@drawable/record_speed_button_background"
android:button="@null"
android:checked="true"
android:gravity="center"
android:text="标准"
android:textColor="@drawable/record_speed_button_text" />
<RadioButton
android:id="@+id/btn_fast"
android:layout_width="50dp"
android:layout_height="match_parent"
android:background="@drawable/record_speed_button_background"
android:button="@null"
android:gravity="center"
android:text="快"
android:textColor="@drawable/record_speed_button_text" />
<RadioButton
android:id="@+id/btn_extra_fast"
android:layout_width="50dp"
android:layout_height="match_parent"
android:background="@drawable/record_speed_button_background"
android:button="@null"
android:gravity="center"
android:text="极快"
android:textColor="@drawable/record_speed_button_text" />
</RadioGroup>
<com.luisliuyi.demo.opengl.widget.RecordButton
android:id="@+id/btn_record"
android:layout_width="80dp"
android:layout_height="80dp"
android:layout_alignParentBottom="true"
android:layout_centerHorizontal="true"
android:layout_marginBottom="20dp"
android:background="@drawable/record_button_background"
android:gravity="center"
android:text="按住拍"/>
</RelativeLayout>
二.MainAcitivity
public class MainActivity extends AppCompatActivity implements RecordButton.OnRecordListener, RadioGroup.OnCheckedChangeListener {
private CameraView cameraView;
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_main);
checkPermission();
cameraView = findViewById(R.id.cameraView);
RecordButton btn_record = findViewById(R.id.btn_record);
btn_record.setOnRecordListener(this);
//速度
RadioGroup rgSpeed = findViewById(R.id.rg_speed);
rgSpeed.setOnCheckedChangeListener(this);
}
public boolean checkPermission() {
if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.M && checkSelfPermission(
Manifest.permission.WRITE_EXTERNAL_STORAGE) != PackageManager.PERMISSION_GRANTED) {
requestPermissions(new String[]{
Manifest.permission.READ_EXTERNAL_STORAGE,
Manifest.permission.WRITE_EXTERNAL_STORAGE,
Manifest.permission.CAMERA
}, 1);
}
return false;
}
@Override
public void onRecordStart() {
cameraView.startRecord();
}
@Override
public void onRecordStop() {
cameraView.stopRecord();
}
@Override
public void onCheckedChanged(RadioGroup group, int checkedId) {
switch (checkedId) {
case R.id.btn_extra_slow:
cameraView.setSpeed(CameraView.Speed.MODE_EXTRA_SLOW);
break;
case R.id.btn_slow:
cameraView.setSpeed(CameraView.Speed.MODE_SLOW);
break;
case R.id.btn_normal:
cameraView.setSpeed(CameraView.Speed.MODE_NORMAL);
break;
case R.id.btn_fast:
cameraView.setSpeed(CameraView.Speed.MODE_FAST);
break;
case R.id.btn_extra_fast:
cameraView.setSpeed(CameraView.Speed.MODE_EXTRA_FAST);
break;
}
}
}
三.CameraView
public class CameraView extends GLSurfaceView {
private CameraRender renderer;
public CameraView(Context context) {
super(context);
}
public CameraView(Context context, AttributeSet attrs) {
super(context, attrs);
setEGLContextClientVersion(2);
renderer = new CameraRender(this);
setRenderer(renderer);
setRenderMode(GLSurfaceView.RENDERMODE_WHEN_DIRTY);
}
private Speed mSpeed = Speed.MODE_NORMAL;
public enum Speed {
MODE_EXTRA_SLOW, MODE_SLOW, MODE_NORMAL, MODE_FAST, MODE_EXTRA_FAST
}
public void setSpeed(Speed speed) {
this.mSpeed = speed;
}
public void startRecord(){
float speed = 1.f;
switch (mSpeed) {
case MODE_EXTRA_SLOW:
speed = 0.3f;
break;
case MODE_SLOW:
speed = 0.5f;
break;
case MODE_NORMAL:
speed = 1.f;
break;
case MODE_FAST:
speed = 2.f;
break;
case MODE_EXTRA_FAST:
speed = 3.f;
break;
}
renderer.startRecord(speed);
}
public void stopRecord(){
renderer.stopRecord();
}
}
四.AbstractFilter
public abstract class AbstractFilter {
protected int program;
private int vPosition;
private FloatBuffer textureBuffer;
private FloatBuffer vertexBuffer;
private int vCoord;
private int vTexture;
private int vMatrix;
protected int mWidth;
protected int mHeight;
private float[] mtx;
private float[] VERTEX = {
-1.0f, -1.0f,
1.0f, -1.0f,
-1.0f, 1.0f,
1.0f, 1.0f
};
private float[] TEXTURE = {
0.0f, 0.0f,
1.0f, 0.0f,
0.0f, 1.0f,
1.0f, 1.0f
};
public AbstractFilter(Context context, int vertexShaderId, int fragmentShaderId) {
vertexBuffer = ByteBuffer.allocateDirect(4 * 4 * 2).order(ByteOrder.nativeOrder()).asFloatBuffer();
vertexBuffer.clear();
vertexBuffer.put(VERTEX);
textureBuffer = ByteBuffer.allocateDirect(4 * 4 * 2).order(ByteOrder.nativeOrder()).asFloatBuffer();
textureBuffer.clear();
textureBuffer.put(TEXTURE);
String vertexSharder = OpenGLUtils.readRawTextFile(context, vertexShaderId);
String fragSharder = OpenGLUtils.readRawTextFile(context, fragmentShaderId);
program = OpenGLUtils.loadProgram(vertexSharder, fragSharder);
vPosition = GLES20.glGetAttribLocation(program, "vPosition");
vCoord = GLES20.glGetAttribLocation(program, "vCoord");
vTexture = GLES20.glGetUniformLocation(program, "vTexture");
}
public void setSize(int width, int height) {
mWidth = width;
mHeight = height;
}
public int onDraw(int texture) {
GLES20.glViewport(0, 0, mWidth, mHeight);
GLES20.glUseProgram(program);
vertexBuffer.position(0);
GLES20.glVertexAttribPointer(vPosition,2,GL_FLOAT, false,0,vertexBuffer);
GLES20.glEnableVertexAttribArray(vPosition);
textureBuffer.position(0);
GLES20.glVertexAttribPointer(vCoord, 2, GLES20.GL_FLOAT, false, 0, textureBuffer);
GLES20.glEnableVertexAttribArray(vCoord);
GLES20.glActiveTexture(GL_TEXTURE0);
GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, texture);
GLES20.glUniform1i(vTexture, 0);
beforeDraw();
GLES20.glDrawArrays(GLES20.GL_TRIANGLE_STRIP,0,4);
GLES20.glBindTexture(GLES20.GL_TEXTURE_2D,0);
return texture;
}
protected abstract void beforeDraw();
protected void release(){
GLES20.glDeleteProgram(program);
}
}
六.AbstractFboFilter
public class AbstractFboFilter extends AbstractFilter{
int[] frameBuffer;
int[] frameTextures;
public AbstractFboFilter(Context context, int vertexShaderId, int fragmentShaderId) {
super(context, vertexShaderId, fragmentShaderId);
}
@Override
public void setSize(int width, int height) {
super.setSize(width, height);
releaseFrame();
frameBuffer = new int[1];
GLES20.glGenFramebuffers(1, frameBuffer, 0);
//生成纹理
frameTextures = new int[1];
GLES20.glGenTextures(frameTextures.length, frameTextures, 0);
for (int i = 0; i < frameTextures.length; i++) {
GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, frameTextures[i]);
GLES20.glTexParameteri(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_MAG_FILTER, GLES20.GL_NEAREST);//放大过滤
GLES20.glTexParameteri(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_MIN_FILTER, GLES20.GL_LINEAR);//缩小过滤
GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, 0);
}
GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, frameTextures[0]);
/**
* 指定一个二维的纹理图片
* level
* 指定细节级别,0级表示基本图像,n级则表示Mipmap缩小n级之后的图像(缩小2^n)
* internalformat
* 指定纹理内部格式,必须是下列符号常量之一:GL_ALPHA,GL_LUMINANCE,GL_LUMINANCE_ALPHA,GL_RGB,GL_RGBA。
* width height
* 指定纹理图像的宽高,所有实现都支持宽高至少为64 纹素的2D纹理图像和宽高至少为16 纹素的立方体贴图纹理图像 。
* border
* 指定边框的宽度。必须为0。
* format
* 指定纹理数据的格式。必须匹配internalformat。下面的符号值被接受:GL_ALPHA,GL_RGB,GL_RGBA,GL_LUMINANCE,和GL_LUMINANCE_ALPHA。
* type
* 指定纹理数据的数据类型。下面的符号值被接受:GL_UNSIGNED_BYTE,GL_UNSIGNED_SHORT_5_6_5,GL_UNSIGNED_SHORT_4_4_4_4,和GL_UNSIGNED_SHORT_5_5_5_1。
* data
* 指定一个指向内存中图像数据的指针。
*/
GLES20.glTexImage2D(GLES20.GL_TEXTURE_2D, 0, GLES20.GL_RGBA, width, height, 0, GLES20.GL_RGBA, GLES20.GL_UNSIGNED_BYTE,
null);
GLES20.glBindFramebuffer(GLES20.GL_FRAMEBUFFER, frameBuffer[0]);
GLES20.glFramebufferTexture2D(GLES20.GL_FRAMEBUFFER,
GLES20.GL_COLOR_ATTACHMENT0, GLES20.GL_TEXTURE_2D,
frameTextures[0],
0);
GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, 0);
GLES20.glBindFramebuffer(GLES20.GL_FRAMEBUFFER, 0);
}
@Override
protected void beforeDraw() {
}
@Override
public int onDraw(int texture) {
GLES20.glBindFramebuffer(GLES20.GL_FRAMEBUFFER, frameBuffer[0]);
super.onDraw(texture);
GLES20.glBindFramebuffer(GLES20.GL_FRAMEBUFFER, 0);
return frameTextures[0] ;
}
private void releaseFrame() {
if (frameTextures != null) {
GLES20.glDeleteTextures(1, frameTextures, 0);
frameTextures = null;
}
if (frameBuffer != null) {
GLES20.glDeleteFramebuffers(1, frameBuffer, 0);
}
}
}
七.CameraRender
public class CameraRender implements GLSurfaceView.Renderer, Preview.OnPreviewOutputUpdateListener, SurfaceTexture.OnFrameAvailableListener {
private CameraView cameraView;
private CameraHelper cameraHelper;
private SurfaceTexture mCameraTexure;
private int[] textures;
float[] mtx = new float[16];
private CameraFilter cameraFilter;
private RecordFilter recordFilter;
private MediaRecorder mRecorder;
public CameraRender(CameraView cameraView) {
this.cameraView = cameraView;
LifecycleOwner lifecycleOwner = (LifecycleOwner) cameraView.getContext();
// 打开摄像头
cameraHelper = new CameraHelper(lifecycleOwner, this);
}
@Override
public void onSurfaceCreated(GL10 gl, EGLConfig config) {
textures = new int[1];
//让 SurfaceTexture与 Gpu 共享一个数据源
mCameraTexure.attachToGLContext(textures[0]);
//监听摄像头数据回调,
mCameraTexure.setOnFrameAvailableListener(this);
Context context = cameraView.getContext();
cameraFilter = new CameraFilter(context);
recordFilter = new RecordFilter(context);
File file = new File(Environment.getExternalStorageDirectory(), "input.mp4");
if (file.exists()) {
file.delete();
}
String path = file.getAbsolutePath();
mRecorder = new MediaRecorder(cameraView.getContext(), path,
EGL14.eglGetCurrentContext(),
480, 640);
}
@Override
public void onSurfaceChanged(GL10 gl, int width, int height) {
cameraFilter.setSize(width,height);
recordFilter.setSize(width,height);
}
@Override
public void onDrawFrame(GL10 gl) {
// 摄像头的数据给gpu
mCameraTexure.updateTexImage();
mCameraTexure.getTransformMatrix(mtx);
cameraFilter.setTransformMatrix(mtx);
int id = cameraFilter.onDraw(textures[0]);
id = recordFilter.onDraw(id);
mRecorder.fireFrame(id,mCameraTexure.getTimestamp());
}
@Override
public void onUpdated(Preview.PreviewOutput output) {
//摄像头预览到的数据 在这里
mCameraTexure=output.getSurfaceTexture();
}
@Override
public void onFrameAvailable(SurfaceTexture surfaceTexture) {
cameraView.requestRender();
}
public void startRecord(float speed) {
try {
mRecorder.start(speed);
} catch (IOException e) {
e.printStackTrace();
}
}
public void stopRecord() {
mRecorder.stop();
}
}
八.CameraFilter
public class CameraFilter extends AbstractFboFilter{
private float[] mtx;
private int vMatrix;
public CameraFilter(Context context) {
super(context, R.raw.camera_vert, R.raw.camera_frag);
vMatrix = GLES20.glGetUniformLocation(program, "vMatrix");
}
@Override
public void beforeDraw() {
super.beforeDraw();
GLES20.glUniformMatrix4fv(vMatrix, 1, false, mtx, 0);
}
public void setTransformMatrix(float[] mtx) {
this.mtx = mtx;
}
}
九.RecordFilter
public class RecordFilter extends AbstractFilter{
public RecordFilter(Context context){
super(context, R.raw.base_vert, R.raw.base_frag);
}
@Override
protected void beforeDraw() {
}
}
十.MediaRecorder
public class MediaRecorder {
private int mWidth;
private int mHeight;
private String mPath;
private MediaCodec mMediaCodec;
private Surface mSurface;
private Handler mHandler;
private EGLEnv eglEnv;
private Context mContext;
private EGLContext mGlContext;
private boolean isStart;
private long startTime;
MediaMuxer mMediaMuxer ;
public MediaRecorder(Context context, String path, EGLContext glContext, int width, int
height) {
mContext = context.getApplicationContext();
mGlContext = glContext;
mPath = path;
mWidth = width;
mHeight = height;
}
public void start(float speed) throws IOException {
Log.e("liuyi", "start");
MediaFormat format = MediaFormat.createVideoFormat(MediaFormat.MIMETYPE_VIDEO_AVC,
mWidth, mHeight);
format.setInteger(MediaFormat.KEY_COLOR_FORMAT, MediaCodecInfo.CodecCapabilities
.COLOR_FormatSurface);
//码率
format.setInteger(MediaFormat.KEY_BIT_RATE, 1500_000);
//帧率
format.setInteger(MediaFormat.KEY_FRAME_RATE, 25);
//关键帧间隔
format.setInteger(MediaFormat.KEY_I_FRAME_INTERVAL, 10);
//创建编码器
mMediaCodec = MediaCodec.createEncoderByType(MediaFormat.MIMETYPE_VIDEO_AVC);
//配置编码器
mMediaCodec.configure(format, null, null, MediaCodec.CONFIGURE_FLAG_ENCODE);
mSurface= mMediaCodec.createInputSurface();
// mMediaMuxer = new MediaMuxer(mPath, MediaMuxer.OutputFormat.MUXER_OUTPUT_MPEG_4);
//开启编码
mMediaCodec.start();
//創建OpenGL 的 環境
HandlerThread handlerThread = new HandlerThread("codec-gl");
handlerThread.start();
mHandler = new Handler(handlerThread.getLooper());
mHandler.post(new Runnable() {
@Override
public void run() {
eglEnv = new EGLEnv(mContext,mGlContext, mSurface,mWidth, mHeight);
isStart = true;
}
});
}
public void fireFrame(final int textureId, final long timestamp) {
if (!isStart) {
return;
}
mHandler.post(new Runnable() {
public void run() {
eglEnv.draw(textureId,timestamp);
// 获取对应的数据
codec(false);
}
});
}
private void codec(boolean endOfStream) {
//给个结束信号
if (endOfStream) {
mMediaCodec.signalEndOfInputStream();
}
while (true) {
MediaCodec.BufferInfo bufferInfo = new MediaCodec.BufferInfo();
int index = mMediaCodec.dequeueOutputBuffer(bufferInfo, 10_000);
if (index >= 0) {
ByteBuffer buffer = mMediaCodec.getOutputBuffer(index);
MediaFormat mediaFormat = mMediaCodec.getOutputFormat(index);
byte[] outData = new byte[bufferInfo.size];
buffer.get(outData);
if (startTime == 0) {
// 微妙转为毫秒
startTime = bufferInfo.presentationTimeUs / 1000;
}
FileUtils.writeContent(outData);
FileUtils.writeBytes(outData);
// 包含 分隔符
mMediaCodec.releaseOutputBuffer(index, false);
}
}
}
public void stop() {
// 释放
isStart = false;
mHandler.post(new Runnable() {
@Override
public void run() {
codec(true);
mMediaCodec.stop();
mMediaCodec.release();
mMediaCodec = null;
eglEnv.release();
eglEnv = null;
mSurface = null;
mHandler.getLooper().quitSafely();
mHandler = null;
}
});
}
}
十一.EGLEnv
public class EGLEnv {
private EGLDisplay mEglDisplay;
private final EGLConfig mEglConfig;
private EGLContext mEglContext;
private final EGLSurface mEglSurface;
private ScreenFilter screenFilter;
public EGLEnv(Context context, EGLContext mGlContext, Surface surface, int width, int height) {
// 获得显示窗口
mEglDisplay= EGL14.eglGetDisplay(EGL14.EGL_DEFAULT_DISPLAY);
if (mEglDisplay == EGL14.EGL_NO_DISPLAY) {
throw new RuntimeException("eglGetDisplay failed");
}
// 初始化顯示窗口
int[] version = new int[2];
if(!EGL14.eglInitialize(mEglDisplay, version,0,version,1)) {
throw new RuntimeException("eglInitialize failed");
}
// 配置 属性选项
int[] configAttribs = {
EGL14.EGL_RED_SIZE, 8, //颜色缓冲区中红色位数
EGL14.EGL_GREEN_SIZE, 8,//颜色缓冲区中绿色位数
EGL14.EGL_BLUE_SIZE, 8, //
EGL14.EGL_ALPHA_SIZE, 8,//
EGL14.EGL_RENDERABLE_TYPE, EGL14.EGL_OPENGL_ES2_BIT, //opengl es 2.0
EGL14.EGL_NONE
};
int[] numConfigs = new int[1];
EGLConfig[] configs = new EGLConfig[1];
//EGL 根据属性选择一个配置
if (!EGL14.eglChooseConfig(mEglDisplay, configAttribs, 0, configs, 0, configs.length,
numConfigs, 0)) {
throw new RuntimeException("EGL error " + EGL14.eglGetError());
}
mEglConfig = configs[0];
int[] context_attrib_list = {
EGL14.EGL_CONTEXT_CLIENT_VERSION,2,
EGL14.EGL_NONE
};
mEglContext = EGL14.eglCreateContext(mEglDisplay, mEglConfig, mGlContext, context_attrib_list,0);
if (mEglContext == EGL14.EGL_NO_CONTEXT){
throw new RuntimeException("EGL error " + EGL14.eglGetError());
}
//创建EGLSurface
int[] surface_attrib_list = {
EGL14.EGL_NONE
};
mEglSurface = EGL14.eglCreateWindowSurface(mEglDisplay, mEglConfig, surface, surface_attrib_list, 0);
if (mEglSurface == null){
throw new RuntimeException("EGL error " + EGL14.eglGetError());
}
//绑定当前线程的显示器display
if (!EGL14.eglMakeCurrent(mEglDisplay,mEglSurface,mEglSurface,mEglContext)){
throw new RuntimeException("EGL error " + EGL14.eglGetError());
}
screenFilter = new ScreenFilter(context);
screenFilter.setSize(width,height);
}
public void draw(int textureId, long timestamp) {
screenFilter.onDraw(textureId);
// 给帧缓冲 时间戳
EGLExt.eglPresentationTimeANDROID(mEglDisplay,mEglSurface,timestamp);
EGL14.eglSwapBuffers(mEglDisplay,mEglSurface);
}
public void release(){
EGL14.eglDestroySurface(mEglDisplay,mEglSurface);
EGL14.eglMakeCurrent(mEglDisplay, EGL14.EGL_NO_SURFACE, EGL14.EGL_NO_SURFACE,
EGL14.EGL_NO_CONTEXT);
EGL14.eglDestroyContext(mEglDisplay, mEglContext);
EGL14.eglReleaseThread();
EGL14.eglTerminate(mEglDisplay);
screenFilter.release();
}
}
十二.分屏特效
precision mediump float;
varying vec2 aCoord; //待绘制坐标 0, 0
uniform sampler2D vTexture;
void main() {
float y = aCoord.y;
if(y<0.5)
{
y+=0.25;
}else{
y -= 0.25;
}
// 采样的坐标
gl_FragColor= texture2D(vTexture, vec2(aCoord.x, y));
}
public class SplitFilter extends AbstractFboFilter {
public SplitFilter(Context context) {
super(context, R.raw.base_vert, R.raw.split3_screen);
}
public int onDraw(int texture ) {
super.onDraw(texture );
return frameTextures[0];
}
}
十三.代码地址
https://gitee.com/luisliuyi/android-opengl03.git
网友评论