在 相机滤镜2:图片动态切换滤镜章节,我们实现了通过OPENGL ES实现图片显示和简单滤镜效果;本章节,将复用上节的实现,使用GlSurfaceView,Camera2和OPENGL ES 2.0实现基本的相机预览
定义和实例实例化GlSurfaceView
activity_camera2.xml
<?xml version="1.0" encoding="utf-8"?>
<com.jdf.common.widget.recycleview.EffectRelativeLayout xmlns:android="http://schemas.android.com/apk/res/android"
android:layout_width="match_parent"
android:layout_height="match_parent"
android:background="@android:color/black"
android:orientation="vertical">
<android.opengl.GLSurfaceView
android:id="@+id/glsurfaceView"
android:layout_width="match_parent"
android:layout_height="match_parent" />
</com.jdf.common.widget.recycleview.EffectRelativeLayout>
Camera2Activity.java
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_camera2);
glSurfaceView = findViewById(R.id.glsurfaceView);
//要在setRenderer之前绑定,否则会抛出状态异常
glSurfaceView.setEGLContextClientVersion(2);
//创建前面章节创建的render对象
render = new JGPUImageRenderer(new JGPUImageFilter());
//绑定render对象
glSurfaceView.setRenderer(render);
//预览数据设置成一直刷新
glSurfaceView.setRenderMode(GLSurfaceView.RENDERMODE_CONTINUOUSLY);
}
封装JCamera2Loader类,操作相机
Camera2Activity.java
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_camera2);
glSurfaceView = findViewById(R.id.glsurfaceView);
JLog.d("jiadongfeng1", "glSurfaceView: "+glSurfaceView);
render = new JGPUImageRenderer(new JGPUImageFilter());
glSurfaceView.setEGLContextClientVersion(2);
glSurfaceView.setRenderer(render);
glSurfaceView.setRenderMode(GLSurfaceView.RENDERMODE_CONTINUOUSLY);
camera2Loader = new JCamera2Loader(this);
//预览数据可用时,回调
camera2Loader.setOnPreviewFrameListener(new CameraLoader.OnPreviewFrameListener() {
@Override
public void onPreviewFrame(byte[] data, int width, int height) {
render.onPreviewFrame(data, width, height);
}
});
}
JCamera2Loader.java
public JCamera2Loader(Activity activity) {
mActivity = activity;
mCameraManager = (CameraManager) mActivity.getSystemService(Context.CAMERA_SERVICE);
}
public void setOnPreviewFrameListener(OnPreviewFrameListener onPreviewFrameListener) {
mOnPreviewFrameListener = onPreviewFrameListener;
}
界面可见时,启动相机
Camera2Activity.onResume
@Override
protected void onResume() {
super.onResume();
camera2Loader.onResume(glSurfaceView.getWidth(), glSurfaceView.getHeight());
}
JCamera2Loader.onResume
@Override
public void onResume(int width, int height) {
mViewWidth = width;
mViewHeight = height;
JLog.d("jiadongfeng1", "onResume with WxH[%d, %d]: " , mViewWidth , mViewHeight);
setUpCamera();
}
JCamera2Loader.setUpCamera 启动相机
@SuppressLint("MissingPermission")
private void setUpCamera() {
try {
mCameraId = getCameraId(mCameraFacing);
mCharacteristics = mCameraManager.getCameraCharacteristics(mCameraId);
//主要是获取预览尺寸和返回的预览数据的方向
setUpCameraOutputs();
JLog.d(TAG, "Opening camera (ID: " + mCameraId + ") sucess.");
mCameraManager.openCamera(mCameraId, mCameraDeviceCallback, null);
} catch (CameraAccessException e) {
Log.e(TAG, "Opening camera (ID: " + mCameraId + ") failed.");
e.printStackTrace();
}
}
开启预览
当相机开启成功时,开始请求调用相机预览
private CameraDevice.StateCallback mCameraDeviceCallback = new CameraDevice.StateCallback() {
@Override
public void onOpened(@NonNull CameraDevice camera) {
mCameraDevice = camera;
startCaptureSession();
}
@Override
public void onDisconnected(@NonNull CameraDevice camera) {
mCameraDevice.close();
mCameraDevice = null;
}
@Override
public void onError(@NonNull CameraDevice camera, int error) {
mCameraDevice.close();
mCameraDevice = null;
}
};
JCamera2Loader.startCaptureSession
private void startCaptureSession() {
Log.d(TAG, "size: " + mPreviewSize);
Log.d("jiadongfeng1","Opening camera preview: " + mPreviewSize.getWidth() + "x" + mPreviewSize.getHeight());
//创建ImageReader对象,用于获取预览数据
mImageReader = ImageReader.newInstance(mPreviewSize.getWidth(), mPreviewSize.getHeight(), ImageFormat.YUV_420_888, 2);
mImageReader.setOnImageAvailableListener(new ImageReader.OnImageAvailableListener() {
@Override
public void onImageAvailable(ImageReader reader) {
if (reader != null) {
Image image = reader.acquireNextImage();
Log.d("jiadongfeng1","onImageAvailable: " + image.getWidth() + "x" + image.getHeight());
if (image != null) {
if (mOnPreviewFrameListener != null) {
byte[] data = ImageUtils.generateNV21Data(image);
//当预览数据可用时,调用Activity回调传递预览数据
mOnPreviewFrameListener.onPreviewFrame(data, image.getWidth(), image.getHeight());
}
Log.d(TAG, "dector onImageAvailable..." );
//释放图像资源
image.close();
}
}
}
}, null);
try {
//创建捕获回话
mCameraDevice.createCaptureSession(Arrays.asList(mImageReader.getSurface()), mCaptureStateCallback, null);
} catch (CameraAccessException e) {
e.printStackTrace();
Log.e(TAG, "Failed to start camera session");
}
}
private CameraCaptureSession.StateCallback mCaptureStateCallback = new CameraCaptureSession.StateCallback() {
@Override
public void onConfigured(@NonNull CameraCaptureSession session) {
if (mCameraDevice == null) {
return;
}
mCaptureSession = session;
try {
//设置回话类型为预览
CaptureRequest.Builder builder = mCameraDevice.createCaptureRequest(CameraDevice.TEMPLATE_PREVIEW);
builder.addTarget(mImageReader.getSurface());
//设置自动曝光
builder.set(CaptureRequest.CONTROL_AF_MODE, CaptureRequest.CONTROL_AF_MODE_CONTINUOUS_PICTURE);
//打开连续取景模式
builder.set(CaptureRequest.CONTROL_AE_MODE, CaptureRequest.CONTROL_AE_MODE_ON_AUTO_FLASH);
//开始预览
session.setRepeatingRequest(builder.build(), null, null);
} catch (CameraAccessException e) {
e.printStackTrace();
}
}
@Override
public void onConfigureFailed(@NonNull CameraCaptureSession session) {
Log.e(TAG, "Failed to configure capture session.");
}
};
当预览数据可用时,会调用
mOnPreviewFrameListener.onPreviewFrame(data, image.getWidth(), image.getHeight());
回调到Actiity中设置的监听方法
处理预览回调
- 如上文中的代码,当回调成功时,调用以下代码:
//Camera2Activity
camera2Loader.setOnPreviewFrameListener(new CameraLoader.OnPreviewFrameListener() {
@Override
public void onPreviewFrame(byte[] data, int width, int height) {
render.onPreviewFrame(data, width, height);
}
});
- 然后调用JGPUImageRender.onPreviewFrame
主要是创建一个队列任务,然后根据预览数据生成纹理id,在onDrawFrame执行的时候,
将纹理id传递给着色器去处理;数据处理完后,显示到绑定了该JGPUImageRender的GlSurfaceVeiw
public void onPreviewFrame(final byte[] data, final int width, final int height) {
if (glRgbBuffer == null) {
glRgbBuffer = IntBuffer.allocate(width * height);
}
if (runOnDraw.isEmpty()) {
runOnDraw(new Runnable() {
@Override
public void run() {
GPUImageNativeLibrary.YUVtoRBGA(data, width, height, glRgbBuffer.array());
glTextureId = OpenGlUtils.loadTexture(glRgbBuffer, width, height, glTextureId);
if (imageWidth != width) {
imageWidth = width;
imageHeight = height;
adjustImageScaling();
}
}
});
}
}
预览显示效果
预览界面因为我们没有处理图片旋转,因此图片是旋转了90度的
- 处理图片旋转
private void updateGPUImageRotate() {
Rotation rotation = getRotation(camera2Loader.getCameraOrientation());
boolean flipHorizontal = false;
boolean flipVertical = false;
if (camera2Loader.isFrontCamera()) { // 前置摄像头需要镜像
if (rotation == Rotation.NORMAL || rotation == Rotation.ROTATION_180) {
flipHorizontal = true;
} else {
flipVertical = true;
}
}
//根据预览返回的图片的旋转角度,选择不动的纹理id坐标,也就是在显示的时候,调整下
render.setRotation(rotation, flipHorizontal, flipVertical);
}
预览图片
预览流程总结
相机简单预览.png主要步骤如下:
- 定义GlSurfaceView和Render,将两者进行绑定,当Render有数据刷新时,就直接显示到GlSurfaceView上
- 开启相机
- 开启预览,并将预览数据与ImageReader进行绑定,
- ImageReader监听到每一帧获的数据后(数据格式为YUV),首先将YUV根据帧数据RGB格式,然后根据转换后的RGB图像数据生成纹理id,并且将纹理id对应的图像数据传递到render中进行渲染,最后显示到GlSurfaceView上
以上流程注意
- 预览回调数据是YUV格式的,需要转换成RGB,才能生成OPengl可用的纹理ID
- GlSurfaceView并没有与预览数据直接绑定,需要我们获取每一帧的数据,然后转换格式(YUV-RGB)后,传递到Render渲染处理,最后显示到跟Render绑定的GlSurfaceView上;分离的目的是,我们可以对该帧数据进行滤镜,美颜等各种数据处理,处理后再显示到GlSurfaceView上
代码工程
代码工程位置参考以下开源文件的Camera2Activity.java文件
网友评论