SurfaceHolder holder = source.getHolder();
holder.addCallback(new SurfaceHolder.Callback() {
@Override
public void surfaceCreated(final SurfaceHolder holder) {
Camera camera = Camera.open(Camera.CameraInfo.CAMERA_FACING_FRONT);
try {
camera.setPreviewDisplay(holder);
} catch (IOException e) {
e.printStackTrace();
}
camera.setDisplayOrientation(90);
Camera.Size previewSize = camera.getParameters().getPreviewSize();
final int width = previewSize.width;
final int height = previewSize.height;
System.out.println(width + ":" + height);
camera.setPreviewCallback(new Camera.PreviewCallback() {
@Override
public void onPreviewFrame(byte[] data, Camera camera) {
}
});
camera.startPreview();
}
@Override
public void surfaceChanged(SurfaceHolder holder, int format, int width, int height) {
}
@Override
public void surfaceDestroyed(SurfaceHolder holder) {
}
});
}
其中source是SurfaceView。
主要步骤如下:
- SurfaceHolder.addCallback。
- 在android.view.SurfaceHolder.Callback#surfaceCreated方法中,初始化摄像头相关。
- 通过android.hardware.Camera#setPreviewDisplay 来绑定SurfaceHolder。
- 调用android.hardware.Camera#startPreview来开始摄像。
这种情况下,视频直接渲染到surfaceView上边。如果要进行特殊操作。
- 通过android.hardware.Camera#setPreviewCallback监听每一帧的回调。
- 在回调中调用方法直接绘制到surfaceView上。
Canvas canvas = surfaceHolder.lockCanvas();
canvas.drawColor(0, android.graphics.PorterDuff.Mode.CLEAR);
Bitmap cacheBitmap = nv21ToBitmap(data, width, height);
canvas.drawBitmap(cacheBitmap, 0, 0, null);
surfaceHolder.unlockCanvasAndPost(canvas);
private static Bitmap nv21ToBitmap(byte[] nv21, int width, int height) {
Bitmap bitmap = null;
try {
YuvImage image = new YuvImage(nv21, ImageFormat.NV21, width, height, null);
ByteArrayOutputStream stream = new ByteArrayOutputStream();
image.compressToJpeg(new Rect(0, 0, width, height), 80, stream);
bitmap = BitmapFactory.decodeByteArray(stream.toByteArray(), 0, stream.size());
stream.close();
} catch (IOException e) {
e.printStackTrace();
}
return bitmap;
}
网友评论