import android.content.Context;
import android.graphics.Bitmap;
import android.graphics.BitmapFactory;
import android.graphics.Canvas;
import android.graphics.Color;
import android.graphics.ImageFormat;
import android.graphics.Matrix;
import android.graphics.Paint;
import android.graphics.SurfaceTexture;
import android.graphics.YuvImage;
import android.hardware.Camera;
import android.media.FaceDetector;
import android.os.Build;
import android.util.AttributeSet;
import android.util.Log;
import android.view.Surface;
import android.view.TextureView;
import android.view.WindowManager;
import java.io.ByteArrayOutputStream;
import java.util.List;
public class MyFaceDetectorView extends TextureView implements TextureView.SurfaceTextureListener {
private static final String TAG = MyFaceDetectorView.class.getName();
protected Camera mCamera;
private int mPreviewFormat = ImageFormat.NV21;
public MyFaceDetectorView(Context context) {
super(context);
init();
}
public MyFaceDetectorView(Context context,AttributeSet attrs) {
super(context,attrs);
init();
}
public MyFaceDetectorView(Context context,AttributeSet attrs,int defStyleAttr) {
super(context,attrs,defStyleAttr);
init();
}
public List<Camera.Size> getSupportedSizes() {
if (null != mCamera) {
return mCamera.getParameters().getSupportedPreviewSizes();
}
return null;
}
private int mCameraIndex = 0;
private void init() {
this.setSurfaceTextureListener(this);
}
protected boolean initializeCamera(int width,int height) {
Log.e(TAG,"Initialize java camera wh = " + width + "x" + height);
boolean result = true;
synchronized (this) {
mCamera = null;
if (mCameraIndex == -1) {
Log.e(TAG,"Trying to open camera with old open()");
try {
mCamera = Camera.open();
} catch (Exception e) {
Log.e(TAG,"Camera is not available (in use or does not exist): " + e.getLocalizedMessage());
}
if (mCamera == null && Build.VERSION.SDK_INT >= Build.VERSION_CODES.GINGERBREAD) {
boolean connected = false;
for (int camIdx = 0;camIdx < Camera.getNumberOfCameras();++camIdx) {
Log.e(TAG,"Trying to open camera with new open(" + Integer.valueOf(camIdx) + ")");
try {
mCamera = Camera.open(camIdx);
connected = true;
} catch (RuntimeException e) {
Log.e(TAG,"Camera #" + camIdx + "failed to open: " + e.getLocalizedMessage());
}
if (connected) break;
}
}
} else {
if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.GINGERBREAD) {
int localCameraIndex = mCameraIndex;
if (mCameraIndex == Camera.CameraInfo.CAMERA_FACING_BACK) {
Log.e(TAG,"Trying to open back camera");
Camera.CameraInfo cameraInfo = new Camera.CameraInfo();
for (int camIdx = 0;camIdx < Camera.getNumberOfCameras();++camIdx) {
Camera.getCameraInfo(camIdx,cameraInfo);
if (cameraInfo.facing == Camera.CameraInfo.CAMERA_FACING_BACK) {
localCameraIndex = camIdx;
break;
}
}
} else if (mCameraIndex == Camera.CameraInfo.CAMERA_FACING_FRONT) {
Log.e(TAG,"Trying to open front camera");
Camera.CameraInfo cameraInfo = new Camera.CameraInfo();
for (int camIdx = 0;camIdx < Camera.getNumberOfCameras();++camIdx) {
Camera.getCameraInfo(camIdx,cameraInfo);
if (cameraInfo.facing == Camera.CameraInfo.CAMERA_FACING_FRONT) {
localCameraIndex = camIdx;
break;
}
}
}
if (localCameraIndex == 99) {
Log.e(TAG,"Back camera not found!");
} else if (localCameraIndex == 98) {
Log.e(TAG,"Front camera not found!");
} else {
Log.e(TAG,"Trying to open camera with new open(" + Integer.valueOf(localCameraIndex) + ")");
try {
mCamera = Camera.open(localCameraIndex);
} catch (RuntimeException e) {
Log.e(TAG,"Camera #" + localCameraIndex + "failed to open: " + e.getLocalizedMessage());
}
}
}
}
if (mCamera == null)
return false;
/* Now set camera parameters */
try {
Camera.Parameters params = mCamera.getParameters();
Log.e(TAG,"getSupportedPreviewSizes()");
List<Camera.Size> sizes = params.getSupportedPreviewSizes();
if (sizes != null) {
/* Select the size that fits surface considering maximum size allowed */
Size frameSize = calculateCameraFrameSize(sizes,new JavaCameraSizeAccessor(),width,height);
/* Image format NV21 causes issues in the Android emulators */
if (Build.FINGERPRINT.startsWith("generic")
|| Build.FINGERPRINT.startsWith("unknown")
|| Build.MODEL.contains("google_sdk")
|| Build.MODEL.contains("Emulator")
|| Build.MODEL.contains("Android SDK built for x86")
|| Build.MANUFACTURER.contains("Genymotion")
|| (Build.BRAND.startsWith("generic") && Build.DEVICE.startsWith("generic"))
|| "google_sdk".equals(Build.PRODUCT))
params.setPreviewFormat(ImageFormat.YV12); // "generic" or "android" = android emulator
else
params.setPreviewFormat(ImageFormat.NV21);
mPreviewFormat = params.getPreviewFormat();
Log.e(TAG,"Set preview size to " + Integer.valueOf((int)frameSize.width) + "x" + Integer.valueOf((int)frameSize.height));
params.setPreviewSize((int)frameSize.width,(int)frameSize.height);
if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.ICE_CREAM_SANDWICH && !Build.MODEL.equals("GT-I9100"))
params.setRecordingHint(true);
List<String> FocusModes = params.getSupportedFocusModes();
if (FocusModes != null && FocusModes.contains(Camera.Parameters.FOCUS_MODE_CONTINUOUS_VIDEO)) {
params.setFocusMode(Camera.Parameters.FOCUS_MODE_CONTINUOUS_VIDEO);
}
mCamera.setParameters(params);
if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.HONEYCOMB) {
// mSurfaceTexture = new SurfaceTexture();
mCamera.setPreviewTexture(MyFaceDetectorView.this.getSurfaceTexture());
} else
mCamera.setPreviewDisplay(null);
setFaceDetectCallback();
/* Finally we are ready to start the preview */
Log.e(TAG,"startPreview");
mCamera.startPreview();
} else
result = false;
} catch (Exception e) {
result = false;
e.printStackTrace();
}
}
return result;
}
public void releaseCamera() {
synchronized (this) {
if (mCamera != null) {
mCamera.stopPreview();
mCamera.setPreviewCallback(null);
mCamera.lock();
mCamera.release();
}
mCamera = null;
}
}
private static final int MAX_UNSPECIFIED = -1;
protected int mMaxHeight = 1080;
protected int mMaxWidth = 1920;
/**
* This helper method can be called by subclasses to select camera preview size.
* It goes over the list of the supported preview sizes and selects the maximum one which
* fits both values set via setMaxFrameSize() and surface frame allocated for this view
*
* @param supportedSizes
* @param surfaceWidth
* @param surfaceHeight
* @return optimal frame size
*/
protected Size calculateCameraFrameSize(List<?> supportedSizes,ListItemAccessor accessor,int surfaceWidth,int surfaceHeight) {
int calcWidth = 0;
int calcHeight = 0;
int maxAllowedWidth = (mMaxWidth != MAX_UNSPECIFIED && mMaxWidth < surfaceWidth) ? mMaxWidth : surfaceWidth;
int maxAllowedHeight = (mMaxHeight != MAX_UNSPECIFIED && mMaxHeight < surfaceHeight) ? mMaxHeight : surfaceHeight;
for (Object size : supportedSizes) {
int width = accessor.getWidth(size);
int height = accessor.getHeight(size);
if (width <= maxAllowedWidth && height <= maxAllowedHeight) {
if (width >= calcWidth && height >= calcHeight) {
calcWidth = (int)width;
calcHeight = (int)height;
}
}
}
return new Size(calcWidth,calcHeight);
}
public static class JavaCameraSizeAccessor implements ListItemAccessor {
@Override
public int getWidth(Object obj) {
Camera.Size size = (Camera.Size)obj;
return size.width;
}
@Override
public int getHeight(Object obj) {
Camera.Size size = (Camera.Size)obj;
return size.height;
}
}
public interface ListItemAccessor {
int getWidth(Object obj);
int getHeight(Object obj);
}
@Override
public void onSurfaceTextureAvailable(SurfaceTexture surface,int width,int height) {
initializeCamera(getWidth(),getHeight());
}
@Override
public void onSurfaceTextureSizeChanged(SurfaceTexture surface,int width,int height) {
}
@Override
public boolean onSurfaceTextureDestroyed(SurfaceTexture surface) {
releaseCamera();
return false;
}
@Override
public void onSurfaceTextureUpdated(SurfaceTexture surface) {
}
//人脸检测-start---------
/**
* 最大检测的人脸数
*/
private int numberOfFace = 5;
/**
* 人脸识别类的实例
*/
private FaceDetector myFaceDetect; //
/**
* 存储多张人脸的数组变量
*/
private FaceDetector.Face[] myFace; //
/**
* 两眼之间的距离
*/
float myEyesDistance; //
/**
* 实际检测到的人脸数
*/
int numberOfFaceDetected; //
private long mScanBeginTime = 0; // 扫描开始时间
private long mScanEndTime = 0; // 扫描结束时间
private long mSpecPreviewTime = 0; // 扫描持续时间
/**
* 摄像头的安装旋转角度
*/
private int orientionOfCamera; //
// private void initFace() {
// myFaceDetect = new FaceDetector(getWidth(),getHeight(),numberOfFace);
// myFace = new FaceDetector.Face[numberOfFace]; //分配人脸数组空间
// BitmapFactory.Options BitmapFactoryOptionsbfo = new BitmapFactory.Options();
// BitmapFactoryOptionsbfo.inPreferredConfig = Bitmap.Config.RGB_565; //构造位图生成的参数,必须为565。类名+enum
// numberOfFaceDetected = myFaceDetect.findFaces(myBitmap, myFace); //FaceDetector 构造实例并解析人脸
// Log.i(TAG,"numberOfFaceDetected is " + numberOfFaceDetected);
// }
OnFaceDetectorListener mOnFaceDetectorListener;
public interface OnFaceDetectorListener {
void onDetectored();
}
public void setOnFaceDetectorListener(OnFaceDetectorListener mOnFaceDetectorListener) {
this.mOnFaceDetectorListener = mOnFaceDetectorListener;
}
/**
* 设置预览回调
*/
private void setFaceDetectCallback() {
Camera.Parameters parameters = mCamera.getParameters();
setCameraDisplayOrientation(0,mCamera); //设置预览方向
mCamera.setPreviewCallback(new Camera.PreviewCallback() {
public void onPreviewFrame(byte[] data,Camera camera) {
cameraFrame2Img(data,camera);
}
});
}
/**
* 开始检测
*/
private void cameraFrame2Img(byte[] data,Camera camera) {
Log.i(TAG,"startFaceDetection");
mScanEndTime = System.currentTimeMillis(); //记录摄像头返回数据的时间
mSpecPreviewTime = mScanEndTime - mScanBeginTime; //从onPreviewFrame获取摄像头数据的时间
Log.i(TAG,"onPreviewFrame and mSpecPreviewTime = " + String.valueOf(mSpecPreviewTime));
Camera.Size localSize = camera.getParameters().getPreviewSize(); //获得预览分辨率
YuvImage localYuvImage = new YuvImage(data,17,localSize.width,localSize.height,null);
ByteArrayOutputStream localByteArrayOutputStream = new ByteArrayOutputStream();
localYuvImage.compressToJpeg(new android.graphics.Rect(0,0,localSize.width,localSize.height)
,90,localByteArrayOutputStream); //把摄像头回调数据转成YUV,再按图像尺寸压缩成JPEG,从输出流中转成数组
byte[] arrayOfByte = localByteArrayOutputStream.toByteArray();
// releaseCamera(); //及早释放camera资源,避免影响camera设备的正常调用
storeByteImage(arrayOfByte);
// mCamera.startPreview(); //该语句可放在回调后面,当执行到这里,调用前面的setPreviewCallback
mScanBeginTime = System.currentTimeMillis();// 记录下系统开始扫描的时间
}
/**
* 设置预览方向的函数说明,该函数比较重要,因为方向直接影响bitmap构造时的矩阵旋转角度,影响最终人脸识别的成功与否
* 获得摄像头的安装旋转角度
*/
public void setCameraDisplayOrientation(int cameraId,Camera paramCamera) {
Camera.CameraInfo info = new Camera.CameraInfo();
Camera.getCameraInfo(cameraId,info);
int rotation = ((WindowManager)getContext().getSystemService(Context.WINDOW_SERVICE))
.getDefaultDisplay().getRotation(); //获得显示器件角度
int degrees = 0;
Log.e(TAG,"getRotation's rotation is " + String.valueOf(rotation));
switch (rotation) {
case Surface.ROTATION_0:
degrees = 0;
break;
case Surface.ROTATION_90:
degrees = 90;
break;
case Surface.ROTATION_180:
degrees = 180;
break;
case Surface.ROTATION_270:
degrees = 270;
break;
}
orientionOfCamera = info.orientation; //获得摄像头的安装旋转角度
int result;
if (info.facing == Camera.CameraInfo.CAMERA_FACING_FRONT) {
result = (info.orientation + degrees) % 360;
result = (360 - result) % 360; // compensate the mirror
} else { // back-facing
result = (info.orientation - degrees + 360) % 360;
}
paramCamera.setDisplayOrientation(result); //注意前后置的处理,前置是映象画面,该段是SDK文档的标准DEMO
}
long mSpecStopTime;
long mSpecCameraTime ;
public void storeByteImage(byte[] paramArrayOfByte) {
mSpecStopTime = System.currentTimeMillis();
mSpecCameraTime = mSpecStopTime - mScanBeginTime;
Log.i(TAG,"StoreByteImage and mSpecCameraTime is " + String.valueOf(mSpecCameraTime));
BitmapFactory.Options localOptions = new BitmapFactory.Options();
Bitmap localBitmap1 = BitmapFactory.decodeByteArray(paramArrayOfByte,0,paramArrayOfByte.length,localOptions);
int i = localBitmap1.getWidth();
int j = localBitmap1.getHeight(); //从上步解出的JPEG数组中接出BMP,即RAW->JPEG->BMP
Matrix localMatrix = new Matrix();
//int k = cameraResOr;
Bitmap localBitmap2 = null;
FaceDetector localFaceDetector = null;
Log.e(TAG,"StoreByteImage orientionOfCamera is " +orientionOfCamera);
switch (orientionOfCamera) { //根据前置安装旋转的角度来重新构造BMP
case 0:
localFaceDetector = new FaceDetector(i,j,1);
localMatrix.postRotate(0.0F,i / 2,j / 2);
localBitmap2 = Bitmap.createBitmap(i,j,Bitmap.Config.RGB_565);
break;
case 90:
localFaceDetector = new FaceDetector(j,i,1); //长宽互换
localMatrix.postRotate(-270.0F,j / 2,i / 2); //正90度的话就反方向转270度,一样效果
localBitmap2 = Bitmap.createBitmap(j,i,Bitmap.Config.RGB_565);
// localFaceDetector = new FaceDetector(j,i,1); //长宽互换
// localMatrix.postRotate(-270.0F,j / 2,i / 2); //正90度的话就反方向转270度,一样效果
// localBitmap2 = Bitmap.createBitmap(i,j,Bitmap.Config.RGB_565);
break;
case 180:
localFaceDetector = new FaceDetector(i,j,1);
localMatrix.postRotate(-180.0F,i / 2,j / 2);
localBitmap2 = Bitmap.createBitmap(i,j,Bitmap.Config.RGB_565);
break;
case 270:
localFaceDetector = new FaceDetector(j,i,1);
localMatrix.postRotate(-90.0F,j / 2,i / 2);
localBitmap2 = Bitmap.createBitmap(j,i,Bitmap.Config.RGB_565); //localBitmap2应是没有数据的
break;
}
FaceDetector.Face[] arrayOfFace = new FaceDetector.Face[1];
Paint localPaint1 = new Paint();
Paint localPaint2 = new Paint();
localPaint1.setDither(true);
localPaint2.setColor(Color.GREEN);
localPaint2.setStyle(Paint.Style.STROKE);
localPaint2.setStrokeWidth(4.0F);
Canvas localCanvas = new Canvas();
localCanvas.setBitmap(localBitmap2);
localCanvas.setMatrix(localMatrix);
localCanvas.drawBitmap(localBitmap1,0.0F,0.0F,localPaint1); //该处将localBitmap1和localBitmap2关联(可不要?)
numberOfFaceDetected = localFaceDetector.findFaces(localBitmap2,arrayOfFace); //返回识脸的结果
localBitmap2.recycle();
localBitmap1.recycle(); //释放位图资源
if (numberOfFaceDetected > 0) {
if (null != mOnFaceDetectorListener) {
mOnFaceDetectorListener.onDetectored();
}
}
// FaceDetectDeal(numberOfFaceDetected);
}
public static class Size {
public double width, height;
public Size(double width, double height) {
this.width = width;
this.height = height;
}
public Size() {
this(0, 0);
}
// public Size(Point p) {
// width = p.x;
// height = p.y;
// }
public Size(double[] vals) {
set(vals);
}
public void set(double[] vals) {
if (vals != null) {
width = vals.length > 0 ? vals[0] : 0;
height = vals.length > 1 ? vals[1] : 0;
} else {
width = 0;
height = 0;
}
}
public double area() {
return width * height;
}
public boolean empty() {
return width <= 0 || height <= 0;
}
public org.opencv.core.Size clone() {
return new org.opencv.core.Size(width, height);
}
@Override
public int hashCode() {
final int prime = 31;
int result = 1;
long temp;
temp = Double.doubleToLongBits(height);
result = prime * result + (int) (temp ^ (temp >>> 32));
temp = Double.doubleToLongBits(width);
result = prime * result + (int) (temp ^ (temp >>> 32));
return result;
}
@Override
public boolean equals(Object obj) {
if (this == obj) return true;
if (!(obj instanceof org.opencv.core.Size)) return false;
org.opencv.core.Size it = (org.opencv.core.Size) obj;
return width == it.width && height == it.height;
}
@Override
public String toString() {
return (int)width + "x" + (int)height;
}
}
}
网友评论