一、框架视图
Main.png二、关键代码
FaceMaskExample
using UnityEngine;
using System.Collections;
#if UNITY_5_3 || UNITY_5_3_OR_NEWER
using UnityEngine.SceneManagement;
#endif
namespace FaceMaskExample
{
/// <summary>
/// Face mask example.
/// </summary>
public class FaceMaskExample : MonoBehaviour
{
// Use this for initialization
void Start ()
{
}
// Update is called once per frame
void Update ()
{
}
public void OnShowLicenseButton ()
{
#if UNITY_5_3 || UNITY_5_3_OR_NEWER
//SceneManager.LoadScene ("ShowLicense");
SceneManager.LoadScene("WebCamTextureAR"); //跳转到AR场景
#else
Application.LoadLevel ("WebCamTextureAR");
//Application.LoadLevel ("ShowLicense");
#endif
}
public void OnTexture2DFaceMaskExample ()
{
#if UNITY_5_3 || UNITY_5_3_OR_NEWER
SceneManager.LoadScene ("Texture2DFaceMaskExample");
#else
Application.LoadLevel ("Texture2DFaceMaskExample");
#endif
}
public void OnVideoCaptureFaceMaskExample ()
{
#if UNITY_5_3 || UNITY_5_3_OR_NEWER
SceneManager.LoadScene ("VideoCaptureFaceMaskExample");
#else
Application.LoadLevel ("VideoCaptureFaceMaskExample");
#endif
}
public void OnWebCamTextureFaceMaskExample ()
{
#if UNITY_5_3 || UNITY_5_3_OR_NEWER
//SceneManager.LoadScene ("WebCamTextureFaceMaskExample");
SceneManager.LoadScene ("WebCamTextureFaceMask");
#else
Application.LoadLevel ("WebCamTextureFaceMask");
//Application.LoadLevel ("WebCamTextureFaceMaskExample");
#endif
}
}
}
WebCamTextureARSample
using UnityEngine;
using System.Collections;
using System.Collections.Generic;
using UnityEngine.UI;
#if UNITY_5_3 || UNITY_5_3_OR_NEWER
using UnityEngine.SceneManagement;
#endif
using OpenCVForUnity;
using DlibFaceLandmarkDetector;
namespace DlibFaceLandmarkDetectorSample
{
/// <summary>
/// Face tracker AR from WebCamTexture Sample.
/// This sample was referring to http://www.morethantechnical.com/2012/10/17/head-pose-estimation-with-opencv-opengl-revisited-w-code/
/// and use effect asset from http://ktk-kumamoto.hatenablog.com/entry/2014/09/14/092400
/// </summary>
[RequireComponent(typeof(WebCamTextureToMatHelper))]
public class WebCamTextureARSample : MonoBehaviour
{
/// <summary>
/// The is showing face points.
/// </summary>
public bool isShowingFacePoints;
/// <summary>
/// The is showing face points toggle.
/// </summary>
public Toggle isShowingFacePointsToggle;
/// <summary>
/// The is showing axes.
/// </summary>
public bool isShowingAxes;
/// <summary>
/// The is showing axes toggle.
/// </summary>
public Toggle isShowingAxesToggle;
/// <summary>
/// The is showing head.
/// </summary>
public bool isShowingHead;
/// <summary>
/// The is showing head toggle.
/// </summary>
public Toggle isShowingHeadToggle;
/// <summary>
/// The is showing effects.
/// </summary>
public bool isShowingEffects;
/// <summary>
/// The is showing effects toggle.
/// </summary>
public Toggle isShowingEffectsToggle;
/// <summary>
/// The axes. 轴
/// </summary>
public GameObject axes;
/// <summary>
/// The head. 头部
/// </summary>
public GameObject head;
/// <summary>
/// The right eye.右眼
/// </summary>
public GameObject rightEye;
/// <summary>
/// The left eye.左眼
/// </summary>
public GameObject leftEye;
/// <summary>
/// The mouth. 嘴巴
/// </summary>
public GameObject mouth;
/// <summary>
/// The mouth particle system. 嘴部特效
/// </summary>
ParticleSystem[] mouthParticleSystem;
/// <summary>
/// The texture. 贴图
/// </summary>
Texture2D texture;
/// <summary>
/// The face landmark detector. 脸部识别
/// </summary>
FaceLandmarkDetector faceLandmarkDetector;
/// <summary>
/// The AR camera.
/// </summary>
public Camera ARCamera;
/// <summary>
/// The cam matrix.
/// </summary>
Mat camMatrix;
/// <summary>
/// The dist coeffs.
/// </summary>
MatOfDouble distCoeffs;
/// <summary>
/// The invert Y.
/// </summary>
Matrix4x4 invertYM;
/// <summary>
/// The transformation m.
/// </summary>
Matrix4x4 transformationM = new Matrix4x4 ();
/// <summary>
/// The invert Z.
/// </summary>
Matrix4x4 invertZM;
/// <summary>
/// The ar m.
/// </summary>
Matrix4x4 ARM;
/// <summary>
/// The ar game object.
/// </summary>
public GameObject ARGameObject;
/// <summary>
/// The should move AR camera.
/// </summary>
public bool shouldMoveARCamera;
/// <summary>
/// The 3d face object points.
/// </summary>
MatOfPoint3f objectPoints;
/// <summary>
/// The image points.
/// </summary>
MatOfPoint2f imagePoints;
/// <summary>
/// The rvec.
/// </summary>
Mat rvec;
/// <summary>
/// The tvec.
/// </summary>
Mat tvec;
/// <summary>
/// The rot m.
/// </summary>
Mat rotM;
/// <summary>
/// The web cam texture to mat helper.
/// </summary>
WebCamTextureToMatHelper webCamTextureToMatHelper;
/// <summary>
/// The shape_predictor_68_face_landmarks_dat_filepath.
/// </summary>
private string shape_predictor_68_face_landmarks_dat_filepath;
// Use this for initialization
void Start ()
{
isShowingFacePointsToggle.isOn = isShowingFacePoints;
isShowingAxesToggle.isOn = isShowingAxes;
isShowingHeadToggle.isOn = isShowingHead;
isShowingEffectsToggle.isOn = isShowingEffects;
#if UNITY_WEBGL && !UNITY_EDITOR
StartCoroutine(DlibFaceLandmarkDetector.Utils.getFilePathAsync("shape_predictor_68_face_landmarks.dat", (result) => {
shape_predictor_68_face_landmarks_dat_filepath = result;
Run ();
}));
#else
shape_predictor_68_face_landmarks_dat_filepath = DlibFaceLandmarkDetector.Utils.getFilePath ("shape_predictor_68_face_landmarks.dat");
Run ();
#endif
}
private void Run ()
{
//set 3d face object points.
objectPoints = new MatOfPoint3f (
new Point3 (-31, 72, 86),//l eye
new Point3 (31, 72, 86),//r eye
new Point3 (0, 40, 114),//nose
new Point3 (-20, 15, 90),//l mouse
new Point3 (20, 15, 90),//r mouse
new Point3 (-69, 76, -2),//l ear
new Point3 (69, 76, -2)//r ear
);
imagePoints = new MatOfPoint2f ();
rvec = new Mat ();
tvec = new Mat ();
rotM = new Mat (3, 3, CvType.CV_64FC1);
faceLandmarkDetector = new FaceLandmarkDetector (shape_predictor_68_face_landmarks_dat_filepath);
webCamTextureToMatHelper = gameObject.GetComponent<WebCamTextureToMatHelper> ();
webCamTextureToMatHelper.Init ();
}
/// <summary>
/// Raises the web cam texture to mat helper inited event.
/// </summary>
public void OnWebCamTextureToMatHelperInited ()
{
Debug.Log ("OnWebCamTextureToMatHelperInited");
Mat webCamTextureMat = webCamTextureToMatHelper.GetMat ();
texture = new Texture2D (webCamTextureMat.cols (), webCamTextureMat.rows (), TextureFormat.RGBA32, false);
gameObject.GetComponent<Renderer> ().material.mainTexture = texture;
gameObject.transform.localScale = new Vector3 (webCamTextureMat.cols (), webCamTextureMat.rows (), 1);
Debug.Log ("Screen.width " + Screen.width + " Screen.height " + Screen.height + " Screen.orientation " + Screen.orientation);
float width = webCamTextureMat.width ();
float height = webCamTextureMat.height ();
float imageSizeScale = 1.0f;
float widthScale = (float)Screen.width / width;
float heightScale = (float)Screen.height / height;
if (widthScale < heightScale) {
Camera.main.orthographicSize = (width * (float)Screen.height / (float)Screen.width) / 2;
imageSizeScale = (float)Screen.height / (float)Screen.width;
} else {
Camera.main.orthographicSize = height / 2;
}
//set cameraparam
int max_d = (int)Mathf.Max (width, height);
double fx = max_d;
double fy = max_d;
double cx = width / 2.0f;
double cy = height / 2.0f;
camMatrix = new Mat (3, 3, CvType.CV_64FC1);
camMatrix.put (0, 0, fx);
camMatrix.put (0, 1, 0);
camMatrix.put (0, 2, cx);
camMatrix.put (1, 0, 0);
camMatrix.put (1, 1, fy);
camMatrix.put (1, 2, cy);
camMatrix.put (2, 0, 0);
camMatrix.put (2, 1, 0);
camMatrix.put (2, 2, 1.0f);
Debug.Log ("camMatrix " + camMatrix.dump ());
distCoeffs = new MatOfDouble (0, 0, 0, 0);
Debug.Log ("distCoeffs " + distCoeffs.dump ());
//calibration camera
Size imageSize = new Size (width * imageSizeScale, height * imageSizeScale);
double apertureWidth = 0;
double apertureHeight = 0;
double[] fovx = new double[1];
double[] fovy = new double[1];
double[] focalLength = new double[1];
Point principalPoint = new Point (0, 0);
double[] aspectratio = new double[1];
Calib3d.calibrationMatrixValues (camMatrix, imageSize, apertureWidth, apertureHeight, fovx, fovy, focalLength, principalPoint, aspectratio);
Debug.Log ("imageSize " + imageSize.ToString ());
Debug.Log ("apertureWidth " + apertureWidth);
Debug.Log ("apertureHeight " + apertureHeight);
Debug.Log ("fovx " + fovx [0]);
Debug.Log ("fovy " + fovy [0]);
Debug.Log ("focalLength " + focalLength [0]);
Debug.Log ("principalPoint " + principalPoint.ToString ());
Debug.Log ("aspectratio " + aspectratio [0]);
//To convert the difference of the FOV value of the OpenCV and Unity.
double fovXScale = (2.0 * Mathf.Atan ((float)(imageSize.width / (2.0 * fx)))) / (Mathf.Atan2 ((float)cx, (float)fx) + Mathf.Atan2 ((float)(imageSize.width - cx), (float)fx));
double fovYScale = (2.0 * Mathf.Atan ((float)(imageSize.height / (2.0 * fy)))) / (Mathf.Atan2 ((float)cy, (float)fy) + Mathf.Atan2 ((float)(imageSize.height - cy), (float)fy));
Debug.Log ("fovXScale " + fovXScale);
Debug.Log ("fovYScale " + fovYScale);
//Adjust Unity Camera FOV https://github.com/opencv/opencv/commit/8ed1945ccd52501f5ab22bdec6aa1f91f1e2cfd4
if (widthScale < heightScale) {
ARCamera.fieldOfView = (float)(fovx [0] * fovXScale);
} else {
ARCamera.fieldOfView = (float)(fovy [0] * fovYScale);
}
invertYM = Matrix4x4.TRS (Vector3.zero, Quaternion.identity, new Vector3 (1, -1, 1));
Debug.Log ("invertYM " + invertYM.ToString ());
invertZM = Matrix4x4.TRS (Vector3.zero, Quaternion.identity, new Vector3 (1, 1, -1));
Debug.Log ("invertZM " + invertZM.ToString ());
axes.SetActive (false);
head.SetActive (false);
rightEye.SetActive (false);
leftEye.SetActive (false);
mouth.SetActive (false);
mouthParticleSystem = mouth.GetComponentsInChildren<ParticleSystem> (true);
}
/// <summary>
/// Raises the web cam texture to mat helper disposed event.
/// </summary>
public void OnWebCamTextureToMatHelperDisposed ()
{
Debug.Log ("OnWebCamTextureToMatHelperDisposed");
camMatrix.Dispose ();
distCoeffs.Dispose ();
}
/// <summary>
/// Raises the web cam texture to mat helper error occurred event.
/// </summary>
/// <param name="errorCode">Error code.</param>
public void OnWebCamTextureToMatHelperErrorOccurred(WebCamTextureToMatHelper.ErrorCode errorCode){
Debug.Log ("OnWebCamTextureToMatHelperErrorOccurred " + errorCode);
}
// Update is called once per frame
void Update ()
{
if (webCamTextureToMatHelper.IsPlaying () && webCamTextureToMatHelper.DidUpdateThisFrame ()) {
Mat rgbaMat = webCamTextureToMatHelper.GetMat ();
OpenCVForUnityUtils.SetImage (faceLandmarkDetector, rgbaMat);
//detect face rects
List<UnityEngine.Rect> detectResult = faceLandmarkDetector.Detect ();
if (detectResult.Count > 0) {
//detect landmark points
List<Vector2> points = faceLandmarkDetector.DetectLandmark (detectResult [0]);
if (points.Count > 0) {
if (isShowingFacePoints)
OpenCVForUnityUtils.DrawFaceLandmark (rgbaMat, points, new Scalar (0, 255, 0, 255), 2);
imagePoints.fromArray (
new Point ((points [38].x + points [41].x) / 2, (points [38].y + points [41].y) / 2),//l eye
new Point ((points [43].x + points [46].x) / 2, (points [43].y + points [46].y) / 2),//r eye
new Point (points [33].x, points [33].y),//nose
new Point (points [48].x, points [48].y),//l mouth
new Point (points [54].x, points [54].y) //r mouth
,
new Point (points [0].x, points [0].y),//l ear
new Point (points [16].x, points [16].y)//r ear
);
Calib3d.solvePnP (objectPoints, imagePoints, camMatrix, distCoeffs, rvec, tvec);
//眼睛的特效
if (tvec.get (2, 0) [0] > 0) {
if (Mathf.Abs ((float)(points [43].y - points [46].y)) > Mathf.Abs ((float)(points [42].x - points [45].x)) / 6.0) {
if (isShowingEffects)
rightEye.SetActive (true);
}
if (Mathf.Abs ((float)(points [38].y - points [41].y)) > Mathf.Abs ((float)(points [39].x - points [36].x)) / 6.0) {
if (isShowingEffects)
leftEye.SetActive (true);
}
if (isShowingHead)
head.SetActive (true);
if (isShowingAxes)
axes.SetActive (true);
//嘴部特效
float noseDistance = Mathf.Abs ((float)(points [27].y - points [33].y));
float mouseDistance = Mathf.Abs ((float)(points [62].y - points [66].y));
if (mouseDistance > noseDistance / 5.0) {
if (isShowingEffects) {
mouth.SetActive (true);
foreach (ParticleSystem ps in mouthParticleSystem) {
ps.enableEmission = true;
ps.startSize = 500 * (mouseDistance / noseDistance);
}
}
} else {
if (isShowingEffects) {
foreach (ParticleSystem ps in mouthParticleSystem) {
ps.enableEmission = false;
}
}
}
Calib3d.Rodrigues (rvec, rotM);
transformationM .SetRow (0, new Vector4 ((float)rotM.get (0, 0) [0], (float)rotM.get (0, 1) [0], (float)rotM.get (0, 2) [0], (float)tvec.get (0, 0) [0]));
transformationM.SetRow (1, new Vector4 ((float)rotM.get (1, 0) [0], (float)rotM.get (1, 1) [0], (float)rotM.get (1, 2) [0], (float)tvec.get (1, 0) [0]));
transformationM.SetRow (2, new Vector4 ((float)rotM.get (2, 0) [0], (float)rotM.get (2, 1) [0], (float)rotM.get (2, 2) [0], (float)tvec.get (2, 0) [0]));
transformationM.SetRow (3, new Vector4 (0, 0, 0, 1));
if (shouldMoveARCamera) {
if (ARGameObject != null) {
ARM = ARGameObject.transform.localToWorldMatrix * invertZM * transformationM.inverse * invertYM;
ARUtils.SetTransformFromMatrix (ARCamera.transform, ref ARM);
ARGameObject.SetActive (true);
}
} else {
ARM = ARCamera.transform.localToWorldMatrix * invertYM * transformationM * invertZM;
if (ARGameObject != null) {
ARUtils.SetTransformFromMatrix (ARGameObject.transform, ref ARM);
ARGameObject.SetActive (true);
}
}
}
}
}
//显示分辨率和的字眼在底下
// Imgproc.putText (rgbaMat, "W:" + rgbaMat.width () + " H:" + rgbaMat.height () + " SO:" + Screen.orientation, new Point (5, rgbaMat.rows () - 10), Core.FONT_HERSHEY_SIMPLEX, 0.5, new Scalar (255, 255, 255, 255), 1, Imgproc.LINE_AA, false);
OpenCVForUnity.Utils.matToTexture2D (rgbaMat, texture, webCamTextureToMatHelper.GetBufferColors ());
}
}
/// <summary>
/// Raises the disable event.
/// </summary>
void OnDisable ()
{
if (webCamTextureToMatHelper != null)
webCamTextureToMatHelper.Dispose ();
if (faceLandmarkDetector != null)
faceLandmarkDetector.Dispose ();
}
/// <summary>
/// Raises the back button event.
/// </summary>
public void OnBackButton ()
{
#if UNITY_5_3 || UNITY_5_3_OR_NEWER
// SceneManager.LoadScene ("DlibFaceLandmarkDetectorSample");
SceneManager.LoadScene("FaceMask");
#else
Application.LoadLevel ("FaceMask");
//Application.LoadLevel ("DlibFaceLandmarkDetectorSample");
#endif
}
/// <summary>
/// Raises the play button event.
/// </summary>
public void OnPlayButton ()
{
webCamTextureToMatHelper.Play ();
}
/// <summary>
/// Raises the pause button event.
/// </summary>
public void OnPauseButton ()
{
webCamTextureToMatHelper.Pause ();
}
/// <summary>
/// Raises the stop button event.
/// </summary>
public void OnStopButton ()
{
webCamTextureToMatHelper.Stop ();
}
/// <summary>
/// Raises the change camera button event.
/// </summary>
public void OnChangeCameraButton ()
{
webCamTextureToMatHelper.Init (null, webCamTextureToMatHelper.requestWidth, webCamTextureToMatHelper.requestHeight, !webCamTextureToMatHelper.requestIsFrontFacing);
}
/// <summary>
/// Raises the is showing face points toggle event.
/// </summary>
public void OnIsShowingFacePointsToggle ()
{
if (isShowingFacePointsToggle.isOn) {
isShowingFacePoints = true;
} else {
isShowingFacePoints = false;
}
}
/// <summary>
/// Raises the is showing axes toggle event.
/// </summary>
public void OnIsShowingAxesToggle ()
{
if (isShowingAxesToggle.isOn) {
isShowingAxes = true;
} else {
isShowingAxes = false;
axes.SetActive (false);
}
}
/// <summary>
/// Raises the is showing head toggle event.
/// </summary>
public void OnIsShowingHeadToggle ()
{
if (isShowingHeadToggle.isOn) {
isShowingHead = true;
} else {
isShowingHead = false;
head.SetActive (false);
}
}
/// <summary>
/// Raises the is showin effects toggle event.
/// </summary>
public void OnIsShowinEffectsToggle ()
{
if (isShowingEffectsToggle.isOn) {
isShowingEffects = true;
} else {
isShowingEffects = false;
rightEye.SetActive (false);
leftEye.SetActive (false);
mouth.SetActive (false);
}
}
}
}
WebCamTextureToMatHelper
using OpenCVForUnity;
using System;
using System.Collections;
using UnityEngine;
using UnityEngine.Events;
namespace DlibFaceLandmarkDetectorSample
{
/// <summary>
/// Web cam texture to mat helper.
/// </summary>
public class WebCamTextureToMatHelper : MonoBehaviour
{
/// <summary>
/// The name of the device.
/// </summary>
public string requestDeviceName = null;
/// <summary>
/// The width.
/// </summary>
public int requestWidth = 640;
/// <summary>
/// The height.
/// </summary>
public int requestHeight = 480;
/// <summary>
/// Should use front facing.
/// </summary>
public bool requestIsFrontFacing = false;
/// <summary>
/// The flip vertical.
/// </summary>
public bool flipVertical = false;
/// <summary>
/// The flip horizontal.
/// </summary>
public bool flipHorizontal = false;
/// <summary>
/// The timeout frame count.
/// </summary>
public int timeoutFrameCount = 300;
/// <summary>
/// The on inited event.
/// </summary>
public UnityEvent OnInitedEvent;
/// <summary>
/// The on disposed event.
/// </summary>
public UnityEvent OnDisposedEvent;
/// <summary>
/// The on error occurred event.
/// </summary>
public ErrorUnityEvent OnErrorOccurredEvent;
/// <summary>
/// The web cam texture.
/// </summary>
WebCamTexture webCamTexture;
/// <summary>
/// The web cam device.
/// </summary>
WebCamDevice webCamDevice;
/// <summary>
/// The rgba mat.
/// </summary>
Mat rgbaMat;
/// <summary>
/// The rotated rgba mat
/// </summary>
Mat rotatedRgbaMat;
/// <summary>
/// The colors.
/// </summary>
Color32[] colors;
/// <summary>
/// The init waiting.
/// </summary>
bool initWaiting = false;
/// <summary>
/// The init done.
/// </summary>
bool initDone = false;
/// <summary>
/// The screenOrientation.
/// </summary>
ScreenOrientation screenOrientation = ScreenOrientation.Unknown;
[System.Serializable]
public enum ErrorCode :int
{
CAMERA_DEVICE_NOT_EXIST = 0,
TIMEOUT = 1,
}
[System.Serializable]
public class ErrorUnityEvent : UnityEngine.Events.UnityEvent<ErrorCode>
{
}
// Update is called once per frame
void Update ()
{
if (initDone) {
if (screenOrientation != Screen.orientation) {
StartCoroutine (init ());
}
}
}
/// <summary>
/// Init this instance.
/// </summary>
public void Init ()
{
if (initWaiting)
return;
if (OnInitedEvent == null)
OnInitedEvent = new UnityEvent ();
if (OnDisposedEvent == null)
OnDisposedEvent = new UnityEvent ();
if (OnErrorOccurredEvent == null)
OnErrorOccurredEvent = new ErrorUnityEvent ();
StartCoroutine (init ());
}
/// <summary>
/// Init this instance.
/// </summary>
/// <param name="deviceName">Device name.</param>
/// <param name="requestWidth">Request width.</param>
/// <param name="requestHeight">Request height.</param>
/// <param name="requestIsFrontFacing">If set to <c>true</c> request is front facing.</param>
/// <param name="OnInited">On inited.</param>
public void Init (string deviceName, int requestWidth, int requestHeight, bool requestIsFrontFacing)
{
if (initWaiting)
return;
this.requestDeviceName = deviceName;
this.requestWidth = requestWidth;
this.requestHeight = requestHeight;
this.requestIsFrontFacing = requestIsFrontFacing;
if (OnInitedEvent == null)
OnInitedEvent = new UnityEvent ();
if (OnDisposedEvent == null)
OnDisposedEvent = new UnityEvent ();
if (OnErrorOccurredEvent == null)
OnErrorOccurredEvent = new ErrorUnityEvent ();
StartCoroutine (init ());
}
/// <summary>
/// Init this instance by coroutine.
/// </summary>
private IEnumerator init ()
{
if (initDone)
dispose ();
initWaiting = true;
if (!String.IsNullOrEmpty (requestDeviceName)) {
//Debug.Log ("deviceName is "+requestDeviceName);
webCamTexture = new WebCamTexture (requestDeviceName, requestWidth, requestHeight);
} else {
//Debug.Log ("deviceName is null");
// Checks how many and which cameras are available on the device
for (int cameraIndex = 0; cameraIndex < WebCamTexture.devices.Length; cameraIndex++) {
if (WebCamTexture.devices [cameraIndex].isFrontFacing == requestIsFrontFacing) {
//Debug.Log (cameraIndex + " name " + WebCamTexture.devices [cameraIndex].name + " isFrontFacing " + WebCamTexture.devices [cameraIndex].isFrontFacing);
webCamDevice = WebCamTexture.devices [cameraIndex];
webCamTexture = new WebCamTexture (webCamDevice.name, requestWidth, requestHeight);
break;
}
}
}
if (webCamTexture == null) {
if (WebCamTexture.devices.Length > 0) {
webCamDevice = WebCamTexture.devices [0];
webCamTexture = new WebCamTexture (webCamDevice.name, requestWidth, requestHeight);
} else {
//Debug.Log("Camera device does not exist.");
initWaiting = false;
if (OnErrorOccurredEvent != null)
OnErrorOccurredEvent.Invoke (ErrorCode.CAMERA_DEVICE_NOT_EXIST);
yield break;
}
}
//Debug.Log ("name " + webCamTexture.name + " width " + webCamTexture.width + " height " + webCamTexture.height + " fps " + webCamTexture.requestedFPS);
// Starts the camera
webCamTexture.Play ();
int initCount = 0;
bool isTimeout = false;
while (true) {
if (initCount > timeoutFrameCount) {
isTimeout = true;
break;
}
// If you want to use webcamTexture.width and webcamTexture.height on iOS, you have to wait until webcamTexture.didUpdateThisFrame == 1, otherwise these two values will be equal to 16. (http://forum.unity3d.com/threads/webcamtexture-and-error-0x0502.123922/)
#if UNITY_IOS && !UNITY_EDITOR && (UNITY_4_6_3 || UNITY_4_6_4 || UNITY_5_0_0 || UNITY_5_0_1)
else if (webCamTexture.width > 16 && webCamTexture.height > 16) {
#else
else if (webCamTexture.didUpdateThisFrame) {
#if UNITY_IOS && !UNITY_EDITOR && UNITY_5_2
while (webCamTexture.width <= 16) {
if (initCount > timeoutFrameCount) {
isTimeout = true;
break;
}else {
initCount++;
}
webCamTexture.GetPixels32 ();
yield return new WaitForEndOfFrame ();
}
if (isTimeout) break;
#endif
#endif
Debug.Log ("name " + webCamTexture.name + " width " + webCamTexture.width + " height " + webCamTexture.height + " fps " + webCamTexture.requestedFPS);
Debug.Log ("videoRotationAngle " + webCamTexture.videoRotationAngle + " videoVerticallyMirrored " + webCamTexture.videoVerticallyMirrored + " isFrongFacing " + webCamDevice.isFrontFacing);
if (colors == null || colors.Length != webCamTexture.width * webCamTexture.height)
colors = new Color32[webCamTexture.width * webCamTexture.height];
rgbaMat = new Mat (webCamTexture.height, webCamTexture.width, CvType.CV_8UC4);
//Debug.Log ("Screen.orientation " + Screen.orientation);
screenOrientation = Screen.orientation;
#if !UNITY_EDITOR && !(UNITY_STANDALONE || UNITY_WEBGL)
if (screenOrientation == ScreenOrientation.Portrait || screenOrientation == ScreenOrientation.PortraitUpsideDown) {
rotatedRgbaMat = new Mat (webCamTexture.width, webCamTexture.height, CvType.CV_8UC4);
}
#endif
initWaiting = false;
initDone = true;
if (OnInitedEvent != null)
OnInitedEvent.Invoke ();
break;
} else {
initCount++;
yield return 0;
}
}
if (isTimeout) {
//Debug.Log("Init time out.");
webCamTexture.Stop ();
webCamTexture = null;
initWaiting = false;
if (OnErrorOccurredEvent != null)
OnErrorOccurredEvent.Invoke (ErrorCode.TIMEOUT);
}
}
/// <summary>
/// Ises the inited.
/// </summary>
/// <returns><c>true</c>, if inited was ised, <c>false</c> otherwise.</returns>
public bool IsInited ()
{
return initDone;
}
/// <summary>
/// Play this instance.
/// </summary>
public void Play ()
{
if (initDone)
webCamTexture.Play ();
}
/// <summary>
/// Pause this instance.
/// </summary>
public void Pause ()
{
if (initDone)
webCamTexture.Pause ();
}
/// <summary>
/// Stop this instance.
/// </summary>
public void Stop ()
{
if (initDone)
webCamTexture.Stop ();
}
/// <summary>
/// Ises the playing.
/// </summary>
/// <returns><c>true</c>, if playing was ised, <c>false</c> otherwise.</returns>
public bool IsPlaying ()
{
if (!initDone)
return false;
return webCamTexture.isPlaying;
}
/// <summary>
/// Gets the web cam texture.
/// </summary>
/// <returns>The web cam texture.</returns>
public WebCamTexture GetWebCamTexture ()
{
return (initDone) ? webCamTexture : null;
}
/// <summary>
/// Gets the web cam device.
/// </summary>
/// <returns>The web cam device.</returns>
public WebCamDevice GetWebCamDevice ()
{
return webCamDevice;
}
/// <summary>
/// Dids the update this frame.
/// </summary>
/// <returns><c>true</c>, if update this frame was dided, <c>false</c> otherwise.</returns>
public bool DidUpdateThisFrame ()
{
if (!initDone)
return false;
#if UNITY_IOS && !UNITY_EDITOR && (UNITY_4_6_3 || UNITY_4_6_4 || UNITY_5_0_0 || UNITY_5_0_1)
if (webCamTexture.width > 16 && webCamTexture.height > 16) {
return true;
} else {
return false;
}
#else
return webCamTexture.didUpdateThisFrame;
#endif
}
/// <summary>
/// Gets the mat.
/// </summary>
/// <returns>The mat.</returns>
public Mat GetMat ()
{
if (!initDone || !webCamTexture.isPlaying) {
if (rotatedRgbaMat != null) {
return rotatedRgbaMat;
} else {
return rgbaMat;
}
}
Utils.webCamTextureToMat (webCamTexture, rgbaMat, colors);
int flipCode = int.MinValue;
if (webCamDevice.isFrontFacing) {
if (webCamTexture.videoRotationAngle == 0) {
flipCode = 1;
} else if (webCamTexture.videoRotationAngle == 90) {
flipCode = 0;
}
if (webCamTexture.videoRotationAngle == 180) {
flipCode = 0;
} else if (webCamTexture.videoRotationAngle == 270) {
flipCode = 1;
}
} else {
if (webCamTexture.videoRotationAngle == 180) {
flipCode = -1;
} else if (webCamTexture.videoRotationAngle == 270) {
flipCode = -1;
}
}
if (flipVertical) {
if (flipCode == int.MinValue) {
flipCode = 0;
} else if (flipCode == 0) {
flipCode = int.MinValue;
} else if (flipCode == 1) {
flipCode = -1;
} else if (flipCode == -1) {
flipCode = 1;
}
}
if (flipHorizontal) {
if (flipCode == int.MinValue) {
flipCode = 1;
} else if (flipCode == 0) {
flipCode = -1;
} else if (flipCode == 1) {
flipCode = int.MinValue;
} else if (flipCode == -1) {
flipCode = 0;
}
}
if (flipCode > int.MinValue) {
Core.flip (rgbaMat, rgbaMat, flipCode);
}
if (rotatedRgbaMat != null) {
using (Mat transposeRgbaMat = rgbaMat.t ()) {
Core.flip (transposeRgbaMat, rotatedRgbaMat, 1);
}
return rotatedRgbaMat;
} else {
return rgbaMat;
}
}
/// <summary>
/// Gets the buffer colors.
/// </summary>
/// <returns>The buffer colors.</returns>
public Color32[] GetBufferColors ()
{
return colors;
}
/// <summary>
/// To release the resources for the init method.
/// </summary>
private void dispose ()
{
initWaiting = false;
initDone = false;
if (webCamTexture != null) {
webCamTexture.Stop ();
webCamTexture = null;
}
if (rgbaMat != null) {
rgbaMat.Dispose ();
rgbaMat = null;
}
if (rotatedRgbaMat != null) {
rotatedRgbaMat.Dispose ();
rotatedRgbaMat = null;
}
if (OnDisposedEvent != null)
OnDisposedEvent.Invoke ();
}
/// <summary>
/// Releases all resource used by the <see cref="WebCamTextureToMatHelper"/> object.
/// </summary>
/// <remarks>Call <see cref="Dispose"/> when you are finished using the <see cref="WebCamTextureToMatHelper"/>. The
/// <see cref="Dispose"/> method leaves the <see cref="WebCamTextureToMatHelper"/> in an unusable state. After
/// calling <see cref="Dispose"/>, you must release all references to the <see cref="WebCamTextureToMatHelper"/> so
/// the garbage collector can reclaim the memory that the <see cref="WebCamTextureToMatHelper"/> was occupying.</remarks>
public void Dispose ()
{
if (initDone)
dispose ();
colors = null;
}
}
}
WebCamTextureToMatHelper
using OpenCVForUnity;
using System;
using System.Collections;
using UnityEngine;
using UnityEngine.Events;
namespace FaceMaskExample
{
/// <summary>
/// Web cam texture to mat helper.
/// </summary>
public class WebCamTextureToMatHelper : MonoBehaviour
{
/// <summary>
/// The name of the device.
/// </summary>
public string requestDeviceName = null;
/// <summary>
/// The width.
/// </summary>
public int requestWidth = 640;
/// <summary>
/// The height.
/// </summary>
public int requestHeight = 480;
/// <summary>
/// Should use front facing.
/// </summary>
public bool requestIsFrontFacing = false;
/// <summary>
/// The flip vertical.
/// </summary>
public bool flipVertical = false;
/// <summary>
/// The flip horizontal.
/// </summary>
public bool flipHorizontal = false;
/// <summary>
/// The timeout frame count.
/// </summary>
public int timeoutFrameCount = 300;
/// <summary>
/// The on inited event.
/// </summary>
public UnityEvent OnInitedEvent;
/// <summary>
/// The on disposed event.
/// </summary>
public UnityEvent OnDisposedEvent;
/// <summary>
/// The on error occurred event.
/// </summary>
public ErrorUnityEvent OnErrorOccurredEvent;
/// <summary>
/// The web cam texture.
/// </summary>
WebCamTexture webCamTexture;
/// <summary>
/// The web cam device.
/// </summary>
WebCamDevice webCamDevice;
/// <summary>
/// The rgba mat.
/// </summary>
Mat rgbaMat;
/// <summary>
/// The rotated rgba mat
/// </summary>
Mat rotatedRgbaMat;
/// <summary>
/// The colors.
/// </summary>
Color32[] colors;
/// <summary>
/// The init waiting.
/// </summary>
bool initWaiting = false;
/// <summary>
/// The init done.
/// </summary>
bool initDone = false;
/// <summary>
/// The screenOrientation.
/// </summary>
ScreenOrientation screenOrientation = ScreenOrientation.Unknown;
[System.Serializable]
public enum ErrorCode :int
{
CAMERA_DEVICE_NOT_EXIST = 0,
TIMEOUT = 1,
}
[System.Serializable]
public class ErrorUnityEvent : UnityEngine.Events.UnityEvent<ErrorCode>
{
}
// Update is called once per frame
void Update ()
{
if (initDone) {
if (screenOrientation != Screen.orientation) {
StartCoroutine (init ());
}
}
}
/// <summary>
/// Init this instance.
/// </summary>
public void Init ()
{
if (initWaiting)
return;
if (OnInitedEvent == null)
OnInitedEvent = new UnityEvent ();
if (OnDisposedEvent == null)
OnDisposedEvent = new UnityEvent ();
if (OnErrorOccurredEvent == null)
OnErrorOccurredEvent = new ErrorUnityEvent ();
StartCoroutine (init ());
}
/// <summary>
/// Init this instance.
/// </summary>
/// <param name="deviceName">Device name.</param>
/// <param name="requestWidth">Request width.</param>
/// <param name="requestHeight">Request height.</param>
/// <param name="requestIsFrontFacing">If set to <c>true</c> request is front facing.</param>
/// <param name="OnInited">On inited.</param>
public void Init (string deviceName, int requestWidth, int requestHeight, bool requestIsFrontFacing)
{
if (initWaiting)
return;
this.requestDeviceName = deviceName;
this.requestWidth = requestWidth;
this.requestHeight = requestHeight;
this.requestIsFrontFacing = requestIsFrontFacing;
if (OnInitedEvent == null)
OnInitedEvent = new UnityEvent ();
if (OnDisposedEvent == null)
OnDisposedEvent = new UnityEvent ();
if (OnErrorOccurredEvent == null)
OnErrorOccurredEvent = new ErrorUnityEvent ();
StartCoroutine (init ());
}
/// <summary>
/// Init this instance by coroutine.
/// </summary>
private IEnumerator init ()
{
if (initDone)
dispose ();
initWaiting = true;
if (!String.IsNullOrEmpty (requestDeviceName)) {
//Debug.Log ("deviceName is "+requestDeviceName);
webCamTexture = new WebCamTexture (requestDeviceName, requestWidth, requestHeight);
} else {
//Debug.Log ("deviceName is null");
// Checks how many and which cameras are available on the device
for (int cameraIndex = 0; cameraIndex < WebCamTexture.devices.Length; cameraIndex++) {
if (WebCamTexture.devices [cameraIndex].isFrontFacing == requestIsFrontFacing) {
//Debug.Log (cameraIndex + " name " + WebCamTexture.devices [cameraIndex].name + " isFrontFacing " + WebCamTexture.devices [cameraIndex].isFrontFacing);
webCamDevice = WebCamTexture.devices [cameraIndex];
webCamTexture = new WebCamTexture (webCamDevice.name, requestWidth, requestHeight);
break;
}
}
}
if (webCamTexture == null) {
if (WebCamTexture.devices.Length > 0) {
webCamDevice = WebCamTexture.devices [0];
webCamTexture = new WebCamTexture (webCamDevice.name, requestWidth, requestHeight);
} else {
//Debug.Log("Camera device does not exist.");
initWaiting = false;
if (OnErrorOccurredEvent != null)
OnErrorOccurredEvent.Invoke (ErrorCode.CAMERA_DEVICE_NOT_EXIST);
yield break;
}
}
//Debug.Log ("name " + webCamTexture.name + " width " + webCamTexture.width + " height " + webCamTexture.height + " fps " + webCamTexture.requestedFPS);
// Starts the camera
webCamTexture.Play ();
int initCount = 0;
bool isTimeout = false;
while (true) {
if (initCount > timeoutFrameCount) {
isTimeout = true;
break;
}
// If you want to use webcamTexture.width and webcamTexture.height on iOS, you have to wait until webcamTexture.didUpdateThisFrame == 1, otherwise these two values will be equal to 16. (http://forum.unity3d.com/threads/webcamtexture-and-error-0x0502.123922/)
#if UNITY_IOS && !UNITY_EDITOR && (UNITY_4_6_3 || UNITY_4_6_4 || UNITY_5_0_0 || UNITY_5_0_1)
else if (webCamTexture.width > 16 && webCamTexture.height > 16) {
#else
else if (webCamTexture.didUpdateThisFrame) {
#if UNITY_IOS && !UNITY_EDITOR && UNITY_5_2
while (webCamTexture.width <= 16) {
if (initCount > timeoutFrameCount) {
isTimeout = true;
break;
}else {
initCount++;
}
webCamTexture.GetPixels32 ();
yield return new WaitForEndOfFrame ();
}
if (isTimeout) break;
#endif
#endif
Debug.Log ("name " + webCamTexture.name + " width " + webCamTexture.width + " height " + webCamTexture.height + " fps " + webCamTexture.requestedFPS);
Debug.Log ("videoRotationAngle " + webCamTexture.videoRotationAngle + " videoVerticallyMirrored " + webCamTexture.videoVerticallyMirrored + " isFrongFacing " + webCamDevice.isFrontFacing);
if (colors == null || colors.Length != webCamTexture.width * webCamTexture.height)
colors = new Color32[webCamTexture.width * webCamTexture.height];
rgbaMat = new Mat (webCamTexture.height, webCamTexture.width, CvType.CV_8UC4);
//Debug.Log ("Screen.orientation " + Screen.orientation);
screenOrientation = Screen.orientation;
#if !UNITY_EDITOR && !(UNITY_STANDALONE || UNITY_WEBGL)
if (screenOrientation == ScreenOrientation.Portrait || screenOrientation == ScreenOrientation.PortraitUpsideDown) {
rotatedRgbaMat = new Mat (webCamTexture.width, webCamTexture.height, CvType.CV_8UC4);
}
#endif
initWaiting = false;
initDone = true;
if (OnInitedEvent != null)
OnInitedEvent.Invoke ();
break;
} else {
initCount++;
yield return 0;
}
}
if (isTimeout) {
//Debug.Log("Init time out.");
webCamTexture.Stop ();
webCamTexture = null;
initWaiting = false;
if (OnErrorOccurredEvent != null)
OnErrorOccurredEvent.Invoke (ErrorCode.TIMEOUT);
}
}
/// <summary>
/// Ises the inited.
/// </summary>
/// <returns><c>true</c>, if inited was ised, <c>false</c> otherwise.</returns>
public bool IsInited ()
{
return initDone;
}
/// <summary>
/// Play this instance.
/// </summary>
public void Play ()
{
if (initDone)
webCamTexture.Play ();
}
/// <summary>
/// Pause this instance.
/// </summary>
public void Pause ()
{
if (initDone)
webCamTexture.Pause ();
}
/// <summary>
/// Stop this instance.
/// </summary>
public void Stop ()
{
if (initDone)
webCamTexture.Stop ();
}
/// <summary>
/// Ises the playing.
/// </summary>
/// <returns><c>true</c>, if playing was ised, <c>false</c> otherwise.</returns>
public bool IsPlaying ()
{
if (!initDone)
return false;
return webCamTexture.isPlaying;
}
/// <summary>
/// Gets the web cam texture.
/// </summary>
/// <returns>The web cam texture.</returns>
public WebCamTexture GetWebCamTexture ()
{
return (initDone) ? webCamTexture : null;
}
/// <summary>
/// Gets the web cam device.
/// </summary>
/// <returns>The web cam device.</returns>
public WebCamDevice GetWebCamDevice ()
{
return webCamDevice;
}
/// <summary>
/// Dids the update this frame.
/// </summary>
/// <returns><c>true</c>, if update this frame was dided, <c>false</c> otherwise.</returns>
public bool DidUpdateThisFrame ()
{
if (!initDone)
return false;
#if UNITY_IOS && !UNITY_EDITOR && (UNITY_4_6_3 || UNITY_4_6_4 || UNITY_5_0_0 || UNITY_5_0_1)
if (webCamTexture.width > 16 && webCamTexture.height > 16) {
return true;
} else {
return false;
}
#else
return webCamTexture.didUpdateThisFrame;
#endif
}
/// <summary>
/// Gets the mat.
/// </summary>
/// <returns>The mat.</returns>
public Mat GetMat ()
{
if (!initDone || !webCamTexture.isPlaying) {
if (rotatedRgbaMat != null) {
return rotatedRgbaMat;
} else {
return rgbaMat;
}
}
Utils.webCamTextureToMat (webCamTexture, rgbaMat, colors);
if (rotatedRgbaMat != null) {
using (Mat transposeRgbaMat = rgbaMat.t ()) {
Core.flip (transposeRgbaMat, rotatedRgbaMat, 1);
}
flipMat (rotatedRgbaMat);
return rotatedRgbaMat;
} else {
flipMat (rgbaMat);
return rgbaMat;
}
}
/// <summary>
/// Flips the mat.
/// </summary>
/// <param name="mat">Mat.</param>
private void flipMat (Mat mat)
{
int flipCode = int.MinValue;
if (webCamDevice.isFrontFacing) {
if (webCamTexture.videoRotationAngle == 0) {
flipCode = 1;
} else if (webCamTexture.videoRotationAngle == 90) {
flipCode = 1;
}
if (webCamTexture.videoRotationAngle == 180) {
flipCode = 0;
} else if (webCamTexture.videoRotationAngle == 270) {
flipCode = 0;
}
} else {
if (webCamTexture.videoRotationAngle == 180) {
flipCode = -1;
} else if (webCamTexture.videoRotationAngle == 270) {
flipCode = -1;
}
}
if (flipVertical) {
if (flipCode == int.MinValue) {
flipCode = 0;
} else if (flipCode == 0) {
flipCode = int.MinValue;
} else if (flipCode == 1) {
flipCode = -1;
} else if (flipCode == -1) {
flipCode = 1;
}
}
if (flipHorizontal) {
if (flipCode == int.MinValue) {
flipCode = 1;
} else if (flipCode == 0) {
flipCode = -1;
} else if (flipCode == 1) {
flipCode = int.MinValue;
} else if (flipCode == -1) {
flipCode = 0;
}
}
if (flipCode > int.MinValue) {
Core.flip (mat, mat, flipCode);
}
}
/// <summary>
/// Gets the buffer colors.
/// </summary>
/// <returns>The buffer colors.</returns>
public Color32[] GetBufferColors ()
{
return colors;
}
/// <summary>
/// To release the resources for the init method.
/// </summary>
private void dispose ()
{
initWaiting = false;
initDone = false;
if (webCamTexture != null) {
webCamTexture.Stop ();
webCamTexture = null;
}
if (rgbaMat != null) {
rgbaMat.Dispose ();
rgbaMat = null;
}
if (rotatedRgbaMat != null) {
rotatedRgbaMat.Dispose ();
rotatedRgbaMat = null;
}
if (OnDisposedEvent != null)
OnDisposedEvent.Invoke ();
}
/// <summary>
/// Releases all resource used by the <see cref="WebCamTextureToMatHelper"/> object.
/// </summary>
/// <remarks>Call <see cref="Dispose"/> when you are finished using the <see cref="WebCamTextureToMatHelper"/>. The
/// <see cref="Dispose"/> method leaves the <see cref="WebCamTextureToMatHelper"/> in an unusable state. After
/// calling <see cref="Dispose"/>, you must release all references to the <see cref="WebCamTextureToMatHelper"/> so
/// the garbage collector can reclaim the memory that the <see cref="WebCamTextureToMatHelper"/> was occupying.</remarks>
public void Dispose ()
{
if (initDone)
dispose ();
colors = null;
}
}
}
TrackedMeshOverlay
using System;
using System.Collections.Generic;
using UnityEngine;
using OpenCVForUnity.RectangleTrack;
namespace FaceMaskExample
{
public class TrackedMeshOverlay : MonoBehaviour
{
public int Interval = 1;
public int PoolSize = 10;
[SerializeField]
private GameObject baseObject;
public GameObject BaseObject
{
get {
return baseObject;
}
set {
baseObject = value;
setBaseObject(baseObject);
}
}
public float Width
{
get {
return targetWidth;
}
}
public float Height
{
get {
return targetHeight;
}
}
protected Transform targetTransform;
protected float targetWidth = 0;
protected float targetHeight = 0;
protected Transform overlayTransform;
protected ObjectPool objectPool;
protected Dictionary<int, TrackedMesh> showingObjects = new Dictionary<int, TrackedMesh>();
void Awake()
{
init("MeshOverlay");
}
void OnDestroy()
{
overlayTransform = null;
targetTransform = null;
targetWidth = 0;
targetHeight = 0;
showingObjects.Clear();
if(objectPool != null)
{
Destroy(objectPool.gameObject);
objectPool = null;
}
}
protected GameObject getPoolObject(Transform parent)
{
if(objectPool == null) return null;
GameObject newObj = objectPool.GetInstance(parent);
if(newObj != null){
newObj.transform.SetParent(parent, false);
return newObj;
}else{
return null;
}
}
protected virtual void init(String name)
{
GameObject obj = new GameObject(name);
overlayTransform = obj.transform;
overlayTransform.parent = gameObject.transform.parent;
if(baseObject != null)
setBaseObject (baseObject);
}
protected virtual void setBaseObject (GameObject obj)
{
if (obj.GetComponent<TrackedMesh>() == null)
{
Debug.LogWarning("Object is not TrackedMesh.");
return;
}
if(objectPool != null){
Destroy(objectPool);
}
objectPool = overlayTransform.gameObject.AddComponent<ObjectPool>();
objectPool.prefab = obj;
objectPool.maxCount = PoolSize;
objectPool.prepareCount = (int)PoolSize / 2;
objectPool.Interval = Interval;
}
public virtual void UpdateOverlayTransform(Transform targetTransform)
{
if (targetTransform == null)
{
this.targetTransform = null;
return;
}
targetWidth = targetTransform.localScale.x;
targetHeight = targetTransform.localScale.y;
this.targetTransform = targetTransform;
overlayTransform.localPosition = targetTransform.localPosition;
overlayTransform.localRotation = targetTransform.localRotation;
overlayTransform.localScale = targetTransform.localScale;
}
public virtual TrackedMesh GetObjectById(int id)
{
if (showingObjects.ContainsKey(id))
{
return showingObjects[id];
}
return null;
}
public virtual TrackedMesh CreateObject(int id, Texture2D tex = null)
{
if (!showingObjects.ContainsKey(id)){
GameObject obj = getPoolObject(overlayTransform);
if (obj == null) return null;
TrackedMesh tm = obj.GetComponent<TrackedMesh>();
if (tm != null)
{
tm.Id = id;
tm.transform.localPosition = Vector3.zero;
tm.transform.localRotation = new Quaternion();
tm.transform.localScale = Vector3.one;
if (tex != null)
{
Renderer tmRenderer = tm.transform.GetComponent<Renderer>();
tmRenderer.sharedMaterial.SetTexture ("_MainTex", tex);
}
showingObjects.Add(id, tm);
}
return tm;
}
else{
return null;
}
}
public virtual void UpdateObject(int id, Vector3[] vertices, int[] triangles = null, Vector2[] uv = null)
{
if (showingObjects.ContainsKey(id)){
TrackedMesh tm = showingObjects[id];
if(vertices.Length != tm.MeshFilter.mesh.vertices.Length) Debug.LogError("The number of vertices does not match.");
tm.MeshFilter.mesh.vertices = vertices;
if (triangles != null)
{
tm.MeshFilter.mesh.triangles = triangles;
}
if (uv != null)
{
tm.MeshFilter.mesh.uv = uv;
}
tm.MeshFilter.mesh.RecalculateBounds();
tm.MeshFilter.mesh.RecalculateNormals();
}
}
public virtual void DeleteObject(int id)
{
if (showingObjects.ContainsKey(id))
{
if(showingObjects[id] != null)
showingObjects[id].gameObject.SetActive(false);
showingObjects.Remove(id);
}
}
public virtual void Reset()
{
foreach (int key in showingObjects.Keys)
{
if(showingObjects[key] != null)
showingObjects[key].gameObject.SetActive(false);
}
showingObjects.Clear();
}
}
}
WebCamTextureFaceMaskExample
using System.Collections;
using System.Collections.Generic;
using System.IO;
using UnityEngine;
using UnityEngine.UI;
using DlibFaceLandmarkDetector;
using OpenCVForUnity;
using OpenCVForUnity.RectangleTrack;
using WebGLFileUploader;
#if UNITY_5_3 || UNITY_5_3_OR_NEWER
using UnityEngine.SceneManagement;
#endif
namespace FaceMaskExample
{
/// <summary>
/// WebCamTexture face mask example.
/// </summary>
[RequireComponent (typeof(WebCamTextureToMatHelper), typeof(TrackedMeshOverlay))]
public class WebCamTextureFaceMaskExample : MonoBehaviour
{
/// <summary>
/// The colors.
/// </summary>
Color32[] colors;
/// <summary>
/// The gray mat.
/// </summary>
Mat grayMat;
/// <summary>
/// The texture.
/// </summary>
Texture2D texture;
/// <summary>
/// The cascade.
/// </summary>
CascadeClassifier cascade;
/// <summary>
/// The web cam texture to mat helper.
/// </summary>
WebCamTextureToMatHelper webCamTextureToMatHelper;
/// <summary>
/// The face landmark detector.
/// </summary>
FaceLandmarkDetector faceLandmarkDetector;
/// <summary>
/// The detection based tracker.
/// </summary>
RectangleTracker rectangleTracker;
/// <summary>
/// The frontal face parameter.
/// </summary>
FrontalFaceParam frontalFaceParam;
/// <summary>
/// The is showing face rects.
/// </summary>
public bool isShowingFaceRects = false;
/// <summary>
/// The is showing face rects toggle.
/// </summary>
public Toggle isShowingFaceRectsToggle;
/// <summary>
/// The use Dlib face detector flag.
/// </summary>
public bool useDlibFaceDetecter = false;
/// <summary>
/// The use dlib face detecter toggle.
/// </summary>
public Toggle useDlibFaceDetecterToggle;
/// <summary>
/// The is filtering non frontal faces.
/// </summary>
public bool isFilteringNonFrontalFaces;
/// <summary>
/// The is filtering non frontal faces toggle.
/// </summary>
public Toggle isFilteringNonFrontalFacesToggle;
/// <summary>
/// The frontal face rate lower limit.
/// </summary>
[Range (0.0f, 1.0f)]
public float
frontalFaceRateLowerLimit = 0.85f;
/// <summary>
/// The is showing debug face points.
/// </summary>
public bool isShowingDebugFacePoints = false;
/// <summary>
/// The is showing debug face points toggle.
/// </summary>
public Toggle isShowingDebugFacePointsToggle;
/// <summary>
/// The is upload face mask button.
/// </summary>
public Button uploadFaceMaskButton;
/// <summary>
/// The mesh overlay.
/// </summary>
private TrackedMeshOverlay meshOverlay;
/// <summary>
/// The Shader.PropertyToID for "_Fade".
/// </summary>
private int shader_FadeID;
/// <summary>
/// The face mask texture.
/// </summary>
private Texture2D faceMaskTexture;
/// <summary>
/// The face mask mat.
/// </summary>
private Mat faceMaskMat;
/// <summary>
/// The detected face rect in mask mat.
/// </summary>
private UnityEngine.Rect faceRectInMask;
/// <summary>
/// The detected face landmark points in mask mat.
/// </summary>
private List<Vector2> faceLandmarkPointsInMask;
/// <summary>
/// The haarcascade_frontalface_alt_xml_filepath.
/// </summary>
private string haarcascade_frontalface_alt_xml_filepath;
/// <summary>
/// The shape_predictor_68_face_landmarks_dat_filepath.
/// </summary>
private string shape_predictor_68_face_landmarks_dat_filepath;
// Use this for initialization
void Start ()
{
WebGLFileUploadManager.SetImageEncodeSetting (true);
WebGLFileUploadManager.SetAllowedFileName ("\\.(png|jpe?g|gif)$");
WebGLFileUploadManager.SetImageShrinkingSize (640, 480);
WebGLFileUploadManager.FileUploadEventHandler += fileUploadHandler;
webCamTextureToMatHelper = gameObject.GetComponent<WebCamTextureToMatHelper> ();
#if UNITY_WEBGL && !UNITY_EDITOR
StartCoroutine(getFilePathCoroutine());
#else
haarcascade_frontalface_alt_xml_filepath = OpenCVForUnity.Utils.getFilePath ("haarcascade_frontalface_alt.xml");
shape_predictor_68_face_landmarks_dat_filepath = DlibFaceLandmarkDetector.Utils.getFilePath ("shape_predictor_68_face_landmarks.dat");
Run ();
#endif
}
#if UNITY_WEBGL && !UNITY_EDITOR
private IEnumerator getFilePathCoroutine()
{
var getFilePathAsync_0_Coroutine = StartCoroutine (OpenCVForUnity.Utils.getFilePathAsync ("haarcascade_frontalface_alt.xml", (result) => {
haarcascade_frontalface_alt_xml_filepath = result;
}));
var getFilePathAsync_1_Coroutine = StartCoroutine (DlibFaceLandmarkDetector.Utils.getFilePathAsync ("shape_predictor_68_face_landmarks.dat", (result) => {
shape_predictor_68_face_landmarks_dat_filepath = result;
}));
yield return getFilePathAsync_0_Coroutine;
yield return getFilePathAsync_1_Coroutine;
Run ();
uploadFaceMaskButton.interactable = true;
}
#endif
private void Run ()
{
meshOverlay = this.GetComponent<TrackedMeshOverlay> ();
shader_FadeID = Shader.PropertyToID("_Fade");
rectangleTracker = new RectangleTracker ();
faceLandmarkDetector = new FaceLandmarkDetector (shape_predictor_68_face_landmarks_dat_filepath);
frontalFaceParam = new FrontalFaceParam ();
webCamTextureToMatHelper.Init ();
isShowingFaceRectsToggle.isOn = isShowingFaceRects;
useDlibFaceDetecterToggle.isOn = useDlibFaceDetecter;
isFilteringNonFrontalFacesToggle.isOn = isFilteringNonFrontalFaces;
isShowingDebugFacePointsToggle.isOn = isShowingDebugFacePoints;
}
/// <summary>
/// Raises the web cam texture to mat helper inited event.
/// </summary>
public void OnWebCamTextureToMatHelperInited ()
{
Debug.Log ("OnWebCamTextureToMatHelperInited");
Mat webCamTextureMat = webCamTextureToMatHelper.GetMat ();
colors = new Color32[webCamTextureMat.cols () * webCamTextureMat.rows ()];
texture = new Texture2D (webCamTextureMat.cols (), webCamTextureMat.rows (), TextureFormat.RGBA32, false);
gameObject.transform.localScale = new Vector3 (webCamTextureMat.cols (), webCamTextureMat.rows (), 1);
Debug.Log ("Screen.width " + Screen.width + " Screen.height " + Screen.height + " Screen.orientation " + Screen.orientation);
float width = gameObject.transform.localScale.x;
float height = gameObject.transform.localScale.y;
float widthScale = (float)Screen.width / width;
float heightScale = (float)Screen.height / height;
if (widthScale < heightScale) {
Camera.main.orthographicSize = (width * (float)Screen.height / (float)Screen.width) / 2;
} else {
Camera.main.orthographicSize = height / 2;
}
gameObject.GetComponent<Renderer> ().material.mainTexture = texture;
grayMat = new Mat (webCamTextureMat.rows (), webCamTextureMat.cols (), CvType.CV_8UC1);
cascade = new CascadeClassifier (haarcascade_frontalface_alt_xml_filepath);
if (cascade.empty ()) {
Debug.LogError ("cascade file is not loaded.Please copy from “FaceTrackerExample/StreamingAssets/” to “Assets/StreamingAssets/” folder. ");
}
meshOverlay.UpdateOverlayTransform (gameObject.transform);
OnChangeFaceMaskButton ();
}
/// <summary>
/// Raises the web cam texture to mat helper disposed event.
/// </summary>
public void OnWebCamTextureToMatHelperDisposed ()
{
Debug.Log ("OnWebCamTextureToMatHelperDisposed");
grayMat.Dispose ();
rectangleTracker.Reset ();
meshOverlay.Reset ();
}
/// <summary>
/// Raises the web cam texture to mat helper error occurred event.
/// </summary>
/// <param name="errorCode">Error code.</param>
public void OnWebCamTextureToMatHelperErrorOccurred(WebCamTextureToMatHelper.ErrorCode errorCode){
Debug.Log ("OnWebCamTextureToMatHelperErrorOccurred " + errorCode);
}
// Update is called once per frame
void Update ()
{
if (webCamTextureToMatHelper.IsPlaying () && webCamTextureToMatHelper.DidUpdateThisFrame ()) {
Mat rgbaMat = webCamTextureToMatHelper.GetMat ();
// detect faces.
List<OpenCVForUnity.Rect> detectResult = new List<OpenCVForUnity.Rect> ();
if (useDlibFaceDetecter) {
OpenCVForUnityUtils.SetImage (faceLandmarkDetector, rgbaMat);
List<UnityEngine.Rect> result = faceLandmarkDetector.Detect ();
foreach (var unityRect in result) {
detectResult.Add (new OpenCVForUnity.Rect ((int)unityRect.x, (int)unityRect.y, (int)unityRect.width, (int)unityRect.height));
}
} else {
// convert image to greyscale.
Imgproc.cvtColor (rgbaMat, grayMat, Imgproc.COLOR_RGBA2GRAY);
using (Mat equalizeHistMat = new Mat ())
using (MatOfRect faces = new MatOfRect ()) {
Imgproc.equalizeHist (grayMat, equalizeHistMat);
cascade.detectMultiScale (equalizeHistMat, faces, 1.1f, 2, 0 | Objdetect.CASCADE_SCALE_IMAGE, new OpenCVForUnity.Size (equalizeHistMat.cols () * 0.15, equalizeHistMat.cols () * 0.15), new Size ());
detectResult = faces.toList ();
}
// Adjust to Dilb's result.
foreach (OpenCVForUnity.Rect r in detectResult) {
r.y += (int)(r.height * 0.1f);
}
}
// face traking.
rectangleTracker.UpdateTrackedObjects (detectResult);
List<TrackedRect> trackedRects = new List<TrackedRect> ();
rectangleTracker.GetObjects (trackedRects, true);
// detect face landmark.
OpenCVForUnityUtils.SetImage (faceLandmarkDetector, rgbaMat);
List<List<Vector2>> landmarkPoints = new List<List<Vector2>> ();
for (int i = 0; i < trackedRects.Count; i++) {
TrackedRect tr = trackedRects [i];
UnityEngine.Rect rect = new UnityEngine.Rect (tr.x, tr.y, tr.width, tr.height);
List<Vector2> points = faceLandmarkDetector.DetectLandmark (rect);
landmarkPoints.Add (points);
}
// face masking.
if (faceMaskTexture != null && landmarkPoints.Count >= 1) {
OpenCVForUnity.Utils.texture2DToMat (faceMaskTexture, faceMaskMat);
float imageWidth = meshOverlay.Width;
float imageHeight = meshOverlay.Height;
float maskImageWidth = faceMaskTexture.width;
float maskImageHeight = faceMaskTexture.height;
TrackedRect tr;
TrackedMesh tm;
for (int i = 0; i < trackedRects.Count; i++) {
tr = trackedRects [i];
if (tr.state == TrackedState.NEW) {
meshOverlay.CreateObject (tr.id, faceMaskTexture);
}
if (tr.state < TrackedState.DELETED) {
tm = meshOverlay.GetObjectById (tr.id);
Vector3[] vertices = tm.MeshFilter.mesh.vertices;
if (vertices.Length == landmarkPoints [i].Count) {
for (int j = 0; j < vertices.Length; j++) {
vertices [j].x = landmarkPoints [i] [j].x / imageWidth - 0.5f;
vertices [j].y = 0.5f - landmarkPoints [i] [j].y / imageHeight;
}
}
Vector2[] uv = tm.MeshFilter.mesh.uv;
if (uv.Length == faceLandmarkPointsInMask.Count) {
for (int jj = 0; jj < uv.Length; jj++) {
uv [jj].x = faceLandmarkPointsInMask [jj].x / maskImageWidth;
uv [jj].y = (maskImageHeight - faceLandmarkPointsInMask [jj].y) / maskImageHeight;
}
}
meshOverlay.UpdateObject (tr.id, vertices, null, uv);
if (tr.numFramesNotDetected > 3) {
tm.Material.SetFloat (shader_FadeID, 1f);
}else if (tr.numFramesNotDetected > 0 && tr.numFramesNotDetected <= 3) {
tm.Material.SetFloat (shader_FadeID, 0.3f + (0.7f/4f) * tr.numFramesNotDetected);
} else {
tm.Material.SetFloat (shader_FadeID, 0.3f);
}
// filter nonfrontalface.
if (isFilteringNonFrontalFaces && frontalFaceParam.getFrontalFaceRate (landmarkPoints [i]) < frontalFaceRateLowerLimit) {
tm.Material.SetFloat (shader_FadeID, 1f);
}
} else if (tr.state == TrackedState.DELETED) {
meshOverlay.DeleteObject (tr.id);
}
}
} else if (landmarkPoints.Count >= 1) {
float imageWidth = meshOverlay.Width;
float imageHeight = meshOverlay.Height;
float maskImageWidth = texture.width;
float maskImageHeight = texture.height;
TrackedRect tr;
TrackedMesh tm;
for (int i = 0; i < trackedRects.Count; i++) {
tr = trackedRects [i];
if (tr.state == TrackedState.NEW) {
meshOverlay.CreateObject (tr.id, texture);
}
if (tr.state < TrackedState.DELETED) {
tm = meshOverlay.GetObjectById (tr.id);
Vector3[] vertices = tm.MeshFilter.mesh.vertices;
if (vertices.Length == landmarkPoints [i].Count) {
for (int j = 0; j < vertices.Length; j++) {
vertices [j].x = landmarkPoints[i][j].x / imageWidth - 0.5f;
vertices [j].y = 0.5f - landmarkPoints[i][j].y / imageHeight;
}
}
Vector2[] uv = tm.MeshFilter.mesh.uv;
if (uv.Length == landmarkPoints [0].Count) {
for (int jj = 0; jj < uv.Length; jj++) {
uv [jj].x = landmarkPoints[0][jj].x / maskImageWidth;
uv [jj].y = (maskImageHeight - landmarkPoints[0][jj].y) / maskImageHeight;
}
}
meshOverlay.UpdateObject (tr.id, vertices, null, uv);
if (tr.numFramesNotDetected > 3) {
tm.Material.SetFloat (shader_FadeID, 1f);
}else if (tr.numFramesNotDetected > 0 && tr.numFramesNotDetected <= 3) {
tm.Material.SetFloat (shader_FadeID, 0.3f + (0.7f/4f) * tr.numFramesNotDetected);
} else {
tm.Material.SetFloat (shader_FadeID, 0.3f);
}
// filter nonfrontalface.
if (isFilteringNonFrontalFaces && frontalFaceParam.getFrontalFaceRate (landmarkPoints [i]) < frontalFaceRateLowerLimit) {
tm.Material.SetFloat (shader_FadeID, 1f);
}
} else if (tr.state == TrackedState.DELETED) {
meshOverlay.DeleteObject (tr.id);
}
}
}
// draw face rects.
if (isShowingFaceRects) {
for (int i = 0; i < detectResult.Count; i++) {
UnityEngine.Rect rect = new UnityEngine.Rect (detectResult [i].x, detectResult [i].y, detectResult [i].width, detectResult [i].height);
OpenCVForUnityUtils.DrawFaceRect (rgbaMat, rect, new Scalar (255, 0, 0, 255), 2);
}
for (int i = 0; i < trackedRects.Count; i++) {
UnityEngine.Rect rect = new UnityEngine.Rect (trackedRects [i].x, trackedRects [i].y, trackedRects [i].width, trackedRects [i].height);
OpenCVForUnityUtils.DrawFaceRect (rgbaMat, rect, new Scalar (255, 255, 0, 255), 2);
//Imgproc.putText (rgbaMat, " " + frontalFaceParam.getAngleOfFrontalFace (landmarkPoints [i]), new Point (rect.xMin, rect.yMin - 10), Core.FONT_HERSHEY_SIMPLEX, 0.5, new Scalar (255, 255, 255, 255), 2, Imgproc.LINE_AA, false);
//Imgproc.putText (rgbaMat, " " + frontalFaceParam.getFrontalFaceRate (landmarkPoints [i]), new Point (rect.xMin, rect.yMin - 10), Core.FONT_HERSHEY_SIMPLEX, 0.5, new Scalar (255, 255, 255, 255), 2, Imgproc.LINE_AA, false);
}
}
// draw face points.
if (isShowingDebugFacePoints) {
for (int i = 0; i < landmarkPoints.Count; i++) {
OpenCVForUnityUtils.DrawFaceLandmark (rgbaMat, landmarkPoints [i], new Scalar (0, 255, 0, 255), 2);
}
}
// display face mask image.
if (faceMaskTexture != null && faceMaskMat != null) {
if (isShowingFaceRects) {
OpenCVForUnityUtils.DrawFaceRect (faceMaskMat, faceRectInMask, new Scalar (255, 0, 0, 255), 2);
}
if (isShowingDebugFacePoints) {
OpenCVForUnityUtils.DrawFaceLandmark (faceMaskMat, faceLandmarkPointsInMask, new Scalar (0, 255, 0, 255), 2);
}
float scale = (rgbaMat.width () / 4f) / faceMaskMat.width ();
float tx = rgbaMat.width () - faceMaskMat.width () * scale;
float ty = 0.0f;
Mat trans = new Mat (2, 3, CvType.CV_32F);//1.0, 0.0, tx, 0.0, 1.0, ty);
trans.put (0, 0, scale);
trans.put (0, 1, 0.0f);
trans.put (0, 2, tx);
trans.put (1, 0, 0.0f);
trans.put (1, 1, scale);
trans.put (1, 2, ty);
Imgproc.warpAffine (faceMaskMat, rgbaMat, trans, rgbaMat.size (), Imgproc.INTER_LINEAR, Core.BORDER_TRANSPARENT, new Scalar (0));
}
Imgproc.putText (rgbaMat, "W:" + rgbaMat.width () + " H:" + rgbaMat.height () + " SO:" + Screen.orientation, new Point (5, rgbaMat.rows () - 10), Core.FONT_HERSHEY_SIMPLEX, 0.5, new Scalar (255, 255, 255, 255), 1, Imgproc.LINE_AA, false);
OpenCVForUnity.Utils.matToTexture2D (rgbaMat, texture, colors);
}
}
/// <summary>
/// Raises the disable event.
/// </summary>
void OnDisable ()
{
WebGLFileUploadManager.FileUploadEventHandler -= fileUploadHandler;
WebGLFileUploadManager.Dispose ();
webCamTextureToMatHelper.Dispose ();
if (cascade != null)
cascade.Dispose ();
if (rectangleTracker != null)
rectangleTracker.Dispose ();
if (faceLandmarkDetector != null)
faceLandmarkDetector.Dispose ();
if (frontalFaceParam != null)
frontalFaceParam.Dispose ();
}
/// <summary>
/// Raises the back button event.
/// </summary>
public void OnBackButton ()
{
#if UNITY_5_3 || UNITY_5_3_OR_NEWER
SceneManager.LoadScene ("FaceMask");
#else
Application.LoadLevel ("FaceMask");
#endif
}
/// <summary>
/// Raises the play button event.
/// </summary>
public void OnPlayButton ()
{
webCamTextureToMatHelper.Play ();
}
/// <summary>
/// Raises the pause button event.
/// </summary>
public void OnPauseButton ()
{
webCamTextureToMatHelper.Pause ();
}
/// <summary>
/// Raises the change camera button event.
/// </summary>
public void OnChangeCameraButton ()
{
webCamTextureToMatHelper.Init (null, webCamTextureToMatHelper.requestWidth, webCamTextureToMatHelper.requestHeight, !webCamTextureToMatHelper.requestIsFrontFacing);
}
/// <summary>
/// Raises the is showing face rects toggle event.
/// </summary>
public void OnIsShowingFaceRectsToggle ()
{
if (isShowingFaceRectsToggle.isOn) {
isShowingFaceRects = true;
} else {
isShowingFaceRects = false;
}
}
/// <summary>
/// Raises the use Dlib face detector toggle event.
/// </summary>
public void OnUseDlibFaceDetecterToggle ()
{
if (useDlibFaceDetecterToggle.isOn) {
useDlibFaceDetecter = true;
} else {
useDlibFaceDetecter = false;
}
}
/// <summary>
/// Raises the is filtering non frontal faces toggle event.
/// </summary>
public void OnIsFilteringNonFrontalFacesToggle ()
{
if (isFilteringNonFrontalFacesToggle.isOn) {
isFilteringNonFrontalFaces = true;
} else {
isFilteringNonFrontalFaces = false;
}
}
/// <summary>
/// Raises the is showing debug face points toggle event.
/// </summary>
public void OnIsShowingDebugFacePointsToggle ()
{
if (isShowingDebugFacePointsToggle.isOn) {
isShowingDebugFacePoints = true;
} else {
isShowingDebugFacePoints = false;
}
}
/// <summary>
/// Raises the set face mask button event.
/// </summary>
public void OnChangeFaceMaskButton ()
{
removeFaceMask ();
ExampleMaskData maskData = ExampleDataSet.GetData();
faceMaskTexture = Resources.Load (maskData.FileName) as Texture2D;
faceMaskMat = new Mat (faceMaskTexture.height, faceMaskTexture.width, CvType.CV_8UC4);
OpenCVForUnity.Utils.texture2DToMat (faceMaskTexture, faceMaskMat);
Debug.Log ("faceMaskMat ToString " + faceMaskMat.ToString ());
if(maskData.LandmarkPoints != null){
faceRectInMask = maskData.FaceRect;
faceLandmarkPointsInMask = maskData.LandmarkPoints;
}else{
faceRectInMask = detectFace (faceMaskMat);
faceLandmarkPointsInMask = detectFaceLandmarkPoints (faceMaskMat, faceRectInMask);
}
ExampleDataSet.Next();
if (faceRectInMask.width == 0 && faceRectInMask.height == 0){
removeFaceMask ();
Debug.Log ("A face could not be detected from the input image.");
}
//dumpRect(faceRectInMask);
//dumpVector2(faceLandmarkPointsInMask);
//dumpVector3(faceLandmarkPointsInMask);
//MeshFilter mf = createFaceMesh(faceMaskTexture.width, faceMaskTexture.height);
//ObjExporter.MeshToFile(mf, "Assets/FaceMaskExample/Resources/FaceMesh.obj");
}
/// <summary>
/// Raises the scan face mask button event.
/// </summary>
public void OnScanFaceMaskButton ()
{
removeFaceMask ();
// Capture webcam frame.
if (webCamTextureToMatHelper.IsPlaying ()) {
Mat rgbaMat = webCamTextureToMatHelper.GetMat ();
faceRectInMask = detectFace (rgbaMat);
if (faceRectInMask.width == 0 && faceRectInMask.height == 0){
Debug.Log ("A face could not be detected from the input image.");
return;
}
OpenCVForUnity.Rect rect = new OpenCVForUnity.Rect((int)faceRectInMask.x, (int)faceRectInMask.y, (int)faceRectInMask.width, (int)faceRectInMask.height);
rect.inflate(rect.x/5, rect.y/5);
rect = rect.intersect(new OpenCVForUnity.Rect(0,0,rgbaMat.width(),rgbaMat.height()));
faceMaskTexture = new Texture2D (rect.width, rect.height, TextureFormat.RGBA32, false);
faceMaskMat = new Mat(rgbaMat, rect).clone ();
OpenCVForUnity.Utils.matToTexture2D(faceMaskMat, faceMaskTexture);
Debug.Log ("faceMaskMat ToString " + faceMaskMat.ToString ());
faceRectInMask = detectFace (faceMaskMat);
faceLandmarkPointsInMask = detectFaceLandmarkPoints (faceMaskMat, faceRectInMask);
if (faceRectInMask.width == 0 && faceRectInMask.height == 0){
removeFaceMask ();
Debug.Log ("A face could not be detected from the input image.");
}
}
}
/// <summary>
/// Raises the upload face mask button event.
/// </summary>
public void OnUploadFaceMaskButton ()
{
WebGLFileUploadManager.PopupDialog (null, "Select frontal face image file (.png|.jpg|.gif)");
}
/// <summary>
/// Raises the remove face mask button event.
/// </summary>
public void OnRemoveFaceMaskButton ()
{
removeFaceMask ();
}
private void removeFaceMask ()
{
faceMaskTexture = null;
if (faceMaskMat != null) {
faceMaskMat.Dispose ();
faceMaskMat = null;
}
rectangleTracker.Reset ();
meshOverlay.Reset ();
}
/// <summary>
/// Files the upload handler.
/// </summary>
/// <param name="result">Result.</param>
private void fileUploadHandler (UploadedFileInfo[] result)
{
if (result.Length == 0) {
Debug.Log ("File upload Error!");
return;
}
removeFaceMask ();
foreach (UploadedFileInfo file in result) {
if (file.isSuccess) {
Debug.Log ("file.filePath: " + file.filePath + " exists:" + File.Exists (file.filePath));
faceMaskTexture = new Texture2D (2, 2);
byte[] byteArray = File.ReadAllBytes (file.filePath);
faceMaskTexture.LoadImage (byteArray);
break;
}
}
if (faceMaskTexture != null) {
faceMaskMat = new Mat (faceMaskTexture.height, faceMaskTexture.width, CvType.CV_8UC4);
OpenCVForUnity.Utils.texture2DToMat (faceMaskTexture, faceMaskMat);
Debug.Log ("faceMaskMat ToString " + faceMaskMat.ToString ());
faceRectInMask = detectFace (faceMaskMat);
faceLandmarkPointsInMask = detectFaceLandmarkPoints (faceMaskMat, faceRectInMask);
if (faceRectInMask.width == 0 && faceRectInMask.height == 0){
removeFaceMask ();
Debug.Log ("A face could not be detected from the input image.");
}
}
}
private UnityEngine.Rect detectFace (Mat mat)
{
if (useDlibFaceDetecter) {
OpenCVForUnityUtils.SetImage (faceLandmarkDetector, mat);
List<UnityEngine.Rect> result = faceLandmarkDetector.Detect ();
if (result.Count >= 1)
return result [0];
} else {
using (Mat grayMat = new Mat ())
using (Mat equalizeHistMat = new Mat ())
using (MatOfRect faces = new MatOfRect ()) {
// convert image to greyscale.
Imgproc.cvtColor (mat, grayMat, Imgproc.COLOR_RGBA2GRAY);
Imgproc.equalizeHist (grayMat, equalizeHistMat);
cascade.detectMultiScale (equalizeHistMat, faces, 1.1f, 2, 0 | Objdetect.CASCADE_SCALE_IMAGE, new OpenCVForUnity.Size (equalizeHistMat.cols () * 0.15, equalizeHistMat.cols () * 0.15), new Size ());
List<OpenCVForUnity.Rect> faceList = faces.toList ();
if (faceList.Count >= 1) {
UnityEngine.Rect r = new UnityEngine.Rect (faceList [0].x, faceList [0].y, faceList [0].width, faceList [0].height);
// Adjust to Dilb's result.
r.y += (int)(r.height * 0.1f);
return r;
}
}
}
return new UnityEngine.Rect ();
}
private List<Vector2> detectFaceLandmarkPoints (Mat mat, UnityEngine.Rect rect)
{
OpenCVForUnityUtils.SetImage (faceLandmarkDetector, mat);
List<Vector2> points = faceLandmarkDetector.DetectLandmark (rect);
return points;
}
/*
private void dumpRect(UnityEngine.Rect rect){
string r = "new Rect(" + rect.x + ", " + rect.y + ", " + rect.width + ", " + rect.height + ")";
Debug.Log ("dumpRect:" + "\n" + r);
}
private void dumpVector2(List<Vector2> points){
string p = "";
int i = 0;
foreach (var item in points) {
p += "new Vector2(" + "" + item.x + ", " + item.y + "),\n";
i++;
}
Debug.Log ("dumpMeshVector2:" + "\n" + p);
}
private void dumpVector3(List<Vector2> points){
string p = "";
int i = 0;
foreach (var item in points) {
//p += ", " + i + ":" + item;
p += "new Vector3(" + "" + item.x + ", " + item.y + "),\n";
i++;
}
Debug.Log ("dumpMeshVector3:" + "\n" + p);
}
private MeshFilter createFaceMesh (float textureWidth, float textureHeight)
{
GameObject newObj = new GameObject("FaceMesh");
MeshFilter meshFilter = newObj.AddComponent<MeshFilter>();
newObj.AddComponent<MeshCollider>();
MeshRenderer meshRenderer = newObj.AddComponent<MeshRenderer>();
meshRenderer.material = new Material(Shader.Find("Hide/FadeShader"));
//vertices
Vector3[] vertices = new Vector3[68]{
new Vector3(63, 170),
new Vector3(65, 190),
new Vector3(69, 211),
new Vector3(74, 231),
new Vector3(83, 250),
new Vector3(95, 267),
new Vector3(110, 279),
new Vector3(126, 288),
new Vector3(145, 289),
new Vector3(164, 285),
new Vector3(180, 273),
new Vector3(193, 256),
new Vector3(202, 236),
new Vector3(207, 214),
new Vector3(210, 193),
new Vector3(210, 171),
new Vector3(207, 149),
new Vector3(70, 159),
new Vector3(76, 147),
new Vector3(90, 145),
new Vector3(103, 147),
new Vector3(118, 152),
new Vector3(138, 149),
new Vector3(151, 140),
new Vector3(167, 133),
new Vector3(183, 132),
new Vector3(194, 142),
new Vector3(129, 163),
new Vector3(130, 178),
new Vector3(132, 192),
new Vector3(133, 207),
new Vector3(121, 217),
new Vector3(128, 220),
new Vector3(137, 222),
new Vector3(145, 218),
new Vector3(152, 213),
new Vector3(86, 167),
new Vector3(93, 161),
new Vector3(104, 160),
new Vector3(112, 167),
new Vector3(104, 171),
new Vector3(93, 171),
new Vector3(151, 162),
new Vector3(159, 153),
new Vector3(170, 150),
new Vector3(179, 155),
new Vector3(172, 161),
new Vector3(161, 163),
new Vector3(114, 248),
new Vector3(123, 243),
new Vector3(131, 240),
new Vector3(139, 240),
new Vector3(145, 237),
new Vector3(156, 237),
new Vector3(166, 240),
new Vector3(159, 248),
new Vector3(149, 252),
new Vector3(142, 254),
new Vector3(134, 254),
new Vector3(124, 253),
new Vector3(119, 248),
new Vector3(132, 245),
new Vector3(139, 245),
new Vector3(146, 243),
new Vector3(162, 241),
new Vector3(147, 244),
new Vector3(140, 246),
new Vector3(133, 247)
};
Vector3[] vertices2 = (Vector3[])vertices.Clone();
for (int j = 0; j < vertices2.Length; j++) {
vertices2 [j].x = vertices2 [j].x - textureWidth/2;
vertices2 [j].y = textureHeight/2 - vertices2 [j].y;
}
//Flip X axis
for (int j = 0; j < vertices2.Length; j++) {
vertices2 [j].x = -vertices2 [j].x;
}
meshFilter.mesh.vertices = vertices2;
//triangles
//int[] triangles = new int[327]{
int[] triangles = new int[309]{
//Around the right eye 21
0,36,1,
1,36,41,
1,41,31,
41,40,31,
40,29,31,
40,39,29,
39,28,29,
39,27,28,
39,21,27,
38,21,39,
20,21,38,
37,20,38,
37,19,20,
18,19,37,
18,37,36,
17,18,36,
0,17,36,
36,37,41,
37,40,41,
37,38,40,
38,39,40,
//Around the left eye 21
45,16,15,
46,45,15,
46,15,35,
47,46,35,
29,47,35,
42,47,29,
28,42,29,
27,42,28,
27,22,42,
22,43,42,
22,23,43,
23,44,43,
23,24,44,
24,25,44,
44,25,45,
25,26,45,
45,26,16,
44,45,46,
47,44,46,
43,44,47,
42,43,47,
//Eyebrows, nose and cheeks 13
20,23,21,
21,23,22,
21,22,27,
29,30,31,
29,35,30,
30,32,31,
30,33,32,
30,34,33,
30,35,34,
1,31,2,
2,31,3,
35,15,14,
35,14,13,
//mouth 48
33,51,50,
32,33,50,
31,32,50,
31,50,49,
31,49,48,
3,31,48,
3,48,4,
4,48,5,
48,59,5,
5,59,6,
59,58,6,
58,7,6,
58,57,7,
57,8,7,
57,9,8,
57,56,9,
56,10,9,
56,55,10,
55,11,10,
55,54,11,
54,12,11,
54,13,12,
35,13,54,
35,54,53,
35,53,52,
34,35,52,
33,34,52,
33,52,51,
48,49,60,
48,60,59,
49,50,61,
49,61,60,
60,67,59,
59,67,58,
50,51,61,
51,62,61,
67,66,58,
66,57,58,
51,52,63,
51,63,62,
66,65,56,
66,56,57,
52,53,63,
53,64,63,
65,64,55,
65,55,56,
53,54,64,
64,54,55
//inner mouth 6
//60,61,67,
//61,62,67,
//62,66,67,
//62,63,65,
//62,65,66,
//63,64,65,
};
//Flip X axis
for (int j = 0; j < triangles.Length; j=j+3) {
int a = triangles [j+1];
int b = triangles [j+2];
triangles [j+1] = b;
triangles [j+2] = a;
}
meshFilter.mesh.triangles = triangles;
//uv
Vector2[] uv = new Vector2[68];
for (int j = 0; j < uv.Length; j++) {
uv [j].x = vertices[j].x / textureWidth;
uv [j].y = (textureHeight - vertices[j].y) / textureHeight;
}
meshFilter.mesh.uv = uv;
meshFilter.mesh.RecalculateBounds ();
meshFilter.mesh.RecalculateNormals ();
// string v = "";
// foreach (var item in vertices) {
// v += "," + item;
// }
// Debug.Log ("vertices: " + v);
//
// string t = "";
// foreach (var item in triangles) {
// t += "," + item;
// }
// Debug.Log ("triangles: " + t);
//
// string u = "";
// foreach (var item in uv) {
// u += "," + item;
// }
// Debug.Log ("uv: " + u);
return meshFilter;
}
*/
}
}
网友评论
525517400@qq.com