Guest User

FaceTracker

a guest
Nov 20th, 2012
311
0
Never
Not a member of Pastebin yet? Sign Up, it unlocks many cool features!
C# 29.11 KB | None | 0 0
  1. // --------------------------------------------------------------------------------------------------------------------
  2. // <copyright file="FaceTracker.cs" company="Microsoft">
  3. //     Copyright (c) Microsoft Corporation.  All rights reserved.
  4. // </copyright>
  5. // --------------------------------------------------------------------------------------------------------------------
  6.  
  7. namespace Microsoft.Kinect.Toolkit.FaceTracking
  8. {
  9.     using System;
  10.     using System.Collections.Specialized;
  11.     using System.Configuration;
  12.     using System.Diagnostics;
  13.     using System.Globalization;
  14.     using System.Runtime.InteropServices;
  15.  
  16.     /// <summary>
  17.     /// Main class that instantiates the face tracking engine and tracks the faces of a single person
  18.     /// retrieving various metrics like animation units, 3D points and triangles on the face.
  19.     /// </summary>
  20.     public class FaceTracker : IDisposable
  21.     {
  22.         /// <summary>
  23.         /// A constant zoom factor is used for now, since Windows Kinect does not support
  24.         /// different zoom levels.
  25.         /// </summary>
  26.         internal const float DefaultZoomFactor = 1.0f;
  27.  
  28.         private const string FaceTrackTraceSwitchName = "KinectForWindowsFaceTracking";
  29.  
  30.         private const string TraceCategory = "FTR";
  31.  
  32.         private const string TraceLogFileName = "TraceLogFile";
  33.  
  34.         private readonly Stopwatch copyStopwatch = new Stopwatch();
  35.  
  36.         private readonly ColorImageFormat initializationColorImageFormat;
  37.  
  38.         private readonly DepthImageFormat initializationDepthImageFormat;
  39.  
  40.         private readonly OperationMode operationMode = OperationMode.Kinect;
  41.  
  42.         private readonly KinectSensor sensor;
  43.  
  44.         private readonly Stopwatch startOrContinueTrackingStopwatch = new Stopwatch();
  45.  
  46.         private readonly Stopwatch trackStopwatch = new Stopwatch();
  47.  
  48.         private Image colorFaceTrackingImage;
  49.  
  50.         private CameraConfig depthCameraConfig;
  51.  
  52.         private Image depthFaceTrackingImage;
  53.  
  54.         private bool disposed;
  55.  
  56.         private FaceModel faceModel;
  57.  
  58.         private IFTFaceTracker faceTrackerInteropPtr;
  59.  
  60.         private FaceTrackFrame frame;
  61.  
  62.         private long lastSuccessTrackElapsedMs;
  63.  
  64.         private FaceTrackingRegisterDepthToColor registerDepthToColorDelegate;
  65.  
  66.         private long totalSuccessTrackMs;
  67.  
  68.         private int totalSuccessTracks;
  69.  
  70.         private int totalTracks;
  71.  
  72.         // trace settings
  73.         private TraceLevel traceLevel = TraceLevel.Off;
  74.  
  75.         private bool trackSucceeded;
  76.  
  77.         private CameraConfig videoCameraConfig;
  78.  
  79.         static FaceTracker()
  80.         {
  81.             try
  82.             {
  83.                 NameValueCollection appSettings = ConfigurationManager.AppSettings;
  84.                 string logFileName = appSettings[TraceLogFileName];
  85.                 if (!string.IsNullOrEmpty(logFileName))
  86.                 {
  87.                     foreach (TraceListener tl in Trace.Listeners)
  88.                     {
  89.                         var defaultListener = tl as DefaultTraceListener;
  90.                         if (defaultListener != null)
  91.                         {
  92.                             defaultListener.LogFileName = logFileName;
  93.                             break;
  94.                         }
  95.                     }
  96.  
  97.                     DateTime cur = DateTime.Now;
  98.                     Trace.WriteLine(
  99.                         string.Format(
  100.                             CultureInfo.InvariantCulture, "---------------------------------------------------------------------------"));
  101.                     Trace.WriteLine(
  102.                         string.Format(
  103.                             CultureInfo.InvariantCulture,
  104.                             "Starting Trace. Time={0} {1}, Machine={2}, Processor={3}, OS={4}",
  105.                             cur.ToShortDateString(),
  106.                             cur.ToLongTimeString(),
  107.                             Environment.MachineName,
  108.                             Environment.Is64BitProcess ? "64bit" : "32bit",
  109.                             Environment.OSVersion));
  110.                     Trace.WriteLine(
  111.                         string.Format(
  112.                             CultureInfo.InvariantCulture, "---------------------------------------------------------------------------"));
  113.                 }
  114.             }
  115.             catch (Exception ex)
  116.             {
  117.                 Trace.WriteLine(
  118.                     string.Format(CultureInfo.InvariantCulture, "Failed to set logfile for logging trace output. Exception={0}", ex));
  119.  
  120.                 throw;
  121.             }
  122.         }
  123.  
  124.         /// <summary>
  125.         /// Initializes a new instance of the FaceTracker class from a reference of the Kinect device.
  126.         /// <param name="sensor">Reference to kinect sensor instance</param>
  127.         /// </summary>
  128.         public FaceTracker(KinectSensor sensor)
  129.         {
  130.             if (sensor == null)
  131.             {
  132.                 throw new ArgumentNullException("sensor");
  133.             }
  134.  
  135.             if (!sensor.ColorStream.IsEnabled)
  136.             {
  137.                 throw new InvalidOperationException("Color stream is not enabled yet.");
  138.             }
  139.  
  140.             if (!sensor.DepthStream.IsEnabled)
  141.             {
  142.                 throw new InvalidOperationException("Depth stream is not enabled yet.");
  143.             }
  144.  
  145.             this.operationMode = OperationMode.Kinect;
  146.             this.sensor = sensor;
  147.             this.initializationColorImageFormat = sensor.ColorStream.Format;
  148.             this.initializationDepthImageFormat = sensor.DepthStream.Format;
  149.  
  150.             var newColorCameraConfig = new CameraConfig(
  151.                 (uint)sensor.ColorStream.FrameWidth,
  152.                 (uint)sensor.ColorStream.FrameHeight,
  153.                 sensor.ColorStream.NominalFocalLengthInPixels,
  154.                 FaceTrackingImageFormat.FTIMAGEFORMAT_UINT8_B8G8R8X8);
  155.             var newDepthCameraConfig = new CameraConfig(
  156.                 (uint)sensor.DepthStream.FrameWidth,
  157.                 (uint)sensor.DepthStream.FrameHeight,
  158.                 sensor.DepthStream.NominalFocalLengthInPixels,
  159.                 FaceTrackingImageFormat.FTIMAGEFORMAT_UINT16_D13P3);
  160.             this.Initialize(newColorCameraConfig, newDepthCameraConfig, IntPtr.Zero, IntPtr.Zero, this.DepthToColorCallback);
  161.         }
  162.  
  163.         /// <summary>
  164.         /// Finalizes an instance of the FaceTracker class
  165.         /// </summary>
  166.         ~FaceTracker()
  167.         {
  168.             this.Dispose(false);
  169.         }
  170.  
  171.         internal CameraConfig ColorCameraConfig
  172.         {
  173.             get
  174.             {
  175.                 return this.videoCameraConfig;
  176.             }
  177.         }
  178.  
  179.         /// <summary>
  180.         /// Returns reference to FaceModel class for the loaded face model.
  181.         /// </summary>
  182.         internal FaceModel FaceModel
  183.         {
  184.             get
  185.             {
  186.                 this.CheckPtrAndThrow();
  187.                 if (this.faceModel == null)
  188.                 {
  189.                     IFTModel faceTrackModelPtr;
  190.                     this.faceTrackerInteropPtr.GetFaceModel(out faceTrackModelPtr);
  191.                     this.faceModel = new FaceModel(this, faceTrackModelPtr);
  192.                 }
  193.  
  194.                 return this.faceModel;
  195.             }
  196.         }
  197.  
  198.         internal IFTFaceTracker FaceTrackerPtr
  199.         {
  200.             get
  201.             {
  202.                 return this.faceTrackerInteropPtr;
  203.             }
  204.         }
  205.  
  206.         /// <summary>
  207.         /// Stopwatch associated with the tracker
  208.         /// </summary>
  209.         internal Stopwatch Stopwatch
  210.         {
  211.             get
  212.             {
  213.                 return this.trackStopwatch;
  214.             }
  215.         }
  216.  
  217.         /// <summary>
  218.         /// Total number of tracking operations handled by the tracker
  219.         /// </summary>
  220.         internal int TotalTracks
  221.         {
  222.             get
  223.             {
  224.                 return this.totalTracks;
  225.             }
  226.         }
  227.  
  228.         /// <summary>
  229.         /// Disposes of the face tracking engine
  230.         /// </summary>
  231.         public void Dispose()
  232.         {
  233.             this.Dispose(true);
  234.             GC.SuppressFinalize(this);
  235.         }
  236.  
  237.         /// <summary>
  238.         /// Resets IFTFaceTracker instance to the clean state (like it is in right after the call
  239.         /// to Initialize() method)
  240.         /// </summary>
  241.         public void ResetTracking()
  242.         {
  243.             this.CheckPtrAndThrow();
  244.             this.trackSucceeded = false;
  245.             this.faceTrackerInteropPtr.Reset();
  246.         }
  247.  
  248.         /// <summary>
  249.         /// Starts face tracking from Kinect input data. Track() detects a face
  250.         /// based on the passed parameters, then identifies characteristic
  251.         /// points and begins tracking. The first call to this API is more
  252.         /// expensive, but if the tracking succeeds then subsequent calls use
  253.         /// the tracking information generated from first call and is faster,
  254.         /// until a tracking failure happens.
  255.         /// </summary>
  256.         /// <param name="colorImageFormat">format of the colorImage array</param>
  257.         /// <param name="colorImage">Input color image frame retrieved from Kinect sensor</param>
  258.         /// <param name="depthImageFormat">format of the depthImage array</param>
  259.         /// <param name="depthImage">Input depth image frame retrieved from Kinect sensor</param>
  260.         /// <param name="skeletonOfInterest">Input skeleton to track. Head & shoulder joints in the skeleton are used to calculate the head vector</param>
  261.         /// <returns>Returns computed face tracking results for this image frame</returns>
  262.         public FaceTrackFrame Track(
  263.             ColorImageFormat colorImageFormat,
  264.             byte[] colorImage,
  265.             DepthImageFormat depthImageFormat,
  266.             short[] depthImage,
  267.             Skeleton skeletonOfInterest)
  268.         {
  269.             return this.Track(colorImageFormat, colorImage, depthImageFormat, depthImage, skeletonOfInterest, Rect.Empty);
  270.         }
  271.  
  272.         /// <summary>
  273.         /// Starts face tracking from Kinect input data. Track() detects a face
  274.         /// based on the passed parameters, then identifies characteristic
  275.         /// points and begins tracking. The first call to this API is more
  276.         /// expensive, but if the tracking succeeds then subsequent calls use
  277.         /// the tracking information generated from first call and is faster,
  278.         /// until a tracking failure happens.
  279.         /// </summary>
  280.         /// <param name="colorImageFormat">
  281.         /// format of the colorImage array
  282.         /// </param>
  283.         /// <param name="colorImage">
  284.         /// Input color image frame retrieved from Kinect sensor
  285.         /// </param>
  286.         /// <param name="depthImageFormat">
  287.         /// format of the depthImage array
  288.         /// </param>
  289.         /// <param name="depthImage">
  290.         /// Input depth image frame retrieved from Kinect sensor
  291.         /// </param>
  292.         /// <param name="regionOfInterest">
  293.         /// Region of interest in the passed video frame where the face tracker should search for a face to initiate tracking.
  294.         /// Passing Rectangle.Empty (default) causes the entire frame to be searched.
  295.         /// </param>
  296.         /// <returns>
  297.         /// Returns computed face tracking results for this image frame
  298.         /// </returns>
  299.         public FaceTrackFrame Track(
  300.             ColorImageFormat colorImageFormat,
  301.             byte[] colorImage,
  302.             DepthImageFormat depthImageFormat,
  303.             short[] depthImage,
  304.             Rect regionOfInterest)
  305.         {
  306.             return this.Track(colorImageFormat, colorImage, depthImageFormat, depthImage, null, regionOfInterest);
  307.         }
  308.  
  309.         /// <summary>
  310.         /// Starts face tracking from Kinect input data. Track() detects a face
  311.         /// based on the passed parameters, then identifies characteristic
  312.         /// points and begins tracking. The first call to this API is more
  313.         /// expensive, but if the tracking succeeds then subsequent calls use
  314.         /// the tracking information generated from first call and is faster,
  315.         /// until a tracking failure happens.
  316.         /// </summary>
  317.         /// <param name="colorImageFormat">
  318.         /// format of the colorImage array
  319.         /// </param>
  320.         /// <param name="colorImage">
  321.         /// Input color image frame retrieved from Kinect sensor
  322.         /// </param>
  323.         /// <param name="depthImageFormat">
  324.         /// format of the depthImage array
  325.         /// </param>
  326.         /// <param name="depthImage">
  327.         /// Input depth image frame retrieved from Kinect sensor
  328.         /// </param>
  329.         /// <returns>
  330.         /// Returns computed face tracking results for this image frame
  331.         /// </returns>
  332.         public FaceTrackFrame Track(
  333.             ColorImageFormat colorImageFormat, byte[] colorImage, DepthImageFormat depthImageFormat, short[] depthImage)
  334.         {
  335.             return this.Track(colorImageFormat, colorImage, depthImageFormat, depthImage, null, Rect.Empty);
  336.         }
  337.  
  338.         /// <summary>
  339.         /// Creates a frame object instance. Can be used for caching of the face tracking
  340.         /// frame. FaceTrackFrame should be disposed after use.
  341.         /// </summary>
  342.         /// <returns>
  343.         /// newly created frame object
  344.         /// </returns>
  345.         internal FaceTrackFrame CreateResult(out int hr)
  346.         {
  347.             IFTResult faceTrackResultPtr;
  348.             FaceTrackFrame faceTrackFrame = null;
  349.  
  350.             this.CheckPtrAndThrow();
  351.             hr = this.faceTrackerInteropPtr.CreateFTResult(out faceTrackResultPtr);
  352.             if (faceTrackResultPtr != null)
  353.             {
  354.                 faceTrackFrame = new FaceTrackFrame(faceTrackResultPtr, this);
  355.             }
  356.  
  357.             return faceTrackFrame;
  358.         }
  359.  
  360.         /// <summary>
  361.         /// Allows calling dispose explicitly or from the finalizer
  362.         /// </summary>
  363.         /// <param name="disposing">true to release both managed and unmanaged resources; false to release only unmanaged resources</param>
  364.         protected virtual void Dispose(bool disposing)
  365.         {
  366.             if (!this.disposed)
  367.             {
  368.                 string traceStr = string.Format(
  369.                     CultureInfo.InvariantCulture,
  370.                     "FaceTracker::Dispose() - TotalTracks={0}, TotalSuccessTracks={1}, TimePerTrack={2:F3}ms, TimePerSuccessTrack={3:F3}ms, TimePerDataCopy={4:F3}ms, TimePerStartOrContinueTracking={5:F3}ms",
  371.                     this.totalTracks,
  372.                     this.totalSuccessTracks,
  373.                     this.totalTracks > 0 ? (double)this.trackStopwatch.ElapsedMilliseconds / this.totalTracks : 0,
  374.                     this.totalSuccessTracks > 0 ? (double)this.totalSuccessTrackMs / this.totalSuccessTracks : 0,
  375.                     this.totalTracks > 0 ? (double)this.copyStopwatch.ElapsedMilliseconds / this.totalTracks : 0,
  376.                     this.totalTracks > 0 ? (double)this.startOrContinueTrackingStopwatch.ElapsedMilliseconds / this.totalTracks : 0);
  377. #if DEBUG
  378.                 Debug.WriteLine(traceStr);
  379. #else
  380.                 Trace.WriteLineIf(traceLevel >= TraceLevel.Info, traceStr);
  381. #endif
  382.                 if (this.faceModel != null)
  383.                 {
  384.                     this.faceModel.Dispose();
  385.                     this.faceModel = null;
  386.                 }
  387.  
  388.                 if (this.frame != null)
  389.                 {
  390.                     this.frame.Dispose();
  391.                     this.frame = null;
  392.                 }
  393.  
  394.                 if (this.colorFaceTrackingImage != null)
  395.                 {
  396.                     this.colorFaceTrackingImage.Dispose();
  397.                     this.colorFaceTrackingImage = null;
  398.                 }
  399.  
  400.                 if (this.depthFaceTrackingImage != null)
  401.                 {
  402.                     this.depthFaceTrackingImage.Dispose();
  403.                     this.depthFaceTrackingImage = null;
  404.                 }
  405.  
  406.                 if (this.faceTrackerInteropPtr != null)
  407.                 {
  408.                     Marshal.FinalReleaseComObject(this.faceTrackerInteropPtr);
  409.                     this.faceTrackerInteropPtr = null;
  410.                 }
  411.  
  412.                 this.disposed = true;
  413.             }
  414.         }
  415.  
  416.         /// <summary>
  417.         /// Helper API to retrieve head points structure from a given skeleton instance
  418.         /// </summary>
  419.         /// <param name="skeletonOfInterest">
  420.         /// skeleton from which head points are to be extracted
  421.         /// </param>
  422.         /// <returns>
  423.         /// HeadPoints that can be passed to Start/Continue tracking APIs
  424.         /// </returns>
  425.         private static Vector3DF[] GetHeadPointsFromSkeleton(Skeleton skeletonOfInterest)
  426.         {
  427.             Vector3DF[] headPoints = null;
  428.  
  429.             if (skeletonOfInterest != null && skeletonOfInterest.TrackingState == SkeletonTrackingState.Tracked)
  430.             {
  431.                 headPoints = new Vector3DF[2];
  432.  
  433.                 SkeletonPoint sp0 = skeletonOfInterest.Joints[JointType.ShoulderCenter].Position;
  434.                 headPoints[0] = new Vector3DF(sp0.X, sp0.Y, sp0.Z);
  435.  
  436.                 SkeletonPoint sp1 = skeletonOfInterest.Joints[JointType.Head].Position;
  437.                 headPoints[1] = new Vector3DF(sp1.X, sp1.Y, sp1.Z);
  438.             }
  439.  
  440.             return headPoints;
  441.         }
  442.  
  443.         private void CheckPtrAndThrow()
  444.         {
  445.             if (this.faceTrackerInteropPtr == null)
  446.             {
  447.                 throw new InvalidOperationException("Native face tracker pointer in invalid state.");
  448.             }
  449.         }
  450.  
  451.         /// <summary>
  452.         /// Callback to help with mapping depth pixel to color pixel data. Uses Kinect sensor's MapDepthToColorImagePoint to
  453.         /// do the conversion
  454.         /// </summary>
  455.         /// <returns>
  456.         /// The depth to color callback.
  457.         /// </returns>
  458.         private int DepthToColorCallback(
  459.             uint depthFrameWidth,
  460.             uint depthFrameHeight,
  461.             uint colorFrameWidth,
  462.             uint colorFrameHeight,
  463.             float zoomFactor,
  464.             Point viewOffset,
  465.             int depthX,
  466.             int depthY,
  467.             ushort depthZ,
  468.             out int colorX,
  469.             out int colorY)
  470.         {
  471.             int retCode = 0;
  472.             colorX = 0;
  473.             colorY = 0;
  474.  
  475.             if (this.sensor != null)
  476.             {
  477.                 var colorPoint = new ColorImagePoint();
  478.                 try
  479.                 {
  480.                     colorPoint = this.sensor.MapDepthToColorImagePoint(
  481.                         this.sensor.DepthStream.Format,
  482.                         depthX,
  483.                         depthY,
  484.                         (short)(depthZ << DepthImageFrame.PlayerIndexBitmaskWidth),
  485.                         this.sensor.ColorStream.Format);
  486.                 }
  487.                 catch (InvalidOperationException e)
  488.                 {
  489.                     string traceStr = string.Format(
  490.                         CultureInfo.CurrentCulture,
  491.                         "Exception on MapDepthToColorImagePoint while translating depth point({0},{1},{2}). Exception={3}",
  492.                         depthX,
  493.                         depthY,
  494.                         depthZ,
  495.                         e.Message);
  496.                     Trace.WriteLineIf(this.traceLevel >= TraceLevel.Error, traceStr, TraceCategory);
  497.  
  498.                     retCode = -1;
  499.                 }
  500.  
  501.                 colorX = colorPoint.X;
  502.                 colorY = colorPoint.Y;
  503.             }
  504.             else
  505.             {
  506.                 retCode = -1;
  507.             }
  508.  
  509.             return retCode;
  510.         }
  511.  
  512.         /// <summary>
  513.         /// Helper method that does the core instantiation & initialization of face tracking engine
  514.         /// </summary>
  515.         /// <param name="newColorCameraConfig">Color camera configuration</param>
  516.         /// <param name="newDepthCameraConfig">Depth camera configuration</param>
  517.         /// <param name="colorImagePtr">Allows face tracking engine to read color image from native memory pointer.
  518.         /// If set to IntPtr.Zero, image data needs to be provided for tracking to this instance. </param>
  519.         /// <param name="depthImagePtr">Allows face tracking engine to read depth image from native memory pointer.
  520.         /// If set to IntPtr.Zero, image data needs to be provided for tracking to this instance.</param>
  521.         /// <param name="newRegisterDepthToColorDelegate">Callback which maps of depth to color pixels</param>
  522.         private void Initialize(
  523.             CameraConfig newColorCameraConfig,
  524.             CameraConfig newDepthCameraConfig,
  525.             IntPtr colorImagePtr,
  526.             IntPtr depthImagePtr,
  527.             FaceTrackingRegisterDepthToColor newRegisterDepthToColorDelegate)
  528.         {
  529.             if (newColorCameraConfig == null)
  530.             {
  531.                 throw new ArgumentNullException("newColorCameraConfig");
  532.             }
  533.  
  534.             if (newDepthCameraConfig == null)
  535.             {
  536.                 throw new ArgumentNullException("newDepthCameraConfig");
  537.             }
  538.  
  539.             if (newRegisterDepthToColorDelegate == null)
  540.             {
  541.                 throw new ArgumentNullException("newRegisterDepthToColorDelegate");
  542.             }
  543.  
  544.             // initialize perf counters
  545.             this.totalTracks = 0;
  546.             this.trackStopwatch.Reset();
  547.  
  548.             // get configuration & trace settings
  549.             this.traceLevel = new TraceSwitch(FaceTrackTraceSwitchName, FaceTrackTraceSwitchName).Level;
  550.  
  551.             this.videoCameraConfig = newColorCameraConfig;
  552.             this.depthCameraConfig = newDepthCameraConfig;
  553.             this.registerDepthToColorDelegate = newRegisterDepthToColorDelegate;
  554.  
  555.             this.faceTrackerInteropPtr = NativeMethods.FTCreateFaceTracker(IntPtr.Zero);
  556.             if (this.faceTrackerInteropPtr == null)
  557.             {
  558.                 throw new InsufficientMemoryException("Cannot create face tracker.");
  559.             }
  560.  
  561.             IntPtr funcPtr = Marshal.GetFunctionPointerForDelegate(this.registerDepthToColorDelegate);
  562.             if (funcPtr == IntPtr.Zero)
  563.             {
  564.                 throw new InsufficientMemoryException("Cannot setup callback for retrieving color to depth pixel mapping");
  565.             }
  566.  
  567.             int hr = this.faceTrackerInteropPtr.Initialize(this.videoCameraConfig, this.depthCameraConfig, funcPtr, null);
  568.             if (hr != 0)
  569.             {
  570.                 throw new InvalidOperationException(
  571.                     string.Format(CultureInfo.CurrentCulture, "Failed to initialize face tracker - Error code from native=0x{0:X}", hr));
  572.             }
  573.  
  574.             this.frame = this.CreateResult(out hr);
  575.             if (this.frame == null || hr != 0)
  576.             {
  577.                 throw new InvalidOperationException(
  578.                     string.Format(CultureInfo.CurrentCulture, "Failed to create face tracking result. Error code from native=0x{0:X}", hr));
  579.             }
  580.  
  581.             this.colorFaceTrackingImage = new Image();
  582.             if (colorImagePtr == IntPtr.Zero)
  583.             {
  584.                 this.colorFaceTrackingImage.Allocate(
  585.                     this.videoCameraConfig.Width, this.videoCameraConfig.Height, this.videoCameraConfig.ImageFormat);
  586.             }
  587.             else
  588.             {
  589.                 this.colorFaceTrackingImage.Attach(
  590.                     this.videoCameraConfig.Width,
  591.                     this.videoCameraConfig.Height,
  592.                     colorImagePtr,
  593.                     this.videoCameraConfig.ImageFormat,
  594.                     this.videoCameraConfig.Stride);
  595.             }
  596.  
  597.             this.depthFaceTrackingImage = new Image();
  598.             if (depthImagePtr == IntPtr.Zero)
  599.             {
  600.                 this.depthFaceTrackingImage.Allocate(
  601.                     this.depthCameraConfig.Width, this.depthCameraConfig.Height, this.depthCameraConfig.ImageFormat);
  602.             }
  603.             else
  604.             {
  605.                 this.depthFaceTrackingImage.Attach(
  606.                     this.depthCameraConfig.Width,
  607.                     this.depthCameraConfig.Height,
  608.                     depthImagePtr,
  609.                     this.depthCameraConfig.ImageFormat,
  610.                     this.depthCameraConfig.Stride);
  611.             }
  612.         }
  613.  
  614.         /// <summary>
  615.         /// Starts face tracking from Kinect input data. Track() detects a face
  616.         /// based on the passed parameters, then identifies characteristic
  617.         /// points and begins tracking. The first call to this API is more
  618.         /// expensive, but if the tracking succeeds then subsequent calls use
  619.         /// the tracking information generated from first call and is faster,
  620.         /// until a tracking failure happens.
  621.         /// </summary>
  622.         /// <param name="colorImageFormat">format of the colorImage array</param>
  623.         /// <param name="colorImage">Input color image frame retrieved from Kinect sensor</param>
  624.         /// <param name="depthImageFormat">format of the depthImage array</param>
  625.         /// <param name="depthImage">Input depth image frame retrieved from Kinect sensor</param>
  626.         /// <param name="skeletonOfInterest">Input skeleton to track. Head & shoulder joints in the skeleton are used to calculate the head vector</param>
  627.         /// <param name="regionOfInterest">Region of interest in the passed video frame where the face tracker should search for a face to initiate tracking.
  628.         /// Passing Rectangle.Empty (default) causes the entire frame to be searched.</param>
  629.         /// <returns>Returns computed face tracking results for this image frame</returns>
  630.         private FaceTrackFrame Track(
  631.             ColorImageFormat colorImageFormat,
  632.             byte[] colorImage,
  633.             DepthImageFormat depthImageFormat,
  634.             short[] depthImage,
  635.             Skeleton skeletonOfInterest,
  636.             Rect regionOfInterest)
  637.         {
  638.             this.totalTracks++;
  639.             this.trackStopwatch.Start();
  640.  
  641.             if (this.operationMode != OperationMode.Kinect)
  642.             {
  643.                 throw new InvalidOperationException(
  644.                     "Cannot use Track with Kinect input types when face tracker is initialized for tracking videos/images");
  645.             }
  646.  
  647.             if (colorImage == null)
  648.             {
  649.                 throw new ArgumentNullException("colorImage");
  650.             }
  651.  
  652.             if (depthImage == null)
  653.             {
  654.                 throw new ArgumentNullException("depthImage");
  655.             }
  656.  
  657.             if (colorImageFormat != this.initializationColorImageFormat)
  658.             {
  659.                 throw new InvalidOperationException("Color image frame format different from initialization");
  660.             }
  661.  
  662.             if (depthImageFormat != this.initializationDepthImageFormat)
  663.             {
  664.                 throw new InvalidOperationException("Depth image frame format different from initialization");
  665.             }
  666.  
  667.             if (colorImage.Length != this.videoCameraConfig.FrameBufferLength)
  668.             {
  669.                 throw new ArgumentOutOfRangeException("colorImage", "Color image data size is needs to match initialization configuration.");
  670.             }
  671.  
  672.             if (depthImage.Length != this.depthCameraConfig.FrameBufferLength)
  673.             {
  674.                 throw new ArgumentOutOfRangeException("depthImage", "Depth image data size is needs to match initialization configuration.");
  675.             }
  676.  
  677.             int hr;
  678.             HeadPoints headPointsObj = null;
  679.             Vector3DF[] headPoints = GetHeadPointsFromSkeleton(skeletonOfInterest);
  680.  
  681.             if (headPoints != null && headPoints.Length == 2)
  682.             {
  683.                 headPointsObj = new HeadPoints { Points = headPoints };
  684.             }
  685.  
  686.             this.copyStopwatch.Start();
  687.             this.colorFaceTrackingImage.CopyFrom(colorImage);
  688.             this.depthFaceTrackingImage.CopyFrom(depthImage);
  689.             this.copyStopwatch.Stop();
  690.  
  691.             var sensorData = new SensorData(this.colorFaceTrackingImage, this.depthFaceTrackingImage, DefaultZoomFactor, Point.Empty);
  692.             FaceTrackingSensorData faceTrackSensorData = sensorData.FaceTrackingSensorData;
  693.  
  694.             this.startOrContinueTrackingStopwatch.Start();
  695.             if (this.trackSucceeded)
  696.             {
  697.                 hr = this.faceTrackerInteropPtr.ContinueTracking(ref faceTrackSensorData, headPointsObj, this.frame.ResultPtr);
  698.             }
  699.             else
  700.             {
  701.                 hr = this.faceTrackerInteropPtr.StartTracking(
  702.                     ref faceTrackSensorData, ref regionOfInterest, headPointsObj, this.frame.ResultPtr);
  703.             }
  704.  
  705.             this.startOrContinueTrackingStopwatch.Stop();
  706.  
  707.             this.trackSucceeded = hr == (int)ErrorCode.Success && this.frame.Status == ErrorCode.Success;
  708.             this.trackStopwatch.Stop();
  709.  
  710.             if (this.trackSucceeded)
  711.             {
  712.                 ++this.totalSuccessTracks;
  713.                 this.totalSuccessTrackMs += this.trackStopwatch.ElapsedMilliseconds - this.lastSuccessTrackElapsedMs;
  714.                 this.lastSuccessTrackElapsedMs = this.trackStopwatch.ElapsedMilliseconds;
  715.             }
  716.  
  717.             return this.frame;
  718.         }
  719.     }
  720. }
Add Comment
Please, Sign In to add comment