3using System.Threading.Tasks;
4using System.Runtime.InteropServices;
5using Microsoft.Azure.Kinect.Sensor;
6using Microsoft.Azure.Kinect.BodyTracking;
7using static UnityEngine.Analytics.IAnalytic;
16 public bool HasData {
get;
private set; } =
false;
23 private object dataMutex =
new object();
24 private CancellationTokenSource cancellationTokenSource =
new CancellationTokenSource();
28 UnityEditor.EditorApplication.quitting += OnEditorClose;
38 UnityEditor.EditorApplication.quitting -= OnEditorClose;
41 cancellationTokenSource?.Cancel();
42 cancellationTokenSource?.Dispose();
43 cancellationTokenSource =
null;
50 var temp = frontBuffer;
61 UnityEngine.Debug.Log(
"Starting body tracker background thread.");
64 if (Device.GetInstalledCount() <= deviceId) {
65 throw new Exception(
"SkeletalFrameDataProvider - Cannot open device ID " + deviceId +
". Only " + Device.GetInstalledCount() +
" devices are connected. Terminating thread.");
69 using (Device device = Device.Open(deviceId)) {
73 device.StartCameras(
new DeviceConfiguration() {
74 CameraFPS = FPS.FPS30,
75 ColorResolution = ColorResolution.Off,
76 DepthMode = DepthMode.NFOV_Unbinned,
77 WiredSyncMode = WiredSyncMode.Standalone,
78 DisableStreamingIndicator =
false
81 UnityEngine.Debug.Log(
"SkeletalFrameDataProvider - Open K4A device successfully. Device ID: " + deviceId +
", Serial Number: " + device.SerialNum);
84 var trackerCalibration = device.GetCalibration();
85 TrackerConfiguration trackerConfig =
new TrackerConfiguration() {
86 ProcessingMode = TrackerProcessingMode.Cpu,
96 bool isFirstFrame =
true;
97 TimeSpan initialTimestamp =
new TimeSpan(0);
98 using (Tracker tracker = Tracker.Create(trackerCalibration, trackerConfig)) {
99 while (!token.IsCancellationRequested) {
101 using (Capture sensorCapture = device.GetCapture()) {
102 tracker.EnqueueCapture(sensorCapture);
106 using (Frame frame = tracker.PopResult(TimeSpan.Zero, throwOnTimeout:
false)) {
108 UnityEngine.Debug.Log($
"SkeletalFrameDataProvider - ID: {deviceId}, Pop result from tracker timeout!");
116 backBuffer.NumDetectedBodies = frame.NumberOfBodies;
122 Capture bodyFrameCapture = frame.Capture;
123 Image depthImage = bodyFrameCapture.Depth;
125 isFirstFrame =
false;
126 initialTimestamp = depthImage.DeviceTimestamp;
128 backBuffer.TimestampInMs = (float)(depthImage.DeviceTimestamp - initialTimestamp).TotalMilliseconds;
129 backBuffer.DepthImageWidth = depthImage.WidthPixels;
130 backBuffer.DepthImageHeight = depthImage.HeightPixels;
133 var depthFrame = MemoryMarshal.Cast<byte, ushort>(depthImage.Memory.Span);
136 const float MAX_DISPLAYED_DEPTH_IN_MILLIMETERS = 5000.0f;
138 backBuffer.DepthImageSize = backBuffer.DepthImageWidth * backBuffer.DepthImageHeight * 3;
140 byte b = (byte)(depthFrame[it] / MAX_DISPLAYED_DEPTH_IN_MILLIMETERS * 255);
154 }
catch (Exception e) {
155 UnityEngine.Debug.Log($
"SkeletalFrameDataProvider - ID: {deviceId}, Catching exception for background thread: {e.Message}");
157 UnityEngine.Debug.Log($
"SkeletalFrameDataProvider - ID: {deviceId}, Shutting down background thread.");
164 private void SwapBuffers() {
166 var temp = backBuffer;
167 backBuffer = frontBuffer;
Processes data from the ORBBEC sensor in a background thread to produce FrameData.
bool GetData(ref FrameData output)
SensorOrientation Orientation
delegate void FinishCallback()
bool HasStarted
Flag to determine of the background thread has started.
void RunBackgroundThreadAsync(int deviceId, CancellationToken token, FinishCallback onFinish)
FrameDataProvider(int deviceId, SensorOrientation orientation, FinishCallback onFinish)
BodyData[] Bodies
Array of bodies. Use NumDetectedBodies to determine how many bodies contain useful data.
void CopyFromBodyTrackingSdk(Microsoft.Azure.Kinect.BodyTracking.Body body, Calibration sensorCalibration)