MITK-IGT
IGT Extension of MITK
Loading...
Searching...
No Matches
mitkKinectV2Controller.cpp
Go to the documentation of this file.
1/*============================================================================
2
3The Medical Imaging Interaction Toolkit (MITK)
4
5Copyright (c) German Cancer Research Center (DKFZ)
6All rights reserved.
7
8Use of this source code is governed by a 3-clause BSD license that can be
9found in the LICENSE file.
10
11============================================================================*/
13
14//Kinect V2 SDK
15#include <Kinect.h>
16
17//VTK
18#include <vtkPolyData.h>
19#include <vtkCellArray.h>
20#include <vtkPoints.h>
21#include <vtkSmartPointer.h>
22#include <vtkFloatArray.h>
23#include <vtkPointData.h>
24#include <vtkMath.h>
25
26#include <mitkVector.h>
27
28//Taken from official Microsoft SDK samples. Should never be public or part of the class,
29//because it is just for cleaning up.
30// Safe release for interfaces
31template<class Interface>
32inline void SafeRelease(Interface *& pInterfaceToRelease)
33{
34 if (pInterfaceToRelease != nullptr)
35 {
36 pInterfaceToRelease->Release();
37 pInterfaceToRelease = nullptr;
38 }
39}
40
41namespace mitk
42{
72
74 m_pKinectSensor(nullptr),
75 m_pMultiSourceFrameReader(nullptr),
76 m_pCoordinateMapper(nullptr),
77 m_pColorRGBX(nullptr),
78 m_ConnectionCheck(false),
79 m_DepthCaptureWidth(512),
80 m_DepthCaptureHeight(424),
81 m_RGBCaptureWidth(1920),
82 m_RGBCaptureHeight(1080),
83 m_RGBBufferSize(1920*1080*3),
84 m_DepthBufferSize(sizeof(float)*512*424),
85 m_CameraCoordinates(nullptr),
86 m_ColorPoints(nullptr),
87 m_PolyData(nullptr),
88 m_TriangulationThreshold(0.0),
89 m_GenerateTriangularMesh(false)
90 {
91 // create heap storage for color pixel data in RGBX format
93 //initialize 3D world coordinates and texture coordinates
97 }
98
100 {
101 MITK_INFO << "~KinectV2ControllerPrivate";
102 }
103
107
109 {
110 MITK_INFO << "~KinectV2Controller";
111 delete d;
112 }
113
115 {
116 if (!d->m_ConnectionCheck)
117 {
118 HRESULT hr;
119 d->m_ConnectionCheck = true;
120
121 hr = GetDefaultKinectSensor(&d->m_pKinectSensor);
122
123 if (FAILED(hr))
124 {
125 d->m_ConnectionCheck = false;
126 }
127 else
128 {
129 hr = d->m_pKinectSensor->get_CoordinateMapper(&d->m_pCoordinateMapper);
130 if (FAILED(hr))
131 {
132 d->m_ConnectionCheck = false;
133 }
134 hr = d->m_pKinectSensor->Open();
135 }
136
137 if (!d->m_pKinectSensor || FAILED(hr))
138 {
139 d->m_ConnectionCheck = false;
140 MITK_WARN << "No Kinect 2 ready!";
141 }
142 else
143 {
144 MITK_INFO << "Kinect 2 succesfully connected";
145 }
146 }
147 return d->m_ConnectionCheck;
148 }
149
151 {
152 //check if it is already initialized
154 {
155 return true;
156 }
157 else //initialize the frame reader
158 {
159 HRESULT hr = d->m_pKinectSensor->OpenMultiSourceFrameReader(
160 FrameSourceTypes::FrameSourceTypes_Depth | FrameSourceTypes::FrameSourceTypes_Color | FrameSourceTypes::FrameSourceTypes_Infrared,
162 if (SUCCEEDED(hr) && (d->m_pMultiSourceFrameReader))
163 {
164 MITK_INFO << "KinectV2 MultiFrameReader initialized";
165 return true;
166 }
167 }
168 return false;
169 }
170
171
173 {
174 // done with depth frame reader
175 MITK_INFO << "CloseConnection";
177
178 // close the Kinect Sensor
179 if(d->m_pKinectSensor)
180 {
181 d->m_pKinectSensor->Close();
182 }
183
185 d->m_ConnectionCheck = false;
186 return true;
187 }
188
190 {
191 //Acquire lastest frame updates the camera and for
192 //unknown reasons I cannot use it here in UpdateCamera()
193 //without resulting in random crashes of the app.
194 return true;
195 }
196
197 void KinectV2Controller::GetDistances(float* distances)
198 {
200 {
201 MITK_ERROR << "Unable to initialize MultiFrameReader";
202 return;
203 }
204 IMultiSourceFrame* pMultiSourceFrame = nullptr;
205 IDepthFrame* pDepthFrame = nullptr;
206
207 HRESULT hr = -1; //SDK error format
208
209 static DWORD lastTime = 0;
210 DWORD currentTime = GetTickCount();
211 //Check if we do not request data faster than 30 FPS. Kinect V2 can only deliver 30 FPS.
212 if( unsigned int(currentTime - lastTime) > 33 )
213 {
214 hr = d->m_pMultiSourceFrameReader->AcquireLatestFrame(&pMultiSourceFrame);
215 lastTime = currentTime;
216 }
217
218 if (SUCCEEDED(hr))
219 {
220 IDepthFrameReference* pDepthFrameReference = nullptr;
221
222 hr = pMultiSourceFrame->get_DepthFrameReference(&pDepthFrameReference);
223 if (SUCCEEDED(hr))
224 {
225 hr = pDepthFrameReference->AcquireFrame(&pDepthFrame);
226 }
227 SafeRelease(pDepthFrameReference);
228 }
229
230 if (SUCCEEDED(hr))
231 {
232 UINT nDepthBufferSize = 0;
233 UINT16 *pDepthBuffer = nullptr;
234
235 if (SUCCEEDED(hr))
236 {
237 hr = pDepthFrame->AccessUnderlyingBuffer(&nDepthBufferSize, &pDepthBuffer);
238 }
239 if (SUCCEEDED(hr))
240 {
241 UINT pointCount = d->m_DepthCaptureWidth * d->m_DepthCaptureHeight;
242 d->m_pCoordinateMapper->MapDepthFrameToCameraSpace(pointCount, pDepthBuffer, pointCount, d->m_CameraCoordinates);
246 textureCoordinates->SetNumberOfComponents(2);
247 textureCoordinates->Allocate(pointCount);
248
249 d->m_pCoordinateMapper->MapDepthFrameToColorSpace(pointCount, pDepthBuffer, pointCount, d->m_ColorPoints);
250
251 for(int i = 0; i < d->m_DepthCaptureHeight*d->m_DepthCaptureWidth; ++i)
252 {
253 vtkIdType id = points->InsertNextPoint(d->m_CameraCoordinates[i].X, d->m_CameraCoordinates[i].Y, d->m_CameraCoordinates[i].Z);
254 vertices->InsertNextCell(1);
255 vertices->InsertCellPoint(id);
256 distances[i] = static_cast<float>(*pDepthBuffer);
257 ++pDepthBuffer;
258
259 ColorSpacePoint colorPoint = d->m_ColorPoints[i];
260 // retrieve the depth to color mapping for the current depth pixel
261 int colorInDepthX = (int)(floor(colorPoint.X + 0.5));
262 int colorInDepthY = (int)(floor(colorPoint.Y + 0.5));
263
264 float xNorm = static_cast<float>(colorInDepthX)/d->m_RGBCaptureWidth;
265 float yNorm = static_cast<float>(colorInDepthY)/d->m_RGBCaptureHeight;
266
267 // make sure the depth pixel maps to a valid point in color space
268 if ( colorInDepthX >= 0 && colorInDepthX < d->m_RGBCaptureWidth && colorInDepthY >= 0 && colorInDepthY < d->m_RGBCaptureHeight )
269 {
270 textureCoordinates->InsertTuple2(id, xNorm, yNorm);
271 }
272 }
274 d->m_PolyData->SetPoints(points);
275 d->m_PolyData->SetVerts(vertices);
276 d->m_PolyData->GetPointData()->SetTCoords(textureCoordinates);
277 }
278 else
279 {
280 MITK_ERROR << "AccessUnderlyingBuffer";
281 }
282 }
283 SafeRelease(pDepthFrame);
284 SafeRelease(pMultiSourceFrame);
285
286 if( hr != -1 && !SUCCEEDED(hr) )
287 {
288 //The thread gets here, if the data is requested faster than the device can deliver it.
289 //This may happen from time to time.
290 MITK_DEBUG << "HR result false in KinectV2Controller::GetDistances()";
291 return;
292 }
293 }
294
295 void KinectV2Controller::GetRgb(unsigned char* rgb)
296 {
298 {
299 MITK_ERROR << "Unable to initialize MultiFrameReader";
300 return;
301 }
302 IMultiSourceFrame* pMultiSourceFrame = nullptr;
303 IColorFrame* pColorFrame = nullptr;
304
305 HRESULT hr = -1;
306
307 static DWORD lastTime = 0;
308 DWORD currentTime = GetTickCount();
309 //Check if we do not request data faster than 30 FPS. Kinect V2 can only deliver 30 FPS.
310 if( unsigned int(currentTime - lastTime) > 33 )
311 {
312 hr = d->m_pMultiSourceFrameReader->AcquireLatestFrame(&pMultiSourceFrame);
313 lastTime = currentTime;
314 }
315
316 ColorImageFormat imageFormat = ColorImageFormat_None;
317 UINT nColorBufferSize = 0;
318 RGBQUAD *pColorBuffer = nullptr;
319 // get color frame data
320 if (SUCCEEDED(hr))
321 {
322 hr = pColorFrame->get_RawColorImageFormat(&imageFormat);
323 }
324
325 if (SUCCEEDED(hr))
326 {
327 if (imageFormat == ColorImageFormat_Bgra)
328 {
329 hr = pColorFrame->AccessRawUnderlyingBuffer(&nColorBufferSize, reinterpret_cast<BYTE**>(&pColorBuffer));
330 }
331 else if (d->m_pColorRGBX)
332 {
333 pColorBuffer = d->m_pColorRGBX;
334 nColorBufferSize = d->m_RGBCaptureWidth * d->m_RGBCaptureHeight * sizeof(RGBQUAD);
335 hr = pColorFrame->CopyConvertedFrameDataToArray(nColorBufferSize, reinterpret_cast<BYTE*>(pColorBuffer), ColorImageFormat_Bgra);
336 }
337 else
338 {
339 hr = E_FAIL;
340 }
341 if (SUCCEEDED(hr))
342 {
343 for(int i = 0; i < d->m_RGBBufferSize; i+=3)
344 {
345 //convert from BGR to RGB
346 rgb[i+0] = pColorBuffer->rgbRed;
347 rgb[i+1] = pColorBuffer->rgbGreen;
348 rgb[i+2] = pColorBuffer->rgbBlue;
349 ++pColorBuffer;
350 }
351 }
352 }
353 SafeRelease(pColorFrame);
354 SafeRelease(pMultiSourceFrame);
355
356 if( hr != -1 && !SUCCEEDED(hr) )
357 {
358 //The thread gets here, if the data is requested faster than the device can deliver it.
359 //This may happen from time to time.
360 MITK_DEBUG << "HR result false in KinectV2Controller::GetRgb()";
361 }
362 }
363
364 void KinectV2Controller::GetAllData(float* distances, float* amplitudes, unsigned char* rgb)
365 {
367 {
368 MITK_ERROR << "Unable to initialize MultiFrameReader";
369 return;
370 }
371
372 IMultiSourceFrame* pMultiSourceFrame = nullptr;
373 IDepthFrame* pDepthFrame = nullptr;
374 IColorFrame* pColorFrame = nullptr;
375 IInfraredFrame* pInfraRedFrame = nullptr;
376
377 HRESULT hr = -1;
378
379 static DWORD lastTime = 0;
380 DWORD currentTime = GetTickCount();
381 //Check if we do not request data faster than 30 FPS. Kinect V2 can only deliver 30 FPS.
382 if( unsigned int(currentTime - lastTime) > 33 )
383 {
384 hr = d->m_pMultiSourceFrameReader->AcquireLatestFrame(&pMultiSourceFrame);
385 lastTime = currentTime;
386 }
387
388 if (SUCCEEDED(hr))
389 {
390 IDepthFrameReference* pDepthFrameReference = nullptr;
391
392 hr = pMultiSourceFrame->get_DepthFrameReference(&pDepthFrameReference);
393 if (SUCCEEDED(hr))
394 {
395 hr = pDepthFrameReference->AcquireFrame(&pDepthFrame);
396 }
397 SafeRelease(pDepthFrameReference);
398 }
399
400 if (SUCCEEDED(hr))
401 {
402 IColorFrameReference* pColorFrameReference = nullptr;
403
404 hr = pMultiSourceFrame->get_ColorFrameReference(&pColorFrameReference);
405 if (SUCCEEDED(hr))
406 {
407 hr = pColorFrameReference->AcquireFrame(&pColorFrame);
408 }
409 SafeRelease(pColorFrameReference);
410 }
411
412 if (SUCCEEDED(hr))
413 {
414 IInfraredFrameReference* pInfraredFrameReference = nullptr;
415
416 hr = pMultiSourceFrame->get_InfraredFrameReference(&pInfraredFrameReference);
417 if (SUCCEEDED(hr))
418 {
419 hr = pInfraredFrameReference->AcquireFrame(&pInfraRedFrame);
420 }
421 SafeRelease(pInfraredFrameReference);
422 }
423
424 if (SUCCEEDED(hr))
425 {
426 UINT nDepthBufferSize = 0;
427 UINT16 *pDepthBuffer = nullptr;
428 UINT16 *pInfraRedBuffer = nullptr;
429
430 ColorImageFormat imageFormat = ColorImageFormat_None;
431 UINT nColorBufferSize = 0;
432 RGBQUAD *pColorBuffer = nullptr;
433
434 if (SUCCEEDED(hr))
435 {
436 hr = pDepthFrame->AccessUnderlyingBuffer(&nDepthBufferSize, &pDepthBuffer);
437 }
438 if (SUCCEEDED(hr))
439 {
440 hr = pInfraRedFrame->AccessUnderlyingBuffer(&nDepthBufferSize, &pInfraRedBuffer);
441 }
442 if (SUCCEEDED(hr))
443 {
444 UINT pointCount = d->m_DepthCaptureWidth * d->m_DepthCaptureHeight;
445 d->m_pCoordinateMapper->MapDepthFrameToCameraSpace(pointCount, pDepthBuffer, pointCount, d->m_CameraCoordinates);
449
450 const double meterfactor = 1000.0;
451
454 vertexIdList->Allocate(pointCount);
455 vertexIdList->SetNumberOfIds(pointCount);
456 for(unsigned int i = 0; i < pointCount; ++i)
457 {
458 vertexIdList->SetId(i, 0);
459 }
460
461 std::vector<bool> isPointValid;
462 isPointValid.resize(pointCount);
463 //Allocate the object once else it would automatically allocate new memory
464 //for every vertex and perform a copy which is expensive.
465 vertexIdList->Allocate(pointCount);
466 vertexIdList->SetNumberOfIds(pointCount);
467 textureCoordinates->SetNumberOfComponents(2);
468 textureCoordinates->Allocate(pointCount);
469
470 d->m_pCoordinateMapper->MapDepthFrameToColorSpace(pointCount, pDepthBuffer, pointCount, d->m_ColorPoints);
471
472 for(int j = 0; j < d->m_DepthCaptureHeight; ++j)
473 {
474 for(int i = 0; i < d->m_DepthCaptureWidth; ++i)
475 {
476 unsigned int pixelID = i+j*d->m_DepthCaptureWidth;
477 unsigned int inverseid = (d->m_DepthCaptureWidth - i - 1) + j*d->m_DepthCaptureWidth;
478
479 distances[inverseid] = static_cast<float>(*pDepthBuffer);
480 amplitudes[inverseid] = static_cast<float>(*pInfraRedBuffer);
481 ++pDepthBuffer;
482 ++pInfraRedBuffer;
483
484 if (d->m_CameraCoordinates[pixelID].Z<=mitk::eps)
485 {
486 isPointValid[pixelID] = false;
487 }
488 else
489 {
490 isPointValid[pixelID] = true;
491
492 //VTK would insert empty points into the polydata if we use
493 //points->InsertPoint(pixelID, cartesianCoordinates.GetDataPointer()).
494 //If we use points->InsertNextPoint(...) instead, the ID's do not
495 //correspond to the image pixel ID's. Thus, we have to save them
496 //in the vertexIdList.
497 //Kinect SDK delivers world coordinates in meters, so we have to
498 //convert to mm for MITK.
499 vertexIdList->SetId(pixelID, points->InsertNextPoint(-d->m_CameraCoordinates[pixelID].X*meterfactor, -d->m_CameraCoordinates[pixelID].Y*meterfactor, d->m_CameraCoordinates[pixelID].Z*meterfactor));
500
502 {
503 if((i >= 1) && (j >= 1))
504 {
505 //This little piece of art explains the ID's:
506 //
507 // P(x_1y_1)---P(xy_1)
508 // | |
509 // | |
510 // | |
511 // P(x_1y)-----P(xy)
512 //
513 //We can only start triangulation if we are at vertex (1,1),
514 //because we need the other 3 vertices near this one.
515 //To go one pixel line back in the image array, we have to
516 //subtract 1x xDimension.
517 vtkIdType xy = pixelID;
518 vtkIdType x_1y = pixelID-1;
519 vtkIdType xy_1 = pixelID-d->m_DepthCaptureWidth;
520 vtkIdType x_1y_1 = xy_1-1;
521
522 //Find the corresponding vertex ID's in the saved vertexIdList:
523 vtkIdType xyV = vertexIdList->GetId(xy);
524 vtkIdType x_1yV = vertexIdList->GetId(x_1y);
525 vtkIdType xy_1V = vertexIdList->GetId(xy_1);
526 vtkIdType x_1y_1V = vertexIdList->GetId(x_1y_1);
527
528 if (isPointValid[xy]&&isPointValid[x_1y]&&isPointValid[x_1y_1]&&isPointValid[xy_1]) // check if points of cell are valid
529 {
530 double pointXY[3], pointX_1Y[3], pointXY_1[3], pointX_1Y_1[3];
531
532 points->GetPoint(xyV, pointXY);
533 points->GetPoint(x_1yV, pointX_1Y);
534 points->GetPoint(xy_1V, pointXY_1);
535 points->GetPoint(x_1y_1V, pointX_1Y_1);
536
537
538 if( (mitk::Equal(d->m_TriangulationThreshold, 0.0)) || ((vtkMath::Distance2BetweenPoints(pointXY, pointX_1Y) <= d->m_TriangulationThreshold)
539 && (vtkMath::Distance2BetweenPoints(pointXY, pointXY_1) <= d->m_TriangulationThreshold)
540 && (vtkMath::Distance2BetweenPoints(pointX_1Y, pointX_1Y_1) <= d->m_TriangulationThreshold)
541 && (vtkMath::Distance2BetweenPoints(pointXY_1, pointX_1Y_1) <= d->m_TriangulationThreshold)))
542 {
543 polys->InsertNextCell(3);
544 polys->InsertCellPoint(x_1yV);
545 polys->InsertCellPoint(xyV);
546 polys->InsertCellPoint(x_1y_1V);
547
548 polys->InsertNextCell(3);
549 polys->InsertCellPoint(x_1y_1V);
550 polys->InsertCellPoint(xyV);
551 polys->InsertCellPoint(xy_1V);
552 }
553 else
554 {
555 //We dont want triangulation, but we want to keep the vertex
556 vertices->InsertNextCell(1);
557 vertices->InsertCellPoint(xyV);
558 }
559 }
560 }
561 }
562 else
563 {
564 //We dont want triangulation, we only want vertices
565 vertices->InsertNextCell(1);
566 vertices->InsertCellPoint(vertexIdList->GetId(pixelID));
567 }
568
569 ColorSpacePoint colorPoint = d->m_ColorPoints[pixelID];
570 // retrieve the depth to color mapping for the current depth pixel
571 int colorInDepthX = (int)(floor(colorPoint.X + 0.5));
572 int colorInDepthY = (int)(floor(colorPoint.Y + 0.5));
573
574 float xNorm = -static_cast<float>(colorInDepthX)/d->m_RGBCaptureWidth;
575 float yNorm = static_cast<float>(colorInDepthY)/d->m_RGBCaptureHeight;
576
577 // make sure the depth pixel maps to a valid point in color space
578 if ( colorInDepthX >= 0 && colorInDepthX < d->m_RGBCaptureWidth && colorInDepthY >= 0 && colorInDepthY < d->m_RGBCaptureHeight )
579 {
580 textureCoordinates->InsertTuple2(vertexIdList->GetId(pixelID), xNorm, yNorm);
581 }
582 else
583 {
584 textureCoordinates->InsertTuple2(vertexIdList->GetId(pixelID), 0, 0);
585 }
586 }
587 }
588 }
589
591 d->m_PolyData->SetPoints(points);
592 d->m_PolyData->SetVerts(vertices);
593 d->m_PolyData->SetPolys(polys);
594 d->m_PolyData->GetPointData()->SetTCoords(textureCoordinates);
595 }
596 else
597 {
598 MITK_ERROR << "AccessUnderlyingBuffer";
599 }
600
601 // get color frame data
602 if (SUCCEEDED(hr))
603 {
604 hr = pColorFrame->get_RawColorImageFormat(&imageFormat);
605 }
606
607 if (SUCCEEDED(hr))
608 {
609 if (imageFormat == ColorImageFormat_Bgra)
610 {
611 hr = pColorFrame->AccessRawUnderlyingBuffer(&nColorBufferSize, reinterpret_cast<BYTE**>(&pColorBuffer));
612 }
613 else if (d->m_pColorRGBX)
614 {
615 pColorBuffer = d->m_pColorRGBX;
616 nColorBufferSize = d->m_RGBCaptureWidth * d->m_RGBCaptureHeight * sizeof(RGBQUAD);
617 hr = pColorFrame->CopyConvertedFrameDataToArray(nColorBufferSize, reinterpret_cast<BYTE*>(pColorBuffer), ColorImageFormat_Bgra);
618 }
619 else
620 {
621 hr = E_FAIL;
622 }
623 if (SUCCEEDED(hr))
624 {
625 for(int j = 0; j < d->m_RGBCaptureHeight; ++j)
626 {
627 for(int i = 0; i < d->m_RGBCaptureWidth; ++i)
628 {
629 //the buffer has the size of 3*ResolutionX/Y (one for each color value)
630 //thats why die id is multiplied by 3.
631 unsigned int id = ((d->m_RGBCaptureWidth - i - 1) + j*d->m_RGBCaptureWidth)*3;
632 //convert from BGR to RGB
633 rgb[id+0] = pColorBuffer->rgbRed;
634 rgb[id+1] = pColorBuffer->rgbGreen;
635 rgb[id+2] = pColorBuffer->rgbBlue;
636 ++pColorBuffer;
637 }
638 }
639 }
640 }
641 }
642
643 SafeRelease(pDepthFrame);
644 SafeRelease(pColorFrame);
645 SafeRelease(pInfraRedFrame);
646 SafeRelease(pMultiSourceFrame);
647
648 if( hr != -1 && !SUCCEEDED(hr) )
649 {
650 //The thread gets here, if the data is requested faster than the device can deliver it.
651 //This may happen from time to time.
652 MITK_DEBUG << "HR result false in KinectV2Controller::GetAllData()";
653 }
654 }
655
656 void KinectV2Controller::GetAmplitudes( float* amplitudes )
657 {
659 {
660 MITK_ERROR << "Unable to initialize MultiFrameReader";
661 return;
662 }
663
664 IMultiSourceFrame* pMultiSourceFrame = nullptr;
665 IInfraredFrame* pInfraRedFrame = nullptr;
666
667 HRESULT hr = -1;
668
669 static DWORD lastTime = 0;
670 DWORD currentTime = GetTickCount();
671 //Check if we do not request data faster than 30 FPS. Kinect V2 can only deliver 30 FPS.
672 if( unsigned int(currentTime - lastTime) > 33 )
673 {
674 hr = d->m_pMultiSourceFrameReader->AcquireLatestFrame(&pMultiSourceFrame);
675 lastTime = currentTime;
676 }
677
678 if (SUCCEEDED(hr))
679 {
680 IInfraredFrameReference* pInfraredFrameReference = nullptr;
681
682 hr = pMultiSourceFrame->get_InfraredFrameReference(&pInfraredFrameReference);
683 if (SUCCEEDED(hr))
684 {
685 hr = pInfraredFrameReference->AcquireFrame(&pInfraRedFrame);
686 }
687 SafeRelease(pInfraredFrameReference);
688 }
689
690 if (SUCCEEDED(hr))
691 {
692 UINT nDepthBufferSize = 0;
693 UINT16 *pInfraRedBuffer = nullptr;
694
695 if (SUCCEEDED(hr))
696 {
697 hr = pInfraRedFrame->AccessUnderlyingBuffer(&nDepthBufferSize, &pInfraRedBuffer);
698 }
699 if (SUCCEEDED(hr))
700 {
701 for(int i = 0; i < d->m_DepthCaptureHeight*d->m_DepthCaptureWidth; ++i)
702 {
703 amplitudes[i] = static_cast<float>(*pInfraRedBuffer);
704 ++pInfraRedBuffer;
705 }
706 }
707 else
708 {
709 MITK_ERROR << "AccessUnderlyingBuffer";
710 }
711 }
712 SafeRelease(pInfraRedFrame);
713 SafeRelease(pMultiSourceFrame);
714
715 if( hr != -1 && !SUCCEEDED(hr) )
716 {
717 //The thread gets here, if the data is requested faster than the device can deliver it.
718 //This may happen from time to time.
719 MITK_DEBUG << "HR result false in KinectV2Controller::GetAmplitudes()";
720 }
721 }
722
723 int KinectV2Controller::GetRGBCaptureWidth() const
724 {
725 return d->m_RGBCaptureWidth;
726 }
727
729 {
730 return d->m_RGBCaptureHeight;
731 }
732
734 {
735 return d->m_DepthCaptureWidth;
736 }
737
742
747
752
753 void KinectV2Controller::SetTriangulationThreshold(double triangulationThreshold)
754 {
755 this->d->m_TriangulationThreshold = triangulationThreshold * triangulationThreshold;
756 }
757}
size_t m_DepthBufferSize
Size of the depth buffer in byte (one float per pixel)
double m_TriangulationThreshold
Threshold to cut off vertices from triangulation.
vtkSmartPointer< vtkPolyData > m_PolyData
Conversion of m_CameraCoordinates to vtkPolyData.
CameraSpacePoint * m_CameraCoordinates
3D world coordinate points of the Kinect V2 SDK
size_t m_RGBBufferSize
Size of the RGB buffer in byte (one unsigned char per color per pixel)
IKinectSensor * m_pKinectSensor
Kinect V2 sensor object.
ColorSpacePoint * m_ColorPoints
Texture coordinates of the Kinect V2 SDK.
IMultiSourceFrameReader * m_pMultiSourceFrameReader
Multiframe reader to read all frames at once.
ICoordinateMapper * m_pCoordinateMapper
Coordinate mapper allows for computation of wolrd coordinates and texture mapping.
RGBQUAD * m_pColorRGBX
RGBX color format, to copy the color image.
bool m_ConnectionCheck
check if camera is connected or not
void GetAllData(float *distances, float *amplitudes, unsigned char *rgb)
convenience method for faster access to distance and rgb data
virtual bool UpdateCamera()
updates the camera. The update function of the hardware interface is called only when new data is ava...
virtual bool OpenCameraConnection()
opens a connection to the Kinect V2 camera.
virtual bool CloseCameraConnection()
closes the connection to the camera
void SetTriangulationThreshold(double triangulationThreshold)
vtkSmartPointer< vtkPolyData > GetVtkPolyData()
void GetAmplitudes(float *amplitudes)
void GetDistances(float *distances)
acquire new distance data from the Kinect camera
bool InitializeMultiFrameReader()
Setup MultiFrameReader of Kinect V2. This reader can acquire different types of data....
void GetRgb(unsigned char *rgb)
acquire new rgb data from the Kinect camera
void SafeRelease(Interface *&pInterfaceToRelease)
IGT Exceptions.
MITKIGTBASE_EXPORT bool Equal(const mitk::NavigationData &leftHandSide, const mitk::NavigationData &rightHandSide, ScalarType eps=mitk::eps, bool verbose=false)
Equal A function comparing two navigation data objects for beeing equal in meta- and imagedata.