Visual Servoing Platform version 3.6.0
Loading...
Searching...
No Matches
vpMbtFaceDepthDense.cpp
1/****************************************************************************
2 *
3 * ViSP, open source Visual Servoing Platform software.
4 * Copyright (C) 2005 - 2023 by Inria. All rights reserved.
5 *
6 * This software is free software; you can redistribute it and/or modify
7 * it under the terms of the GNU General Public License as published by
8 * the Free Software Foundation; either version 2 of the License, or
9 * (at your option) any later version.
10 * See the file LICENSE.txt at the root directory of this source
11 * distribution for additional information about the GNU GPL.
12 *
13 * For using ViSP with software that can not be combined with the GNU
14 * GPL, please contact Inria about acquiring a ViSP Professional
15 * Edition License.
16 *
17 * See https://visp.inria.fr for more information.
18 *
19 * This software was developed at:
20 * Inria Rennes - Bretagne Atlantique
21 * Campus Universitaire de Beaulieu
22 * 35042 Rennes Cedex
23 * France
24 *
25 * If you have questions regarding the use of this file, please contact
26 * Inria at visp@inria.fr
27 *
28 * This file is provided AS IS with NO WARRANTY OF ANY KIND, INCLUDING THE
29 * WARRANTY OF DESIGN, MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE.
30 *
31 * Description:
32 * Manage depth dense features for a particular face.
33 *
34*****************************************************************************/
35
36#include <visp3/core/vpCPUFeatures.h>
37#include <visp3/mbt/vpMbtFaceDepthDense.h>
38
39#ifdef VISP_HAVE_PCL
40#include <pcl/common/point_tests.h>
41#endif
42
43#if defined __SSE2__ || defined _M_X64 || (defined _M_IX86_FP && _M_IX86_FP >= 2)
44#include <emmintrin.h>
45#define VISP_HAVE_SSE2 1
46#endif
47
48// https://stackoverflow.com/a/40765925
49#if !defined(__FMA__) && defined(__AVX2__)
50 #define __FMA__ 1
51#endif
52
53#if defined _WIN32 && defined(_M_ARM64)
54#define _ARM64_DISTINCT_NEON_TYPES
55#include <Intrin.h>
56#include <arm_neon.h>
57#define VISP_HAVE_NEON 1
58#elif (defined(__ARM_NEON__) || defined (__ARM_NEON)) && defined(__aarch64__)
59#include <arm_neon.h>
60#define VISP_HAVE_NEON 1
61#endif
62
63#define USE_SIMD_CODE 1
64
65#if VISP_HAVE_SSE2 && USE_SIMD_CODE
66#define USE_SSE 1
67#else
68#define USE_SSE 0
69#endif
70
71#if VISP_HAVE_NEON && USE_SIMD_CODE
72#define USE_NEON 1
73#else
74#define USE_NEON 0
75#endif
76
77#if (VISP_HAVE_OPENCV_VERSION >= 0x040101 || (VISP_HAVE_OPENCV_VERSION < 0x040000 && VISP_HAVE_OPENCV_VERSION >= 0x030407)) && USE_SIMD_CODE
78#define USE_OPENCV_HAL 1
79#include <opencv2/core/simd_intrinsics.hpp>
80#include <opencv2/core/hal/intrin.hpp>
81#endif
82
83#if !USE_OPENCV_HAL && (USE_SSE || USE_NEON)
84#if (VISP_CXX_STANDARD >= VISP_CXX_STANDARD_11)
85#include <cstdint>
86#endif
87
88namespace
89{
90#if USE_SSE
91inline void v_load_deinterleave(const uint64_t *ptr, __m128i& a, __m128i& b, __m128i& c)
92{
93 __m128i t0 = _mm_loadu_si128((const __m128i*)ptr); // a0, b0
94 __m128i t1 = _mm_loadu_si128((const __m128i*)(ptr + 2)); // c0, a1
95 __m128i t2 = _mm_loadu_si128((const __m128i*)(ptr + 4)); // b1, c1
96
97 t1 = _mm_shuffle_epi32(t1, 0x4e); // a1, c0
98
99 a = _mm_unpacklo_epi64(t0, t1);
100 b = _mm_unpacklo_epi64(_mm_unpackhi_epi64(t0, t0), t2);
101 c = _mm_unpackhi_epi64(t1, t2);
102}
103
104inline void v_load_deinterleave(const double* ptr, __m128d& a0, __m128d& b0, __m128d& c0)
105{
106 __m128i a1, b1, c1;
107 v_load_deinterleave((const uint64_t*)ptr, a1, b1, c1);
108 a0 = _mm_castsi128_pd(a1);
109 b0 = _mm_castsi128_pd(b1);
110 c0 = _mm_castsi128_pd(c1);
111}
112
113inline __m128d v_combine_low(const __m128d& a, const __m128d& b)
114{
115 __m128i a1 = _mm_castpd_si128(a), b1 = _mm_castpd_si128(b);
116 return _mm_castsi128_pd(_mm_unpacklo_epi64(a1, b1));
117}
118
119inline __m128d v_combine_high(const __m128d& a, const __m128d& b)
120{
121 __m128i a1 = _mm_castpd_si128(a), b1 = _mm_castpd_si128(b);
122 return _mm_castsi128_pd(_mm_unpackhi_epi64(a1, b1));
123}
124
125inline __m128d v_fma(const __m128d& a, const __m128d& b, const __m128d& c)
126{
127#if __FMA__
128 return _mm_fmadd_pd(a, b, c);
129#else
130 return _mm_add_pd(_mm_mul_pd(a, b), c);
131#endif
132}
133#else
134inline void v_load_deinterleave(const double* ptr, float64x2_t& a0, float64x2_t& b0, float64x2_t& c0)
135{
136 float64x2x3_t v = vld3q_f64(ptr);
137 a0 = v.val[0];
138 b0 = v.val[1];
139 c0 = v.val[2];
140}
141
142inline float64x2_t v_combine_low(const float64x2_t& a, const float64x2_t& b)
143{
144 return vcombine_f64(vget_low_f64(a), vget_low_f64(b));
145}
146
147inline float64x2_t v_combine_high(const float64x2_t& a, const float64x2_t& b)
148{
149 return vcombine_f64(vget_high_f64(a), vget_high_f64(b));
150}
151
152inline float64x2_t v_fma(const float64x2_t& a, const float64x2_t& b, const float64x2_t& c)
153{
154 return vfmaq_f64(c, a, b);
155}
156#endif
157}
158#endif // !USE_OPENCV_HAL && (USE_SSE || USE_NEON)
159
161 : m_cam(), m_clippingFlag(vpPolygon3D::NO_CLIPPING), m_distFarClip(100), m_distNearClip(0.001), m_hiddenFace(NULL),
162 m_planeObject(), m_polygon(NULL), m_useScanLine(false),
163 m_depthDenseFilteringMethod(DEPTH_OCCUPANCY_RATIO_FILTERING), m_depthDenseFilteringMaxDist(3.0),
164 m_depthDenseFilteringMinDist(0.8), m_depthDenseFilteringOccupancyRatio(0.3), m_isTrackedDepthDenseFace(true),
165 m_isVisible(false), m_listOfFaceLines(), m_planeCamera(), m_pointCloudFace(), m_polygonLines()
166{
167}
168
170{
171 for (size_t i = 0; i < m_listOfFaceLines.size(); i++) {
172 delete m_listOfFaceLines[i];
173 }
174}
175
191 vpUniRand &rand_gen, int polygon, std::string name)
192{
193 // Build a PolygonLine to be able to easily display the lines model
194 PolygonLine polygon_line;
195
196 // Add polygon
197 polygon_line.m_poly.setNbPoint(2);
198 polygon_line.m_poly.addPoint(0, P1);
199 polygon_line.m_poly.addPoint(1, P2);
200
201 polygon_line.m_poly.setClipping(m_clippingFlag);
202 polygon_line.m_poly.setNearClippingDistance(m_distNearClip);
203 polygon_line.m_poly.setFarClippingDistance(m_distFarClip);
204
205 polygon_line.m_p1 = &polygon_line.m_poly.p[0];
206 polygon_line.m_p2 = &polygon_line.m_poly.p[1];
207
208 m_polygonLines.push_back(polygon_line);
209
210 // suppress line already in the model
211 bool already_here = false;
213
214 for (std::vector<vpMbtDistanceLine *>::const_iterator it = m_listOfFaceLines.begin(); it != m_listOfFaceLines.end();
215 ++it) {
216 l = *it;
217 if ((samePoint(*(l->p1), P1) && samePoint(*(l->p2), P2)) || (samePoint(*(l->p1), P2) && samePoint(*(l->p2), P1))) {
218 already_here = true;
219 l->addPolygon(polygon);
220 l->hiddenface = faces;
222 }
223 }
224
225 if (!already_here) {
226 l = new vpMbtDistanceLine;
227
229 l->buildFrom(P1, P2, rand_gen);
230 l->addPolygon(polygon);
231 l->hiddenface = faces;
233
234 l->setIndex((unsigned int)m_listOfFaceLines.size());
235 l->setName(name);
236
239
242
245
246 m_listOfFaceLines.push_back(l);
247 }
248}
249
250#ifdef VISP_HAVE_PCL
252 const pcl::PointCloud<pcl::PointXYZ>::ConstPtr &point_cloud,
253 unsigned int stepX, unsigned int stepY
254#if DEBUG_DISPLAY_DEPTH_DENSE
255 ,
256 vpImage<unsigned char> &debugImage,
257 std::vector<std::vector<vpImagePoint> > &roiPts_vec
258#endif
259 ,
260 const vpImage<bool> *mask)
261{
262 unsigned int width = point_cloud->width, height = point_cloud->height;
263 m_pointCloudFace.clear();
264
265 if (point_cloud->width == 0 || point_cloud->height == 0)
266 return false;
267
268 std::vector<vpImagePoint> roiPts;
269 double distanceToFace;
270 computeROI(cMo, width, height, roiPts
271#if DEBUG_DISPLAY_DEPTH_DENSE
272 ,
273 roiPts_vec
274#endif
275 ,
276 distanceToFace);
277
278 if (roiPts.size() <= 2) {
279#ifndef NDEBUG
280 std::cerr << "Error: roiPts.size() <= 2 in computeDesiredFeatures" << std::endl;
281#endif
282 return false;
283 }
284
287 return false;
288 }
289
290 vpPolygon polygon_2d(roiPts);
291 vpRect bb = polygon_2d.getBoundingBox();
292
293 unsigned int top = (unsigned int)std::max(0.0, bb.getTop());
294 unsigned int bottom = (unsigned int)std::min((double)height, std::max(0.0, bb.getBottom()));
295 unsigned int left = (unsigned int)std::max(0.0, bb.getLeft());
296 unsigned int right = (unsigned int)std::min((double)width, std::max(0.0, bb.getRight()));
297
298 bb.setTop(top);
299 bb.setBottom(bottom);
300 bb.setLeft(left);
301 bb.setRight(right);
302
303 if (bb.getHeight() < 0 || bb.getWidth() < 0) {
304 return false;
305 }
306
307 m_pointCloudFace.reserve((size_t)(bb.getWidth() * bb.getHeight()));
308
309 int totalTheoreticalPoints = 0, totalPoints = 0;
310 for (unsigned int i = top; i < bottom; i += stepY) {
311 for (unsigned int j = left; j < right; j += stepX) {
312 if ((m_useScanLine ? (i < m_hiddenFace->getMbScanLineRenderer().getPrimitiveIDs().getHeight() &&
313 j < m_hiddenFace->getMbScanLineRenderer().getPrimitiveIDs().getWidth() &&
314 m_hiddenFace->getMbScanLineRenderer().getPrimitiveIDs()[i][j] == m_polygon->getIndex())
315 : polygon_2d.isInside(vpImagePoint(i, j)))) {
316 totalTheoreticalPoints++;
317
318 if (vpMeTracker::inMask(mask, i, j) && pcl::isFinite((*point_cloud)(j, i)) && (*point_cloud)(j, i).z > 0) {
319 totalPoints++;
320
321 m_pointCloudFace.push_back((*point_cloud)(j, i).x);
322 m_pointCloudFace.push_back((*point_cloud)(j, i).y);
323 m_pointCloudFace.push_back((*point_cloud)(j, i).z);
324
325#if DEBUG_DISPLAY_DEPTH_DENSE
326 debugImage[i][j] = 255;
327#endif
328 }
329 }
330 }
331 }
332
334 totalPoints / (double)totalTheoreticalPoints < m_depthDenseFilteringOccupancyRatio)) {
335 return false;
336 }
337
338 return true;
339}
340#endif
341
343 unsigned int height, const std::vector<vpColVector> &point_cloud,
344 unsigned int stepX, unsigned int stepY
345#if DEBUG_DISPLAY_DEPTH_DENSE
346 ,
347 vpImage<unsigned char> &debugImage,
348 std::vector<std::vector<vpImagePoint> > &roiPts_vec
349#endif
350 ,
351 const vpImage<bool> *mask)
352{
353 m_pointCloudFace.clear();
354
355 if (width == 0 || height == 0)
356 return 0;
357
358 std::vector<vpImagePoint> roiPts;
359 double distanceToFace;
360 computeROI(cMo, width, height, roiPts
361#if DEBUG_DISPLAY_DEPTH_DENSE
362 ,
363 roiPts_vec
364#endif
365 ,
366 distanceToFace);
367
368 if (roiPts.size() <= 2) {
369#ifndef NDEBUG
370 std::cerr << "Error: roiPts.size() <= 2 in computeDesiredFeatures" << std::endl;
371#endif
372 return false;
373 }
374
377 return false;
378 }
379
380 vpPolygon polygon_2d(roiPts);
381 vpRect bb = polygon_2d.getBoundingBox();
382
383 unsigned int top = (unsigned int)std::max(0.0, bb.getTop());
384 unsigned int bottom = (unsigned int)std::min((double)height, std::max(0.0, bb.getBottom()));
385 unsigned int left = (unsigned int)std::max(0.0, bb.getLeft());
386 unsigned int right = (unsigned int)std::min((double)width, std::max(0.0, bb.getRight()));
387
388 bb.setTop(top);
389 bb.setBottom(bottom);
390 bb.setLeft(left);
391 bb.setRight(right);
392
393 m_pointCloudFace.reserve((size_t)(bb.getWidth() * bb.getHeight()));
394
395 int totalTheoreticalPoints = 0, totalPoints = 0;
396 for (unsigned int i = top; i < bottom; i += stepY) {
397 for (unsigned int j = left; j < right; j += stepX) {
398 if ((m_useScanLine ? (i < m_hiddenFace->getMbScanLineRenderer().getPrimitiveIDs().getHeight() &&
399 j < m_hiddenFace->getMbScanLineRenderer().getPrimitiveIDs().getWidth() &&
400 m_hiddenFace->getMbScanLineRenderer().getPrimitiveIDs()[i][j] == m_polygon->getIndex())
401 : polygon_2d.isInside(vpImagePoint(i, j)))) {
402 totalTheoreticalPoints++;
403
404 if (vpMeTracker::inMask(mask, i, j) && point_cloud[i * width + j][2] > 0) {
405 totalPoints++;
406
407 m_pointCloudFace.push_back(point_cloud[i * width + j][0]);
408 m_pointCloudFace.push_back(point_cloud[i * width + j][1]);
409 m_pointCloudFace.push_back(point_cloud[i * width + j][2]);
410
411#if DEBUG_DISPLAY_DEPTH_DENSE
412 debugImage[i][j] = 255;
413#endif
414 }
415 }
416 }
417 }
418
420 totalPoints / (double)totalTheoreticalPoints < m_depthDenseFilteringOccupancyRatio)) {
421 return false;
422 }
423
424 return true;
425}
426
428
430{
431 // Compute lines visibility, only for display
432 vpMbtDistanceLine *line;
433 for (std::vector<vpMbtDistanceLine *>::const_iterator it = m_listOfFaceLines.begin(); it != m_listOfFaceLines.end();
434 ++it) {
435 line = *it;
436 bool isvisible = false;
437
438 for (std::list<int>::const_iterator itindex = line->Lindex_polygon.begin(); itindex != line->Lindex_polygon.end();
439 ++itindex) {
440 int index = *itindex;
441 if (index == -1) {
442 isvisible = true;
443 } else {
444 if (line->hiddenface->isVisible((unsigned int)index)) {
445 isvisible = true;
446 }
447 }
448 }
449
450 // Si la ligne n'appartient a aucune face elle est tout le temps visible
451 if (line->Lindex_polygon.empty())
452 isvisible = true; // Not sure that this can occur
453
454 if (isvisible) {
455 line->setVisible(true);
456 } else {
457 line->setVisible(false);
458 }
459 }
460}
461
463 vpColVector &error)
464{
465 if (m_pointCloudFace.empty()) {
466 L.resize(0, 0);
467 error.resize(0);
468 return;
469 }
470
471 L.resize(getNbFeatures(), 6, false, false);
472 error.resize(getNbFeatures(), false);
473
474 // Transform the plane equation for the current pose
477
478 double nx = m_planeCamera.getA();
479 double ny = m_planeCamera.getB();
480 double nz = m_planeCamera.getC();
481 double D = m_planeCamera.getD();
482
484#if USE_OPENCV_HAL
485 useSIMD = true;
486#endif
487#if !USE_SSE && !USE_NEON && !USE_OPENCV_HAL
488 useSIMD = false;
489#endif
490
491 if (useSIMD) {
492#if USE_SSE || USE_NEON|| USE_OPENCV_HAL
493 size_t cpt = 0;
494 if (getNbFeatures() >= 2) {
495 double *ptr_point_cloud = &m_pointCloudFace[0];
496 double *ptr_L = L.data;
497 double *ptr_error = error.data;
498
499#if USE_OPENCV_HAL
500 const cv::v_float64x2 vnx = cv::v_setall_f64(nx);
501 const cv::v_float64x2 vny = cv::v_setall_f64(ny);
502 const cv::v_float64x2 vnz = cv::v_setall_f64(nz);
503 const cv::v_float64x2 vd = cv::v_setall_f64(D);
504#elif USE_SSE
505 const __m128d vnx = _mm_set1_pd(nx);
506 const __m128d vny = _mm_set1_pd(ny);
507 const __m128d vnz = _mm_set1_pd(nz);
508 const __m128d vd = _mm_set1_pd(D);
509#else
510 const float64x2_t vnx = vdupq_n_f64(nx);
511 const float64x2_t vny = vdupq_n_f64(ny);
512 const float64x2_t vnz = vdupq_n_f64(nz);
513 const float64x2_t vd = vdupq_n_f64(D);
514#endif
515
516 for (; cpt <= m_pointCloudFace.size() - 6; cpt += 6, ptr_point_cloud += 6) {
517#if USE_OPENCV_HAL
518 cv::v_float64x2 vx, vy, vz;
519 cv::v_load_deinterleave(ptr_point_cloud, vx, vy, vz);
520
521#if (VISP_HAVE_OPENCV_VERSION >= 0x040900)
522 cv::v_float64x2 va1 = cv::v_sub(cv::v_mul(vnz, vy), cv::v_mul(vny, vz)); // vnz*vy - vny*vz
523 cv::v_float64x2 va2 = cv::v_sub(cv::v_mul(vnx, vz), cv::v_mul(vnz, vx)); // vnx*vz - vnz*vx
524 cv::v_float64x2 va3 = cv::v_sub(cv::v_mul(vny, vx), cv::v_mul(vnx, vy)); // vny*vx - vnx*vy
525#else
526 cv::v_float64x2 va1 = vnz*vy - vny*vz;
527 cv::v_float64x2 va2 = vnx*vz - vnz*vx;
528 cv::v_float64x2 va3 = vny*vx - vnx*vy;
529#endif
530
531 cv::v_float64x2 vnxy = cv::v_combine_low(vnx, vny);
532 cv::v_store(ptr_L, vnxy);
533 ptr_L += 2;
534 vnxy = cv::v_combine_low(vnz, va1);
535 cv::v_store(ptr_L, vnxy);
536 ptr_L += 2;
537 vnxy = cv::v_combine_low(va2, va3);
538 cv::v_store(ptr_L, vnxy);
539 ptr_L += 2;
540
541 vnxy = cv::v_combine_high(vnx, vny);
542 cv::v_store(ptr_L, vnxy);
543 ptr_L += 2;
544 vnxy = cv::v_combine_high(vnz, va1);
545 cv::v_store(ptr_L, vnxy);
546 ptr_L += 2;
547 vnxy = cv::v_combine_high(va2, va3);
548 cv::v_store(ptr_L, vnxy);
549 ptr_L += 2;
550
551#if (VISP_HAVE_OPENCV_VERSION >= 0x040900)
552 cv::v_float64x2 verr = cv::v_add(vd, cv::v_muladd(vnx, vx, cv::v_muladd(vny, vy, cv::v_mul(vnz, vz))));
553#else
554 cv::v_float64x2 verr = vd + cv::v_muladd(vnx, vx, cv::v_muladd(vny, vy, vnz*vz));
555#endif
556
557 cv::v_store(ptr_error, verr);
558 ptr_error += 2;
559#elif USE_SSE
560 __m128d vx, vy, vz;
561 v_load_deinterleave(ptr_point_cloud, vx, vy, vz);
562
563 __m128d va1 = _mm_sub_pd(_mm_mul_pd(vnz, vy), _mm_mul_pd(vny, vz));
564 __m128d va2 = _mm_sub_pd(_mm_mul_pd(vnx, vz), _mm_mul_pd(vnz, vx));
565 __m128d va3 = _mm_sub_pd(_mm_mul_pd(vny, vx), _mm_mul_pd(vnx, vy));
566
567 __m128d vnxy = v_combine_low(vnx, vny);
568 _mm_storeu_pd(ptr_L, vnxy);
569 ptr_L += 2;
570 vnxy = v_combine_low(vnz, va1);
571 _mm_storeu_pd(ptr_L, vnxy);
572 ptr_L += 2;
573 vnxy = v_combine_low(va2, va3);
574 _mm_storeu_pd(ptr_L, vnxy);
575 ptr_L += 2;
576
577 vnxy = v_combine_high(vnx, vny);
578 _mm_storeu_pd(ptr_L, vnxy);
579 ptr_L += 2;
580 vnxy = v_combine_high(vnz, va1);
581 _mm_storeu_pd(ptr_L, vnxy);
582 ptr_L += 2;
583 vnxy = v_combine_high(va2, va3);
584 _mm_storeu_pd(ptr_L, vnxy);
585 ptr_L += 2;
586
587 const __m128d verror = _mm_add_pd(vd, v_fma(vnx, vx, v_fma(vny, vy, _mm_mul_pd(vnz, vz))));
588 _mm_storeu_pd(ptr_error, verror);
589 ptr_error += 2;
590#else
591 float64x2_t vx, vy, vz;
592 v_load_deinterleave(ptr_point_cloud, vx, vy, vz);
593
594 float64x2_t va1 = vsubq_f64(vmulq_f64(vnz, vy), vmulq_f64(vny, vz));
595 float64x2_t va2 = vsubq_f64(vmulq_f64(vnx, vz), vmulq_f64(vnz, vx));
596 float64x2_t va3 = vsubq_f64(vmulq_f64(vny, vx), vmulq_f64(vnx, vy));
597
598 float64x2_t vnxy = v_combine_low(vnx, vny);
599 vst1q_f64(ptr_L, vnxy);
600 ptr_L += 2;
601 vnxy = v_combine_low(vnz, va1);
602 vst1q_f64(ptr_L, vnxy);
603 ptr_L += 2;
604 vnxy = v_combine_low(va2, va3);
605 vst1q_f64(ptr_L, vnxy);
606 ptr_L += 2;
607
608 vnxy = v_combine_high(vnx, vny);
609 vst1q_f64(ptr_L, vnxy);
610 ptr_L += 2;
611 vnxy = v_combine_high(vnz, va1);
612 vst1q_f64(ptr_L, vnxy);
613 ptr_L += 2;
614 vnxy = v_combine_high(va2, va3);
615 vst1q_f64(ptr_L, vnxy);
616 ptr_L += 2;
617
618 const float64x2_t verror = vaddq_f64(vd, v_fma(vnx, vx, v_fma(vny, vy, vmulq_f64(vnz, vz))));
619 vst1q_f64(ptr_error, verror);
620 ptr_error += 2;
621#endif
622 }
623 }
624
625 for (; cpt < m_pointCloudFace.size(); cpt += 3) {
626 double x = m_pointCloudFace[cpt];
627 double y = m_pointCloudFace[cpt + 1];
628 double z = m_pointCloudFace[cpt + 2];
629
630 double _a1 = (nz * y) - (ny * z);
631 double _a2 = (nx * z) - (nz * x);
632 double _a3 = (ny * x) - (nx * y);
633
634 // L
635 L[(unsigned int)(cpt / 3)][0] = nx;
636 L[(unsigned int)(cpt / 3)][1] = ny;
637 L[(unsigned int)(cpt / 3)][2] = nz;
638 L[(unsigned int)(cpt / 3)][3] = _a1;
639 L[(unsigned int)(cpt / 3)][4] = _a2;
640 L[(unsigned int)(cpt / 3)][5] = _a3;
641
642 vpColVector normal(3);
643 normal[0] = nx;
644 normal[1] = ny;
645 normal[2] = nz;
646
647 vpColVector pt(3);
648 pt[0] = x;
649 pt[1] = y;
650 pt[2] = z;
651
652 // Error
653 error[(unsigned int)(cpt / 3)] = D + (normal.t() * pt);
654 }
655#endif
656 } else {
657 vpColVector normal(3);
658 normal[0] = nx;
659 normal[1] = ny;
660 normal[2] = nz;
661 vpColVector pt(3);
662
663 unsigned int idx = 0;
664 for (size_t i = 0; i < m_pointCloudFace.size(); i += 3, idx++) {
665 double x = m_pointCloudFace[i];
666 double y = m_pointCloudFace[i + 1];
667 double z = m_pointCloudFace[i + 2];
668
669 double _a1 = (nz * y) - (ny * z);
670 double _a2 = (nx * z) - (nz * x);
671 double _a3 = (ny * x) - (nx * y);
672
673 // L
674 L[idx][0] = nx;
675 L[idx][1] = ny;
676 L[idx][2] = nz;
677 L[idx][3] = _a1;
678 L[idx][4] = _a2;
679 L[idx][5] = _a3;
680
681 pt[0] = x;
682 pt[1] = y;
683 pt[2] = z;
684 // Error
685 error[idx] = D + (normal.t() * pt);
686 }
687 }
688}
689
690void vpMbtFaceDepthDense::computeROI(const vpHomogeneousMatrix &cMo, unsigned int width, unsigned int height,
691 std::vector<vpImagePoint> &roiPts
692#if DEBUG_DISPLAY_DEPTH_DENSE
693 ,
694 std::vector<std::vector<vpImagePoint> > &roiPts_vec
695#endif
696 ,
697 double &distanceToFace)
698{
699 if (m_useScanLine || m_clippingFlag > 2)
700 m_cam.computeFov(width, height);
701
702 if (m_useScanLine) {
703 for (std::vector<PolygonLine>::iterator it = m_polygonLines.begin(); it != m_polygonLines.end(); ++it) {
704 it->m_p1->changeFrame(cMo);
705 it->m_p2->changeFrame(cMo);
706
707 vpImagePoint ip1, ip2;
708
709 it->m_poly.changeFrame(cMo);
710 it->m_poly.computePolygonClipped(m_cam);
711
712 if (it->m_poly.polyClipped.size() == 2 &&
713 ((it->m_poly.polyClipped[1].second & it->m_poly.polyClipped[0].second & vpPolygon3D::NEAR_CLIPPING) == 0) &&
714 ((it->m_poly.polyClipped[1].second & it->m_poly.polyClipped[0].second & vpPolygon3D::FAR_CLIPPING) == 0) &&
715 ((it->m_poly.polyClipped[1].second & it->m_poly.polyClipped[0].second & vpPolygon3D::DOWN_CLIPPING) == 0) &&
716 ((it->m_poly.polyClipped[1].second & it->m_poly.polyClipped[0].second & vpPolygon3D::UP_CLIPPING) == 0) &&
717 ((it->m_poly.polyClipped[1].second & it->m_poly.polyClipped[0].second & vpPolygon3D::LEFT_CLIPPING) == 0) &&
718 ((it->m_poly.polyClipped[1].second & it->m_poly.polyClipped[0].second & vpPolygon3D::RIGHT_CLIPPING) == 0)) {
719
720 std::vector<std::pair<vpPoint, vpPoint> > linesLst;
721 m_hiddenFace->computeScanLineQuery(it->m_poly.polyClipped[0].first, it->m_poly.polyClipped[1].first, linesLst,
722 true);
723
724 vpPoint faceCentroid;
725
726 for (unsigned int i = 0; i < linesLst.size(); i++) {
727 linesLst[i].first.project();
728 linesLst[i].second.project();
729
730 vpMeterPixelConversion::convertPoint(m_cam, linesLst[i].first.get_x(), linesLst[i].first.get_y(), ip1);
731 vpMeterPixelConversion::convertPoint(m_cam, linesLst[i].second.get_x(), linesLst[i].second.get_y(), ip2);
732
733 it->m_imPt1 = ip1;
734 it->m_imPt2 = ip2;
735
736 roiPts.push_back(ip1);
737 roiPts.push_back(ip2);
738
739 faceCentroid.set_X(faceCentroid.get_X() + linesLst[i].first.get_X() + linesLst[i].second.get_X());
740 faceCentroid.set_Y(faceCentroid.get_Y() + linesLst[i].first.get_Y() + linesLst[i].second.get_Y());
741 faceCentroid.set_Z(faceCentroid.get_Z() + linesLst[i].first.get_Z() + linesLst[i].second.get_Z());
742
743#if DEBUG_DISPLAY_DEPTH_DENSE
744 std::vector<vpImagePoint> roiPts_;
745 roiPts_.push_back(ip1);
746 roiPts_.push_back(ip2);
747 roiPts_vec.push_back(roiPts_);
748#endif
749 }
750
751 if (linesLst.empty()) {
752 distanceToFace = std::numeric_limits<double>::max();
753 } else {
754 faceCentroid.set_X(faceCentroid.get_X() / (2 * linesLst.size()));
755 faceCentroid.set_Y(faceCentroid.get_Y() / (2 * linesLst.size()));
756 faceCentroid.set_Z(faceCentroid.get_Z() / (2 * linesLst.size()));
757
758 distanceToFace =
759 sqrt(faceCentroid.get_X() * faceCentroid.get_X() + faceCentroid.get_Y() * faceCentroid.get_Y() +
760 faceCentroid.get_Z() * faceCentroid.get_Z());
761 }
762 }
763 }
764 } else {
765 // Get polygon clipped
766 m_polygon->getRoiClipped(m_cam, roiPts, cMo);
767
768 // Get 3D polygon clipped
769 std::vector<vpPoint> polygonsClipped;
770 m_polygon->getPolygonClipped(polygonsClipped);
771
772 if (polygonsClipped.empty()) {
773 distanceToFace = std::numeric_limits<double>::max();
774 } else {
775 vpPoint faceCentroid;
776
777 for (size_t i = 0; i < polygonsClipped.size(); i++) {
778 faceCentroid.set_X(faceCentroid.get_X() + polygonsClipped[i].get_X());
779 faceCentroid.set_Y(faceCentroid.get_Y() + polygonsClipped[i].get_Y());
780 faceCentroid.set_Z(faceCentroid.get_Z() + polygonsClipped[i].get_Z());
781 }
782
783 faceCentroid.set_X(faceCentroid.get_X() / polygonsClipped.size());
784 faceCentroid.set_Y(faceCentroid.get_Y() / polygonsClipped.size());
785 faceCentroid.set_Z(faceCentroid.get_Z() / polygonsClipped.size());
786
787 distanceToFace = sqrt(faceCentroid.get_X() * faceCentroid.get_X() + faceCentroid.get_Y() * faceCentroid.get_Y() +
788 faceCentroid.get_Z() * faceCentroid.get_Z());
789 }
790
791#if DEBUG_DISPLAY_DEPTH_DENSE
792 roiPts_vec.push_back(roiPts);
793#endif
794 }
795}
796
798 const vpCameraParameters &cam, const vpColor &col, unsigned int thickness,
799 bool displayFullModel)
800{
801 std::vector<std::vector<double> > models =
802 getModelForDisplay(I.getWidth(), I.getHeight(), cMo, cam, displayFullModel);
803
804 for (size_t i = 0; i < models.size(); i++) {
805 vpImagePoint ip1(models[i][1], models[i][2]);
806 vpImagePoint ip2(models[i][3], models[i][4]);
807 vpDisplay::displayLine(I, ip1, ip2, col, thickness);
808 }
809}
810
812 const vpCameraParameters &cam, const vpColor &col, unsigned int thickness,
813 bool displayFullModel)
814{
815 std::vector<std::vector<double> > models =
816 getModelForDisplay(I.getWidth(), I.getHeight(), cMo, cam, displayFullModel);
817
818 for (size_t i = 0; i < models.size(); i++) {
819 vpImagePoint ip1(models[i][1], models[i][2]);
820 vpImagePoint ip2(models[i][3], models[i][4]);
821 vpDisplay::displayLine(I, ip1, ip2, col, thickness);
822 }
823}
824
826 const vpCameraParameters & /*cam*/, const double /*scale*/,
827 const unsigned int /*thickness*/)
828{
829}
830
832 const vpCameraParameters & /*cam*/, const double /*scale*/,
833 const unsigned int /*thickness*/)
834{
835}
836
848std::vector<std::vector<double> > vpMbtFaceDepthDense::getModelForDisplay(unsigned int width, unsigned int height,
849 const vpHomogeneousMatrix &cMo,
850 const vpCameraParameters &cam,
851 bool displayFullModel)
852{
853 std::vector<std::vector<double> > models;
854
855 if ((m_polygon->isVisible() && m_isTrackedDepthDenseFace) || displayFullModel) {
857
858 for (std::vector<vpMbtDistanceLine *>::const_iterator it = m_listOfFaceLines.begin(); it != m_listOfFaceLines.end();
859 ++it) {
860 vpMbtDistanceLine *line = *it;
861 std::vector<std::vector<double> > lineModels =
862 line->getModelForDisplay(width, height, cMo, cam, displayFullModel);
863 models.insert(models.end(), lineModels.begin(), lineModels.end());
864 }
865 }
866
867 return models;
868}
869
879bool vpMbtFaceDepthDense::samePoint(const vpPoint &P1, const vpPoint &P2) const
880{
881 double dx = fabs(P1.get_oX() - P2.get_oX());
882 double dy = fabs(P1.get_oY() - P2.get_oY());
883 double dz = fabs(P1.get_oZ() - P2.get_oZ());
884
885 if (dx <= std::numeric_limits<double>::epsilon() && dy <= std::numeric_limits<double>::epsilon() &&
886 dz <= std::numeric_limits<double>::epsilon())
887 return true;
888 else
889 return false;
890}
891
893{
894 m_cam = camera;
895
896 for (std::vector<vpMbtDistanceLine *>::const_iterator it = m_listOfFaceLines.begin(); it != m_listOfFaceLines.end();
897 ++it) {
898 (*it)->setCameraParameters(camera);
899 }
900}
901
903{
904 m_useScanLine = v;
905
906 for (std::vector<vpMbtDistanceLine *>::const_iterator it = m_listOfFaceLines.begin(); it != m_listOfFaceLines.end();
907 ++it) {
908 (*it)->useScanLine = v;
909 }
910}
Type * data
Address of the first element of the data array.
Definition vpArray2D.h:144
Generic class defining intrinsic camera parameters.
void computeFov(const unsigned int &w, const unsigned int &h)
Implementation of column vector and the associated operations.
vpRowVector t() const
void resize(unsigned int i, bool flagNullify=true)
Class to define RGB colors available for display functionalities.
Definition vpColor.h:152
static void displayLine(const vpImage< unsigned char > &I, const vpImagePoint &ip1, const vpImagePoint &ip2, const vpColor &color, unsigned int thickness=1, bool segment=true)
Implementation of an homogeneous matrix and operations on such kind of matrices.
Class that defines a 2D point in an image. This class is useful for image processing and stores only ...
Definition of the vpImage class member functions.
Definition vpImage.h:135
unsigned int getWidth() const
Definition vpImage.h:242
unsigned int getHeight() const
Definition vpImage.h:184
Implementation of a matrix and operations on matrices.
Definition vpMatrix.h:152
Implementation of the polygons management for the model-based trackers.
bool isVisible(unsigned int i)
void computeScanLineQuery(const vpPoint &a, const vpPoint &b, std::vector< std::pair< vpPoint, vpPoint > > &lines, const bool &displayResults=false)
vpMbScanLine & getMbScanLineRenderer()
Manage the line of a polygon used in the model-based tracker.
void setIndex(unsigned int i)
vpPoint * p2
The second extremity.
std::list< int > Lindex_polygon
Index of the faces which contain the line.
void buildFrom(vpPoint &_p1, vpPoint &_p2, vpUniRand &rand_gen)
vpMbHiddenFaces< vpMbtPolygon > * hiddenface
Pointer to the list of faces.
std::vector< std::vector< double > > getModelForDisplay(unsigned int width, unsigned int height, const vpHomogeneousMatrix &cMo, const vpCameraParameters &cam, bool displayFullModel=false)
vpMbtPolygon & getPolygon()
bool useScanLine
Use scanline rendering.
vpPoint * p1
The first extremity.
void setCameraParameters(const vpCameraParameters &camera)
void setName(const std::string &line_name)
void setVisible(bool _isvisible)
void addPolygon(const int &index)
double m_depthDenseFilteringMinDist
Minimum distance threshold.
vpMbHiddenFaces< vpMbtPolygon > * m_hiddenFace
Pointer to the list of faces.
bool m_isVisible
Visibility flag.
double m_distFarClip
Distance for near clipping.
std::vector< double > m_pointCloudFace
List of depth points inside the face.
vpPlane m_planeObject
Plane equation described in the object frame.
void display(const vpImage< unsigned char > &I, const vpHomogeneousMatrix &cMo, const vpCameraParameters &cam, const vpColor &col, unsigned int thickness=1, bool displayFullModel=false)
void setCameraParameters(const vpCameraParameters &camera)
bool computeDesiredFeatures(const vpHomogeneousMatrix &cMo, const pcl::PointCloud< pcl::PointXYZ >::ConstPtr &point_cloud, unsigned int stepX, unsigned int stepY, const vpImage< bool > *mask=NULL)
std::vector< std::vector< double > > getModelForDisplay(unsigned int width, unsigned int height, const vpHomogeneousMatrix &cMo, const vpCameraParameters &cam, bool displayFullModel=false)
void computeROI(const vpHomogeneousMatrix &cMo, unsigned int width, unsigned int height, std::vector< vpImagePoint > &roiPts, double &distanceToFace)
unsigned int getNbFeatures() const
bool samePoint(const vpPoint &P1, const vpPoint &P2) const
void computeInteractionMatrixAndResidu(const vpHomogeneousMatrix &cMo, vpMatrix &L, vpColVector &error)
void setScanLineVisibilityTest(bool v)
vpMbtPolygon * m_polygon
Polygon defining the face.
bool m_useScanLine
Scan line visibility.
bool m_isTrackedDepthDenseFace
Flag to define if the face should be tracked or not.
double m_depthDenseFilteringMaxDist
Maximum distance threshold.
std::vector< PolygonLine > m_polygonLines
Polygon lines used for scan-line visibility.
int m_depthDenseFilteringMethod
Method to use to consider or not the face.
unsigned int m_clippingFlag
Flags specifying which clipping to used.
std::vector< vpMbtDistanceLine * > m_listOfFaceLines
vpCameraParameters m_cam
Camera intrinsic parameters.
double m_depthDenseFilteringOccupancyRatio
Ratio between available depth points and theoretical number of points.
double m_distNearClip
Distance for near clipping.
void displayFeature(const vpImage< unsigned char > &I, const vpHomogeneousMatrix &cMo, const vpCameraParameters &cam, double scale=0.05, unsigned int thickness=1)
void addLine(vpPoint &p1, vpPoint &p2, vpMbHiddenFaces< vpMbtPolygon > *const faces, vpUniRand &rand_gen, int polygon=-1, std::string name="")
virtual bool isVisible(const vpHomogeneousMatrix &cMo, double alpha, const bool &modulo=false, const vpCameraParameters &cam=vpCameraParameters(), unsigned int width=0, unsigned int height=0)
int getIndex() const
static bool inMask(const vpImage< bool > *mask, unsigned int i, unsigned int j)
static void convertPoint(const vpCameraParameters &cam, const double &x, const double &y, double &u, double &v)
void changeFrame(const vpHomogeneousMatrix &cMo)
Definition vpPlane.cpp:361
double getD() const
Definition vpPlane.h:106
double getA() const
Definition vpPlane.h:100
double getC() const
Definition vpPlane.h:104
double getB() const
Definition vpPlane.h:102
Class that defines a 3D point in the object frame and allows forward projection of a 3D point in the ...
Definition vpPoint.h:77
double get_oX() const
Get the point oX coordinate in the object frame.
Definition vpPoint.cpp:458
double get_Y() const
Get the point cY coordinate in the camera frame.
Definition vpPoint.cpp:451
double get_oZ() const
Get the point oZ coordinate in the object frame.
Definition vpPoint.cpp:462
void set_X(double cX)
Set the point cX coordinate in the camera frame.
Definition vpPoint.cpp:490
void set_Y(double cY)
Set the point cY coordinate in the camera frame.
Definition vpPoint.cpp:492
double get_Z() const
Get the point cZ coordinate in the camera frame.
Definition vpPoint.cpp:453
void set_Z(double cZ)
Set the point cZ coordinate in the camera frame.
Definition vpPoint.cpp:494
double get_oY() const
Get the point oY coordinate in the object frame.
Definition vpPoint.cpp:460
double get_X() const
Get the point cX coordinate in the camera frame.
Definition vpPoint.cpp:449
Implements a 3D polygon with render functionalities like clipping.
Definition vpPolygon3D.h:55
void setFarClippingDistance(const double &dist)
void setNearClippingDistance(const double &dist)
void setClipping(const unsigned int &flags)
void getRoiClipped(const vpCameraParameters &cam, std::vector< vpImagePoint > &roi)
void getPolygonClipped(std::vector< std::pair< vpPoint, unsigned int > > &poly)
Defines a generic 2D polygon.
Definition vpPolygon.h:97
vpRect getBoundingBox() const
Definition vpPolygon.h:171
bool isInside(const vpImagePoint &iP, const PointInPolygonMethod &method=PnPolyRayCasting) const
Defines a rectangle in the plane.
Definition vpRect.h:76
double getWidth() const
Definition vpRect.h:224
void setTop(double pos)
Definition vpRect.h:354
double getLeft() const
Definition vpRect.h:170
void setLeft(double pos)
Definition vpRect.h:318
void setRight(double pos)
Definition vpRect.h:345
double getRight() const
Definition vpRect.h:176
double getBottom() const
Definition vpRect.h:94
double getHeight() const
Definition vpRect.h:163
void setBottom(double pos)
Definition vpRect.h:285
double getTop() const
Definition vpRect.h:189
Class for generating random numbers with uniform probability density.
Definition vpUniRand.h:122
VISP_EXPORT bool checkSSE2()
VISP_EXPORT bool checkNeon()