Visual Servoing Platform version 3.5.0
vpMbDepthNormalTracker.cpp
1/****************************************************************************
2 *
3 * ViSP, open source Visual Servoing Platform software.
4 * Copyright (C) 2005 - 2019 by Inria. All rights reserved.
5 *
6 * This software is free software; you can redistribute it and/or modify
7 * it under the terms of the GNU General Public License as published by
8 * the Free Software Foundation; either version 2 of the License, or
9 * (at your option) any later version.
10 * See the file LICENSE.txt at the root directory of this source
11 * distribution for additional information about the GNU GPL.
12 *
13 * For using ViSP with software that can not be combined with the GNU
14 * GPL, please contact Inria about acquiring a ViSP Professional
15 * Edition License.
16 *
17 * See http://visp.inria.fr for more information.
18 *
19 * This software was developed at:
20 * Inria Rennes - Bretagne Atlantique
21 * Campus Universitaire de Beaulieu
22 * 35042 Rennes Cedex
23 * France
24 *
25 * If you have questions regarding the use of this file, please contact
26 * Inria at visp@inria.fr
27 *
28 * This file is provided AS IS with NO WARRANTY OF ANY KIND, INCLUDING THE
29 * WARRANTY OF DESIGN, MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE.
30 *
31 * Description:
32 * Model-based tracker using depth normal features.
33 *
34 *****************************************************************************/
35
36#include <iostream>
37
38#include <visp3/core/vpConfig.h>
39
40#ifdef VISP_HAVE_PCL
41#include <pcl/point_cloud.h>
42#endif
43
44#include <visp3/core/vpDisplay.h>
45#include <visp3/core/vpExponentialMap.h>
46#include <visp3/core/vpTrackingException.h>
47#include <visp3/mbt/vpMbDepthNormalTracker.h>
48#include <visp3/mbt/vpMbtXmlGenericParser.h>
49
50#if DEBUG_DISPLAY_DEPTH_NORMAL
51#include <visp3/gui/vpDisplayGDI.h>
52#include <visp3/gui/vpDisplayX.h>
53#endif
54
56 : m_depthNormalFeatureEstimationMethod(vpMbtFaceDepthNormal::ROBUST_FEATURE_ESTIMATION),
57 m_depthNormalHiddenFacesDisplay(), m_depthNormalListOfActiveFaces(),
58 m_depthNormalListOfDesiredFeatures(), m_depthNormalFaces(), m_depthNormalPclPlaneEstimationMethod(2),
59 m_depthNormalPclPlaneEstimationRansacMaxIter(200), m_depthNormalPclPlaneEstimationRansacThreshold(0.001),
60 m_depthNormalSamplingStepX(2), m_depthNormalSamplingStepY(2), m_depthNormalUseRobust(false), m_error_depthNormal(),
61 m_featuresToBeDisplayedDepthNormal(), m_L_depthNormal(), m_robust_depthNormal(), m_w_depthNormal(), m_weightedError_depthNormal()
62#if DEBUG_DISPLAY_DEPTH_NORMAL
63 ,
64 m_debugDisp_depthNormal(NULL), m_debugImage_depthNormal()
65#endif
66{
67#ifdef VISP_HAVE_OGRE
68 faces.getOgreContext()->setWindowName("MBT Depth");
69#endif
70
71#if defined(VISP_HAVE_X11) && DEBUG_DISPLAY_DEPTH_NORMAL
72 m_debugDisp_depthNormal = new vpDisplayX;
73#elif defined(VISP_HAVE_GDI) && DEBUG_DISPLAY_DEPTH_NORMAL
74 m_debugDisp_depthNormal = new vpDisplayGDI;
75#endif
76}
77
79{
80 for (size_t i = 0; i < m_depthNormalFaces.size(); i++) {
81 delete m_depthNormalFaces[i];
82 }
83}
84
85void vpMbDepthNormalTracker::addFace(vpMbtPolygon &polygon, bool alreadyClose)
86{
87 if (polygon.nbpt < 3) {
88 return;
89 }
90
91 // Copy hidden faces
93
95 normal_face->m_hiddenFace = &faces;
96 normal_face->m_polygon = &polygon;
97 normal_face->m_cam = m_cam;
98 normal_face->m_useScanLine = useScanLine;
99 normal_face->m_clippingFlag = clippingFlag;
100 normal_face->m_distNearClip = distNearClip;
101 normal_face->m_distFarClip = distFarClip;
106
107 // Add lines that compose the face
108 unsigned int nbpt = polygon.getNbPoint();
109 if (nbpt > 0) {
110 for (unsigned int i = 0; i < nbpt - 1; i++) {
111 normal_face->addLine(polygon.p[i], polygon.p[i + 1], &m_depthNormalHiddenFacesDisplay, m_rand, polygon.getIndex(),
112 polygon.getName());
113 }
114
115 if (!alreadyClose) {
116 // Add last line that closes the face
117 normal_face->addLine(polygon.p[nbpt - 1], polygon.p[0], &m_depthNormalHiddenFacesDisplay, m_rand, polygon.getIndex(),
118 polygon.getName());
119 }
120 }
121
122 // Construct a vpPlane in object frame
123 vpPoint pts[3];
124 pts[0] = polygon.p[0];
125 pts[1] = polygon.p[1];
126 pts[2] = polygon.p[2];
127 normal_face->m_planeObject = vpPlane(pts[0], pts[1], pts[2], vpPlane::object_frame);
128
129 m_depthNormalFaces.push_back(normal_face);
130}
131
132void vpMbDepthNormalTracker::computeVisibility(unsigned int width, unsigned int height)
133{
134 bool changed = false;
135 faces.setVisible(width, height, m_cam, m_cMo, angleAppears, angleDisappears, changed);
136
137 if (useScanLine) {
138 // if (clippingFlag <= 2) {
139 // cam.computeFov(width, height);
140 // }
141
143 faces.computeScanLineRender(m_cam, width, height);
144 }
145
146 for (std::vector<vpMbtFaceDepthNormal *>::const_iterator it = m_depthNormalFaces.begin();
147 it != m_depthNormalFaces.end(); ++it) {
148 vpMbtFaceDepthNormal *face_normal = *it;
149 face_normal->computeVisibility();
150 }
151}
152
154{
155 double normRes = 0;
156 double normRes_1 = -1;
157 unsigned int iter = 0;
158
160 unsigned int nb_features = (unsigned int)(3 * m_depthNormalListOfDesiredFeatures.size());
161
162 vpColVector error_prev(nb_features);
163 vpMatrix LTL;
164 vpColVector LTR, v;
165
166 double mu = m_initialMu;
167 vpHomogeneousMatrix cMo_prev;
168
169 bool isoJoIdentity_ = true;
171 vpMatrix L_true, LVJ_true;
172
173 while (std::fabs(normRes_1 - normRes) > m_stopCriteriaEpsilon && (iter < m_maxIter)) {
175
176 bool reStartFromLastIncrement = false;
177 computeVVSCheckLevenbergMarquardt(iter, m_error_depthNormal, error_prev, cMo_prev, mu, reStartFromLastIncrement);
178
179 if (!reStartFromLastIncrement) {
182
183 if (computeCovariance) {
184 L_true = m_L_depthNormal;
185 if (!isoJoIdentity_) {
186 cVo.buildFrom(m_cMo);
187 LVJ_true = (m_L_depthNormal * (cVo * oJo));
188 }
189 }
190
191 // Compute DoF only once
192 if (iter == 0) {
193 isoJoIdentity_ = true;
194 oJo.eye();
195
196 // If all the 6 dof should be estimated, we check if the interaction
197 // matrix is full rank. If not we remove automatically the dof that
198 // cannot be estimated This is particularly useful when consering
199 // circles (rank 5) and cylinders (rank 4)
200 if (isoJoIdentity_) {
201 cVo.buildFrom(m_cMo);
202
203 vpMatrix K; // kernel
204 unsigned int rank = (m_L_depthNormal * cVo).kernel(K);
205 if (rank == 0) {
206 throw vpException(vpException::fatalError, "Rank=0, cannot estimate the pose !");
207 }
208
209 if (rank != 6) {
210 vpMatrix I; // Identity
211 I.eye(6);
212 oJo = I - K.AtA();
213
214 isoJoIdentity_ = false;
215 }
216 }
217 }
218
219 double num = 0.0, den = 0.0;
220 for (unsigned int i = 0; i < m_L_depthNormal.getRows(); i++) {
221 // Compute weighted errors and stop criteria
224 den += m_w_depthNormal[i];
225
226 // weight interaction matrix
227 for (unsigned int j = 0; j < 6; j++) {
229 }
230 }
231
233 m_error_depthNormal, error_prev, LTR, mu, v);
234
235 cMo_prev = m_cMo;
237
238 normRes_1 = normRes;
239 normRes = sqrt(num / den);
240 }
241
242 iter++;
243 }
244
245 computeCovarianceMatrixVVS(isoJoIdentity_, m_w_depthNormal, cMo_prev, L_true, LVJ_true, m_error_depthNormal);
246}
247
249{
250 unsigned int nb_features = (unsigned int)(3 * m_depthNormalListOfDesiredFeatures.size());
251
252 m_L_depthNormal.resize(nb_features, 6, false, false);
253 m_error_depthNormal.resize(nb_features, false);
254 m_weightedError_depthNormal.resize(nb_features, false);
255
256 m_w_depthNormal.resize(nb_features, false);
257 m_w_depthNormal = 1;
258
260}
261
263{
264 unsigned int cpt = 0;
265 for (std::vector<vpMbtFaceDepthNormal *>::const_iterator it = m_depthNormalListOfActiveFaces.begin();
266 it != m_depthNormalListOfActiveFaces.end(); ++it) {
267 vpMatrix L_face;
268 vpColVector features_face;
269 (*it)->computeInteractionMatrix(m_cMo, L_face, features_face);
270
271 vpColVector face_error = features_face - m_depthNormalListOfDesiredFeatures[(size_t)cpt];
272
273 m_error_depthNormal.insert(cpt * 3, face_error);
274 m_L_depthNormal.insert(L_face, cpt * 3, 0);
275
276 cpt++;
277 }
278}
279
281 const vpCameraParameters &cam, const vpColor &col, unsigned int thickness,
282 bool displayFullModel)
283{
284 std::vector<std::vector<double> > models = vpMbDepthNormalTracker::getModelForDisplay(I.getWidth(), I.getHeight(), cMo, cam, displayFullModel);
285
286 for (size_t i = 0; i < models.size(); i++) {
287 if (vpMath::equal(models[i][0], 0)) {
288 vpImagePoint ip1(models[i][1], models[i][2]);
289 vpImagePoint ip2(models[i][3], models[i][4]);
290 vpDisplay::displayLine(I, ip1, ip2, col, thickness);
291 }
292 }
293
294 if (displayFeatures) {
295 std::vector<std::vector<double> > features = getFeaturesForDisplayDepthNormal();
296 for (size_t i = 0; i < features.size(); i++) {
297 vpImagePoint im_centroid(features[i][1], features[i][2]);
298 vpImagePoint im_extremity(features[i][3], features[i][4]);
299 bool desired = vpMath::equal(features[i][0], 2);
300 vpDisplay::displayArrow(I, im_centroid, im_extremity, desired ? vpColor::blue : vpColor::red, 4, 2, thickness);
301 }
302 }
303}
304
306 const vpCameraParameters &cam, const vpColor &col, unsigned int thickness,
307 bool displayFullModel)
308{
309 std::vector<std::vector<double> > models = vpMbDepthNormalTracker::getModelForDisplay(I.getWidth(), I.getHeight(), cMo, cam, displayFullModel);
310
311 for (size_t i = 0; i < models.size(); i++) {
312 if (vpMath::equal(models[i][0], 0)) {
313 vpImagePoint ip1(models[i][1], models[i][2]);
314 vpImagePoint ip2(models[i][3], models[i][4]);
315 vpDisplay::displayLine(I, ip1, ip2, col, thickness);
316 }
317 }
318
319 if (displayFeatures) {
320 std::vector<std::vector<double> > features = getFeaturesForDisplayDepthNormal();
321 for (size_t i = 0; i < features.size(); i++) {
322 vpImagePoint im_centroid(features[i][1], features[i][2]);
323 vpImagePoint im_extremity(features[i][3], features[i][4]);
324 bool desired = vpMath::equal(features[i][0], 2);
325 vpDisplay::displayArrow(I, im_centroid, im_extremity, desired ? vpColor::blue : vpColor::red, 4, 2, thickness);
326 }
327 }
328}
329
331 std::vector<std::vector<double> > features;
332
333 for (std::vector<vpMbtFaceDepthNormal *>::const_iterator it = m_depthNormalFaces.begin();
334 it != m_depthNormalFaces.end(); ++it) {
335 vpMbtFaceDepthNormal *face_normal = *it;
336 std::vector<std::vector<double> > currentFeatures = face_normal->getFeaturesForDisplay(m_cMo, m_cam);
337 features.insert(features.end(), currentFeatures.begin(), currentFeatures.end());
338 }
339
340 return features;
341}
342
357std::vector<std::vector<double> > vpMbDepthNormalTracker::getModelForDisplay(unsigned int width, unsigned int height,
358 const vpHomogeneousMatrix &cMo,
359 const vpCameraParameters &cam,
360 bool displayFullModel)
361{
362 std::vector<std::vector<double> > models;
363
364 vpCameraParameters c = cam;
365
366 bool changed = false;
368
369 if (useScanLine) {
370 c.computeFov(width, height);
371
374 }
375
376 for (std::vector<vpMbtFaceDepthNormal *>::const_iterator it = m_depthNormalFaces.begin();
377 it != m_depthNormalFaces.end(); ++it) {
378 vpMbtFaceDepthNormal *face_normal = *it;
379 std::vector<std::vector<double> > modelLines = face_normal->getModelForDisplay(width, height, cMo, cam, displayFullModel);
380 models.insert(models.end(), modelLines.begin(), modelLines.end());
381 }
382
383 return models;
384}
385
387{
388 if (!modelInitialised) {
389 throw vpException(vpException::fatalError, "model not initialized");
390 }
391
392 bool reInitialisation = false;
393 if (!useOgre) {
395 } else {
396#ifdef VISP_HAVE_OGRE
397 if (!faces.isOgreInitialised()) {
401 // Turn off Ogre config dialog display for the next call to this
402 // function since settings are saved in the ogre.cfg file and used
403 // during the next call
404 ogreShowConfigDialog = false;
405 }
406
408#else
410#endif
411 }
412
413 if (useScanLine || clippingFlag > 3)
415
417}
418
419void vpMbDepthNormalTracker::loadConfigFile(const std::string &configFile, bool verbose)
420{
422 xmlp.setVerbose(verbose);
426
433
434 try {
435 if (verbose) {
436 std::cout << " *********** Parsing XML for Mb Depth Tracker ************ " << std::endl;
437 }
438 xmlp.parse(configFile);
439 } catch (const vpException &e) {
440 std::cerr << "Exception: " << e.what() << std::endl;
441 throw vpException(vpException::ioError, "Cannot open XML file \"%s\"", configFile.c_str());
442 }
443
444 vpCameraParameters camera;
445 xmlp.getCameraParameters(camera);
446 setCameraParameters(camera);
447
450
451 if (xmlp.hasNearClippingDistance())
453
454 if (xmlp.hasFarClippingDistance())
456
457 if (xmlp.getFovClipping())
459
465}
466
467void vpMbDepthNormalTracker::reInitModel(const vpImage<unsigned char> &I, const std::string &cad_name,
468 const vpHomogeneousMatrix &cMo, bool verbose)
469{
470 m_cMo.eye();
471
472 for (size_t i = 0; i < m_depthNormalFaces.size(); i++) {
473 delete m_depthNormalFaces[i];
474 m_depthNormalFaces[i] = NULL;
475 }
476
477 m_depthNormalFaces.clear();
478
479 loadModel(cad_name, verbose);
480 initFromPose(I, cMo);
481}
482
483#if defined(VISP_HAVE_PCL)
484void vpMbDepthNormalTracker::reInitModel(const pcl::PointCloud<pcl::PointXYZ>::ConstPtr &point_cloud,
485 const std::string &cad_name, const vpHomogeneousMatrix &cMo,
486 bool verbose)
487{
488 vpImage<unsigned char> I_dummy(point_cloud->height, point_cloud->width);
489 reInitModel(I_dummy, cad_name, cMo, verbose);
490}
491
492#endif
493
495{
496 m_cMo.eye();
497
498 for (std::vector<vpMbtFaceDepthNormal *>::iterator it = m_depthNormalFaces.begin(); it != m_depthNormalFaces.end();
499 ++it) {
500 vpMbtFaceDepthNormal *normal_face = *it;
501 delete normal_face;
502 normal_face = NULL;
503 }
504
505 m_depthNormalFaces.clear();
506
508 computeCovariance = false;
509
512
514
515 m_lambda = 1.0;
516
517 faces.reset();
518
520
521 useScanLine = false;
522
523#ifdef VISP_HAVE_OGRE
524 useOgre = false;
525#endif
526
529}
530
532{
534#ifdef VISP_HAVE_OGRE
535 faces.getOgreContext()->setWindowName("MBT Depth");
536#endif
537}
538
540{
541 m_cMo = cdMo;
542 init(I);
543}
544
546{
547 m_cMo = cdMo;
549 init(m_I);
550}
551
552#if defined(VISP_HAVE_PCL)
553void vpMbDepthNormalTracker::setPose(const pcl::PointCloud<pcl::PointXYZ>::ConstPtr &point_cloud,
554 const vpHomogeneousMatrix &cdMo)
555{
556 vpImage<unsigned char> I_dummy(point_cloud->height, point_cloud->width);
557 m_cMo = cdMo;
558 init(I_dummy);
559}
560#endif
561
563{
565
566 for (std::vector<vpMbtFaceDepthNormal *>::const_iterator it = m_depthNormalFaces.begin();
567 it != m_depthNormalFaces.end(); ++it) {
568 (*it)->setScanLineVisibilityTest(v);
569 }
570}
571
572void vpMbDepthNormalTracker::setUseDepthNormalTracking(const std::string &name, const bool &useDepthNormalTracking)
573{
574 for (std::vector<vpMbtFaceDepthNormal *>::const_iterator it = m_depthNormalFaces.begin();
575 it != m_depthNormalFaces.end(); ++it) {
576 vpMbtFaceDepthNormal *face = *it;
577 if (face->m_polygon->getName() == name) {
578 face->setTracked(useDepthNormalTracking);
579 }
580 }
581}
582
584
585#ifdef VISP_HAVE_PCL
586void vpMbDepthNormalTracker::segmentPointCloud(const pcl::PointCloud<pcl::PointXYZ>::ConstPtr &point_cloud)
587{
590
591#if DEBUG_DISPLAY_DEPTH_NORMAL
592 if (!m_debugDisp_depthNormal->isInitialised()) {
593 m_debugImage_depthNormal.resize(point_cloud->height, point_cloud->width);
594 m_debugDisp_depthNormal->init(m_debugImage_depthNormal, 50, 0, "Debug display normal depth tracker");
595 }
596
597 m_debugImage_depthNormal = 0;
598 std::vector<std::vector<vpImagePoint> > roiPts_vec;
599#endif
600
601 for (std::vector<vpMbtFaceDepthNormal *>::iterator it = m_depthNormalFaces.begin(); it != m_depthNormalFaces.end();
602 ++it) {
603 vpMbtFaceDepthNormal *face = *it;
604
605 if (face->isVisible() && face->isTracked()) {
606 vpColVector desired_features;
607
608#if DEBUG_DISPLAY_DEPTH_NORMAL
609 std::vector<std::vector<vpImagePoint> > roiPts_vec_;
610#endif
611 if (face->computeDesiredFeatures(m_cMo, point_cloud->width, point_cloud->height, point_cloud, desired_features,
613#if DEBUG_DISPLAY_DEPTH_NORMAL
614 ,
615 m_debugImage_depthNormal, roiPts_vec_
616#endif
617 , m_mask
618 )) {
619 m_depthNormalListOfDesiredFeatures.push_back(desired_features);
620 m_depthNormalListOfActiveFaces.push_back(face);
621
622#if DEBUG_DISPLAY_DEPTH_NORMAL
623 roiPts_vec.insert(roiPts_vec.end(), roiPts_vec_.begin(), roiPts_vec_.end());
624#endif
625 }
626 }
627 }
628
629#if DEBUG_DISPLAY_DEPTH_NORMAL
630 vpDisplay::display(m_debugImage_depthNormal);
631
632 for (size_t i = 0; i < roiPts_vec.size(); i++) {
633 if (roiPts_vec[i].empty())
634 continue;
635
636 for (size_t j = 0; j < roiPts_vec[i].size() - 1; j++) {
637 vpDisplay::displayLine(m_debugImage_depthNormal, roiPts_vec[i][j], roiPts_vec[i][j + 1], vpColor::red, 2);
638 }
639 vpDisplay::displayLine(m_debugImage_depthNormal, roiPts_vec[i][0], roiPts_vec[i][roiPts_vec[i].size() - 1],
640 vpColor::red, 2);
641 }
642
643 vpDisplay::flush(m_debugImage_depthNormal);
644#endif
645}
646#endif
647
648void vpMbDepthNormalTracker::segmentPointCloud(const std::vector<vpColVector> &point_cloud, unsigned int width,
649 unsigned int height)
650{
653
654#if DEBUG_DISPLAY_DEPTH_NORMAL
655 if (!m_debugDisp_depthNormal->isInitialised()) {
656 m_debugImage_depthNormal.resize(height, width);
657 m_debugDisp_depthNormal->init(m_debugImage_depthNormal, 50, 0, "Debug display normal depth tracker");
658 }
659
660 m_debugImage_depthNormal = 0;
661 std::vector<std::vector<vpImagePoint> > roiPts_vec;
662#endif
663
664 for (std::vector<vpMbtFaceDepthNormal *>::iterator it = m_depthNormalFaces.begin(); it != m_depthNormalFaces.end();
665 ++it) {
666 vpMbtFaceDepthNormal *face = *it;
667
668 if (face->isVisible() && face->isTracked()) {
669 vpColVector desired_features;
670
671#if DEBUG_DISPLAY_DEPTH_NORMAL
672 std::vector<std::vector<vpImagePoint> > roiPts_vec_;
673#endif
674
675 if (face->computeDesiredFeatures(m_cMo, width, height, point_cloud, desired_features, m_depthNormalSamplingStepX,
677#if DEBUG_DISPLAY_DEPTH_NORMAL
678 ,
679 m_debugImage_depthNormal, roiPts_vec_
680#endif
681 , m_mask
682 )) {
683 m_depthNormalListOfDesiredFeatures.push_back(desired_features);
684 m_depthNormalListOfActiveFaces.push_back(face);
685
686#if DEBUG_DISPLAY_DEPTH_NORMAL
687 roiPts_vec.insert(roiPts_vec.end(), roiPts_vec_.begin(), roiPts_vec_.end());
688#endif
689 }
690 }
691 }
692
693#if DEBUG_DISPLAY_DEPTH_NORMAL
694 vpDisplay::display(m_debugImage_depthNormal);
695
696 for (size_t i = 0; i < roiPts_vec.size(); i++) {
697 if (roiPts_vec[i].empty())
698 continue;
699
700 for (size_t j = 0; j < roiPts_vec[i].size() - 1; j++) {
701 vpDisplay::displayLine(m_debugImage_depthNormal, roiPts_vec[i][j], roiPts_vec[i][j + 1], vpColor::red, 2);
702 }
703 vpDisplay::displayLine(m_debugImage_depthNormal, roiPts_vec[i][0], roiPts_vec[i][roiPts_vec[i].size() - 1],
704 vpColor::red, 2);
705 }
706
707 vpDisplay::flush(m_debugImage_depthNormal);
708#endif
709}
710
712{
713 m_cam = cam;
714
715 for (std::vector<vpMbtFaceDepthNormal *>::const_iterator it = m_depthNormalFaces.begin();
716 it != m_depthNormalFaces.end(); ++it) {
717 (*it)->setCameraParameters(cam);
718 }
719}
720
722{
723 for (std::vector<vpMbtFaceDepthNormal *>::const_iterator it = m_depthNormalFaces.begin();
724 it != m_depthNormalFaces.end(); ++it) {
725 (*it)->setFaceCentroidMethod(method);
726 }
727}
728
731{
733
734 for (std::vector<vpMbtFaceDepthNormal *>::const_iterator it = m_depthNormalFaces.begin();
735 it != m_depthNormalFaces.end(); ++it) {
736 (*it)->setFeatureEstimationMethod(method);
737 }
738}
739
741{
743
744 for (std::vector<vpMbtFaceDepthNormal *>::const_iterator it = m_depthNormalFaces.begin();
745 it != m_depthNormalFaces.end(); ++it) {
746 (*it)->setPclPlaneEstimationMethod(method);
747 }
748}
749
751{
753
754 for (std::vector<vpMbtFaceDepthNormal *>::const_iterator it = m_depthNormalFaces.begin();
755 it != m_depthNormalFaces.end(); ++it) {
756 (*it)->setPclPlaneEstimationRansacMaxIter(maxIter);
757 }
758}
759
761{
763
764 for (std::vector<vpMbtFaceDepthNormal *>::const_iterator it = m_depthNormalFaces.begin();
765 it != m_depthNormalFaces.end(); ++it) {
766 (*it)->setPclPlaneEstimationRansacThreshold(threshold);
767 }
768}
769
770void vpMbDepthNormalTracker::setDepthNormalSamplingStep(unsigned int stepX, unsigned int stepY)
771{
772 if (stepX == 0 || stepY == 0) {
773 std::cerr << "stepX and stepY must be greater than zero!" << std::endl;
774 return;
775 }
776
779}
780
781// void vpMbDepthNormalTracker::setDepthNormalUseRobust(bool use) {
782// m_depthNormalUseRobust = use;
783//}
784
786{
787 throw vpException(vpException::fatalError, "Cannot track with a grayscale image!");
788}
789
791{
792 throw vpException(vpException::fatalError, "Cannot track with a color image!");
793}
794
795#ifdef VISP_HAVE_PCL
796void vpMbDepthNormalTracker::track(const pcl::PointCloud<pcl::PointXYZ>::ConstPtr &point_cloud)
797{
798 segmentPointCloud(point_cloud);
799
800 computeVVS();
801
802 computeVisibility(point_cloud->width, point_cloud->height);
803}
804#endif
805
806void vpMbDepthNormalTracker::track(const std::vector<vpColVector> &point_cloud, unsigned int width,
807 unsigned int height)
808{
809 segmentPointCloud(point_cloud, width, height);
810
811 computeVVS();
812
813 computeVisibility(width, height);
814}
815
816void vpMbDepthNormalTracker::initCircle(const vpPoint & /*p1*/, const vpPoint & /*p2*/, const vpPoint & /*p3*/,
817 double /*radius*/, int /*idFace*/, const std::string & /*name*/)
818{
819 throw vpException(vpException::fatalError, "vpMbDepthNormalTracker::initCircle() should not be called!");
820}
821
822void vpMbDepthNormalTracker::initCylinder(const vpPoint & /*p1*/, const vpPoint & /*p2*/, double /*radius*/,
823 int /*idFace*/, const std::string & /*name*/)
824{
825 throw vpException(vpException::fatalError, "vpMbDepthNormalTracker::initCylinder() should not be called!");
826}
827
829
void setWindowName(const Ogre::String &n)
Definition: vpAROgre.h:269
void resize(unsigned int nrows, unsigned int ncols, bool flagNullify=true, bool recopy_=true)
Definition: vpArray2D.h:304
unsigned int getRows() const
Definition: vpArray2D.h:289
Generic class defining intrinsic camera parameters.
void computeFov(const unsigned int &w, const unsigned int &h)
Implementation of column vector and the associated operations.
Definition: vpColVector.h:131
void insert(unsigned int i, const vpColVector &v)
void resize(unsigned int i, bool flagNullify=true)
Definition: vpColVector.h:310
Class to define RGB colors available for display functionnalities.
Definition: vpColor.h:158
static const vpColor red
Definition: vpColor.h:217
static const vpColor blue
Definition: vpColor.h:223
Display for windows using GDI (available on any windows 32 platform).
Definition: vpDisplayGDI.h:129
Use the X11 console to display images on unix-like OS. Thus to enable this class X11 should be instal...
Definition: vpDisplayX.h:135
static void display(const vpImage< unsigned char > &I)
static void displayLine(const vpImage< unsigned char > &I, const vpImagePoint &ip1, const vpImagePoint &ip2, const vpColor &color, unsigned int thickness=1, bool segment=true)
static void flush(const vpImage< unsigned char > &I)
static void displayArrow(const vpImage< unsigned char > &I, const vpImagePoint &ip1, const vpImagePoint &ip2, const vpColor &color=vpColor::white, unsigned int w=4, unsigned int h=2, unsigned int thickness=1)
error that can be emited by ViSP classes.
Definition: vpException.h:72
@ ioError
I/O error.
Definition: vpException.h:91
@ fatalError
Fatal error.
Definition: vpException.h:96
const char * what() const
static vpHomogeneousMatrix direct(const vpColVector &v)
Implementation of an homogeneous matrix and operations on such kind of matrices.
vpHomogeneousMatrix inverse() const
static void convert(const vpImage< unsigned char > &src, vpImage< vpRGBa > &dest)
Class that defines a 2D point in an image. This class is useful for image processing and stores only ...
Definition: vpImagePoint.h:88
unsigned int getWidth() const
Definition: vpImage.h:246
unsigned int getHeight() const
Definition: vpImage.h:188
static double rad(double deg)
Definition: vpMath.h:110
static double sqr(double x)
Definition: vpMath.h:116
static bool equal(double x, double y, double s=0.001)
Definition: vpMath.h:295
static double deg(double rad)
Definition: vpMath.h:103
Implementation of a matrix and operations on matrices.
Definition: vpMatrix.h:154
void eye()
Definition: vpMatrix.cpp:449
vpMatrix AtA() const
Definition: vpMatrix.cpp:629
void insert(const vpMatrix &A, unsigned int r, unsigned int c)
Definition: vpMatrix.cpp:5988
void segmentPointCloud(const pcl::PointCloud< pcl::PointXYZ >::ConstPtr &point_cloud)
virtual void setPose(const vpImage< unsigned char > &I, const vpHomogeneousMatrix &cdMo)
virtual void initCylinder(const vpPoint &p1, const vpPoint &p2, double radius, int idFace=0, const std::string &name="")
virtual void loadConfigFile(const std::string &configFile, bool verbose=true)
vpMatrix m_L_depthNormal
Interaction matrix.
vpRobust m_robust_depthNormal
Robust.
virtual void setDepthNormalSamplingStep(unsigned int stepX, unsigned int stepY)
vpMbtFaceDepthNormal::vpFeatureEstimationType m_depthNormalFeatureEstimationMethod
Method to estimate the desired features.
std::vector< vpMbtFaceDepthNormal * > m_depthNormalFaces
List of faces.
int m_depthNormalPclPlaneEstimationRansacMaxIter
PCL RANSAC maximum number of iterations.
virtual void display(const vpImage< unsigned char > &I, const vpHomogeneousMatrix &cMo, const vpCameraParameters &cam, const vpColor &col, unsigned int thickness=1, bool displayFullModel=false)
unsigned int m_depthNormalSamplingStepY
Sampling step in y-direction.
double m_depthNormalPclPlaneEstimationRansacThreshold
PCL RANSAC threshold.
virtual void setDepthNormalPclPlaneEstimationRansacThreshold(double thresold)
virtual void track(const vpImage< unsigned char > &)
virtual void initFaceFromLines(vpMbtPolygon &polygon)
void addFace(vpMbtPolygon &polygon, bool alreadyClose)
bool m_depthNormalUseRobust
If true, use Tukey robust M-Estimator.
void reInitModel(const vpImage< unsigned char > &I, const std::string &cad_name, const vpHomogeneousMatrix &cMo, bool verbose=false)
virtual std::vector< std::vector< double > > getFeaturesForDisplayDepthNormal()
vpColVector m_w_depthNormal
Robust weights.
vpColVector m_error_depthNormal
(s - s*)
virtual void setScanLineVisibilityTest(const bool &v)
vpColVector m_weightedError_depthNormal
Weighted error.
std::vector< vpMbtFaceDepthNormal * > m_depthNormalListOfActiveFaces
List of current active (visible and with features extracted) faces.
virtual void initCircle(const vpPoint &p1, const vpPoint &p2, const vpPoint &p3, double radius, int idFace=0, const std::string &name="")
virtual void setCameraParameters(const vpCameraParameters &camera)
virtual void setOgreVisibilityTest(const bool &v)
int m_depthNormalPclPlaneEstimationMethod
PCL plane estimation method.
virtual void computeVVSInteractionMatrixAndResidu()
virtual void setDepthNormalFaceCentroidMethod(const vpMbtFaceDepthNormal::vpFaceCentroidType &method)
virtual void setDepthNormalPclPlaneEstimationMethod(int method)
std::vector< vpColVector > m_depthNormalListOfDesiredFeatures
List of desired features.
virtual void initFaceFromCorners(vpMbtPolygon &polygon)
virtual std::vector< std::vector< double > > getModelForDisplay(unsigned int width, unsigned int height, const vpHomogeneousMatrix &cMo, const vpCameraParameters &cam, bool displayFullModel=false)
void setUseDepthNormalTracking(const std::string &name, const bool &useDepthNormalTracking)
void computeVisibility(unsigned int width, unsigned int height)
vpMbHiddenFaces< vpMbtPolygon > m_depthNormalHiddenFacesDisplay
Set of faces describing the object used only for display with scan line.
virtual void setDepthNormalFeatureEstimationMethod(const vpMbtFaceDepthNormal::vpFeatureEstimationType &method)
virtual void init(const vpImage< unsigned char > &I)
virtual void setDepthNormalPclPlaneEstimationRansacMaxIter(int maxIter)
unsigned int m_depthNormalSamplingStepX
Sampling step in x-direction.
void computeClippedPolygons(const vpHomogeneousMatrix &cMo, const vpCameraParameters &cam)
unsigned int setVisibleOgre(unsigned int width, unsigned int height, const vpCameraParameters &cam, const vpHomogeneousMatrix &cMo, const double &angleAppears, const double &angleDisappears, bool &changed)
void initOgre(const vpCameraParameters &cam=vpCameraParameters())
unsigned int setVisible(unsigned int width, unsigned int height, const vpCameraParameters &cam, const vpHomogeneousMatrix &cMo, const double &angle, bool &changed)
void setBackgroundSizeOgre(const unsigned int &h, const unsigned int &w)
void computeScanLineRender(const vpCameraParameters &cam, const unsigned int &w, const unsigned int &h)
vpAROgre * getOgreContext()
void setOgreShowConfigDialog(bool showConfigDialog)
double m_lambda
Gain of the virtual visual servoing stage.
Definition: vpMbTracker.h:187
virtual void computeCovarianceMatrixVVS(const bool isoJoIdentity_, const vpColVector &w_true, const vpHomogeneousMatrix &cMoPrev, const vpMatrix &L_true, const vpMatrix &LVJ_true, const vpColVector &error)
bool modelInitialised
Definition: vpMbTracker.h:123
vpImage< unsigned char > m_I
Grayscale image buffer, used when passing color images.
Definition: vpMbTracker.h:223
virtual void computeVVSCheckLevenbergMarquardt(unsigned int iter, vpColVector &error, const vpColVector &m_error_prev, const vpHomogeneousMatrix &cMoPrev, double &mu, bool &reStartFromLastIncrement, vpColVector *const w=NULL, const vpColVector *const m_w_prev=NULL)
bool m_computeInteraction
Definition: vpMbTracker.h:185
vpMatrix oJo
The Degrees of Freedom to estimate.
Definition: vpMbTracker.h:115
vpUniRand m_rand
Random number generator used in vpMbtDistanceLine::buildFrom()
Definition: vpMbTracker.h:227
double m_initialMu
Initial Mu for Levenberg Marquardt optimization loop.
Definition: vpMbTracker.h:193
vpHomogeneousMatrix m_cMo
The current pose.
Definition: vpMbTracker.h:113
virtual void computeVVSPoseEstimation(const bool isoJoIdentity_, unsigned int iter, vpMatrix &L, vpMatrix &LTL, vpColVector &R, const vpColVector &error, vpColVector &error_prev, vpColVector &LTR, double &mu, vpColVector &v, const vpColVector *const w=NULL, vpColVector *const m_w_prev=NULL)
vpCameraParameters m_cam
The camera parameters.
Definition: vpMbTracker.h:111
double m_stopCriteriaEpsilon
Epsilon threshold to stop the VVS optimization loop.
Definition: vpMbTracker.h:191
bool useOgre
Use Ogre3d for visibility tests.
Definition: vpMbTracker.h:155
virtual void computeVVSWeights(vpRobust &robust, const vpColVector &error, vpColVector &w)
vpMbHiddenFaces< vpMbtPolygon > faces
Set of faces describing the object.
Definition: vpMbTracker.h:143
virtual void setScanLineVisibilityTest(const bool &v)
Definition: vpMbTracker.h:601
virtual void setOgreVisibilityTest(const bool &v)
vpMbtOptimizationMethod m_optimizationMethod
Optimization method used.
Definition: vpMbTracker.h:140
bool displayFeatures
If true, the features are displayed.
Definition: vpMbTracker.h:138
double angleDisappears
Angle used to detect a face disappearance.
Definition: vpMbTracker.h:147
virtual void setNearClippingDistance(const double &dist)
virtual void setFarClippingDistance(const double &dist)
double distFarClip
Distance for near clipping.
Definition: vpMbTracker.h:151
bool useScanLine
Use Scanline for visibility tests.
Definition: vpMbTracker.h:158
virtual void setClipping(const unsigned int &flags)
double angleAppears
Angle used to detect a face appearance.
Definition: vpMbTracker.h:145
const vpImage< bool > * m_mask
Mask used to disable tracking on a part of image.
Definition: vpMbTracker.h:221
virtual void initFromPose(const vpImage< unsigned char > &I, const std::string &initFile)
virtual void loadModel(const std::string &modelFile, bool verbose=false, const vpHomogeneousMatrix &T=vpHomogeneousMatrix())
bool computeCovariance
Flag used to specify if the covariance matrix has to be computed or not.
Definition: vpMbTracker.h:128
double distNearClip
Distance for near clipping.
Definition: vpMbTracker.h:149
unsigned int m_maxIter
Maximum number of iterations of the virtual visual servoing stage.
Definition: vpMbTracker.h:189
bool ogreShowConfigDialog
Definition: vpMbTracker.h:156
unsigned int clippingFlag
Flags specifying which clipping to used.
Definition: vpMbTracker.h:153
double m_distNearClip
Distance for near clipping.
std::vector< std::vector< double > > getModelForDisplay(unsigned int width, unsigned int height, const vpHomogeneousMatrix &cMo, const vpCameraParameters &cam, bool displayFullModel=false)
void addLine(vpPoint &p1, vpPoint &p2, vpMbHiddenFaces< vpMbtPolygon > *const faces, vpUniRand &rand_gen, int polygon=-1, std::string name="")
double m_distFarClip
Distance for near clipping.
bool computeDesiredFeatures(const vpHomogeneousMatrix &cMo, unsigned int width, unsigned int height, const pcl::PointCloud< pcl::PointXYZ >::ConstPtr &point_cloud, vpColVector &desired_features, unsigned int stepX, unsigned int stepY, const vpImage< bool > *mask=NULL)
vpMbHiddenFaces< vpMbtPolygon > * m_hiddenFace
Pointer to the list of faces.
std::vector< std::vector< double > > getFeaturesForDisplay(const vpHomogeneousMatrix &cMo, const vpCameraParameters &cam, double scale=0.05)
void setPclPlaneEstimationMethod(int method)
void setPclPlaneEstimationRansacThreshold(double threshold)
vpMbtPolygon * m_polygon
Polygon defining the face.
vpCameraParameters m_cam
Camera intrinsic parameters.
vpPlane m_planeObject
Plane equation described in the object frame.
unsigned int m_clippingFlag
Flags specifying which clipping to used.
void setTracked(bool tracked)
void setFeatureEstimationMethod(const vpFeatureEstimationType &method)
void setPclPlaneEstimationRansacMaxIter(int maxIter)
bool m_useScanLine
Scan line visibility.
Implementation of a polygon of the model used by the model-based tracker.
Definition: vpMbtPolygon.h:67
std::string getName() const
Definition: vpMbtPolygon.h:108
int getIndex() const
Definition: vpMbtPolygon.h:101
Parse an Xml file to extract configuration parameters of a mbtConfig object.
void setDepthNormalPclPlaneEstimationRansacMaxIter(int maxIter)
int getDepthNormalPclPlaneEstimationRansacMaxIter() const
unsigned int getDepthNormalSamplingStepX() const
void getCameraParameters(vpCameraParameters &cam) const
unsigned int getDepthNormalSamplingStepY() const
void setDepthNormalFeatureEstimationMethod(const vpMbtFaceDepthNormal::vpFeatureEstimationType &method)
vpMbtFaceDepthNormal::vpFeatureEstimationType getDepthNormalFeatureEstimationMethod() const
void setDepthNormalPclPlaneEstimationMethod(int method)
void setDepthNormalSamplingStepX(unsigned int stepX)
void setAngleDisappear(const double &adisappear)
int getDepthNormalPclPlaneEstimationMethod() const
void setAngleAppear(const double &aappear)
double getDepthNormalPclPlaneEstimationRansacThreshold() const
void parse(const std::string &filename)
void setDepthNormalPclPlaneEstimationRansacThreshold(double threshold)
void setCameraParameters(const vpCameraParameters &cam)
void setDepthNormalSamplingStepY(unsigned int stepY)
This class defines the container for a plane geometrical structure.
Definition: vpPlane.h:59
@ object_frame
Definition: vpPlane.h:70
Class that defines a 3D point in the object frame and allows forward projection of a 3D point in the ...
Definition: vpPoint.h:82
unsigned int nbpt
Number of points used to define the polygon.
Definition: vpPolygon3D.h:76
unsigned int getNbPoint() const
Definition: vpPolygon3D.h:132
vpPoint * p
corners in the object frame
Definition: vpPolygon3D.h:81
void setMinMedianAbsoluteDeviation(double mad_min)
Definition: vpRobust.h:161
vpVelocityTwistMatrix buildFrom(const vpTranslationVector &t, const vpRotationMatrix &R)