Visual Servoing Platform version 3.5.0
testKeyPoint-2.cpp
1/****************************************************************************
2 *
3 * ViSP, open source Visual Servoing Platform software.
4 * Copyright (C) 2005 - 2019 by Inria. All rights reserved.
5 *
6 * This software is free software; you can redistribute it and/or modify
7 * it under the terms of the GNU General Public License as published by
8 * the Free Software Foundation; either version 2 of the License, or
9 * (at your option) any later version.
10 * See the file LICENSE.txt at the root directory of this source
11 * distribution for additional information about the GNU GPL.
12 *
13 * For using ViSP with software that can not be combined with the GNU
14 * GPL, please contact Inria about acquiring a ViSP Professional
15 * Edition License.
16 *
17 * See http://visp.inria.fr for more information.
18 *
19 * This software was developed at:
20 * Inria Rennes - Bretagne Atlantique
21 * Campus Universitaire de Beaulieu
22 * 35042 Rennes Cedex
23 * France
24 *
25 * If you have questions regarding the use of this file, please contact
26 * Inria at visp@inria.fr
27 *
28 * This file is provided AS IS with NO WARRANTY OF ANY KIND, INCLUDING THE
29 * WARRANTY OF DESIGN, MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE.
30 *
31 * Description:
32 * Test keypoint matching and pose estimation.
33 *
34 * Authors:
35 * Souriya Trinh
36 *
37 *****************************************************************************/
38
39#include <iostream>
40
41#include <visp3/core/vpConfig.h>
42
43#if defined(VISP_HAVE_OPENCV) && (VISP_HAVE_OPENCV_VERSION >= 0x020301)
44
45#include <visp3/core/vpImage.h>
46#include <visp3/core/vpIoTools.h>
47#include <visp3/gui/vpDisplayGDI.h>
48#include <visp3/gui/vpDisplayGTK.h>
49#include <visp3/gui/vpDisplayOpenCV.h>
50#include <visp3/gui/vpDisplayX.h>
51#include <visp3/io/vpImageIo.h>
52#include <visp3/io/vpParseArgv.h>
53#include <visp3/io/vpVideoReader.h>
54#include <visp3/mbt/vpMbEdgeTracker.h>
55#include <visp3/vision/vpKeyPoint.h>
56
57// List of allowed command line options
58#define GETOPTARGS "cdph"
59
60void usage(const char *name, const char *badparam);
61bool getOptions(int argc, const char **argv, bool &click_allowed, bool &display);
62
71void usage(const char *name, const char *badparam)
72{
73 fprintf(stdout, "\n\
74Test keypoints matching.\n\
75\n\
76SYNOPSIS\n\
77 %s [-c] [-d] [-p] [-h]\n", name);
78
79 fprintf(stdout, "\n\
80OPTIONS: \n\
81\n\
82 -c\n\
83 Disable the mouse click. Useful to automaze the \n\
84 execution of this program without humain intervention.\n\
85\n\
86 -d \n\
87 Turn off the display.\n\
88\n\
89 -p \n\
90 Use parallel RANSAC.\n\
91\n\
92 -h\n\
93 Print the help.\n");
94
95 if (badparam)
96 fprintf(stdout, "\nERROR: Bad parameter [%s]\n", badparam);
97}
98
110bool getOptions(int argc, const char **argv, bool &click_allowed, bool &display,
111 bool &use_parallel_ransac)
112{
113 const char *optarg_;
114 int c;
115 while ((c = vpParseArgv::parse(argc, argv, GETOPTARGS, &optarg_)) > 1) {
116
117 switch (c) {
118 case 'c':
119 click_allowed = false;
120 break;
121 case 'd':
122 display = false;
123 break;
124 case 'p':
125 use_parallel_ransac = true;
126 break;
127 case 'h':
128 usage(argv[0], NULL);
129 return false;
130 break;
131
132 default:
133 usage(argv[0], optarg_);
134 return false;
135 break;
136 }
137 }
138
139 if ((c == 1) || (c == -1)) {
140 // standalone param or error
141 usage(argv[0], NULL);
142 std::cerr << "ERROR: " << std::endl;
143 std::cerr << " Bad argument " << optarg_ << std::endl << std::endl;
144 return false;
145 }
146
147 return true;
148}
149
150template<typename Type>
151void run_test(const std::string &env_ipath, bool opt_click_allowed, bool opt_display, bool use_parallel_ransac,
152 vpImage<Type> &I, vpImage<Type> &IMatching)
153{
154 // Set the path location of the image sequence
155 std::string dirname = vpIoTools::createFilePath(env_ipath, "mbt/cube");
156
157 // Build the name of the image files
158 std::string filenameRef = vpIoTools::createFilePath(dirname, "image0000.pgm");
159 vpImageIo::read(I, filenameRef);
160 std::string filenameCur = vpIoTools::createFilePath(dirname, "image%04d.pgm");
161
162#if defined VISP_HAVE_X11
163 vpDisplayX display;
164#elif defined VISP_HAVE_GTK
165 vpDisplayGTK display;
166#elif defined VISP_HAVE_GDI
167 vpDisplayGDI display;
168#else
169 vpDisplayOpenCV display;
170#endif
171
172 if (opt_display) {
173 display.setDownScalingFactor(vpDisplay::SCALE_AUTO);
174 display.init(I, 0, 0, "ORB keypoints matching and pose estimation");
175 }
176
178 vpMbEdgeTracker tracker;
179 // Load config for tracker
180 std::string tracker_config_file = vpIoTools::createFilePath(env_ipath, "mbt/cube.xml");
181
182 tracker.loadConfigFile(tracker_config_file);
183 tracker.getCameraParameters(cam);
184#if 0
185 // Corresponding parameters manually set to have an example code
186 vpMe me;
187 me.setMaskSize(5);
188 me.setMaskNumber(180);
189 me.setRange(8);
190 me.setThreshold(10000);
191 me.setMu1(0.5);
192 me.setMu2(0.5);
193 me.setSampleStep(4);
194 me.setNbTotalSample(250);
195 tracker.setMovingEdge(me);
196 cam.initPersProjWithoutDistortion(547.7367575, 542.0744058, 338.7036994, 234.5083345);
197 tracker.setCameraParameters(cam);
198 tracker.setNearClippingDistance(0.01);
199 tracker.setFarClippingDistance(100.0);
201#endif
202
203 tracker.setAngleAppear(vpMath::rad(89));
204 tracker.setAngleDisappear(vpMath::rad(89));
205
206 // Load CAO model
207 std::string cao_model_file = vpIoTools::createFilePath(env_ipath, "mbt/cube.cao");
208 tracker.loadModel(cao_model_file);
209
210 // Initialize the pose
211 std::string init_file = vpIoTools::createFilePath(env_ipath, "mbt/cube.init");
212 if (opt_display && opt_click_allowed) {
213 tracker.initClick(I, init_file);
214 } else {
215 vpHomogeneousMatrix cMoi(0.02044769891, 0.1101505452, 0.5078963719, 2.063603907, 1.110231561, -0.4392789872);
216 tracker.initFromPose(I, cMoi);
217 }
218
219 // Get the init pose
221 tracker.getPose(cMo);
222
223 // Init keypoints
224 vpKeyPoint keypoints("ORB", "ORB", "BruteForce-Hamming");
225 keypoints.setRansacParallel(use_parallel_ransac);
226#if (VISP_HAVE_OPENCV_VERSION >= 0x020400)
227 // Bug when using LSH index with FLANN and OpenCV 2.3.1.
228 // see http://code.opencv.org/issues/1741 (Bug #1741)
229 keypoints.setMatcher("FlannBased");
230#if (VISP_HAVE_OPENCV_VERSION < 0x030000)
231 keypoints.setDetectorParameter("ORB", "nLevels", 1);
232#else
233 cv::Ptr<cv::ORB> orb_detector = keypoints.getDetector("ORB").dynamicCast<cv::ORB>();
234 if (orb_detector) {
235 orb_detector->setNLevels(1);
236 }
237#endif
238#endif
239
240 // Detect keypoints on the current image
241 std::vector<cv::KeyPoint> trainKeyPoints;
242 double elapsedTime;
243 keypoints.detect(I, trainKeyPoints, elapsedTime);
244
245 // Keep only keypoints on the cube
246 std::vector<vpPolygon> polygons;
247 std::vector<std::vector<vpPoint> > roisPt;
248 std::pair<std::vector<vpPolygon>, std::vector<std::vector<vpPoint> > > pair =
249 tracker.getPolygonFaces(true); // To detect an issue with CI
250 polygons = pair.first;
251 roisPt = pair.second;
252
253 // Compute the 3D coordinates
254 std::vector<cv::Point3f> points3f;
255 vpKeyPoint::compute3DForPointsInPolygons(cMo, cam, trainKeyPoints, polygons, roisPt, points3f);
256
257 // Build the reference keypoints
258 keypoints.buildReference(I, trainKeyPoints, points3f, false, 1);
259
260 // Read image 150
261 filenameRef = vpIoTools::createFilePath(dirname, "image0150.pgm");
262 vpImageIo::read(I, filenameRef);
263
264 // Init pose at image 150
265 cMo.buildFrom(0.02651282185, -0.03713587374, 0.6873765919, 2.314744454, 0.3492296488, -0.1226054828);
266 tracker.initFromPose(I, cMo);
267
268 // Detect keypoints on the image 150
269 keypoints.detect(I, trainKeyPoints, elapsedTime);
270
271 // Keep only keypoints on the cube
272 pair = tracker.getPolygonFaces(true, true,
273 true); // To detect an issue with CI
274 polygons = pair.first;
275 roisPt = pair.second;
276
277 // Compute the 3D coordinates
278 vpKeyPoint::compute3DForPointsInPolygons(cMo, cam, trainKeyPoints, polygons, roisPt, points3f);
279
280 // Build the reference keypoints
281 keypoints.buildReference(I, trainKeyPoints, points3f, true, 2);
282
283 // Read image 200
284 filenameRef = vpIoTools::createFilePath(dirname, "image0200.pgm");
285 vpImageIo::read(I, filenameRef);
286
287 // Init pose at image 200
288 cMo.buildFrom(0.02965448956, -0.07283091786, 0.7253526051, 2.300529617, -0.4286674806, 0.1788761025);
289 tracker.initFromPose(I, cMo);
290
291 // Detect keypoints on the image 200
292 keypoints.detect(I, trainKeyPoints, elapsedTime);
293
294 // Keep only keypoints on the cube
295 pair = tracker.getPolygonFaces(false); // To detect an issue with CI
296 polygons = pair.first;
297 roisPt = pair.second;
298
299 // Compute the 3D coordinates
300 vpKeyPoint::compute3DForPointsInPolygons(cMo, cam, trainKeyPoints, polygons, roisPt, points3f);
301
302 // Build the reference keypoints
303 keypoints.buildReference(I, trainKeyPoints, points3f, true, 3);
304
305 // Init reader for getting the input image sequence
307 g.setFileName(filenameCur);
308 g.open(I);
309 g.acquire(I);
310
311#if defined VISP_HAVE_X11
312 vpDisplayX display2;
313#elif defined VISP_HAVE_GTK
314 vpDisplayGTK display2;
315#elif defined VISP_HAVE_GDI
316 vpDisplayGDI display2;
317#else
318 vpDisplayOpenCV display2;
319#endif
320
321 keypoints.createImageMatching(I, IMatching);
322
323 if (opt_display) {
325 display2.init(IMatching, 0, (int)I.getHeight() / vpDisplay::getDownScalingFactor(I) + 80, "IMatching");
326 }
327
328 bool opt_click = false;
329 double error;
331 std::vector<double> times_vec;
332 while ((opt_display && !g.end()) || (!opt_display && g.getFrameIndex() < 30)) {
333 g.acquire(I);
334
335 if (opt_display) {
337
338 // Display image matching
339 keypoints.insertImageMatching(I, IMatching);
340
341 vpDisplay::display(IMatching);
342 }
343
344 // Match keypoints and estimate the pose
345 if (keypoints.matchPoint(I, cam, cMo, error, elapsedTime)) {
346 times_vec.push_back(elapsedTime);
347
348 tracker.setCameraParameters(cam);
349 tracker.setPose(I, cMo);
350
351 if (opt_display) {
352 tracker.display(I, cMo, cam, vpColor::red, 2);
353 vpDisplay::displayFrame(I, cMo, cam, 0.025, vpColor::none, 3);
354
355 std::vector<vpImagePoint> ransacInliers = keypoints.getRansacInliers();
356 std::vector<vpImagePoint> ransacOutliers = keypoints.getRansacOutliers();
357
358 for (std::vector<vpImagePoint>::const_iterator it = ransacInliers.begin(); it != ransacInliers.end(); ++it) {
360 vpImagePoint imPt(*it);
361 imPt.set_u(imPt.get_u() + I.getWidth());
362 imPt.set_v(imPt.get_v() + I.getHeight());
363 vpDisplay::displayCircle(IMatching, imPt, 4, vpColor::green);
364 }
365
366 for (std::vector<vpImagePoint>::const_iterator it = ransacOutliers.begin(); it != ransacOutliers.end();
367 ++it) {
369 vpImagePoint imPt(*it);
370 imPt.set_u(imPt.get_u() + I.getWidth());
371 imPt.set_v(imPt.get_v() + I.getHeight());
372 vpDisplay::displayCircle(IMatching, imPt, 4, vpColor::red);
373 }
374
375 keypoints.displayMatching(I, IMatching);
376
377 // Display model in the correct sub-image in IMatching
379 cam2.initPersProjWithoutDistortion(cam.get_px(), cam.get_py(), cam.get_u0() + I.getWidth(),
380 cam.get_v0() + I.getHeight());
381 tracker.setCameraParameters(cam2);
382 tracker.setPose(IMatching, cMo);
383 tracker.display(IMatching, cMo, cam2, vpColor::red, 2);
384 vpDisplay::displayFrame(IMatching, cMo, cam2, 0.025, vpColor::none, 3);
385 }
386 }
387
388 if (opt_display) {
390 vpDisplay::flush(IMatching);
391 }
392
393 if (opt_click_allowed && opt_display) {
394 // Click requested to process next image
395 if (opt_click) {
396 vpDisplay::getClick(I, button, true);
397 if (button == vpMouseButton::button3) {
398 opt_click = false;
399 }
400 } else {
401 // Use right click to enable/disable step by step tracking
402 if (vpDisplay::getClick(I, button, false)) {
403 if (button == vpMouseButton::button3) {
404 opt_click = true;
405 } else if (button == vpMouseButton::button1) {
406 break;
407 }
408 }
409 }
410 }
411 }
412
413 if (!times_vec.empty()) {
414 std::cout << "Computation time, Mean: " << vpMath::getMean(times_vec)
415 << " ms ; Median: " << vpMath::getMedian(times_vec)
416 << " ms ; Std: " << vpMath::getStdev(times_vec) << std::endl;
417 }
418}
419
425int main(int argc, const char **argv)
426{
427 try {
428 std::string env_ipath;
429 bool opt_click_allowed = true;
430 bool opt_display = true;
431 bool use_parallel_ransac = false;
432
433 // Read the command line options
434 if (getOptions(argc, argv, opt_click_allowed, opt_display, use_parallel_ransac) == false) {
435 exit(-1);
436 }
437
438 // Get the visp-images-data package path or VISP_INPUT_IMAGE_PATH
439 // environment variable value
441
442 if (env_ipath.empty()) {
443 std::cerr << "Please set the VISP_INPUT_IMAGE_PATH environment "
444 "variable value."
445 << std::endl;
446 return -1;
447 }
448
449 {
450 vpImage<unsigned char> I, IMatching;
451
452 std::cout << "-- Test on gray level images" << std::endl;
453
454 run_test(env_ipath, opt_click_allowed, opt_display, use_parallel_ransac, I, IMatching);
455 }
456 {
457 vpImage<vpRGBa> I, IMatching;
458
459 std::cout << "-- Test on color images" << std::endl;
460
461 run_test(env_ipath, opt_click_allowed, opt_display, use_parallel_ransac, I, IMatching);
462 }
463
464 } catch (const vpException &e) {
465 std::cerr << e.what() << std::endl;
466 return -1;
467 }
468
469 std::cout << "testKeyPoint-2 is ok !" << std::endl;
470 return 0;
471}
472#else
473int main()
474{
475 std::cerr << "You need OpenCV library." << std::endl;
476
477 return 0;
478}
479
480#endif
Generic class defining intrinsic camera parameters.
void initPersProjWithoutDistortion(double px, double py, double u0, double v0)
static const vpColor red
Definition: vpColor.h:217
static const vpColor none
Definition: vpColor.h:229
static const vpColor green
Definition: vpColor.h:220
Display for windows using GDI (available on any windows 32 platform).
Definition: vpDisplayGDI.h:129
The vpDisplayGTK allows to display image using the GTK 3rd party library. Thus to enable this class G...
Definition: vpDisplayGTK.h:135
The vpDisplayOpenCV allows to display image using the OpenCV library. Thus to enable this class OpenC...
Use the X11 console to display images on unix-like OS. Thus to enable this class X11 should be instal...
Definition: vpDisplayX.h:135
void init(vpImage< unsigned char > &I, int win_x=-1, int win_y=-1, const std::string &win_title="")
static bool getClick(const vpImage< unsigned char > &I, bool blocking=true)
virtual void setDownScalingFactor(unsigned int scale)
Definition: vpDisplay.cpp:231
static void display(const vpImage< unsigned char > &I)
static void flush(const vpImage< unsigned char > &I)
static void displayFrame(const vpImage< unsigned char > &I, const vpHomogeneousMatrix &cMo, const vpCameraParameters &cam, double size, const vpColor &color=vpColor::none, unsigned int thickness=1, const vpImagePoint &offset=vpImagePoint(0, 0))
@ SCALE_AUTO
Definition: vpDisplay.h:183
unsigned int getDownScalingFactor()
Definition: vpDisplay.h:235
static void displayCircle(const vpImage< unsigned char > &I, const vpImagePoint &center, unsigned int radius, const vpColor &color, bool fill=false, unsigned int thickness=1)
error that can be emited by ViSP classes.
Definition: vpException.h:72
const char * what() const
Implementation of an homogeneous matrix and operations on such kind of matrices.
void buildFrom(const vpTranslationVector &t, const vpRotationMatrix &R)
static void read(vpImage< unsigned char > &I, const std::string &filename, int backend=IO_DEFAULT_BACKEND)
Definition: vpImageIo.cpp:149
Class that defines a 2D point in an image. This class is useful for image processing and stores only ...
Definition: vpImagePoint.h:88
Definition of the vpImage class member functions.
Definition: vpImage.h:139
unsigned int getWidth() const
Definition: vpImage.h:246
unsigned int getHeight() const
Definition: vpImage.h:188
static std::string getViSPImagesDataPath()
Definition: vpIoTools.cpp:1365
static std::string createFilePath(const std::string &parent, const std::string &child)
Definition: vpIoTools.cpp:1670
Class that allows keypoints detection (and descriptors extraction) and matching thanks to OpenCV libr...
Definition: vpKeyPoint.h:223
static void compute3DForPointsInPolygons(const vpHomogeneousMatrix &cMo, const vpCameraParameters &cam, std::vector< cv::KeyPoint > &candidates, const std::vector< vpPolygon > &polygons, const std::vector< std::vector< vpPoint > > &roisPt, std::vector< cv::Point3f > &points, cv::Mat *descriptors=NULL)
Definition: vpKeyPoint.cpp:632
static double rad(double deg)
Definition: vpMath.h:110
static double getMedian(const std::vector< double > &v)
Definition: vpMath.cpp:261
static double getStdev(const std::vector< double > &v, bool useBesselCorrection=false)
Definition: vpMath.cpp:291
static double getMean(const std::vector< double > &v)
Definition: vpMath.cpp:241
Make the complete tracking of an object by using its CAD model.
virtual void setCameraParameters(const vpCameraParameters &cam)
virtual void setNearClippingDistance(const double &dist)
virtual void setFarClippingDistance(const double &dist)
virtual void loadConfigFile(const std::string &configFile, bool verbose=true)
virtual void setClipping(const unsigned int &flags)
virtual void setPose(const vpImage< unsigned char > &I, const vpHomogeneousMatrix &cdMo)
void setMovingEdge(const vpMe &me)
virtual void display(const vpImage< unsigned char > &I, const vpHomogeneousMatrix &cMo, const vpCameraParameters &cam, const vpColor &col, unsigned int thickness=1, bool displayFullModel=false)
virtual void getCameraParameters(vpCameraParameters &cam) const
Definition: vpMbTracker.h:248
virtual void getPose(vpHomogeneousMatrix &cMo) const
Definition: vpMbTracker.h:414
virtual void setAngleDisappear(const double &a)
Definition: vpMbTracker.h:480
virtual void initClick(const vpImage< unsigned char > &I, const std::string &initFile, bool displayHelp=false, const vpHomogeneousMatrix &T=vpHomogeneousMatrix())
virtual void initFromPose(const vpImage< unsigned char > &I, const std::string &initFile)
virtual void loadModel(const std::string &modelFile, bool verbose=false, const vpHomogeneousMatrix &T=vpHomogeneousMatrix())
virtual void setAngleAppear(const double &a)
Definition: vpMbTracker.h:469
virtual std::pair< std::vector< vpPolygon >, std::vector< std::vector< vpPoint > > > getPolygonFaces(bool orderPolygons=true, bool useVisibility=true, bool clipPolygon=false)
virtual unsigned int getClipping() const
Definition: vpMbTracker.h:256
Definition: vpMe.h:61
void setMu1(const double &mu_1)
Definition: vpMe.h:241
void setSampleStep(const double &s)
Definition: vpMe.h:278
void setRange(const unsigned int &r)
Definition: vpMe.h:271
void setMaskSize(const unsigned int &a)
Definition: vpMe.cpp:459
void setNbTotalSample(const int &nb)
Definition: vpMe.h:255
void setMu2(const double &mu_2)
Definition: vpMe.h:248
void setMaskNumber(const unsigned int &a)
Definition: vpMe.cpp:452
void setThreshold(const double &t)
Definition: vpMe.h:300
static bool parse(int *argcPtr, const char **argv, vpArgvInfo *argTable, int flags)
Definition: vpParseArgv.cpp:69
Class that enables to manipulate easily a video file or a sequence of images. As it inherits from the...
void acquire(vpImage< vpRGBa > &I)
void open(vpImage< vpRGBa > &I)
void setFileName(const std::string &filename)
long getFrameIndex() const