Visual Servoing Platform version 3.5.0
tutorial-apriltag-detector-live.cpp
1
2#include <visp3/core/vpConfig.h>
3#ifdef VISP_HAVE_MODULE_SENSOR
4#include <visp3/sensor/vpV4l2Grabber.h>
5#include <visp3/sensor/vp1394CMUGrabber.h>
6#include <visp3/sensor/vp1394TwoGrabber.h>
7#include <visp3/sensor/vpFlyCaptureGrabber.h>
8#include <visp3/sensor/vpRealSense2.h>
9#endif
11#include <visp3/detection/vpDetectorAprilTag.h>
13#include <visp3/gui/vpDisplayGDI.h>
14#include <visp3/gui/vpDisplayOpenCV.h>
15#include <visp3/gui/vpDisplayX.h>
16#include <visp3/core/vpXmlParserCamera.h>
17
19//#undef VISP_HAVE_V4L2
20//#undef VISP_HAVE_DC1394
21//#undef VISP_HAVE_CMU1394
22//#undef VISP_HAVE_FLYCAPTURE
23//#undef VISP_HAVE_REALSENSE2
24//#undef VISP_HAVE_OPENCV
26
27int main(int argc, const char **argv)
28{
30#if defined(VISP_HAVE_APRILTAG) && \
31 (defined(VISP_HAVE_V4L2) || defined(VISP_HAVE_DC1394) || defined(VISP_HAVE_CMU1394) || (VISP_HAVE_OPENCV_VERSION >= 0x020100) || \
32 defined(VISP_HAVE_FLYCAPTURE) || defined(VISP_HAVE_REALSENSE2) )
34
35 int opt_device = 0; // For OpenCV and V4l2 grabber to set the camera device
38 double tagSize = 0.053;
39 float quad_decimate = 1.0;
40 int nThreads = 1;
41 std::string intrinsic_file = "";
42 std::string camera_name = "";
43 bool display_tag = false;
44 int color_id = -1;
45 unsigned int thickness = 2;
46 bool align_frame = false;
47
48#if !(defined(VISP_HAVE_X11) || defined(VISP_HAVE_GDI) || defined(VISP_HAVE_OPENCV))
49 bool display_off = true;
50 std::cout << "Warning: There is no 3rd party (X11, GDI or openCV) to dislay images..." << std::endl;
51#else
52 bool display_off = false;
53#endif
54
56
57 for (int i = 1; i < argc; i++) {
58 if (std::string(argv[i]) == "--pose_method" && i + 1 < argc) {
59 poseEstimationMethod = (vpDetectorAprilTag::vpPoseEstimationMethod)atoi(argv[i + 1]);
60 } else if (std::string(argv[i]) == "--tag_size" && i + 1 < argc) {
61 tagSize = atof(argv[i + 1]);
62 } else if (std::string(argv[i]) == "--camera_device" && i + 1 < argc) {
63 opt_device = atoi(argv[i + 1]);
64 } else if (std::string(argv[i]) == "--quad_decimate" && i + 1 < argc) {
65 quad_decimate = (float)atof(argv[i + 1]);
66 } else if (std::string(argv[i]) == "--nthreads" && i + 1 < argc) {
67 nThreads = atoi(argv[i + 1]);
68 } else if (std::string(argv[i]) == "--intrinsic" && i + 1 < argc) {
69 intrinsic_file = std::string(argv[i + 1]);
70 } else if (std::string(argv[i]) == "--camera_name" && i + 1 < argc) {
71 camera_name = std::string(argv[i + 1]);
72 } else if (std::string(argv[i]) == "--display_tag") {
73 display_tag = true;
74 } else if (std::string(argv[i]) == "--display_off") {
75 display_off = true;
76 } else if (std::string(argv[i]) == "--color" && i + 1 < argc) {
77 color_id = atoi(argv[i+1]);
78 } else if (std::string(argv[i]) == "--thickness" && i + 1 < argc) {
79 thickness = (unsigned int) atoi(argv[i+1]);
80 } else if (std::string(argv[i]) == "--tag_family" && i + 1 < argc) {
81 tagFamily = (vpDetectorAprilTag::vpAprilTagFamily)atoi(argv[i + 1]);
82 } else if (std::string(argv[i]) == "--z_aligned") {
83 align_frame = true;
84 }
85 else if (std::string(argv[i]) == "--help" || std::string(argv[i]) == "-h") {
86 std::cout << "Usage: " << argv[0]
87 << " [--camera_device <camera device> (default: 0)]"
88 << " [--tag_size <tag_size in m> (default: 0.053)]"
89 " [--quad_decimate <quad_decimate> (default: 1)]"
90 " [--nthreads <nb> (default: 1)]"
91 " [--intrinsic <intrinsic file> (default: empty)]"
92 " [--camera_name <camera name> (default: empty)]"
93 " [--pose_method <method> (0: HOMOGRAPHY, 1: HOMOGRAPHY_VIRTUAL_VS, "
94 " 2: DEMENTHON_VIRTUAL_VS, 3: LAGRANGE_VIRTUAL_VS, "
95 " 4: BEST_RESIDUAL_VIRTUAL_VS, 5: HOMOGRAPHY_ORTHOGONAL_ITERATION) (default: 0)]"
96 " [--tag_family <family> (0: TAG_36h11, 1: TAG_36h10 (DEPRECATED), 2: TAG_36ARTOOLKIT (DEPRECATED),"
97 " 3: TAG_25h9, 4: TAG_25h7 (DEPRECATED), 5: TAG_16h5, 6: TAG_CIRCLE21h7, 7: TAG_CIRCLE49h12,"
98 " 8: TAG_CUSTOM48h12, 9: TAG_STANDARD41h12, 10: TAG_STANDARD52h13) (default: 0)]"
99 " [--display_tag] [--z_aligned]";
100#if (defined(VISP_HAVE_X11) || defined(VISP_HAVE_GDI) || defined(VISP_HAVE_OPENCV))
101 std::cout << " [--display_off] [--color <color id>] [--thickness <line thickness>]";
102#endif
103 std::cout << " [--help]" << std::endl;
104 return EXIT_SUCCESS;
105 }
106 }
107
108 try {
110 cam.initPersProjWithoutDistortion(615.1674805, 615.1675415, 312.1889954, 243.4373779);
111 vpXmlParserCamera parser;
112 if (!intrinsic_file.empty() && !camera_name.empty())
113 parser.parse(cam, intrinsic_file, camera_name, vpCameraParameters::perspectiveProjWithoutDistortion);
114
116#if defined(VISP_HAVE_V4L2)
118 std::ostringstream device;
119 device << "/dev/video" << opt_device;
120 std::cout << "Use Video 4 Linux grabber on device " << device.str() << std::endl;
121 g.setDevice(device.str());
122 g.setScale(1);
123 g.open(I);
124#elif defined(VISP_HAVE_DC1394)
125 (void)opt_device; // To avoid non used warning
126 std::cout << "Use DC1394 grabber" << std::endl;
128 g.open(I);
129#elif defined(VISP_HAVE_CMU1394)
130 (void)opt_device; // To avoid non used warning
131 std::cout << "Use CMU1394 grabber" << std::endl;
133 g.open(I);
134#elif defined(VISP_HAVE_FLYCAPTURE)
135 (void)opt_device; // To avoid non used warning
136 std::cout << "Use FlyCapture grabber" << std::endl;
138 g.open(I);
139#elif defined(VISP_HAVE_REALSENSE2)
140 (void)opt_device; // To avoid non used warning
141 std::cout << "Use Realsense 2 grabber" << std::endl;
142 vpRealSense2 g;
143 rs2::config config;
144 config.disable_stream(RS2_STREAM_DEPTH);
145 config.disable_stream(RS2_STREAM_INFRARED);
146 config.enable_stream(RS2_STREAM_COLOR, 640, 480, RS2_FORMAT_RGBA8, 30);
147 g.open(config);
148 g.acquire(I);
149
150 std::cout << "Read camera parameters from Realsense device" << std::endl;
152#elif defined(VISP_HAVE_OPENCV)
153 std::cout << "Use OpenCV grabber on device " << opt_device << std::endl;
154 cv::VideoCapture g(opt_device); // Open the default camera
155 if (!g.isOpened()) { // Check if we succeeded
156 std::cout << "Failed to open the camera" << std::endl;
157 return -1;
158 }
159 cv::Mat frame;
160 g >> frame; // get a new frame from camera
161 vpImageConvert::convert(frame, I);
162#endif
164
165 std::cout << cam << std::endl;
166 std::cout << "poseEstimationMethod: " << poseEstimationMethod << std::endl;
167 std::cout << "tagFamily: " << tagFamily << std::endl;
168 std::cout << "nThreads : " << nThreads << std::endl;
169 std::cout << "Z aligned: " << align_frame << std::endl;
170
171 vpDisplay *d = NULL;
172 if (! display_off) {
173#ifdef VISP_HAVE_X11
174 d = new vpDisplayX(I);
175#elif defined(VISP_HAVE_GDI)
176 d = new vpDisplayGDI(I);
177#elif defined(VISP_HAVE_OPENCV)
178 d = new vpDisplayOpenCV(I);
179#endif
180 }
181
183 vpDetectorAprilTag detector(tagFamily);
185
187 detector.setAprilTagQuadDecimate(quad_decimate);
188 detector.setAprilTagPoseEstimationMethod(poseEstimationMethod);
189 detector.setAprilTagNbThreads(nThreads);
190 detector.setDisplayTag(display_tag, color_id < 0 ? vpColor::none : vpColor::getColor(color_id), thickness);
191 detector.setZAlignedWithCameraAxis(align_frame);
193
194 std::vector<double> time_vec;
195 for (;;) {
197#if defined(VISP_HAVE_V4L2) || defined(VISP_HAVE_DC1394) || defined(VISP_HAVE_CMU1394) || defined(VISP_HAVE_FLYCAPTURE) || defined(VISP_HAVE_REALSENSE2)
198 g.acquire(I);
199#elif defined(VISP_HAVE_OPENCV)
200 g >> frame;
201 vpImageConvert::convert(frame, I);
202#endif
204
206
207 double t = vpTime::measureTimeMs();
209 std::vector<vpHomogeneousMatrix> cMo_vec;
210 detector.detect(I, tagSize, cam, cMo_vec);
212 t = vpTime::measureTimeMs() - t;
213 time_vec.push_back(t);
214
215 std::stringstream ss;
216 ss << "Detection time: " << t << " ms for " << detector.getNbObjects() << " tags";
217 vpDisplay::displayText(I, 40, 20, ss.str(), vpColor::red);
218
220 for (size_t i = 0; i < cMo_vec.size(); i++) {
221 vpDisplay::displayFrame(I, cMo_vec[i], cam, tagSize / 2, vpColor::none, 3);
222 }
224
225 vpDisplay::displayText(I, 20, 20, "Click to quit.", vpColor::red);
227 if (vpDisplay::getClick(I, false))
228 break;
229 }
230
231 std::cout << "Benchmark computation time" << std::endl;
232 std::cout << "Mean / Median / Std: " << vpMath::getMean(time_vec) << " ms"
233 << " ; " << vpMath::getMedian(time_vec) << " ms"
234 << " ; " << vpMath::getStdev(time_vec) << " ms" << std::endl;
235
236 if (! display_off)
237 delete d;
238
239 } catch (const vpException &e) {
240 std::cerr << "Catch an exception: " << e.getMessage() << std::endl;
241 }
242
243 return EXIT_SUCCESS;
244#else
245 (void)argc;
246 (void)argv;
247#ifndef VISP_HAVE_APRILTAG
248 std::cout << "Enable Apriltag support, configure and build ViSP to run this tutorial" << std::endl;
249#else
250 std::cout << "Install a 3rd party dedicated to frame grabbing (dc1394, cmu1394, v4l2, OpenCV, FlyCapture, Realsense2), configure and build ViSP again to use this example" << std::endl;
251#endif
252#endif
253 return EXIT_SUCCESS;
254}
Firewire cameras video capture based on CMU 1394 Digital Camera SDK.
void open(vpImage< unsigned char > &I)
Class for firewire ieee1394 video devices using libdc1394-2.x api.
void open(vpImage< unsigned char > &I)
Generic class defining intrinsic camera parameters.
void initPersProjWithoutDistortion(double px, double py, double u0, double v0)
static vpColor getColor(const unsigned int &i)
Definition: vpColor.h:310
static const vpColor red
Definition: vpColor.h:217
static const vpColor none
Definition: vpColor.h:229
@ TAG_36h11
AprilTag 36h11 pattern (recommended)
Display for windows using GDI (available on any windows 32 platform).
Definition: vpDisplayGDI.h:129
The vpDisplayOpenCV allows to display image using the OpenCV library. Thus to enable this class OpenC...
Use the X11 console to display images on unix-like OS. Thus to enable this class X11 should be instal...
Definition: vpDisplayX.h:135
Class that defines generic functionnalities for display.
Definition: vpDisplay.h:178
static bool getClick(const vpImage< unsigned char > &I, bool blocking=true)
static void display(const vpImage< unsigned char > &I)
static void flush(const vpImage< unsigned char > &I)
static void displayFrame(const vpImage< unsigned char > &I, const vpHomogeneousMatrix &cMo, const vpCameraParameters &cam, double size, const vpColor &color=vpColor::none, unsigned int thickness=1, const vpImagePoint &offset=vpImagePoint(0, 0))
static void displayText(const vpImage< unsigned char > &I, const vpImagePoint &ip, const std::string &s, const vpColor &color)
error that can be emited by ViSP classes.
Definition: vpException.h:72
const char * getMessage() const
Definition: vpException.cpp:90
void open(vpImage< unsigned char > &I)
static void convert(const vpImage< unsigned char > &src, vpImage< vpRGBa > &dest)
static double getMedian(const std::vector< double > &v)
Definition: vpMath.cpp:261
static double getStdev(const std::vector< double > &v, bool useBesselCorrection=false)
Definition: vpMath.cpp:291
static double getMean(const std::vector< double > &v)
Definition: vpMath.cpp:241
void acquire(vpImage< unsigned char > &grey, double *ts=NULL)
vpCameraParameters getCameraParameters(const rs2_stream &stream, vpCameraParameters::vpCameraParametersProjType type=vpCameraParameters::perspectiveProjWithDistortion, int index=-1) const
bool open(const rs2::config &cfg=rs2::config())
Class that is a wrapper over the Video4Linux2 (V4L2) driver.
void open(vpImage< unsigned char > &I)
void setScale(unsigned scale=vpV4l2Grabber::DEFAULT_SCALE)
void setDevice(const std::string &devname)
XML parser to load and save intrinsic camera parameters.
int parse(vpCameraParameters &cam, const std::string &filename, const std::string &camera_name, const vpCameraParameters::vpCameraParametersProjType &projModel, unsigned int image_width=0, unsigned int image_height=0)
VISP_EXPORT double measureTimeMs()