Visual Servoing Platform version 3.6.0
Loading...
Searching...
No Matches
testRealSense2_T265_images_odometry_async.cpp
1/****************************************************************************
2 *
3 * ViSP, open source Visual Servoing Platform software.
4 * Copyright (C) 2005 - 2023 by Inria. All rights reserved.
5 *
6 * This software is free software; you can redistribute it and/or modify
7 * it under the terms of the GNU General Public License as published by
8 * the Free Software Foundation; either version 2 of the License, or
9 * (at your option) any later version.
10 * See the file LICENSE.txt at the root directory of this source
11 * distribution for additional information about the GNU GPL.
12 *
13 * For using ViSP with software that can not be combined with the GNU
14 * GPL, please contact Inria about acquiring a ViSP Professional
15 * Edition License.
16 *
17 * See https://visp.inria.fr for more information.
18 *
19 * This software was developed at:
20 * Inria Rennes - Bretagne Atlantique
21 * Campus Universitaire de Beaulieu
22 * 35042 Rennes Cedex
23 * France
24 *
25 * If you have questions regarding the use of this file, please contact
26 * Inria at visp@inria.fr
27 *
28 * This file is provided AS IS with NO WARRANTY OF ANY KIND, INCLUDING THE
29 * WARRANTY OF DESIGN, MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE.
30 *
31 * Description:
32 * Asynchronous acquisition of images and odometry information with
33 * RealSense T265 sensor and librealsense2.
34 *
35*****************************************************************************/
36
43#include <iostream>
44
45#include <visp3/core/vpMeterPixelConversion.h>
46#include <visp3/gui/vpDisplayGDI.h>
47#include <visp3/gui/vpDisplayX.h>
48#include <visp3/sensor/vpRealSense2.h>
49
50#if defined(VISP_HAVE_REALSENSE2) && (VISP_CXX_STANDARD >= VISP_CXX_STANDARD_11) && \
51 (defined(VISP_HAVE_X11) || defined(VISP_HAVE_GDI)) && (RS2_API_VERSION > ((2 * 10000) + (31 * 100) + 0))
52
53#include <functional>
54#include <thread>
55
56int main()
57{
58 vpHomogeneousMatrix cMw, cMw_0;
59 vpHomogeneousMatrix cextMw(0, 0, 2, 0, 0, 0); // External camera view for pose visualization.
60 vpColVector odo_vel, odo_acc, imu_acc, imu_vel;
61 unsigned int confidence;
62 vpImagePoint frame_origin;
63 std::list<std::pair<unsigned int, vpImagePoint> >
64 frame_origins; // Frame origin's history for trajectory visualization.
65 unsigned int display_scale = 2;
66
67 try {
69
70 rs2::config config;
71 config.enable_stream(RS2_STREAM_POSE, RS2_FORMAT_6DOF);
72 config.enable_stream(RS2_STREAM_FISHEYE, 1, RS2_FORMAT_Y8);
73 config.enable_stream(RS2_STREAM_FISHEYE, 2, RS2_FORMAT_Y8);
74
75 // Creating images for left and right cameras, and for visualizing trajectory.
76 vpImage<unsigned char> I_left, I_right;
77 vpImage<unsigned char> I_pose(300, 300, 0);
78
79 vpCameraParameters cam(300., 300., I_pose.getWidth() / 2, I_pose.getHeight() / 2); // For pose visualization.
80
81 // Define frame callback.
82 // The callback is executed on a sensor thread and can be called simultaneously from multiple sensors.
83 std::function<void(rs2::frame)> callback = [&](const rs2::frame &frame) {
84 if (rs2::frameset fs = frame.as<rs2::frameset>()) {
85 // With callbacks, all synchronized stream will arrive in a single frameset.
86 rs2::video_frame left_frame = fs.get_fisheye_frame(1);
87 size_t size = left_frame.get_width() * left_frame.get_height();
88 memcpy(I_left.bitmap, left_frame.get_data(), size);
89
90 rs2::video_frame right_frame = fs.get_fisheye_frame(2);
91 size = right_frame.get_width() * right_frame.get_height();
92 memcpy(I_right.bitmap, right_frame.get_data(), size);
93
94 rs2_pose pose_data = fs.get_pose_frame().get_pose_data();
95
96 vpTranslationVector ctw(static_cast<double>(pose_data.translation.x),
97 static_cast<double>(pose_data.translation.y),
98 static_cast<double>(pose_data.translation.z));
99 vpQuaternionVector cqw(static_cast<double>(pose_data.rotation.x), static_cast<double>(pose_data.rotation.y),
100 static_cast<double>(pose_data.rotation.z), static_cast<double>(pose_data.rotation.w));
101
102 cMw.buildFrom(ctw, cqw);
103
104 odo_vel.resize(6, false);
105 odo_vel[0] = static_cast<double>(pose_data.velocity.x);
106 odo_vel[1] = static_cast<double>(pose_data.velocity.y);
107 odo_vel[2] = static_cast<double>(pose_data.velocity.z);
108 odo_vel[3] = static_cast<double>(pose_data.angular_velocity.x);
109 odo_vel[4] = static_cast<double>(pose_data.angular_velocity.y);
110 odo_vel[5] = static_cast<double>(pose_data.angular_velocity.z);
111
112 odo_acc.resize(6, false);
113 odo_acc[0] = static_cast<double>(pose_data.acceleration.x);
114 odo_acc[1] = static_cast<double>(pose_data.acceleration.y);
115 odo_acc[2] = static_cast<double>(pose_data.acceleration.z);
116 odo_acc[3] = static_cast<double>(pose_data.angular_acceleration.x);
117 odo_acc[4] = static_cast<double>(pose_data.angular_acceleration.y);
118 odo_acc[5] = static_cast<double>(pose_data.angular_acceleration.z);
119
120 confidence = pose_data.tracker_confidence;
121 } else {
122 // Stream that bypass synchronization (such as IMU, Pose, ...) will produce single frames.
123 rs2_pose pose_data = frame.as<rs2::pose_frame>().get_pose_data();
124 vpTranslationVector ctw(static_cast<double>(pose_data.translation.x),
125 static_cast<double>(pose_data.translation.y),
126 static_cast<double>(pose_data.translation.z));
127 vpQuaternionVector cqw(static_cast<double>(pose_data.rotation.x), static_cast<double>(pose_data.rotation.y),
128 static_cast<double>(pose_data.rotation.z), static_cast<double>(pose_data.rotation.w));
129
130 cMw.buildFrom(ctw, cqw);
131
132 odo_vel.resize(6, false);
133 odo_vel[0] = static_cast<double>(pose_data.velocity.x);
134 odo_vel[1] = static_cast<double>(pose_data.velocity.y);
135 odo_vel[2] = static_cast<double>(pose_data.velocity.z);
136 odo_vel[3] = static_cast<double>(pose_data.angular_velocity.x);
137 odo_vel[4] = static_cast<double>(pose_data.angular_velocity.y);
138 odo_vel[5] = static_cast<double>(pose_data.angular_velocity.z);
139
140 odo_acc.resize(6, false);
141 odo_acc[0] = static_cast<double>(pose_data.acceleration.x);
142 odo_acc[1] = static_cast<double>(pose_data.acceleration.y);
143 odo_acc[2] = static_cast<double>(pose_data.acceleration.z);
144 odo_acc[3] = static_cast<double>(pose_data.angular_acceleration.x);
145 odo_acc[4] = static_cast<double>(pose_data.angular_acceleration.y);
146 odo_acc[5] = static_cast<double>(pose_data.angular_acceleration.z);
147
148 confidence = pose_data.tracker_confidence;
149 }
150
151 // Calculate the frame's origin to be projected on the image I_pose and append it to frame_origins
152 vpHomogeneousMatrix cextMc = cextMw * cMw.inverse();
153 vpMeterPixelConversion::convertPoint(cam, cextMc[0][3] / cextMc[2][3], cextMc[1][3] / cextMc[2][3], frame_origin);
154 frame_origins.push_back(std::make_pair(confidence, frame_origin));
155 };
156
157 // Open vpRealSense2 object according to configuration and with the callback to be called.
158 g.open(config, callback);
159
160 I_left.resize(g.getIntrinsics(RS2_STREAM_FISHEYE, 1).height, g.getIntrinsics(RS2_STREAM_FISHEYE, 1).width);
161
162 I_right.resize(g.getIntrinsics(RS2_STREAM_FISHEYE, 2).height, g.getIntrinsics(RS2_STREAM_FISHEYE, 2).width);
163
164#if defined(VISP_HAVE_X11)
165 vpDisplayX display_left; // Left image
166 vpDisplayX display_right; // Right image
167 vpDisplayX display_pose; // Pose visualization
168#elif defined(VISP_HAVE_GDI)
169 vpDisplayGDI display_left; // Left image
170 vpDisplayGDI display_right; // Right image
171 vpDisplayGDI display_pose; // Pose visualization
172#endif
173
174#if defined(VISP_HAVE_X11) || defined(VISP_HAVE_GDI)
175 display_left.setDownScalingFactor(display_scale);
176 display_right.setDownScalingFactor(display_scale);
177 display_left.init(I_left, 10, 10, "Left image");
178 display_right.init(I_right, static_cast<int>(I_left.getWidth() / display_scale) + 80, 10, "Right image"); // Right
179 display_pose.init(I_pose, 10, static_cast<int>(I_left.getHeight() / display_scale) + 80,
180 "Pose visualizer"); // visualization
181#endif
182
183 vpHomogeneousMatrix cextMc_0 = cextMw * cMw_0.inverse();
184 vpMeterPixelConversion::convertPoint(cam, cextMc_0[0][3] / cextMc_0[2][3], cextMc_0[1][3] / cextMc_0[2][3],
185 frame_origin);
186 frame_origins.push_back(std::make_pair(confidence, frame_origin));
187
188 while (true) {
189 // Sleep for 1 millisecond to reduce the number of iterations
190 std::this_thread::sleep_for(std::chrono::milliseconds(1));
191
192 vpDisplay::display(I_left);
193 vpDisplay::display(I_right);
194 vpDisplay::display(I_pose);
195
196 vpHomogeneousMatrix cextMc = cextMw * cMw.inverse();
197 vpMeterPixelConversion::convertPoint(cam, cextMc[0][3] / cextMc[2][3], cextMc[1][3] / cextMc[2][3], frame_origin);
198 frame_origins.push_back(std::make_pair(confidence, frame_origin));
199
200 vpDisplay::displayText(I_left, 15 * display_scale, 15 * display_scale, "Click to quit", vpColor::red);
201 vpDisplay::displayText(I_right, 15 * display_scale, 15 * display_scale, "Click to quit", vpColor::red);
202 vpDisplay::displayText(I_pose, 15, 15, "Click to quit", vpColor::red);
203
204 vpDisplay::displayFrame(I_pose, cextMc_0, cam, 0.1, vpColor::none, 2); // First frame
205 vpDisplay::displayFrame(I_pose, cextMc, cam, 0.1, vpColor::none, 2);
206
207 // Display frame origin trajectory
208 {
209 std::list<std::pair<unsigned int, vpImagePoint> >::const_iterator it = frame_origins.begin();
210 std::pair<unsigned int, vpImagePoint> frame_origin_pair_prev = *(it++);
211 for (; it != frame_origins.end(); ++it) {
212 if (vpImagePoint::distance(frame_origin_pair_prev.second, (*it).second) > 1) {
214 I_pose, frame_origin_pair_prev.second, (*it).second,
215 (*it).first == 3 ? vpColor::green : ((*it).first == 2 ? vpColor::yellow : vpColor::red), 2);
216 frame_origin_pair_prev = *it;
217 }
218 }
219 }
220 if (vpDisplay::getClick(I_left, false) || vpDisplay::getClick(I_right, false) ||
221 vpDisplay::getClick(I_pose, false)) {
222 break;
223 }
224 vpDisplay::flush(I_left);
225 vpDisplay::flush(I_right);
226 vpDisplay::flush(I_pose);
227 }
228 } catch (const vpException &e) {
229 std::cerr << "RealSense error " << e.what() << std::endl;
230 } catch (const std::exception &e) {
231 std::cerr << e.what() << std::endl;
232 }
233
234 return EXIT_SUCCESS;
235}
236#else
237int main()
238{
239#if !defined(VISP_HAVE_REALSENSE2)
240 std::cout << "You do not realsense2 SDK functionality enabled..." << std::endl;
241 std::cout << "Tip:" << std::endl;
242 std::cout << "- Install librealsense2, configure again ViSP using cmake and build again this example" << std::endl;
243 return EXIT_SUCCESS;
244#elif (VISP_CXX_STANDARD < VISP_CXX_STANDARD_11)
245 std::cout << "You do not build ViSP with c++11 or higher compiler flag" << std::endl;
246 std::cout << "Tip:" << std::endl;
247 std::cout << "- Configure ViSP again using cmake -DUSE_CXX_STANDARD=11, and build again this example" << std::endl;
248#elif !(defined(VISP_HAVE_X11) || defined(VISP_HAVE_GDI))
249 std::cout << "You don't have X11 or GDI display capabilities" << std::endl;
250#elif !(RS2_API_VERSION > ((2 * 10000) + (31 * 100) + 0))
251 std::cout << "Install librealsense version > 2.31.0" << std::endl;
252#endif
253 return EXIT_SUCCESS;
254}
255#endif
Generic class defining intrinsic camera parameters.
Implementation of column vector and the associated operations.
void resize(unsigned int i, bool flagNullify=true)
Class to define RGB colors available for display functionalities.
Definition vpColor.h:152
static const vpColor red
Definition vpColor.h:211
static const vpColor none
Definition vpColor.h:223
static const vpColor green
Definition vpColor.h:214
Display for windows using GDI (available on any windows 32 platform).
Use the X11 console to display images on unix-like OS. Thus to enable this class X11 should be instal...
Definition vpDisplayX.h:132
void init(vpImage< unsigned char > &I, int win_x=-1, int win_y=-1, const std::string &win_title="")
static bool getClick(const vpImage< unsigned char > &I, bool blocking=true)
virtual void setDownScalingFactor(unsigned int scale)
static void display(const vpImage< unsigned char > &I)
static void displayLine(const vpImage< unsigned char > &I, const vpImagePoint &ip1, const vpImagePoint &ip2, const vpColor &color, unsigned int thickness=1, bool segment=true)
static void displayFrame(const vpImage< unsigned char > &I, const vpHomogeneousMatrix &cMo, const vpCameraParameters &cam, double size, const vpColor &color=vpColor::none, unsigned int thickness=1, const vpImagePoint &offset=vpImagePoint(0, 0), const std::string &frameName="", const vpColor &textColor=vpColor::black, const vpImagePoint &textOffset=vpImagePoint(15, 15))
static void flush(const vpImage< unsigned char > &I)
static void displayText(const vpImage< unsigned char > &I, const vpImagePoint &ip, const std::string &s, const vpColor &color)
error that can be emitted by ViSP classes.
Definition vpException.h:59
const char * what() const
Implementation of an homogeneous matrix and operations on such kind of matrices.
vpHomogeneousMatrix inverse() const
void buildFrom(const vpTranslationVector &t, const vpRotationMatrix &R)
Class that defines a 2D point in an image. This class is useful for image processing and stores only ...
static double distance(const vpImagePoint &iP1, const vpImagePoint &iP2)
Definition of the vpImage class member functions.
Definition vpImage.h:135
unsigned int getWidth() const
Definition vpImage.h:242
void resize(unsigned int h, unsigned int w)
resize the image : Image initialization
Definition vpImage.h:795
Type * bitmap
points toward the bitmap
Definition vpImage.h:139
unsigned int getHeight() const
Definition vpImage.h:184
static void convertPoint(const vpCameraParameters &cam, const double &x, const double &y, double &u, double &v)
Implementation of a rotation vector as quaternion angle minimal representation.
bool open(const rs2::config &cfg=rs2::config())
rs2_intrinsics getIntrinsics(const rs2_stream &stream, int index=-1) const
Class that consider the case of a translation vector.