Visual Servoing Platform version 3.6.0
Loading...
Searching...
No Matches
tutorial-ibvs-4pts-ogre-tracking.cpp
1
2#include <visp3/core/vpConfig.h>
3#ifdef VISP_HAVE_MODULE_AR
4#include <visp3/ar/vpAROgre.h>
5#endif
6#include <visp3/blob/vpDot2.h>
7#include <visp3/gui/vpDisplayGDI.h>
8#include <visp3/gui/vpDisplayOpenCV.h>
9#include <visp3/gui/vpDisplayX.h>
10#include <visp3/robot/vpSimulatorCamera.h>
11#include <visp3/vision/vpPose.h>
12#include <visp3/visual_features/vpFeatureBuilder.h>
13#include <visp3/vs/vpServo.h>
14#include <visp3/vs/vpServoDisplay.h>
15
16void display_trajectory(const vpImage<unsigned char> &I, const std::vector<vpDot2> &dot, unsigned int thickness);
17#if defined(VISP_HAVE_OGRE)
18void ogre_get_render_image(vpAROgre &ogre, const vpImage<unsigned char> &background, const vpHomogeneousMatrix &cMo,
20#endif
21
22void display_trajectory(const vpImage<unsigned char> &I, const std::vector<vpDot2> &dot, unsigned int thickness)
23{
24 static std::vector<vpImagePoint> traj[4];
25 for (unsigned int i = 0; i < 4; i++) {
26 traj[i].push_back(dot[i].getCog());
27 }
28 for (unsigned int i = 0; i < 4; i++) {
29 for (unsigned int j = 1; j < traj[i].size(); j++) {
30 vpDisplay::displayLine(I, traj[i][j - 1], traj[i][j], vpColor::green, thickness);
31 }
32 }
33}
34
35#if defined(VISP_HAVE_OGRE)
36void ogre_get_render_image(vpAROgre &ogre, const vpImage<unsigned char> &background, const vpHomogeneousMatrix &cMo,
38{
39 static vpImage<vpRGBa> Irender; // Image from ogre scene rendering
40 ogre.display(background, cMo);
41 ogre.getRenderingOutput(Irender, cMo);
42
43 vpImageConvert::convert(Irender, I);
44 // Due to the light that was added to the scene, we need to threshold the
45 // image
46 vpImageTools::binarise(I, (unsigned char)254, (unsigned char)255, (unsigned char)0, (unsigned char)255,
47 (unsigned char)255);
48}
49#endif
50
51int main()
52{
53#if defined(VISP_HAVE_OGRE) && (defined(VISP_HAVE_X11) || defined(VISP_HAVE_GDI) || defined(VISP_HAVE_OPENCV))
54 try {
55 unsigned int thickness = 3;
56
57 vpHomogeneousMatrix cdMo(0, 0, 0.75, 0, 0, 0);
58 vpHomogeneousMatrix cMo(0.15, -0.1, 1., vpMath::rad(10), vpMath::rad(-10), vpMath::rad(50));
59
60 // Color image used as background texture.
61 vpImage<unsigned char> background(480, 640, 255);
62
63 // Parameters of our camera
64 vpCameraParameters cam(840, 840, background.getWidth() / 2, background.getHeight() / 2);
65
66 // Define the target as 4 points
67 std::vector<vpPoint> point;
68 point.push_back(vpPoint(-0.1, -0.1, 0));
69 point.push_back(vpPoint(0.1, -0.1, 0));
70 point.push_back(vpPoint(0.1, 0.1, 0));
71 point.push_back(vpPoint(-0.1, 0.1, 0));
72
73 // Our object
74 // A simulator with the camera parameters defined above,
75 // and the background image size
76 vpAROgre ogre;
77 ogre.setCameraParameters(cam);
78 ogre.setShowConfigDialog(false);
79 ogre.addResource("./"); // Add the path to the Sphere.mesh resource
80 ogre.init(background, false, true);
81 // ogre.setWindowPosition(680, 400);
82
83 // Create the scene that contains 4 spheres
84 // Sphere.mesh contains a sphere with 1 meter radius
85 std::vector<std::string> name(4);
86 for (unsigned int i = 0; i < 4; i++) {
87 std::ostringstream s;
88 s << "Sphere" << i;
89 name[i] = s.str();
90 ogre.load(name[i], "Sphere.mesh");
91 ogre.setScale(name[i], 0.02f, 0.02f,
92 0.02f); // Rescale the sphere to 2 cm radius
93 // Set the position of each sphere in the object frame
94 ogre.setPosition(name[i], vpTranslationVector(point[i].get_oX(), point[i].get_oY(), point[i].get_oZ()));
95 ogre.setRotation(name[i], vpRotationMatrix(M_PI / 2, 0, 0));
96 }
97
98 // Add an optional point light source
99 Ogre::Light *light = ogre.getSceneManager()->createLight();
100 light->setDiffuseColour(1, 1, 1); // scaled RGB values
101 light->setSpecularColour(1, 1, 1); // scaled RGB values
102 light->setPosition((Ogre::Real)cdMo[0][3], (Ogre::Real)cdMo[1][3], (Ogre::Real)(-cdMo[2][3]));
103 light->setType(Ogre::Light::LT_POINT);
104
105 vpServo task;
108 task.setLambda(0.5);
109
110 // Image used for the image processing
112
113 // Render the scene at the desired position
114 ogre_get_render_image(ogre, background, cdMo, I);
115
116// Display the image in which we will do the tracking
117#if defined(VISP_HAVE_X11)
118 vpDisplayX d(I, 0, 0, "Camera view at desired position");
119#elif defined(VISP_HAVE_GDI)
120 vpDisplayGDI d(I, 0, 0, "Camera view at desired position");
121#elif defined(HAVE_OPENCV_HIGHGUI)
122 vpDisplayOpenCV d(I, 0, 0, "Camera view at desired position");
123#else
124 std::cout << "No image viewer is available..." << std::endl;
125#endif
126
128 vpDisplay::displayText(I, 10, 10, "Click in the 4 dots to learn their positions", vpColor::red);
130
131 std::vector<vpDot2> dot(4);
132 vpFeaturePoint p[4], pd[4];
133
134 for (unsigned int i = 0; i < 4; i++) {
135 // Compute the desired feature at the desired position
136 dot[i].setGraphics(true);
137 dot[i].setGraphicsThickness(thickness);
138 dot[i].initTracking(I);
140 vpFeatureBuilder::create(pd[i], cam, dot[i].getCog());
141 }
142
143 // Render the scene at the initial position
144 ogre_get_render_image(ogre, background, cMo, I);
145
147 vpDisplay::setTitle(I, "Current camera view");
148 vpDisplay::displayText(I, 10, 10, "Click in the 4 dots to initialise the tracking and start the servo",
151
152 for (unsigned int i = 0; i < 4; i++) {
153 // We notice that if we project the scene at a given pose, the pose
154 // estimated from the rendered image differs a little. That's why we
155 // cannot simply compute the desired feature from the desired pose using
156 // the next two lines. We will rather compute the desired position of
157 // the features from a learning stage. point[i].project(cdMo);
158 // vpFeatureBuilder::create(pd[i], point[i]);
159
160 // Compute the current feature at the initial position
161 dot[i].setGraphics(true);
162 dot[i].initTracking(I);
164 vpFeatureBuilder::create(p[i], cam, dot[i].getCog());
165 }
166
167 for (unsigned int i = 0; i < 4; i++) {
168 // Set the feature Z coordinate from the pose
169 vpColVector cP;
170 point[i].changeFrame(cMo, cP);
171 p[i].set_Z(cP[2]);
172
173 task.addFeature(p[i], pd[i]);
174 }
175
176 vpHomogeneousMatrix wMc, wMo;
177 vpSimulatorCamera robot;
178 robot.setSamplingTime(0.040);
179 robot.getPosition(wMc);
180 wMo = wMc * cMo;
181
182 for (;;) {
183 // From the camera position in the world frame we retrieve the object
184 // position
185 robot.getPosition(wMc);
186 cMo = wMc.inverse() * wMo;
187
188 // Update the scene from the new camera position
189 ogre_get_render_image(ogre, background, cMo, I);
190
192
193 for (unsigned int i = 0; i < 4; i++) {
194 dot[i].track(I);
195 vpFeatureBuilder::create(p[i], cam, dot[i].getCog());
196 }
197
198 for (unsigned int i = 0; i < 4; i++) {
199 // Set the feature Z coordinate from the pose
200 vpColVector cP;
201 point[i].changeFrame(cMo, cP);
202 p[i].set_Z(cP[2]);
203 }
204
206
207 display_trajectory(I, dot, thickness);
208 vpServoDisplay::display(task, cam, I, vpColor::green, vpColor::red, thickness + 2);
210
212 if (vpDisplay::getClick(I, false))
213 break;
214
215 vpTime::wait(robot.getSamplingTime() * 1000);
216 }
217 } catch (const vpException &e) {
218 std::cout << "Catch a ViSP exception: " << e << std::endl;
219 return EXIT_FAILURE;
220 } catch (...) {
221 std::cout << "Catch an exception " << std::endl;
222 return EXIT_FAILURE;
223 }
224 return EXIT_SUCCESS;
225#endif
226}
Implementation of an augmented reality viewer using Ogre3D 3rd party.
Definition vpAROgre.h:96
void setCameraParameters(const vpCameraParameters &cameraP)
Definition vpAROgre.cpp:656
Ogre::SceneManager * getSceneManager()
Definition vpAROgre.h:163
void setShowConfigDialog(bool showConfigDialog)
Definition vpAROgre.h:258
void getRenderingOutput(vpImage< vpRGBa > &I, const vpHomogeneousMatrix &cMo)
void setRotation(const std::string &sceneName, const vpRotationMatrix &wRo)
Definition vpAROgre.cpp:699
void addResource(const std::string &resourceLocation)
Definition vpAROgre.h:126
virtual void init(vpImage< unsigned char > &I, bool bufferedKeys=false, bool hidden=false)
Definition vpAROgre.cpp:115
virtual void display(const vpImage< unsigned char > &I, const vpHomogeneousMatrix &cMw)
Definition vpAROgre.cpp:622
void load(const std::string &entityName, const std::string &model)
Definition vpAROgre.cpp:663
void setPosition(const std::string &sceneName, const vpTranslationVector &wTo)
Definition vpAROgre.cpp:676
void setScale(const std::string &sceneName, float factorx, float factory, float factorz)
Definition vpAROgre.cpp:763
Generic class defining intrinsic camera parameters.
Implementation of column vector and the associated operations.
static const vpColor red
Definition vpColor.h:211
static const vpColor green
Definition vpColor.h:214
Display for windows using GDI (available on any windows 32 platform).
The vpDisplayOpenCV allows to display image using the OpenCV library. Thus to enable this class OpenC...
Use the X11 console to display images on unix-like OS. Thus to enable this class X11 should be instal...
Definition vpDisplayX.h:132
static bool getClick(const vpImage< unsigned char > &I, bool blocking=true)
static void display(const vpImage< unsigned char > &I)
static void displayLine(const vpImage< unsigned char > &I, const vpImagePoint &ip1, const vpImagePoint &ip2, const vpColor &color, unsigned int thickness=1, bool segment=true)
static void setTitle(const vpImage< unsigned char > &I, const std::string &windowtitle)
static void flush(const vpImage< unsigned char > &I)
static void displayText(const vpImage< unsigned char > &I, const vpImagePoint &ip, const std::string &s, const vpColor &color)
error that can be emitted by ViSP classes.
Definition vpException.h:59
static void create(vpFeaturePoint &s, const vpCameraParameters &cam, const vpDot &d)
Class that defines a 2D point visual feature which is composed by two parameters that are the cartes...
void set_Z(double Z)
Implementation of an homogeneous matrix and operations on such kind of matrices.
vpHomogeneousMatrix inverse() const
static void convert(const vpImage< unsigned char > &src, vpImage< vpRGBa > &dest)
static void binarise(vpImage< Type > &I, Type threshold1, Type threshold2, Type value1, Type value2, Type value3, bool useLUT=true)
Definition of the vpImage class member functions.
Definition vpImage.h:135
unsigned int getWidth() const
Definition vpImage.h:242
unsigned int getHeight() const
Definition vpImage.h:184
static double rad(double deg)
Definition vpMath.h:116
Class that defines a 3D point in the object frame and allows forward projection of a 3D point in the ...
Definition vpPoint.h:77
void setVelocity(const vpRobot::vpControlFrameType frame, const vpColVector &vel)
@ CAMERA_FRAME
Definition vpRobot.h:80
Implementation of a rotation matrix and operations on such kind of matrices.
static void display(const vpServo &s, const vpCameraParameters &cam, const vpImage< unsigned char > &I, vpColor currentColor=vpColor::green, vpColor desiredColor=vpColor::red, unsigned int thickness=1)
void setInteractionMatrixType(const vpServoIteractionMatrixType &interactionMatrixType, const vpServoInversionType &interactionMatrixInversion=PSEUDO_INVERSE)
Definition vpServo.cpp:564
@ EYEINHAND_CAMERA
Definition vpServo.h:151
void setLambda(double c)
Definition vpServo.h:403
void setServo(const vpServoType &servo_type)
Definition vpServo.cpp:210
vpColVector computeControlLaw()
Definition vpServo.cpp:930
@ CURRENT
Definition vpServo.h:179
void addFeature(vpBasicFeature &s, vpBasicFeature &s_star, unsigned int select=vpBasicFeature::FEATURE_ALL)
Definition vpServo.cpp:487
Class that defines the simplest robot: a free flying camera.
Class that consider the case of a translation vector.
VISP_EXPORT int wait(double t0, double t)