JeVoisBase  1.5
JeVois Smart Embedded Machine Vision Toolkit Base Modules
Share this page:
ArUco.C
Go to the documentation of this file.
1 // ///////////////////////////////////////////////////////////////////////////////////////////////////////////////////
2 //
3 // JeVois Smart Embedded Machine Vision Toolkit - Copyright (C) 2016 by Laurent Itti, the University of Southern
4 // California (USC), and iLab at USC. See http://iLab.usc.edu and http://jevois.org for information about this project.
5 //
6 // This file is part of the JeVois Smart Embedded Machine Vision Toolkit. This program is free software; you can
7 // redistribute it and/or modify it under the terms of the GNU General Public License as published by the Free Software
8 // Foundation, version 2. This program is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY;
9 // without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public
10 // License for more details. You should have received a copy of the GNU General Public License along with this program;
11 // if not, write to the Free Software Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
12 //
13 // Contact information: Laurent Itti - 3641 Watt Way, HNB-07A - Los Angeles, CA 90089-2520 - USA.
14 // Tel: +1 213 740 3527 - itti@pollux.usc.edu - http://iLab.usc.edu - http://jevois.org
15 // ///////////////////////////////////////////////////////////////////////////////////////////////////////////////////
16 /*! \file */
17 
20 #include <jevois/Core/Module.H>
21 #include <opencv2/calib3d.hpp> // for projectPoints()
22 #include <opencv2/imgproc/imgproc.hpp>
23 
24 #include <Eigen/Geometry> // for AngleAxis and Quaternion
25 
26 // ##############################################################################################################
28 { }
29 
30 // ##############################################################################################################
32 {
33  // Defer reading camera parameters to first processed frame, so we know the resolution:
35 
36  // Initialize default detector parameters:
37  itsDetectorParams = cv::aruco::DetectorParameters::create();
38  itsDetectorParams->cornerRefinementMethod = cv::aruco::CORNER_REFINE_SUBPIX;
39 
40  // Read detector parameters if any:
42  std::string const dpf = aruco::detparams::get();
43  if (dpf.empty() == false)
44  {
45  cv::FileStorage fs(dpf, cv::FileStorage::READ);
46  if (fs.isOpened())
47  {
48  fs["adaptiveThreshWinSizeMin"] >> itsDetectorParams->adaptiveThreshWinSizeMin;
49  fs["adaptiveThreshWinSizeMax"] >> itsDetectorParams->adaptiveThreshWinSizeMax;
50  fs["adaptiveThreshWinSizeStep"] >> itsDetectorParams->adaptiveThreshWinSizeStep;
51  fs["adaptiveThreshConstant"] >> itsDetectorParams->adaptiveThreshConstant;
52  fs["minMarkerPerimeterRate"] >> itsDetectorParams->minMarkerPerimeterRate;
53  fs["maxMarkerPerimeterRate"] >> itsDetectorParams->maxMarkerPerimeterRate;
54  fs["polygonalApproxAccuracyRate"] >> itsDetectorParams->polygonalApproxAccuracyRate;
55  fs["minCornerDistanceRate"] >> itsDetectorParams->minCornerDistanceRate;
56  fs["minDistanceToBorder"] >> itsDetectorParams->minDistanceToBorder;
57  fs["minMarkerDistanceRate"] >> itsDetectorParams->minMarkerDistanceRate;
58  fs["cornerRefinementMethod"] >> itsDetectorParams->cornerRefinementMethod;
59  fs["cornerRefinementWinSize"] >> itsDetectorParams->cornerRefinementWinSize;
60  fs["cornerRefinementMaxIterations"] >> itsDetectorParams->cornerRefinementMaxIterations;
61  fs["cornerRefinementMinAccuracy"] >> itsDetectorParams->cornerRefinementMinAccuracy;
62  fs["markerBorderBits"] >> itsDetectorParams->markerBorderBits;
63  fs["perspectiveRemovePixelPerCell"] >> itsDetectorParams->perspectiveRemovePixelPerCell;
64  fs["perspectiveRemoveIgnoredMarginPerCell"] >> itsDetectorParams->perspectiveRemoveIgnoredMarginPerCell;
65  fs["maxErroneousBitsInBorderRate"] >> itsDetectorParams->maxErroneousBitsInBorderRate;
66  fs["minOtsuStdDev"] >> itsDetectorParams->minOtsuStdDev;
67  fs["errorCorrectionRate"] >> itsDetectorParams->errorCorrectionRate;
68  }
69  else LERROR("Failed to read detector parameters from file [" << dpf << "] -- IGNORED");
70  }
71 
72  // Instantiate the disctionary:
73  switch (aruco::dictionary::get())
74  {
75  case aruco::Dict::Original: itsDictionary = cv::aruco::getPredefinedDictionary(cv::aruco::DICT_ARUCO_ORIGINAL);break;
76  case aruco::Dict::D4X4_50: itsDictionary = cv::aruco::getPredefinedDictionary(cv::aruco::DICT_4X4_50); break;
77  case aruco::Dict::D4X4_100: itsDictionary = cv::aruco::getPredefinedDictionary(cv::aruco::DICT_4X4_100); break;
78  case aruco::Dict::D4X4_250: itsDictionary = cv::aruco::getPredefinedDictionary(cv::aruco::DICT_4X4_250); break;
79  case aruco::Dict::D4X4_1000: itsDictionary = cv::aruco::getPredefinedDictionary(cv::aruco::DICT_4X4_1000); break;
80  case aruco::Dict::D5X5_50: itsDictionary = cv::aruco::getPredefinedDictionary(cv::aruco::DICT_5X5_50); break;
81  case aruco::Dict::D5X5_100: itsDictionary = cv::aruco::getPredefinedDictionary(cv::aruco::DICT_5X5_100); break;
82  case aruco::Dict::D5X5_250: itsDictionary = cv::aruco::getPredefinedDictionary(cv::aruco::DICT_5X5_250); break;
83  case aruco::Dict::D5X5_1000: itsDictionary = cv::aruco::getPredefinedDictionary(cv::aruco::DICT_5X5_1000); break;
84  case aruco::Dict::D6X6_50: itsDictionary = cv::aruco::getPredefinedDictionary(cv::aruco::DICT_6X6_50); break;
85  case aruco::Dict::D6X6_100: itsDictionary = cv::aruco::getPredefinedDictionary(cv::aruco::DICT_6X6_100); break;
86  case aruco::Dict::D6X6_250: itsDictionary = cv::aruco::getPredefinedDictionary(cv::aruco::DICT_6X6_250); break;
87  case aruco::Dict::D6X6_1000: itsDictionary = cv::aruco::getPredefinedDictionary(cv::aruco::DICT_6X6_1000); break;
88  case aruco::Dict::D7X7_50: itsDictionary = cv::aruco::getPredefinedDictionary(cv::aruco::DICT_7X7_50); break;
89  case aruco::Dict::D7X7_100: itsDictionary = cv::aruco::getPredefinedDictionary(cv::aruco::DICT_7X7_100); break;
90  case aruco::Dict::D7X7_250: itsDictionary = cv::aruco::getPredefinedDictionary(cv::aruco::DICT_7X7_250); break;
91  case aruco::Dict::D7X7_1000: itsDictionary = cv::aruco::getPredefinedDictionary(cv::aruco::DICT_7X7_1000); break;
92  }
93 }
94 
95 // ##############################################################################################################
97 {
100  itsDictionary.release();
101  itsDetectorParams.release();
102  itsCamMatrix = cv::Mat();
103  itsDistCoeffs = cv::Mat();
104 }
105 
106 // ##############################################################################################################
107 void ArUco::detectMarkers(cv::InputArray image, cv::OutputArray ids, cv::OutputArrayOfArrays corners)
108 {
109  if (itsCamMatrix.empty())
110  {
111  std::string const cpf = absolutePath(aruco::camparams::get() + std::to_string(image.cols()) + 'x' +
112  std::to_string(image.rows()) + ".yaml");
113 
114  cv::FileStorage fs(cpf, cv::FileStorage::READ);
115  if (fs.isOpened())
116  {
117  fs["camera_matrix"] >> itsCamMatrix;
118  fs["distortion_coefficients"] >> itsDistCoeffs;
119  LINFO("Loaded camera calibration from " << cpf);
120  }
121  else LERROR("Failed to read camera parameters from file [" << cpf << "] -- IGNORED");
122  }
123 
124  cv::aruco::detectMarkers(image, itsDictionary, corners, ids, itsDetectorParams);
125 }
126 
127 // ##############################################################################################################
128 void ArUco::estimatePoseSingleMarkers(cv::InputArrayOfArrays corners, cv::OutputArray rvecs, cv::OutputArray tvecs)
129 {
130  cv::aruco::estimatePoseSingleMarkers(corners, markerlen::get(), itsCamMatrix, itsDistCoeffs, rvecs, tvecs);
131 }
132 
133 
134 // ##############################################################################################################
135 void ArUco::sendSerial(jevois::StdModule * mod, std::vector<int> ids, std::vector<std::vector<cv::Point2f> > corners,
136  unsigned int w, unsigned int h, std::vector<cv::Vec3d> const & rvecs,
137  std::vector<cv::Vec3d> const & tvecs)
138 {
139  if (rvecs.empty() == false)
140  {
141  float const siz = markerlen::get();
142 
143  // If we have rvecs and tvecs, we are doing 3D pose estimation, so send a 3D message:
144  for (size_t i = 0; i < corners.size(); ++i)
145  {
146  std::vector<cv::Point2f> const & currentMarker = corners[i];
147  cv::Vec3d const & rv = rvecs[i];
148  cv::Vec3d const & tv = tvecs[i];
149 
150  // Compute quaternion:
151  float theta = std::sqrt(rv[0] * rv[0] + rv[1] * rv[1] + rv[2] * rv[2]);
152  Eigen::Vector3f axis(rv[0], rv[1], rv[2]);
153  Eigen::Quaternion<float> q(Eigen::AngleAxis<float>(theta, axis));
154 
155  mod->sendSerialStd3D(tv[0], tv[1], tv[2], // position
156  siz, siz, 1.0F, // size
157  q.w(), q.x(), q.y(), q.z(), // pose
158  "U" + std::to_string(ids[i])); // decoded ID with "U" prefix for ArUco
159  }
160  }
161  else
162  {
163  // Send one 2D message per parker:
164  for (size_t i = 0; i < corners.size(); ++i)
165  {
166  std::vector<cv::Point2f> const & currentMarker = corners[i];
167  mod->sendSerialContour2D(w, h, currentMarker, "U" + std::to_string(ids[i]));
168  }
169  }
170 }
171 
172 // ##############################################################################################################
173 void ArUco::drawDetections(jevois::RawImage & outimg, int txtx, int txty, std::vector<int> ids,
174  std::vector<std::vector<cv::Point2f> > corners, std::vector<cv::Vec3d> const & rvecs,
175  std::vector<cv::Vec3d> const & tvecs)
176 {
177  // This code is like drawDetectedMarkers() in cv::aruco, but for YUYV output image:
178  int nMarkers = int(corners.size());
179  for (int i = 0; i < nMarkers; ++i)
180  {
181  std::vector<cv::Point2f> const & currentMarker = corners[i];
182 
183  // draw marker sides and prepare serial out string:
184  for (int j = 0; j < 4; ++j)
185  {
186  cv::Point2f const & p0 = currentMarker[j];
187  cv::Point2f const & p1 = currentMarker[ (j+1) % 4 ];
188  jevois::rawimage::drawLine(outimg, int(p0.x + 0.5F), int(p0.y + 0.5F),
189  int(p1.x + 0.5F), int(p1.y + 0.5F), 1, jevois::yuyv::LightGreen);
190  }
191 
192  // draw first corner mark
193  jevois::rawimage::drawDisk(outimg, int(currentMarker[0].x + 0.5F), int(currentMarker[0].y + 0.5F),
194  3, jevois::yuyv::LightGreen);
195 
196  // draw ID
197  if (ids.empty() == false)
198  {
199  cv::Point2f cent(0.0F, 0.0F); for (int p = 0; p < 4; ++p) cent += currentMarker[p] * 0.25F;
200  jevois::rawimage::writeText(outimg, std::string("id=") + std::to_string(ids[i]),
201  int(cent.x + 0.5F), int(cent.y + 0.5F) - 5, jevois::yuyv::LightGreen);
202  }
203  }
204 
205  // This code is like drawAxis() in cv::aruco, but for YUYV output image:
206  if (dopose::get() && ids.empty() == false)
207  {
208  float const length = markerlen::get() * 0.4F;
209 
210  for (size_t i = 0; i < ids.size(); ++i)
211  {
212  // Project axis points:
213  std::vector<cv::Point3f> axisPoints;
214  axisPoints.push_back(cv::Point3f(0.0F, 0.0F, 0.0F));
215  axisPoints.push_back(cv::Point3f(length, 0.0F, 0.0F));
216  axisPoints.push_back(cv::Point3f(0.0F, length, 0.0F));
217  axisPoints.push_back(cv::Point3f(0.0F, 0.0F, length));
218 
219  std::vector<cv::Point2f> imagePoints;
220  cv::projectPoints(axisPoints, rvecs[i], tvecs[i], itsCamMatrix, itsDistCoeffs, imagePoints);
221 
222  // Draw axis lines
223  jevois::rawimage::drawLine(outimg, int(imagePoints[0].x + 0.5F), int(imagePoints[0].y + 0.5F),
224  int(imagePoints[1].x + 0.5F), int(imagePoints[1].y + 0.5F),
225  2, jevois::yuyv::MedPurple);
226  jevois::rawimage::drawLine(outimg, int(imagePoints[0].x + 0.5F), int(imagePoints[0].y + 0.5F),
227  int(imagePoints[2].x + 0.5F), int(imagePoints[2].y + 0.5F),
228  2, jevois::yuyv::MedGreen);
229  jevois::rawimage::drawLine(outimg, int(imagePoints[0].x + 0.5F), int(imagePoints[0].y + 0.5F),
230  int(imagePoints[3].x + 0.5F), int(imagePoints[3].y + 0.5F),
231  2, jevois::yuyv::MedGrey);
232 
233  // Also draw a cube if requested:
234  if (showcube::get())
235  {
236  float const len = markerlen::get() * 0.5F;
237 
238  std::vector<cv::Point3f> cubePoints;
239  cubePoints.push_back(cv::Point3f(-len, -len, 0.0F));
240  cubePoints.push_back(cv::Point3f(len, -len, 0.0F));
241  cubePoints.push_back(cv::Point3f(len, len, 0.0F));
242  cubePoints.push_back(cv::Point3f(-len, len, 0.0F));
243  cubePoints.push_back(cv::Point3f(-len, -len, len * 2.0F));
244  cubePoints.push_back(cv::Point3f(len, -len, len * 2.0F));
245  cubePoints.push_back(cv::Point3f(len, len, len * 2.0F));
246  cubePoints.push_back(cv::Point3f(-len, len, len * 2.0F));
247 
248  std::vector<cv::Point2f> cuf;
249  cv::projectPoints(cubePoints, rvecs[i], tvecs[i], itsCamMatrix, itsDistCoeffs, cuf);
250 
251  // Round all the coordinates:
252  std::vector<cv::Point> cu;
253  for (auto const & p : cuf) cu.push_back(cv::Point(int(p.x + 0.5F), int(p.y + 0.5F)));
254 
255  // Draw cube lines:
256  jevois::rawimage::drawLine(outimg, cu[0].x, cu[0].y, cu[1].x, cu[1].y, 2, jevois::yuyv::LightGreen);
257  jevois::rawimage::drawLine(outimg, cu[1].x, cu[1].y, cu[2].x, cu[2].y, 2, jevois::yuyv::LightGreen);
258  jevois::rawimage::drawLine(outimg, cu[2].x, cu[2].y, cu[3].x, cu[3].y, 2, jevois::yuyv::LightGreen);
259  jevois::rawimage::drawLine(outimg, cu[3].x, cu[3].y, cu[0].x, cu[0].y, 2, jevois::yuyv::LightGreen);
260  jevois::rawimage::drawLine(outimg, cu[4].x, cu[4].y, cu[5].x, cu[5].y, 2, jevois::yuyv::LightGreen);
261  jevois::rawimage::drawLine(outimg, cu[5].x, cu[5].y, cu[6].x, cu[6].y, 2, jevois::yuyv::LightGreen);
262  jevois::rawimage::drawLine(outimg, cu[6].x, cu[6].y, cu[7].x, cu[7].y, 2, jevois::yuyv::LightGreen);
263  jevois::rawimage::drawLine(outimg, cu[7].x, cu[7].y, cu[4].x, cu[4].y, 2, jevois::yuyv::LightGreen);
264  jevois::rawimage::drawLine(outimg, cu[0].x, cu[0].y, cu[4].x, cu[4].y, 2, jevois::yuyv::LightGreen);
265  jevois::rawimage::drawLine(outimg, cu[1].x, cu[1].y, cu[5].x, cu[5].y, 2, jevois::yuyv::LightGreen);
266  jevois::rawimage::drawLine(outimg, cu[2].x, cu[2].y, cu[6].x, cu[6].y, 2, jevois::yuyv::LightGreen);
267  jevois::rawimage::drawLine(outimg, cu[3].x, cu[3].y, cu[7].x, cu[7].y, 2, jevois::yuyv::LightGreen);
268  }
269 
270  }
271  }
272 
273  jevois::rawimage::writeText(outimg, "Detected " + std::to_string(ids.size()) + " ArUco markers.",
274  txtx, txty, jevois::yuyv::White);
275 }
276 
void sendSerial(jevois::StdModule *mod, std::vector< int > ids, std::vector< std::vector< cv::Point2f > > corners, unsigned int w, unsigned int h, std::vector< cv::Vec3d > const &rvecs, std::vector< cv::Vec3d > const &tvecs)
Send serial messages about detections.
Definition: ArUco.C:135
void sendSerialContour2D(unsigned int camw, unsigned int camh, std::vector< cv::Point_< T > > points, std::string const &id="", std::string const &extra="")
void unFreeze()
void writeText(RawImage &img, std::string const &txt, int x, int y, unsigned int col, Font font=Font6x10)
void detectMarkers(cv::InputArray image, cv::OutputArray ids, cv::OutputArrayOfArrays corners)
Detect markers.
Definition: ArUco.C:107
void drawLine(RawImage &img, int x1, int y1, int x2, int y2, unsigned int thick, unsigned int col)
void postUninit() override
Un-initialize, nuke allocated resources.
Definition: ArUco.C:96
#define LERROR(msg)
void estimatePoseSingleMarkers(cv::InputArrayOfArrays corners, cv::OutputArray rvecs, cv::OutputArray tvecs)
Estimate pose of individual markers.
Definition: ArUco.C:128
void drawDetections(jevois::RawImage &outimg, int txtx, int txty, std::vector< int > ids, std::vector< std::vector< cv::Point2f > > corners, std::vector< cv::Vec3d > const &rvecs, std::vector< cv::Vec3d > const &tvecs)
Draw any markers previously detected by detectMarkers()
Definition: ArUco.C:173
void freeze()
virtual ~ArUco()
Destructor.
Definition: ArUco.C:27
void sendSerialStd3D(float x, float y, float z, float w=0.0F, float h=0.0F, float d=0.0F, float q1=0.0F, float q2=0.0F, float q3=0.0f, float q4=0.0F, std::string const &id="", std::string const &extra="")
cv::Mat itsDistCoeffs
Our current distortion coefficients.
Definition: ArUco.H:169
cv::Mat itsCamMatrix
Our current camera matrix.
Definition: ArUco.H:166
std::string to_string(T const &val)
cv::Ptr< cv::aruco::Dictionary > itsDictionary
Definition: ArUco.H:173
#define LINFO(msg)
void drawDisk(RawImage &img, int x, int y, unsigned int rad, unsigned int col)
void postInit() override
Initialize, create the detector and read the config files.
Definition: ArUco.C:31
cv::Ptr< cv::aruco::DetectorParameters > itsDetectorParams
Definition: ArUco.H:172
std::string absolutePath(std::string const &path="")