27 #include <opencv2/core/core.hpp>
28 #include <opencv2/imgproc/imgproc.hpp>
29 #include <opencv2/calib3d/calib3d.hpp>
31 #include <Eigen/Geometry>
40 "wraparound, 30=yellow, 45=light green, 60=green, 75=green cyan, 90=cyan, "
41 "105=light blue, 120=blue, 135=purple, 150=pink)",
54 "detected in a frame, we skip that frame before we even try to analyze shapes of the blobs",
59 "if you want to skip shape analysis of very large or very small blobs",
64 "occupies a smaller fraction of its convex hull. This parameter sets an upper bound, "
65 "fuller shapes will be rejected.",
77 JEVOIS_DECLARE_PARAMETER(epsilon,
double,
"Shape smoothing factor (higher for smoother). Shape smoothing is applied "
78 "to remove small contour defects before the shape is analyzed.",
86 JEVOIS_DECLARE_PARAMETER(threads,
size_t,
"Number of parallel vision processing threads. Thread 0 uses the HSV values "
87 "provided by user parameters; thread 1 broadens that fixed range a bit; threads 2-3 use a "
88 "narrow and broader learned HSV window over time",
101 "When dopose is true, 3D serial messages are sent out, otherwise 2D serial messages.",
106 "will be appended, as well as a .yaml extension. For example, specifying 'calibration' "
107 "here and running the camera sensor at 320x240 will attempt to load "
108 "calibration320x240.yaml from within directory " JEVOIS_SHARE_PATH
"/camera/",
109 "calibration", ParamCateg);
117 cv::Size_<float>(0.28
F, 0.175
F), ParamCateg);
120 JEVOIS_DECLARE_PARAMETER(margin,
size_t,
"Margin from from frame borders (pixels). If any corner of a detected shape "
121 "gets closer than the margin to the frame borders, the shape will be rejected. This is to "
122 "avoid possibly bogus 6D pose estimation when the shape starts getting truncated as it "
123 "partially exits the camera's field of view.",
293 public jevois::Parameter<hcue, scue, vcue, maxnumobj, hullarea, hullfill, erodesize,
294 dilatesize, epsilon, debug, threads, showthread, ethresh,
295 dopose, camparams, iou, objsize, margin>
318 hsvcue(
unsigned char h,
unsigned char hsig,
unsigned char s,
unsigned char ssig,
319 unsigned char v,
unsigned char vsig) :
muh(
h),
sih(hsig),
mus(s),
sis(ssig),
muv(v),
siv(vsig)
325 muh = std::min(179.0
F, std::max(1.0
F,
muh));
sih = std::max(1.0
F, std::min(
sih, 360.0
F));
326 mus = std::min(254.0
F, std::max(1.0
F,
mus));
sis = std::max(1.0
F, std::min(
sis, 512.0
F));
327 muv = std::min(254.0
F, std::max(1.0
F,
muv));
siv = std::max(1.0
F, std::min(
siv, 512.0
F));
332 {
return cv::Scalar(std::max(0.0
F,
muh -
sih), std::max(0.0
F,
mus -
sis), std::max(0.0
F,
muv -
siv)); }
336 {
return cv::Scalar(std::min(179.0
F,
muh +
sih), 255, 255); }
373 cv::InputArray _cameraMatrix, cv::InputArray _distCoeffs,
374 cv::Mat & _rvecs, cv::Mat & _tvecs) :
375 objPoints(_objPoints), corners(_corners), cameraMatrix(_cameraMatrix),
376 distCoeffs(_distCoeffs), rvecs(_rvecs), tvecs(_tvecs)
381 int const begin = range.start;
382 int const end = range.end;
384 for (
int i = begin; i <
end; ++i)
385 cv::solvePnP(objPoints, corners.getMat(i), cameraMatrix, distCoeffs,
386 rvecs.at<cv::Vec3d>(i), tvecs.at<cv::Vec3d>(i));
391 cv::InputArrayOfArrays corners;
392 cv::InputArray cameraMatrix, distCoeffs;
393 cv::Mat & rvecs, tvecs;
405 itsKalH = addSubComponent<Kalman1D>(
"kalH");
406 itsKalS = addSubComponent<Kalman1D>(
"kalS");
407 itsKalV = addSubComponent<Kalman1D>(
"kalV");
418 void estimatePose(std::vector<std::vector<cv::Point2f> > & corners, cv::OutputArray _rvecs,
419 cv::OutputArray _tvecs)
421 auto const osiz = objsize::get();
427 corners.push_back(std::vector<cv::Point2f>());
428 std::vector<cv::Point2f> & v = corners.back();
429 for (
auto const & p : d.hull) v.push_back(cv::Point2f(p));
435 cv::Mat objPoints(4, 1, CV_32FC3);
436 objPoints.ptr< cv::Vec3f >(0)[0] = cv::Vec3f(-osiz.width * 0.5F, -osiz.height * 0.5F, 0);
437 objPoints.ptr< cv::Vec3f >(0)[1] = cv::Vec3f(-osiz.width * 0.5F, osiz.height * 0.5F, 0);
438 objPoints.ptr< cv::Vec3f >(0)[2] = cv::Vec3f(osiz.width * 0.5F, osiz.height * 0.5F, 0);
439 objPoints.ptr< cv::Vec3f >(0)[3] = cv::Vec3f(osiz.width * 0.5F, -osiz.height * 0.5F, 0);
441 int nobj = (
int)corners.size();
442 _rvecs.create(nobj, 1, CV_64FC3); _tvecs.create(nobj, 1, CV_64FC3);
443 cv::Mat rvecs = _rvecs.getMat(), tvecs = _tvecs.getMat();
455 std::string
const cpf = std::string(JEVOIS_SHARE_PATH) +
"/camera/" + camparams::get() +
458 cv::FileStorage fs(cpf, cv::FileStorage::READ);
463 LINFO(
"Loaded camera calibration from " << cpf);
467 LERROR(
"Failed to read camera parameters from file [" << cpf <<
"] -- IGNORED");
480 cv::inRange(imghsv, rmin, rmax, imgth);
481 std::string
str =
jevois::sformat(
"T%zu: H=%03d-%03d S=%03d-%03d V=%03d-%03d ", tnum,
int(rmin.val[0]),
482 int(rmax.val[0]),
int(rmin.val[1]),
int(rmax.val[1]),
483 int(rmin.val[2]),
int(rmax.val[2]));
490 std::vector<std::vector<cv::Point> > contours; std::vector<cv::Vec4i> hierarchy;
491 cv::findContours(imgth, contours, hierarchy, cv::RETR_CCOMP, cv::CHAIN_APPROX_SIMPLE);
494 double const epsi = epsilon::get();
495 int const m = margin::get();
498 std::string str2, beststr2;
499 if (hierarchy.size() > 0 && hierarchy.size() <= maxnumobj::get())
501 for (
int index = 0; index >= 0; index = hierarchy[index][0])
504 if (str2.length() > beststr2.length()) beststr2 = str2;
508 std::vector<cv::Point>
const & c = contours[index];
512 double const area = cv::contourArea(c,
false);
515 std::vector<cv::Point> rawhull;
516 cv::convexHull(c, rawhull,
true);
517 double const rawhullperi = cv::arcLength(rawhull,
true);
518 cv::approxPolyDP(rawhull, d.
hull, epsi * rawhullperi * 3.0,
true);
521 if (d.
hull.size() != 4)
continue;
524 double const huarea = cv::contourArea(d.
hull,
false);
525 if ( ! hullarea::get().contains(
int(huarea + 0.4999)))
continue;
528 int const hufill =
int(
area / huarea * 100.0 + 0.4999);
529 if (hufill > hullfill::get())
continue;
533 double const peri = cv::arcLength(c,
true);
534 cv::approxPolyDP(c, d.
approx, epsi * peri,
true);
535 if (d.
approx.size() < 7 || d.
approx.size() > 9)
continue;
539 d.
serr = 100.0 * cv::matchShapes(c, d.
approx, cv::CONTOURS_MATCH_I1, 0.0);
540 if (d.
serr > ethresh::get())
continue;
547 for (
size_t i = 0; i < c.size(); ++i)
548 if (c[i].x < m || c[i].x >= imghsv.cols - m || c[i].y < m || c[i].y >= imghsv.rows - m)
549 { reject =
true;
break; }
550 if (reject)
continue;
562 std::complex<float> v10p23(
float(d.
hull[0].x - d.
hull[1].x + d.
hull[3].x - d.
hull[2].x),
564 float const len10p23 = std::abs(v10p23);
565 std::complex<float> v03p12(
float(d.
hull[3].x - d.
hull[0].x + d.
hull[2].x - d.
hull[1].x),
567 float const len03p12 = std::abs(v03p12);
570 cv::Moments
const momC = cv::moments(c);
571 cv::Moments
const momH = cv::moments(d.
hull);
572 std::complex<float> vCH(momH.m10 / momH.m00 - momC.m10 / momC.m00, momH.m01 / momH.m00 - momC.m01 / momC.m00);
573 float const lenCH = std::abs(vCH);
575 if (len10p23 < 0.1
F || len03p12 < 0.1
F || lenCH < 0.1
F)
continue;
578 float const good = (v10p23.real() * vCH.real() + v10p23.imag() * vCH.imag()) / (len10p23 * lenCH);
579 float const bad = (v03p12.real() * vCH.real() + v03p12.imag() * vCH.imag()) / (len03p12 * lenCH);
582 if (vCH.imag() >= -2.0F)
continue;
586 if (bad > good) { d.
hull.insert(d.
hull.begin(), d.
hull.back()); d.
hull.pop_back(); }
591 std::lock_guard<std::mutex> _(
itsDetMtx);
594 if (str2.length() > beststr2.length()) beststr2 = str2;
598 if (outimg && outimg->valid())
600 if (tnum == showthread::get() &&
int(outimg->width) == 2 * imgth.cols)
610 float const spread = 0.2F;
616 for (
size_t i = 0; i < nthreads; ++i)
618 hsvcue cue(hcue::get(), scue::get(), vcue::get());
619 cue.
sih *= (1.0F + spread * i); cue.
sis *= (1.0F + spread * i); cue.
siv *= (1.0F + spread * i);
639 for (
size_t i = 3; i <
itsHSV.size(); ++i)
642 itsHSV[i].sih *= (1.0F + spread * i);
643 itsHSV[i].sis *= (1.0F + spread * i);
644 itsHSV[i].siv *= (1.0F + spread * i);
655 bool keepgoing =
true;
656 double const iouth = iou::get();
661 keepgoing =
false;
int delidx = -1;
665 for (
size_t i = 0; i < siz; ++i)
667 for (
size_t j = 0; j < i; ++j)
670 for (cv::Point
const & p :
itsDetections[j].hull) pts.push_back(p);
671 std::vector<cv::Point> hull;
672 cv::convexHull(pts, hull);
673 double uarea = cv::contourArea(hull);
677 double const inoun = iarea / uarea;
684 if (delidx != -1)
break;
694 int const w = imgbgr.cols,
h = imgbgr.rows;
698 auto median_fut =
jevois::async([&](){ cv::medianBlur(imgbgr, medimgbgr, 3); } );
701 std::vector<std::vector<cv::Point> > contours;
705 std::future<void> drawc_fut;
706 if (debug::get() && outimg && outimg->valid())
709 cv::Mat outuc2(outimg->height, outimg->width, CV_8UC2, outimg->pixelsw<
unsigned char>());
710 cv::drawContours(outuc2, contours, -1, jevois::yuyv::LightPink, 2);
714 cv::Mat mask(
h,
w, CV_8UC1, (
unsigned char)0);
715 cv::drawContours(mask, contours, -1, 255, -1);
722 cv::meanStdDev(medimgbgr, mean, std, mask);
725 cv::Mat bgrmean(2, 1, CV_8UC3); bgrmean.at<cv::Vec3b>(0, 0) = mean; bgrmean.at<cv::Vec3b>(1, 0) = std;
726 cv::Mat hsvmean; cv::cvtColor(bgrmean, hsvmean, cv::COLOR_BGR2HSV);
728 cv::Vec3b hsv = hsvmean.at<cv::Vec3b>(0, 0);
729 int H = hsv.val[0], S = hsv.val[1],
V = hsv.val[2];
731 cv::Vec3b sighsv = hsvmean.at<cv::Vec3b>(1, 0);
732 int sH = sighsv.val[0], sS = sighsv.val[1], sV = sighsv.val[2];
739 float const eta = 0.4F;
740 itsHSV[2].sih = (1.0F - eta) *
itsHSV[2].sih + eta * sH;
741 itsHSV[2].sis = (1.0F - eta) *
itsHSV[2].sis + eta * sS;
742 itsHSV[2].siv = (1.0F - eta) *
itsHSV[2].siv + eta * sV;
751 void sendAllSerial(
int w,
int h, std::vector<std::vector<cv::Point2f> >
const & corners,
752 std::vector<cv::Vec3d>
const & rvecs, std::vector<cv::Vec3d>
const & tvecs)
754 if (rvecs.empty() ==
false)
757 auto const osiz = objsize::get();
758 for (
size_t i = 0; i < corners.size(); ++i)
760 cv::Vec3d
const & rv = rvecs[i];
761 cv::Vec3d
const & tv = tvecs[i];
764 float theta = std::sqrt(rv[0] * rv[0] + rv[1] * rv[1] + rv[2] * rv[2]);
765 Eigen::Vector3f axis(rv[0], rv[1], rv[2]);
766 Eigen::Quaternion<float>
q(Eigen::AngleAxis<float>(theta, axis));
769 osiz.width, osiz.height, 1.0F,
770 q.w(),
q.x(),
q.y(),
q.z(),
777 for (
size_t i = 0; i < corners.size(); ++i)
786 int e = erodesize::get();
789 if (e)
itsErodeElement = cv::getStructuringElement(cv::MORPH_RECT, cv::Size(e, e));
793 int d = dilatesize::get();
796 if (d)
itsDilateElement = cv::getStructuringElement(cv::MORPH_RECT, cv::Size(d, d));
817 cv::Mat imghsv; cv::cvtColor(imgbgr, imghsv, cv::COLOR_BGR2HSV);
818 size_t const nthreads = threads::get();
828 std::vector<std::future<void> > dfut;
829 for (
size_t i = 0; i < nthreads - 1; ++i)
830 dfut.push_back(
jevois::async([&](
size_t tn) { detect(imghsv, tn, 3, h+2); }, i));
831 detect(imghsv, nthreads - 1, 3,
h+2);
846 std::vector<std::vector<cv::Point2f> > corners; std::vector<cv::Vec3d> rvecs, tvecs;
867 inimg.
require(
"input",
w,
h, V4L2_PIX_FMT_YUYV);
877 outimg = outframe.get();
879 if (outimg.
width !=
w && outimg.
width !=
w * 2)
LFATAL(
"Output image width should be 1x or 2x input width");
887 cv::Mat imghsv; cv::cvtColor(imgbgr, imghsv, cv::COLOR_BGR2HSV);
888 size_t const nthreads = threads::get();
898 std::vector<std::future<void> > dfut;
899 for (
size_t i = 0; i < nthreads - 1; ++i)
900 dfut.push_back(
jevois::async([&](
size_t tn) { detect(imghsv, tn, 3, h+2, &outimg); }, i));
901 detect(imghsv, nthreads - 1, 3,
h+2, &outimg);
919 std::vector<std::vector<cv::Point2f> > corners; std::vector<cv::Vec3d> rvecs, tvecs;
930 w + 3, 3, jevois::yuyv::White);
936 std::string
const & fpscpu = timer.
stop();
945 std::vector<cv::Vec3d>
const & rvecs, std::vector<cv::Vec3d>
const & tvecs)
947 auto const osiz = objsize::get();
float const w = osiz.width,
h = osiz.height;
948 int nobj =
int(corners.size());
954 for (
int i = 0; i < nobj; ++i)
956 std::vector<cv::Point2f>
const & obj = corners[i];
959 for (
int j = 0; j < 4; ++j)
961 cv::Point2f
const & p0 = obj[j];
962 cv::Point2f
const & p1 = obj[ (j+1) % 4 ];
964 int(p1.x + 0.5F),
int(p1.y + 0.5F), 1, jevois::yuyv::LightPink);
973 float const hw =
w * 0.5F, hh =
h * 0.5F, dd = -0.5F * std::max(
w,
h);
975 for (
int i = 0; i < nobj; ++i)
978 std::vector<cv::Point3f> axisPoints;
979 axisPoints.push_back(cv::Point3f(0.0
F, 0.0
F, 0.0
F));
980 axisPoints.push_back(cv::Point3f(hw, 0.0
F, 0.0
F));
981 axisPoints.push_back(cv::Point3f(0.0
F, hh, 0.0
F));
982 axisPoints.push_back(cv::Point3f(0.0
F, 0.0
F, dd));
984 std::vector<cv::Point2f> imagePoints;
989 int(imagePoints[1].x + 0.5
F),
int(imagePoints[1].y + 0.5
F),
990 2, jevois::yuyv::MedPurple);
992 int(imagePoints[2].x + 0.5
F),
int(imagePoints[2].y + 0.5
F),
993 2, jevois::yuyv::MedGreen);
995 int(imagePoints[3].x + 0.5
F),
int(imagePoints[3].y + 0.5
F),
996 2, jevois::yuyv::MedGrey);
999 std::vector<cv::Point3f> cubePoints;
1000 cubePoints.push_back(cv::Point3f(-hw, -hh, 0.0
F));
1001 cubePoints.push_back(cv::Point3f(hw, -hh, 0.0
F));
1002 cubePoints.push_back(cv::Point3f(hw, hh, 0.0
F));
1003 cubePoints.push_back(cv::Point3f(-hw, hh, 0.0
F));
1004 cubePoints.push_back(cv::Point3f(-hw, -hh, dd));
1005 cubePoints.push_back(cv::Point3f(hw, -hh, dd));
1006 cubePoints.push_back(cv::Point3f(hw, hh, dd));
1007 cubePoints.push_back(cv::Point3f(-hw, hh, dd));
1009 std::vector<cv::Point2f> cuf;
1013 std::vector<cv::Point> cu;
1014 for (
auto const & p : cuf) cu.push_back(cv::Point(
int(p.x + 0.5F),
int(p.y + 0.5F)));