diff --git a/example/tracking/mbtEdgeTracking.cpp b/example/tracking/mbtEdgeTracking.cpp index 5381e916c9..2589e7ad46 100644 --- a/example/tracking/mbtEdgeTracking.cpp +++ b/example/tracking/mbtEdgeTracking.cpp @@ -60,7 +60,7 @@ #include #include -#define GETOPTARGS "x:m:i:n:de:chtfColwvp" +#define GETOPTARGS "x:m:i:n:de:chtfColwvpj:" void usage(const char *name, const char *badparam) { @@ -153,7 +153,7 @@ OPTIONS: \n\ bool getOptions(int argc, const char **argv, std::string &ipath, std::string &configFile, std::string &modelFile, std::string &initFile, long &lastFrame, bool &displayFeatures, bool &click_allowed, bool &display, bool &cao3DModel, bool &trackCylinder, bool &useOgre, bool &showOgreConfigDialog, bool &useScanline, - bool &computeCovariance, bool &projectionError) + bool &computeCovariance, bool &projectionError, int &jump) { const char *optarg_; int c; @@ -205,6 +205,9 @@ bool getOptions(int argc, const char **argv, std::string &ipath, std::string &co case 'p': projectionError = true; break; + case 'j': + jump = atoi(optarg_); + break; case 'h': usage(argv[0], nullptr); return false; @@ -252,6 +255,7 @@ int main(int argc, const char **argv) bool computeCovariance = false; bool projectionError = false; bool quit = false; + int jump = 1; #if VISP_HAVE_DATASET_VERSION >= 0x030600 std::string ext("png"); @@ -269,8 +273,8 @@ int main(int argc, const char **argv) // Read the command line options if (!getOptions(argc, argv, opt_ipath, opt_configFile, opt_modelFile, opt_initFile, opt_lastFrame, displayFeatures, - opt_click_allowed, opt_display, cao3DModel, trackCylinder, useOgre, showOgreConfigDialog, - useScanline, computeCovariance, projectionError)) { + opt_click_allowed, opt_display, cao3DModel, trackCylinder, useOgre, showOgreConfigDialog, + useScanline, computeCovariance, projectionError, jump)) { return EXIT_FAILURE; } @@ -391,6 +395,10 @@ int main(int argc, const char **argv) vpMbEdgeTracker tracker; vpHomogeneousMatrix cMo; + std::vector pyramids = { true, true }; + // TODO: + tracker.setScales(pyramids); + // Initialise the tracker: camera parameters, moving edge and KLT settings vpCameraParameters cam; // From the xml file @@ -481,73 +489,76 @@ int main(int argc, const char **argv) while (!reader.end()) { // acquire a new image - reader.acquire(I); + for (int cpt = 0; cpt < jump; cpt++) { + reader.acquire(I); + } // display the image if (opt_display) vpDisplay::display(I); - // Test to reset the tracker - if (reader.getFrameIndex() == reader.getFirstFrameIndex() + 10) { - vpTRACE("Test reset tracker"); - if (opt_display) - vpDisplay::display(I); - tracker.resetTracker(); - tracker.loadConfigFile(configFile); -#if 0 - // Corresponding parameters manually set to have an example code - // By setting the parameters: - cam.initPersProjWithoutDistortion(547, 542, 338, 234); - - vpMe me; - me.setMaskSize(5); - me.setMaskNumber(180); - me.setRange(7); - me.setLikelihoodThresholdType(vpMe::NORMALIZED_THRESHOLD); - me.setThreshold(10); - me.setMu1(0.5); - me.setMu2(0.5); - me.setSampleStep(4); - - tracker.setCameraParameters(cam); - tracker.setMovingEdge(me); - - // Specify the clipping to use - tracker.setNearClippingDistance(0.01); - tracker.setFarClippingDistance(0.90); - tracker.setClipping(tracker.getClipping() | vpMbtPolygon::FOV_CLIPPING); - // tracker.setClipping(tracker.getClipping() | vpMbtPolygon::LEFT_CLIPPING | - // vpMbtPolygon::RIGHT_CLIPPING | vpMbtPolygon::UP_CLIPPING | - // vpMbtPolygon::DOWN_CLIPPING); // Equivalent to FOV_CLIPPING -#endif - tracker.loadModel(modelFile); - tracker.setCameraParameters(cam); - tracker.setOgreVisibilityTest(useOgre); - tracker.setScanLineVisibilityTest(useScanline); - tracker.setCovarianceComputation(computeCovariance); - tracker.setProjectionErrorComputation(projectionError); - tracker.initFromPose(I, cMo); - } - - // Test to set an initial pose - if (reader.getFrameIndex() == reader.getFirstFrameIndex() + 50) { - cMo.buildFrom(0.0439540832, 0.0845870108, 0.5477322481, 2.179498458, 0.8611798108, -0.3491961946); - vpTRACE("Test set pose"); - tracker.setPose(I, cMo); - // if (opt_display) { - // // display the 3D model - // tracker.display(I, cMo, cam, vpColor::darkRed); - // // display the frame - // vpDisplay::displayFrame (I, cMo, cam, 0.05); - //// if (opt_click_allowed) { - //// vpDisplay::flush(I); - //// vpDisplay::getClick(I); - //// } - // } - } - - // track the object: stop tracking from frame 40 to 50 - if (reader.getFrameIndex() - reader.getFirstFrameIndex() < 40 || - reader.getFrameIndex() - reader.getFirstFrameIndex() >= 50) { +// // Test to reset the tracker +// if (reader.getFrameIndex() == reader.getFirstFrameIndex() + 10) { +// vpTRACE("Test reset tracker"); +// if (opt_display) +// vpDisplay::display(I); +// tracker.resetTracker(); +// tracker.loadConfigFile(configFile); +// #if 0 +// // Corresponding parameters manually set to have an example code +// // By setting the parameters: +// cam.initPersProjWithoutDistortion(547, 542, 338, 234); + +// vpMe me; +// me.setMaskSize(5); +// me.setMaskNumber(180); +// me.setRange(7); +// me.setLikelihoodThresholdType(vpMe::NORMALIZED_THRESHOLD); +// me.setThreshold(10); +// me.setMu1(0.5); +// me.setMu2(0.5); +// me.setSampleStep(4); + +// tracker.setCameraParameters(cam); +// tracker.setMovingEdge(me); + +// // Specify the clipping to use +// tracker.setNearClippingDistance(0.01); +// tracker.setFarClippingDistance(0.90); +// tracker.setClipping(tracker.getClipping() | vpMbtPolygon::FOV_CLIPPING); +// // tracker.setClipping(tracker.getClipping() | vpMbtPolygon::LEFT_CLIPPING | +// // vpMbtPolygon::RIGHT_CLIPPING | vpMbtPolygon::UP_CLIPPING | +// // vpMbtPolygon::DOWN_CLIPPING); // Equivalent to FOV_CLIPPING +// #endif +// tracker.loadModel(modelFile); +// tracker.setCameraParameters(cam); +// tracker.setOgreVisibilityTest(useOgre); +// tracker.setScanLineVisibilityTest(useScanline); +// tracker.setCovarianceComputation(computeCovariance); +// tracker.setProjectionErrorComputation(projectionError); +// tracker.initFromPose(I, cMo); +// } + + // // Test to set an initial pose + // if (reader.getFrameIndex() == reader.getFirstFrameIndex() + 50) { + // cMo.buildFrom(0.0439540832, 0.0845870108, 0.5477322481, 2.179498458, 0.8611798108, -0.3491961946); + // vpTRACE("Test set pose"); + // tracker.setPose(I, cMo); + // // if (opt_display) { + // // // display the 3D model + // // tracker.display(I, cMo, cam, vpColor::darkRed); + // // // display the frame + // // vpDisplay::displayFrame (I, cMo, cam, 0.05); + // //// if (opt_click_allowed) { + // //// vpDisplay::flush(I); + // //// vpDisplay::getClick(I); + // //// } + // // } + // } + + // // track the object: stop tracking from frame 40 to 50 + // if (reader.getFrameIndex() - reader.getFirstFrameIndex() < 40 || + // reader.getFrameIndex() - reader.getFirstFrameIndex() >= 50) + { tracker.track(I); tracker.getPose(cMo); if (opt_display) { @@ -560,9 +571,9 @@ int main(int argc, const char **argv) if (opt_click_allowed) { vpDisplay::displayText(I, 10, 10, "Click to quit", vpColor::red); - if (vpDisplay::getClick(I, false)) { - quit = true; - break; + if (vpDisplay::getClick(I, true)) { + // quit = true; + // break; } } diff --git a/example/tracking/mbtGenericTracking.cpp b/example/tracking/mbtGenericTracking.cpp index 5249b137da..cc881f78c8 100644 --- a/example/tracking/mbtGenericTracking.cpp +++ b/example/tracking/mbtGenericTracking.cpp @@ -276,8 +276,8 @@ int main(int argc, const char **argv) // Read the command line options if (!getOptions(argc, argv, opt_ipath, opt_configFile, opt_modelFile, opt_initFile, opt_lastFrame, displayFeatures, - opt_click_allowed, opt_display, cao3DModel, trackCylinder, useOgre, showOgreConfigDialog, - useScanline, computeCovariance, projectionError, trackerType)) { + opt_click_allowed, opt_display, cao3DModel, trackCylinder, useOgre, showOgreConfigDialog, + useScanline, computeCovariance, projectionError, trackerType)) { return EXIT_FAILURE; } @@ -521,6 +521,15 @@ int main(int argc, const char **argv) while (!reader.end() && !quit) { // acquire a new image reader.acquire(I1); + reader.acquire(I1); + reader.acquire(I1); + reader.acquire(I1); + reader.acquire(I1); + reader.acquire(I1); + reader.acquire(I1); + reader.acquire(I1); + reader.acquire(I1); + reader.acquire(I1); I2 = I1; // display the image if (opt_display) { diff --git a/example/tracking/mbtGenericTracking2.cpp b/example/tracking/mbtGenericTracking2.cpp index 357dc9cf56..ac11881dcf 100644 --- a/example/tracking/mbtGenericTracking2.cpp +++ b/example/tracking/mbtGenericTracking2.cpp @@ -60,7 +60,7 @@ #include #include -#define GETOPTARGS "x:m:i:n:de:chtfColwvpT:" +#define GETOPTARGS "x:m:i:n:de:chtfColwvpT:zyj:" #define USE_XML 0 @@ -158,7 +158,7 @@ OPTIONS: \n\ bool getOptions(int argc, const char **argv, std::string &ipath, std::string &configFile, std::string &modelFile, std::string &initFile, long &lastFrame, bool &displayFeatures, bool &click_allowed, bool &display, bool &cao3DModel, bool &trackCylinder, bool &useOgre, bool &showOgreConfigDialog, bool &useScanline, - bool &computeCovariance, bool &projectionError, int &trackerType) + bool &computeCovariance, bool &projectionError, int &trackerType, bool &experimentation, bool &forceInitME, int &jump) { const char *optarg_; int c; @@ -213,6 +213,15 @@ bool getOptions(int argc, const char **argv, std::string &ipath, std::string &co case 'T': trackerType = atoi(optarg_); break; + case 'z': + experimentation = true; + break; + case 'y': + forceInitME = true; + break; + case 'j': + jump = atoi(optarg_); + break; case 'h': usage(argv[0], nullptr); return false; @@ -259,6 +268,9 @@ int main(int argc, const char **argv) bool computeCovariance = false; bool projectionError = false; int trackerType = vpMbGenericTracker::EDGE_TRACKER; + bool experimentation = false; + bool forceInitME = false; + int jump = 1; #if VISP_HAVE_DATASET_VERSION >= 0x030600 std::string ext("png"); @@ -276,8 +288,8 @@ int main(int argc, const char **argv) // Read the command line options if (!getOptions(argc, argv, opt_ipath, opt_configFile, opt_modelFile, opt_initFile, opt_lastFrame, displayFeatures, - opt_click_allowed, opt_display, cao3DModel, trackCylinder, useOgre, showOgreConfigDialog, - useScanline, computeCovariance, projectionError, trackerType)) { + opt_click_allowed, opt_display, cao3DModel, trackCylinder, useOgre, showOgreConfigDialog, + useScanline, computeCovariance, projectionError, trackerType, experimentation, forceInitME, jump)) { return EXIT_FAILURE; } @@ -443,12 +455,12 @@ int main(int argc, const char **argv) vpMe me; me.setMaskSize(5); me.setMaskNumber(180); - me.setRange(7); + me.setRange(20); // TODO: me.setLikelihoodThresholdType(vpMe::NORMALIZED_THRESHOLD); me.setThreshold(10); me.setMu1(0.5); me.setMu2(0.5); - me.setSampleStep(4); + me.setSampleStep(1); std::map mapOfMe; mapOfMe["Camera1"] = me; mapOfMe["Camera2"] = me; @@ -481,6 +493,10 @@ int main(int argc, const char **argv) tracker->setNearClippingDistance(0.01); tracker->setFarClippingDistance(0.90); + // TODO: + dynamic_cast(tracker)->setForceInitMovingEdge(forceInitME); + dynamic_cast(tracker)->setGoodMovingEdgesRatioThreshold(0.2); + std::map mapOfClippingFlags; dynamic_cast(tracker)->getClipping(mapOfClippingFlags); for (std::map::iterator it = mapOfClippingFlags.begin(); it != mapOfClippingFlags.end(); @@ -561,10 +577,31 @@ int main(int argc, const char **argv) vpDisplay::flush(I3); } - bool quit = false, click = false; + bool quit = false, click = true; while (!reader.end() && !quit) { // acquire a new image - reader.acquire(I1); + for (int cpt = 0; cpt < jump; cpt++) { + reader.acquire(I1); + } + + // TODO: + if (experimentation) { + using namespace cv; + Mat grad_x, grad_y; + Mat abs_grad_x, abs_grad_y; + Mat image, src_gray; + vpImageConvert::convert(I1, src_gray); + GaussianBlur(src_gray, image, Size(3, 3), 0, 0, BORDER_DEFAULT); + Sobel(image, grad_x, CV_16SC1, 1, 0, 3); + Sobel(image, grad_y, CV_16SC1, 0, 1, 3); + // converting back to CV_8U + convertScaleAbs(grad_x, abs_grad_x); + convertScaleAbs(grad_y, abs_grad_y); + Mat grad; + addWeighted(abs_grad_x, 0.5, abs_grad_y, 0.5, 0, grad); + vpImageConvert::convert(grad, I1); + } + I2 = I1; I3 = I1; mapOfImages["Camera1"] = &I1; @@ -582,98 +619,102 @@ int main(int argc, const char **argv) vpDisplay::displayText(I1, 40, 20, ss.str(), vpColor::red); } - // Test to reset the tracker - if (reader.getFrameIndex() == reader.getFirstFrameIndex() + 10) { - std::cout << "----------Test reset tracker----------" << std::endl; - if (opt_display) { - vpDisplay::display(I1); - vpDisplay::display(I2); - vpDisplay::display(I3); - } - - tracker->resetTracker(); -#if USE_XML - dynamic_cast(tracker)->loadConfigFile(mapOfConfigFiles); -#else - // By setting the parameters: - cam.initPersProjWithoutDistortion(547, 542, 338, 234); - mapOfCameraParams["Camera1"] = cam; - mapOfCameraParams["Camera2"] = cam; - mapOfCameraParams["Camera3"] = cam; - - me.setMaskSize(5); - me.setMaskNumber(180); - me.setRange(7); - me.setLikelihoodThresholdType(vpMe::NORMALIZED_THRESHOLD); - me.setThreshold(10); - me.setMu1(0.5); - me.setMu2(0.5); - me.setSampleStep(4); - - mapOfMe["Camera1"] = me; - mapOfMe["Camera2"] = me; - mapOfMe["Camera3"] = me; - -#if defined(VISP_HAVE_MODULE_KLT) && defined(VISP_HAVE_OPENCV) && defined(HAVE_OPENCV_IMGPROC) && defined(HAVE_OPENCV_VIDEO) - klt.setMaxFeatures(10000); - klt.setWindowSize(5); - klt.setQuality(0.01); - klt.setMinDistance(5); - klt.setHarrisFreeParameter(0.01); - klt.setBlockSize(3); - klt.setPyramidLevels(3); - - mapOfKlt["Camera1"] = klt; - mapOfKlt["Camera2"] = klt; - mapOfKlt["Camera3"] = klt; - - dynamic_cast(tracker)->setKltOpencv(mapOfKlt); - dynamic_cast(tracker)->setKltMaskBorder(5); -#endif - - dynamic_cast(tracker)->setCameraParameters(mapOfCameraParams); - dynamic_cast(tracker)->setMovingEdge(mapOfMe); - tracker->setAngleAppear(vpMath::rad(65)); - tracker->setAngleDisappear(vpMath::rad(75)); - - // Specify the clipping to - tracker->setNearClippingDistance(0.01); - tracker->setFarClippingDistance(0.90); - - dynamic_cast(tracker)->getClipping(mapOfClippingFlags); - for (std::map::iterator it = mapOfClippingFlags.begin(); - it != mapOfClippingFlags.end(); ++it) { - it->second = (it->second | vpMbtPolygon::FOV_CLIPPING); - } - - dynamic_cast(tracker)->setClipping(mapOfClippingFlags); - // tracker->setClipping(tracker->getClipping() | vpMbtPolygon::LEFT_CLIPPING - // | vpMbtPolygon::RIGHT_CLIPPING | vpMbtPolygon::UP_CLIPPING | - // vpMbtPolygon::DOWN_CLIPPING); // Equivalent to FOV_CLIPPING -#endif - tracker->loadModel(modelFile); - dynamic_cast(tracker)->setCameraParameters(mapOfCameraParams); - tracker->setOgreVisibilityTest(useOgre); - tracker->setScanLineVisibilityTest(useScanline); - tracker->setCovarianceComputation(computeCovariance); - tracker->setProjectionErrorComputation(projectionError); - dynamic_cast(tracker)->initFromPose(mapOfImages, mapOfCameraPoses); - } - - // Test to set an initial pose - if (reader.getFrameIndex() == reader.getFirstFrameIndex() + 50) { - vpHomogeneousMatrix c1Moi; - c1Moi.buildFrom(0.0439540832, 0.0845870108, 0.5477322481, 2.179498458, 0.8611798108, -0.3491961946); - std::map mapOfSetPoses; - mapOfSetPoses["Camera1"] = c1Moi; - - std::cout << "Test set pose" << std::endl; - dynamic_cast(tracker)->setPose(mapOfImages, mapOfSetPoses); - } - + // TODO: +// // Test to reset the tracker +// if (reader.getFrameIndex() == reader.getFirstFrameIndex() + 10) { +// std::cout << "----------Test reset tracker----------" << std::endl; +// if (opt_display) { +// vpDisplay::display(I1); +// vpDisplay::display(I2); +// vpDisplay::display(I3); +// } + +// tracker->resetTracker(); +// #if USE_XML +// dynamic_cast(tracker)->loadConfigFile(mapOfConfigFiles); +// #else +// // By setting the parameters: +// cam.initPersProjWithoutDistortion(547, 542, 338, 234); +// mapOfCameraParams["Camera1"] = cam; +// mapOfCameraParams["Camera2"] = cam; +// mapOfCameraParams["Camera3"] = cam; + +// me.setMaskSize(5); +// me.setMaskNumber(180); +// me.setRange(7); +// me.setLikelihoodThresholdType(vpMe::NORMALIZED_THRESHOLD); +// me.setThreshold(10); +// me.setMu1(0.5); +// me.setMu2(0.5); +// me.setSampleStep(4); + +// mapOfMe["Camera1"] = me; +// mapOfMe["Camera2"] = me; +// mapOfMe["Camera3"] = me; + +// #if defined(VISP_HAVE_MODULE_KLT) && defined(VISP_HAVE_OPENCV) && defined(HAVE_OPENCV_IMGPROC) && defined(HAVE_OPENCV_VIDEO) +// klt.setMaxFeatures(10000); +// klt.setWindowSize(5); +// klt.setQuality(0.01); +// klt.setMinDistance(5); +// klt.setHarrisFreeParameter(0.01); +// klt.setBlockSize(3); +// klt.setPyramidLevels(3); + +// mapOfKlt["Camera1"] = klt; +// mapOfKlt["Camera2"] = klt; +// mapOfKlt["Camera3"] = klt; + +// dynamic_cast(tracker)->setKltOpencv(mapOfKlt); +// dynamic_cast(tracker)->setKltMaskBorder(5); +// #endif + +// dynamic_cast(tracker)->setCameraParameters(mapOfCameraParams); +// dynamic_cast(tracker)->setMovingEdge(mapOfMe); +// tracker->setAngleAppear(vpMath::rad(65)); +// tracker->setAngleDisappear(vpMath::rad(75)); + +// // Specify the clipping to +// tracker->setNearClippingDistance(0.01); +// tracker->setFarClippingDistance(0.90); + +// dynamic_cast(tracker)->getClipping(mapOfClippingFlags); +// for (std::map::iterator it = mapOfClippingFlags.begin(); +// it != mapOfClippingFlags.end(); ++it) { +// it->second = (it->second | vpMbtPolygon::FOV_CLIPPING); +// } + +// dynamic_cast(tracker)->setClipping(mapOfClippingFlags); +// // tracker->setClipping(tracker->getClipping() | vpMbtPolygon::LEFT_CLIPPING +// // | vpMbtPolygon::RIGHT_CLIPPING | vpMbtPolygon::UP_CLIPPING | +// // vpMbtPolygon::DOWN_CLIPPING); // Equivalent to FOV_CLIPPING +// #endif +// tracker->loadModel(modelFile); +// dynamic_cast(tracker)->setCameraParameters(mapOfCameraParams); +// tracker->setOgreVisibilityTest(useOgre); +// tracker->setScanLineVisibilityTest(useScanline); +// tracker->setCovarianceComputation(computeCovariance); +// tracker->setProjectionErrorComputation(projectionError); +// dynamic_cast(tracker)->initFromPose(mapOfImages, mapOfCameraPoses); +// } + + // TODO: + // // Test to set an initial pose + // if (reader.getFrameIndex() == reader.getFirstFrameIndex() + 50) { + // vpHomogeneousMatrix c1Moi; + // c1Moi.buildFrom(0.0439540832, 0.0845870108, 0.5477322481, 2.179498458, 0.8611798108, -0.3491961946); + // std::map mapOfSetPoses; + // mapOfSetPoses["Camera1"] = c1Moi; + + // std::cout << "Test set pose" << std::endl; + // dynamic_cast(tracker)->setPose(mapOfImages, mapOfSetPoses); + // } + + // TODO: // track the object: stop tracking from frame 40 to 50 - if (reader.getFrameIndex() - reader.getFirstFrameIndex() < 40 || - reader.getFrameIndex() - reader.getFirstFrameIndex() >= 50) { + // if (reader.getFrameIndex() - reader.getFirstFrameIndex() < 40 || + // reader.getFrameIndex() - reader.getFirstFrameIndex() >= 50) + { dynamic_cast(tracker)->track(mapOfImages); dynamic_cast(tracker)->getPose(mapOfCameraPoses); if (opt_display) { @@ -684,11 +725,11 @@ int main(int argc, const char **argv) mapOfSubImages["Camera2"] = &I2; dynamic_cast(tracker)->display(mapOfSubImages, mapOfCameraPoses, mapOfCameraParams, - vpColor::red, 3); + vpColor::red, 5); } else { dynamic_cast(tracker)->display(mapOfImages, mapOfCameraPoses, mapOfCameraParams, - vpColor::red, 3); + vpColor::red, 5); } // display the frame vpDisplay::displayFrame(I1, mapOfCameraPoses["Camera1"], mapOfCameraParams["Camera1"], 0.05); diff --git a/modules/tracker/mbt/include/visp3/mbt/vpMbEdgeTracker.h b/modules/tracker/mbt/include/visp3/mbt/vpMbEdgeTracker.h index 4f5073230f..214a187bf1 100644 --- a/modules/tracker/mbt/include/visp3/mbt/vpMbEdgeTracker.h +++ b/modules/tracker/mbt/include/visp3/mbt/vpMbEdgeTracker.h @@ -308,6 +308,9 @@ class VISP_EXPORT vpMbEdgeTracker : public virtual vpMbTracker //! Display features std::vector > m_featuresToBeDisplayedEdge; + // TODO: + bool m_forceInitMovingEdge; + public: vpMbEdgeTracker(); virtual ~vpMbEdgeTracker() override; @@ -463,6 +466,8 @@ class VISP_EXPORT vpMbEdgeTracker : public virtual vpMbTracker void setUseEdgeTracking(const std::string &name, const bool &useEdgeTracking); + void setForceInitMovingEdge(bool force); + virtual void track(const vpImage &I) override; virtual void track(const vpImage &I) override; //@} diff --git a/modules/tracker/mbt/include/visp3/mbt/vpMbGenericTracker.h b/modules/tracker/mbt/include/visp3/mbt/vpMbGenericTracker.h index 3175dcd0a1..e62b553a43 100644 --- a/modules/tracker/mbt/include/visp3/mbt/vpMbGenericTracker.h +++ b/modules/tracker/mbt/include/visp3/mbt/vpMbGenericTracker.h @@ -574,6 +574,8 @@ class VISP_EXPORT vpMbGenericTracker : public vpMbTracker virtual void setUseDepthDenseTracking(const std::string &name, const bool &useDepthDenseTracking); virtual void setUseDepthNormalTracking(const std::string &name, const bool &useDepthNormalTracking); virtual void setUseEdgeTracking(const std::string &name, const bool &useEdgeTracking); + // TODO: + virtual void setForceInitMovingEdge(bool force); #if defined(VISP_HAVE_MODULE_KLT) && defined(VISP_HAVE_OPENCV) && defined(HAVE_OPENCV_IMGPROC) && defined(HAVE_OPENCV_VIDEO) virtual void setUseKltTracking(const std::string &name, const bool &useKltTracking); #endif diff --git a/modules/tracker/mbt/src/edge/vpMbEdgeTracker.cpp b/modules/tracker/mbt/src/edge/vpMbEdgeTracker.cpp index 663fa223f0..9512454478 100644 --- a/modules/tracker/mbt/src/edge/vpMbEdgeTracker.cpp +++ b/modules/tracker/mbt/src/edge/vpMbEdgeTracker.cpp @@ -52,6 +52,9 @@ #include #include +// TODO: +#include + #include #include #include @@ -63,10 +66,10 @@ */ vpMbEdgeTracker::vpMbEdgeTracker() : me(), lines(1), circles(1), cylinders(1), nline(0), ncircle(0), ncylinder(0), nbvisiblepolygone(0), - percentageGdPt(0.4), scales(1), Ipyramid(0), scaleLevel(0), nbFeaturesForProjErrorComputation(0), m_factor(), - m_robustLines(), m_robustCylinders(), m_robustCircles(), m_wLines(), m_wCylinders(), m_wCircles(), m_errorLines(), - m_errorCylinders(), m_errorCircles(), m_L_edge(), m_error_edge(), m_w_edge(), m_weightedError_edge(), - m_robust_edge(), m_featuresToBeDisplayedEdge() + percentageGdPt(0.4), scales(1), Ipyramid(0), scaleLevel(0), nbFeaturesForProjErrorComputation(0), m_factor(), + m_robustLines(), m_robustCylinders(), m_robustCircles(), m_wLines(), m_wCylinders(), m_wCircles(), m_errorLines(), + m_errorCylinders(), m_errorCircles(), m_L_edge(), m_error_edge(), m_w_edge(), m_weightedError_edge(), + m_robust_edge(), m_featuresToBeDisplayedEdge(), m_forceInitMovingEdge(false) { scales[0] = true; @@ -272,6 +275,9 @@ void vpMbEdgeTracker::computeVVS(const vpImage &_I, unsigned int cMoPrev = m_cMo; m_cMo = vpExponentialMap::direct(v).inverse() * m_cMo; + // TODO: + std::cerr << "computeVVSPoseEstimation ; m_cMo:\n" << m_cMo << std::endl; + } // endif(!restartFromLast) iter++; @@ -1021,19 +1027,36 @@ void vpMbEdgeTracker::track(const vpImage &I) initPyramid(I, Ipyramid); unsigned int lvl = (unsigned int)scales.size(); + + // TODO: + std::cerr << "\n\ntrack()" << std::endl; + do { lvl--; + // TODO: + std::cerr << "\nlvl=" << lvl << " ; m_forceInitMovingEdge=" << m_forceInitMovingEdge << std::endl; projectionError = 90.0; if (scales[lvl]) { vpHomogeneousMatrix cMo_1 = m_cMo; + // TODO: + std::cerr << "cMo_1:\n" << cMo_1 << std::endl; try { + // TODO: + std::cerr << "downScale" << std::endl; downScale(lvl); + std::ostringstream output_filename; + output_filename << "debug_pyramid_lvl_" << lvl << ".png"; + vpImageIo::write(*Ipyramid[lvl], output_filename.str()); + try { + // TODO: + std::cerr << "trackMovingEdge" << std::endl; trackMovingEdge(*Ipyramid[lvl]); - } catch (...) { + } + catch (...) { vpTRACE("Error in moving edge tracking"); throw; } @@ -1063,12 +1086,16 @@ void vpMbEdgeTracker::track(const vpImage &I) */ try { + // TODO: + std::cerr << "computeVVS" << std::endl; computeVVS(*Ipyramid[lvl], lvl); } catch (...) { covarianceMatrix = -1; throw; // throw the original exception } + // TODO: + std::cerr << "testTracking" << std::endl; testTracking(); if (displayFeatures) { @@ -1077,7 +1104,7 @@ void vpMbEdgeTracker::track(const vpImage &I) // Looking for new visible face bool newvisibleface = false; - visibleFace(I, m_cMo, newvisibleface); + visibleFace(*Ipyramid[lvl], m_cMo, newvisibleface); // cam.computeFov(I.getWidth(), I.getHeight()); if (useScanLine) { @@ -1085,15 +1112,17 @@ void vpMbEdgeTracker::track(const vpImage &I) faces.computeScanLineRender(m_cam, I.getWidth(), I.getHeight()); } - updateMovingEdge(I); + updateMovingEdge(*Ipyramid[lvl]); - initMovingEdge(I, m_cMo); + initMovingEdge(*Ipyramid[lvl], m_cMo); // Reinit the moving edge for the lines which need it. - reinitMovingEdge(I, m_cMo); + reinitMovingEdge(*Ipyramid[lvl], m_cMo); if (computeProjError) computeProjectionError(I); + // TODO: + std::cerr << "upScale" << std::endl; upScale(lvl); } catch (const vpException &e) { if (lvl != 0) { @@ -1540,7 +1569,7 @@ void vpMbEdgeTracker::initMovingEdge(const vpImage &I, const vpHo if (isvisible) { l->setVisible(true); l->updateTracked(); - if (l->meline.empty() && l->isTracked()) + if ((l->meline.empty() && l->isTracked()) || m_forceInitMovingEdge) // TODO: l->initMovingEdge(I, _cMo, doNotTrack, m_mask); } else { l->setVisible(false); @@ -1574,7 +1603,8 @@ void vpMbEdgeTracker::initMovingEdge(const vpImage &I, const vpHo if (isvisible) { cy->setVisible(true); - if (cy->meline1 == nullptr || cy->meline2 == nullptr) { + if (cy->meline1 == nullptr || cy->meline2 == nullptr || m_forceInitMovingEdge) // TODO: + { if (cy->isTracked()) cy->initMovingEdge(I, _cMo, doNotTrack, m_mask); } @@ -1607,7 +1637,8 @@ void vpMbEdgeTracker::initMovingEdge(const vpImage &I, const vpHo if (isvisible) { ci->setVisible(true); - if (ci->meEllipse == nullptr) { + if (ci->meEllipse == nullptr || m_forceInitMovingEdge) // TODO: + { if (ci->isTracked()) ci->initMovingEdge(I, _cMo, doNotTrack, m_mask); } @@ -1634,7 +1665,8 @@ void vpMbEdgeTracker::trackMovingEdge(const vpImage &I) ++it) { vpMbtDistanceLine *l = *it; if (l->isVisible() && l->isTracked()) { - if (l->meline.empty()) { + if (l->meline.empty() || m_forceInitMovingEdge) // TODO: + { l->initMovingEdge(I, m_cMo, doNotTrack, m_mask); } l->trackMovingEdge(I); @@ -1645,7 +1677,8 @@ void vpMbEdgeTracker::trackMovingEdge(const vpImage &I) it != cylinders[scaleLevel].end(); ++it) { vpMbtDistanceCylinder *cy = *it; if (cy->isVisible() && cy->isTracked()) { - if (cy->meline1 == nullptr || cy->meline2 == nullptr) { + if (cy->meline1 == nullptr || cy->meline2 == nullptr || m_forceInitMovingEdge) // TODO: + { cy->initMovingEdge(I, m_cMo, doNotTrack, m_mask); } cy->trackMovingEdge(I, m_cMo); @@ -1655,8 +1688,9 @@ void vpMbEdgeTracker::trackMovingEdge(const vpImage &I) for (std::list::const_iterator it = circles[scaleLevel].begin(); it != circles[scaleLevel].end(); ++it) { vpMbtDistanceCircle *ci = *it; - if (ci->isVisible() && ci->isTracked()) { - if (ci->meEllipse == nullptr) { + if ((ci->isVisible() && ci->isTracked()) || m_forceInitMovingEdge) { + if (ci->meEllipse == nullptr) // TODO: + { ci->initMovingEdge(I, m_cMo, doNotTrack, m_mask); } ci->trackMovingEdge(I, m_cMo); @@ -1876,7 +1910,7 @@ void vpMbEdgeTracker::reinitMovingEdge(const vpImage &I, const vp ++it) { if ((*it)->isTracked()) { l = *it; - if (l->Reinit && l->isVisible()) + if ((l->Reinit || m_forceInitMovingEdge) && l->isVisible()) // TODO: l->reinitMovingEdge(I, _cMo, m_mask); } } @@ -1886,7 +1920,7 @@ void vpMbEdgeTracker::reinitMovingEdge(const vpImage &I, const vp it != cylinders[scaleLevel].end(); ++it) { if ((*it)->isTracked()) { cy = *it; - if (cy->Reinit && cy->isVisible()) + if ((cy->Reinit || m_forceInitMovingEdge) && cy->isVisible()) // TODO: cy->reinitMovingEdge(I, _cMo, m_mask); } } @@ -1896,7 +1930,7 @@ void vpMbEdgeTracker::reinitMovingEdge(const vpImage &I, const vp it != circles[scaleLevel].end(); ++it) { if ((*it)->isTracked()) { ci = *it; - if (ci->Reinit && ci->isVisible()) + if ((ci->Reinit || m_forceInitMovingEdge) && ci->isVisible()) // TODO: ci->reinitMovingEdge(I, _cMo, m_mask); } } @@ -2954,3 +2988,9 @@ void vpMbEdgeTracker::setUseEdgeTracking(const std::string &name, const bool &us } } } + +// TODO! +void vpMbEdgeTracker::setForceInitMovingEdge(bool force) +{ + m_forceInitMovingEdge = force; +} diff --git a/modules/tracker/mbt/src/vpMbGenericTracker.cpp b/modules/tracker/mbt/src/vpMbGenericTracker.cpp index 830ff1577b..c6a7bfbb23 100644 --- a/modules/tracker/mbt/src/vpMbGenericTracker.cpp +++ b/modules/tracker/mbt/src/vpMbGenericTracker.cpp @@ -5242,6 +5242,16 @@ void vpMbGenericTracker::setUseEdgeTracking(const std::string &name, const bool } } +// TODO: +void vpMbGenericTracker::setForceInitMovingEdge(bool force) +{ + for (std::map::const_iterator it = m_mapOfTrackers.begin(); + it != m_mapOfTrackers.end(); ++it) { + TrackerWrapper *tracker = it->second; + tracker->setForceInitMovingEdge(force); + } +} + #if defined(VISP_HAVE_MODULE_KLT) && defined(VISP_HAVE_OPENCV) && defined(HAVE_OPENCV_IMGPROC) && defined(HAVE_OPENCV_VIDEO) /*! Set if the polygon that has the given name has to be considered during @@ -6157,6 +6167,7 @@ void vpMbGenericTracker::TrackerWrapper::display(const vpImage &I const vpCameraParameters &cam, const vpColor &col, unsigned int thickness, bool displayFullModel) { + displayFullModel = true; if (displayFeatures) { std::vector > features = getFeaturesForDisplay(); for (size_t i = 0; i < features.size(); i++) { @@ -6208,22 +6219,22 @@ void vpMbGenericTracker::TrackerWrapper::display(const vpImage &I } } - std::vector > models = - getModelForDisplay(I.getWidth(), I.getHeight(), cMo, cam, displayFullModel); - for (size_t i = 0; i < models.size(); i++) { - if (vpMath::equal(models[i][0], 0)) { - vpImagePoint ip1(models[i][1], models[i][2]); - vpImagePoint ip2(models[i][3], models[i][4]); - vpDisplay::displayLine(I, ip1, ip2, col, thickness); - } - else if (vpMath::equal(models[i][0], 1)) { - vpImagePoint center(models[i][1], models[i][2]); - double n20 = models[i][3]; - double n11 = models[i][4]; - double n02 = models[i][5]; - vpDisplay::displayEllipse(I, center, n20, n11, n02, true, col, thickness); - } - } + // std::vector > models = + // getModelForDisplay(I.getWidth(), I.getHeight(), cMo, cam, displayFullModel); + // for (size_t i = 0; i < models.size(); i++) { + // if (vpMath::equal(models[i][0], 0)) { + // vpImagePoint ip1(models[i][1], models[i][2]); + // vpImagePoint ip2(models[i][3], models[i][4]); + // vpDisplay::displayLine(I, ip1, ip2, col, thickness); + // } + // else if (vpMath::equal(models[i][0], 1)) { + // vpImagePoint center(models[i][1], models[i][2]); + // double n20 = models[i][3]; + // double n11 = models[i][4]; + // double n02 = models[i][5]; + // vpDisplay::displayEllipse(I, center, n20, n11, n02, true, col, thickness); + // } + // } #ifdef VISP_HAVE_OGRE if ((m_trackerType & EDGE_TRACKER) @@ -6777,6 +6788,12 @@ void vpMbGenericTracker::TrackerWrapper::postTracking(const vpImage std::unique_ptr make_unique_compat(Args&&... args) { @@ -113,6 +114,8 @@ int main(int argc, char *argv[]) double start = vpTime::measureTimeMs(), end = -1; if (opencv_backend) { vpImageIo::readPNGfromMem(vec_img, I, vpImageIo::IO_OPENCV_BACKEND); + end = vpTime::measureTimeMs(); + vpImageConvert::convert(I, I_display); } else { if (channel > 1) { diff --git a/tutorial/tracking/model-based/generic/tutorial-mb-generic-tracker-save.cpp b/tutorial/tracking/model-based/generic/tutorial-mb-generic-tracker-save.cpp index ee6df94ce6..d2b245a749 100755 --- a/tutorial/tracking/model-based/generic/tutorial-mb-generic-tracker-save.cpp +++ b/tutorial/tracking/model-based/generic/tutorial-mb-generic-tracker-save.cpp @@ -38,7 +38,7 @@ int main(int argc, char **argv) if (std::string(argv[i]) == "--cv-backend") { opencv_backend = true; } - else if ((std::string(argv[i]) == "--save" || std::string(argv[i]) == "-i") && i+1 < argc) { + else if ((std::string(argv[i]) == "--save" || std::string(argv[i]) == "-s") && i+1 < argc) { npz_filename = argv[i+1]; } else if (std::string(argv[i]) == "--color" || std::string(argv[i]) == "-c") { @@ -124,7 +124,27 @@ int main(int argc, char **argv) int iter = 0; while (!g.end()) { g.acquire(I); + + // TODO: + using namespace cv; + Mat grad_x, grad_y; + Mat abs_grad_x, abs_grad_y; + Mat image, src_gray; + vpImageConvert::convert(I, src_gray); + GaussianBlur(src_gray, image, Size(3, 3), 0, 0, BORDER_DEFAULT); + Sobel(image, grad_x, CV_16SC1, 1, 0, 3); + Sobel(image, grad_y, CV_16SC1, 0, 1, 3); + // converting back to CV_8U + convertScaleAbs(grad_x, abs_grad_x); + convertScaleAbs(grad_y, abs_grad_y); + Mat grad; + addWeighted(abs_grad_x, 0.5, abs_grad_y, 0.5, 0, grad); + vpImageConvert::convert(grad, I); + // imshow("grad", grad); + // char key = (char)waitKey(30); + tracker.track(I); + // tracker.track(I); tracker.getPose(cMo); // std::cout << "\ncMo:\n" << cMo << std::endl;