changeset 802:d3e8dd9f3ca4 dev

current dev for drone stabilization
author Nicolas Saunier <nicolas.saunier@polymtl.ca>
date Tue, 31 May 2016 17:59:35 -0400
parents 2cade72d75ad
children f7cf43b5ad3b
files c/cvutils.cpp c/feature-based-tracking.cpp
diffstat 2 files changed, 23 insertions(+), 38 deletions(-) [+]
line wrap: on
line diff
--- a/c/cvutils.cpp	Tue May 31 17:06:41 2016 -0400
+++ b/c/cvutils.cpp	Tue May 31 17:59:35 2016 -0400
@@ -49,35 +49,6 @@
     pts.push_back(kpts[i].pt);
 }
 
-// IplImage* allocateImage(const int& width, const int& height, const int& depth, const int& channels) { return ::allocateImage(cvSize(width, height), depth, channels);}
-
-// IplImage* allocateImage(const CvSize& size, const int& depth, const int& channels) {
-//   IplImage* image = cvCreateImage(size, depth, channels);
-
-//   if (!image) {
-//     cerr << "Error: Couldn't allocate image.  Out of memory?\n" << endl;
-//     exit(-1);
-//   }
-
-//   return image;
-// }
-
-// int goToFrameNum(CvCapture* inputVideo, const int& currentFrameNum, const int& targetFrameNum) {
-//   int frameNum = currentFrameNum;
-//   if (currentFrameNum > targetFrameNum) {
-//     cerr << "Current frame number " << currentFrameNum << " is after the target frame number " << targetFrameNum << "." << endl;
-//   } else if (currentFrameNum < targetFrameNum) {
-//     IplImage* frame = cvQueryFrame(inputVideo);
-//     frameNum++;
-//     while (frame && frameNum<targetFrameNum) {
-//       frame = cvQueryFrame(inputVideo);
-//       frameNum++;
-//     }
-//   }
-  
-//   return frameNum;
-// }
-
 const Scalar Colors::color[] = {Colors::red(),
 				Colors::green(),
 				Colors::blue(),
--- a/c/feature-based-tracking.cpp	Tue May 31 17:06:41 2016 -0400
+++ b/c/feature-based-tracking.cpp	Tue May 31 17:59:35 2016 -0400
@@ -136,9 +136,9 @@
   trajectoryDB->beginTransaction();
 
   std::vector<KeyPoint> prevKpts, currKpts;
-  std::vector<Point2f> prevPts, currPts, newPts;
-  std::vector<uchar> status;
-  std::vector<float> errors;
+  std::vector<Point2f> prevPts, currPts, newPts, homographyRefPts, homographyCurrPts;
+  std::vector<uchar> status, homographPtsStatus;
+  std::vector<float> errors, homographyErrors;
   Mat prevDesc, currDesc;
 
   std::vector<FeatureTrajectoryPtr> lostFeatures;
@@ -146,7 +146,16 @@
 
   int key = '?';
   unsigned int savedFeatureId=0;
-  Mat frame = Mat::zeros(1, 1, CV_8UC1), currentFrameBW, previousFrameBW, undistortedFrame;
+  Mat frame = Mat::zeros(1, 1, CV_8UC1), currentFrameBW, previousFrameBW, undistortedFrame, homographyFrameBW;
+
+  Mat homographyFrame = imread("/media/disk1/Research/Data/unb/Drone/S23-10-frame.png"); // reference frame for the homography, if option turned on
+  if (params.undistort) {
+    remap(homographyFrame, undistortedFrame, map1, map2, interpolationMethod, BORDER_CONSTANT, 0.);
+    homographyFrame = undistortedFrame;
+  }
+
+  cvtColor(homographyFrame, homographyFrameBW, CV_RGB2GRAY);  
+  goodFeaturesToTrack(homographyFrameBW, homographyRefPts, params.maxNFeatures, MAX(0.01, params.featureQuality), params.minFeatureDistanceKLT, mask, params.blockSize, params.useHarrisDetector, params.k); // take higher quality features?
 
   unsigned int lastFrameNum = nFrames;
   if (params.nFrames > 0)
@@ -161,23 +170,25 @@
     } else if (frameNum%50 ==0)
       cout << "frame " << frameNum << endl;
 
+    cvtColor(frame, currentFrameBW, CV_RGB2GRAY);
+    
     if (params.undistort) {
-      remap(frame, undistortedFrame, map1, map2, interpolationMethod, BORDER_CONSTANT, 0.);
-      frame = undistortedFrame;
-
+      remap(currentFrameBW, undistortedFrame, map1, map2, interpolationMethod, BORDER_CONSTANT, 0.);
+      currentFrameBW = undistortedFrame;
       if (frame.size() != videoSize) {
 	cout << "Different frame size " << frameNum << ", breaking ([" << frame.size().width << "x" << frame.size().height << "])" << endl;
 	break;
       }
     }
     
-    cvtColor(frame, currentFrameBW, CV_RGB2GRAY);
-    
     if (!prevPts.empty()) {
       currPts.clear();
       calcOpticalFlowPyrLK(previousFrameBW, currentFrameBW, prevPts, currPts, status, errors, window, params.pyramidLevel, TermCriteria(static_cast<int>(TermCriteria::COUNT)+static_cast<int>(TermCriteria::EPS) /* = 3 */, params.maxNumberTrackingIterations, params.minTrackingError), /* int flags = */ 0, params.minFeatureEigThreshold);
       /// \todo try calcOpticalFlowFarneback
 
+      // if there is stabilization todo
+      calcOpticalFlowPyrLK(homographyFrameBW, currentFrameBW, homographyRefPts, homographyCurrPts, homographPtsStatus, homographyErrors, window, params.pyramidLevel, TermCriteria(static_cast<int>(TermCriteria::COUNT)+static_cast<int>(TermCriteria::EPS) /* = 3 */, params.maxNumberTrackingIterations, params.minTrackingError), /* int flags = */ 0, params.minFeatureEigThreshold);
+
       std::vector<Point2f> trackedPts;
       std::vector<FeaturePointMatch>::iterator iter = featurePointMatches.begin();
       while (iter != featurePointMatches.end()) {
@@ -214,6 +225,8 @@
       if (params.display) {
 	BOOST_FOREACH(FeaturePointMatch fp, featurePointMatches)
 	  fp.feature->draw(frame, invHomography, Colors::red());
+	Mat homoFeatures = frame.clone();
+	// todo merge the ref frame and the current frame as in the Python example
       }
     }
     
@@ -233,6 +246,7 @@
     if (params.display) {
       imshow("mask", featureMask*256);
       imshow("frame", frame);
+      //imshow("ref homography", )
       key = waitKey(2);
     }
     previousFrameBW = currentFrameBW.clone();