comparison c/feature-based-tracking.cpp @ 614:5e09583275a4

Merged Nicolas/trafficintelligence into default
author Mohamed Gomaa <eng.m.gom3a@gmail.com>
date Fri, 05 Dec 2014 12:13:53 -0500
parents a3add9f751ef
children 045d05cef9d0
comparison
equal deleted inserted replaced
598:11f96bd08552 614:5e09583275a4
1 #include "Motion.hpp" 1 #include "Motion.hpp"
2 #include "Parameters.hpp" 2 #include "Parameters.hpp"
3 #include "cvutils.hpp" 3 #include "cvutils.hpp"
4 #include "utils.hpp" 4 #include "utils.hpp"
5 #include "InputVideoFileModule.h"
6 #include "InputFrameListModule.h"
5 7
6 #include "src/Trajectory.h" 8 #include "src/Trajectory.h"
7 #include "src/TrajectoryDBAccessList.h" 9 #include "src/TrajectoryDBAccessList.h"
8 #include "src/TrajectoryDBAccessBlob.h" 10 #include "src/TrajectoryDBAccessBlob.h"
9 11
14 #include "opencv2/highgui/highgui.hpp" 16 #include "opencv2/highgui/highgui.hpp"
15 #include "opencv2/objdetect/objdetect.hpp" 17 #include "opencv2/objdetect/objdetect.hpp"
16 18
17 #include <boost/shared_ptr.hpp> 19 #include <boost/shared_ptr.hpp>
18 #include <boost/foreach.hpp> 20 #include <boost/foreach.hpp>
21 #include <boost/filesystem.hpp>
19 22
20 #include <iostream> 23 #include <iostream>
21 #include <vector> 24 #include <vector>
22 #include <ctime> 25 #include <ctime>
26 #include <cmath>
23 27
24 using namespace std; 28 using namespace std;
25 using namespace cv; 29 using namespace cv;
30 namespace fs = boost::filesystem;
26 31
27 void drawMatchesRelative(const vector<KeyPoint>& train, const vector<KeyPoint>& query, std::vector<cv::DMatch>& matches, Mat& img) { 32 void drawMatchesRelative(const vector<KeyPoint>& train, const vector<KeyPoint>& query, std::vector<cv::DMatch>& matches, Mat& img) {
28 for (int i = 0; i < (int)matches.size(); i++) 33 for (int i = 0; i < (int)matches.size(); i++)
29 { 34 {
30 Point2f pt_new = query[matches[i].queryIdx].pt; 35 Point2f pt_new = query[matches[i].queryIdx].pt;
52 57
53 FeaturePointMatch(FeatureTrajectoryPtr _feature, const int& _pointNum): 58 FeaturePointMatch(FeatureTrajectoryPtr _feature, const int& _pointNum):
54 feature(_feature), pointNum(_pointNum) {} 59 feature(_feature), pointNum(_pointNum) {}
55 }; 60 };
56 61
57 inline void saveFeatures(vector<FeatureTrajectoryPtr>& features, TrajectoryDBAccess<Point2f>& db, const string& positionsTableName, const string& velocitiesTableName, const unsigned int& minNFeatures = 0) { 62 inline void saveFeatures(vector<FeatureTrajectoryPtr>& features, TrajectoryDBAccess<Point2f>& db, const string& positionsTableName, const string& velocitiesTableName) {
58 /// \todo smoothing 63 BOOST_FOREACH(FeatureTrajectoryPtr f, features) f->write(db, positionsTableName, velocitiesTableName);
59 if (features.size() >= minNFeatures) { 64 features.clear();
60 BOOST_FOREACH(FeatureTrajectoryPtr f, features) f->write(db, positionsTableName, velocitiesTableName);
61 features.clear();
62 }
63 } 65 }
64 66
65 void trackFeatures(const KLTFeatureTrackingParameters& params) { 67 void trackFeatures(const KLTFeatureTrackingParameters& params) {
66 // BriefDescriptorExtractor brief(32); 68 // BriefDescriptorExtractor brief(32);
67 // const int DESIRED_FTRS = 500; 69 // const int DESIRED_FTRS = 500;
73 invHomography = homography.inv(); 75 invHomography = homography.inv();
74 76
75 float minTotalFeatureDisplacement = params.nDisplacements*params.minFeatureDisplacement; 77 float minTotalFeatureDisplacement = params.nDisplacements*params.minFeatureDisplacement;
76 Size window = Size(params.windowSize, params.windowSize); 78 Size window = Size(params.windowSize, params.windowSize);
77 79
80 int interpolationMethod = -1;
81 if (params.interpolationMethod == 0)
82 interpolationMethod = INTER_NEAREST;
83 else if (params.interpolationMethod == 1)
84 interpolationMethod = INTER_LINEAR;
85 else if (params.interpolationMethod == 2)
86 interpolationMethod = INTER_CUBIC;
87 else if (params.interpolationMethod == 3)
88 interpolationMethod = INTER_LANCZOS4;
89 else
90 cout << "Unsupported option " << interpolationMethod << " for interpolation method" << endl;
91
78 // BruteForceMatcher<Hamming> descMatcher; 92 // BruteForceMatcher<Hamming> descMatcher;
79 // vector<DMatch> matches; 93 // vector<DMatch> matches;
80 94
81 VideoCapture capture; 95 boost::shared_ptr<InputFrameProviderIface> capture;
82 Size videoSize; 96 if (fs::is_directory(fs::path(params.videoFilename)))
83 unsigned int nFrames = 0; 97 capture = boost::shared_ptr<InputFrameListModule>(new InputFrameListModule(params.videoFilename));
84 capture.open(params.videoFilename); 98 else if(!params.videoFilename.empty())
85 if(capture.isOpened()) { 99 capture = boost::shared_ptr<InputVideoFileModule>(new InputVideoFileModule(params.videoFilename));
86 videoSize = Size(capture.get(CV_CAP_PROP_FRAME_WIDTH), capture.get(CV_CAP_PROP_FRAME_HEIGHT)); 100 else
87 nFrames = capture.get(CV_CAP_PROP_FRAME_COUNT); 101 cout << "No valid input parameters" << endl;
88 cout << "Video " << params.videoFilename << 102
89 ": width=" << videoSize.width << 103 if(!capture->isOpen()) {
90 ", height=" << videoSize.height <<
91 ", nframes=" << nFrames << endl;
92 } else {
93 cout << "Video filename " << params.videoFilename << " could not be opened. Exiting." << endl; 104 cout << "Video filename " << params.videoFilename << " could not be opened. Exiting." << endl;
94 exit(0); 105 exit(0);
95 } 106 }
96 // if (!capture.isOpened()) 107
97 // { 108 Size videoSize = capture->getSize();
98 // //help(argv); 109 unsigned int nFrames = capture->getNbFrames();
99 // cout << "capture device " << argv[1] << " failed to open!" << endl; 110 cout << "Video " << params.videoFilename <<
100 // return 1; 111 ": width=" << videoSize.width <<
101 // } 112 ", height=" << videoSize.height <<
102 113 ", nframes=" << nFrames << endl;
114
115 Mat map1, map2;
116 if (params.undistort) {
117 Mat intrinsicCameraMatrix = ::loadMat(params.intrinsicCameraFilename, " ");
118 Mat newIntrinsicCameraMatrix = intrinsicCameraMatrix.clone();
119 videoSize = Size(static_cast<int>(round(videoSize.width*params.undistortedImageMultiplication)), static_cast<int>(round(videoSize.height*params.undistortedImageMultiplication)));
120 newIntrinsicCameraMatrix.at<float>(0,2) = videoSize.width/2.;
121 newIntrinsicCameraMatrix.at<float>(1,2) = videoSize.height/2.;
122 initUndistortRectifyMap(intrinsicCameraMatrix, params.distortionCoefficients, Mat::eye(3,3, CV_32FC1), newIntrinsicCameraMatrix, videoSize, CV_32FC1, map1, map2);
123
124 cout << "Undistorted width=" << videoSize.width <<
125 ", height=" << videoSize.height << endl;
126 }
127
103 Mat mask = imread(params.maskFilename, 0); 128 Mat mask = imread(params.maskFilename, 0);
104 if (mask.empty()) 129 if (mask.empty()) {
130 cout << "Mask filename " << params.maskFilename << " could not be opened." << endl;
105 mask = Mat::ones(videoSize, CV_8UC1); 131 mask = Mat::ones(videoSize, CV_8UC1);
132 }
106 133
107 boost::shared_ptr<TrajectoryDBAccess<Point2f> > trajectoryDB = boost::shared_ptr<TrajectoryDBAccess<Point2f> >(new TrajectoryDBAccessList<Point2f>()); 134 boost::shared_ptr<TrajectoryDBAccess<Point2f> > trajectoryDB = boost::shared_ptr<TrajectoryDBAccess<Point2f> >(new TrajectoryDBAccessList<Point2f>());
108 //TrajectoryDBAccess<Point2f>* trajectoryDB = new TrajectoryDBAccessBlob<Point2f>(); 135 //TrajectoryDBAccess<Point2f>* trajectoryDB = new TrajectoryDBAccessBlob<Point2f>();
109 trajectoryDB->connect(params.databaseFilename.c_str()); 136 trajectoryDB->connect(params.databaseFilename.c_str());
110 trajectoryDB->createTable("positions"); 137 trajectoryDB->createTable("positions");
118 Mat prevDesc, currDesc; 145 Mat prevDesc, currDesc;
119 146
120 std::vector<FeatureTrajectoryPtr> lostFeatures; 147 std::vector<FeatureTrajectoryPtr> lostFeatures;
121 std::vector<FeaturePointMatch> featurePointMatches; 148 std::vector<FeaturePointMatch> featurePointMatches;
122 149
123 HOGDescriptor hog;
124 hog.setSVMDetector(HOGDescriptor::getDefaultPeopleDetector());
125
126 int key = '?'; 150 int key = '?';
127 unsigned int savedFeatureId=0; 151 unsigned int savedFeatureId=0;
128 Mat frame = Mat::zeros(1, 1, CV_8UC1), currentFrameBW, previousFrameBW; 152 Mat frame = Mat::zeros(1, 1, CV_8UC1), currentFrameBW, previousFrameBW, undistortedFrame;
129 153
130 unsigned int lastFrameNum = nFrames; 154 unsigned int lastFrameNum = nFrames;
131 if (params.nFrames > 0) 155 if (params.nFrames > 0)
132 lastFrameNum = MIN(params.frame1+static_cast<unsigned int>(params.nFrames), nFrames); 156 lastFrameNum = MIN(params.frame1+static_cast<unsigned int>(params.nFrames), nFrames);
133 157
134 //capture.set(CV_CAP_PROP_POS_FRAMES, params.frame1); 158 capture->setFrameNumber(params.frame1);
135 for (unsigned int frameNum = params.frame1; (frameNum < lastFrameNum) && !::interruptionKey(key); frameNum++) { 159 for (unsigned int frameNum = params.frame1; (frameNum < lastFrameNum) && !::interruptionKey(key); frameNum++) {
136 capture >> frame; 160 bool success = capture->getNextFrame(frame);
137 161 if (!success || frame.empty()) {
138 if (frame.empty() || frame.size() != videoSize) 162 cout << "Empty frame " << frameNum << ", breaking (" << success << " " << frame.empty() << " [" << frame.size().width << "x" << frame.size().height << "])" << endl;
139 break; 163 break;
140 164 } else if (frameNum%50 ==0)
141 if (frameNum%50 ==0)
142 cout << "frame " << frameNum << endl; 165 cout << "frame " << frameNum << endl;
143 166
144 //capture.get(CV_CAP_PROP_POS_FRAMES) << " " << prevPts.size() << endl; 167 if (params.undistort) {
145 168 remap(frame, undistortedFrame, map1, map2, interpolationMethod, BORDER_CONSTANT, 0.);
146 // int emptyFrameNum = 0; 169 frame = undistortedFrame;
147 // while (frame.empty()) { 170
148 // cerr << "empty frame " << emptyFrameNum << " " << capture.get(CV_CAP_PROP_POS_FRAMES)<< endl; 171 if (frame.size() != videoSize) {
149 // capture >> frame;//break; 172 cout << "Different frame size " << frameNum << ", breaking ([" << frame.size().width << "x" << frame.size().height << "])" << endl;
150 // emptyFrameNum++; 173 break;
151 // if (emptyFrameNum>=3000) 174 }
152 // exit(0); 175 }
153 // } 176
154 177
155 cvtColor(frame, currentFrameBW, CV_RGB2GRAY); 178 cvtColor(frame, currentFrameBW, CV_RGB2GRAY);
156 // "normal" feature detectors: detect features here
157 // detector.detect(currentFrameBW, currKpts); // see video_homography c++ sample
158 179
159 if (!prevPts.empty()) { 180 if (!prevPts.empty()) {
160 //::keyPoints2Points(prevKpts, prevPts);
161 currPts.clear(); 181 currPts.clear();
162 calcOpticalFlowPyrLK(previousFrameBW, currentFrameBW, prevPts, currPts, status, errors, window, params.pyramidLevel, TermCriteria(3 /*static_cast<int>(TermCriteria::COUNT)+static_cast<int>(TermCriteria::EPS)*/, params.maxNumberTrackingIterations, params.minTrackingError), 0.5 /* unused */, 0); // OPTFLOW_USE_INITIAL_FLOW 182 calcOpticalFlowPyrLK(previousFrameBW, currentFrameBW, prevPts, currPts, status, errors, window, params.pyramidLevel, TermCriteria(static_cast<int>(TermCriteria::COUNT)+static_cast<int>(TermCriteria::EPS) /* = 3 */, params.maxNumberTrackingIterations, params.minTrackingError), /* int flags = */ 0, params.minFeatureEigThreshold);
163 /// \todo try calcOpticalFlowFarneback 183 /// \todo try calcOpticalFlowFarneback
164 184
165 std::vector<Point2f> trackedPts; 185 std::vector<Point2f> trackedPts;
166 std::vector<FeaturePointMatch>::iterator iter = featurePointMatches.begin(); 186 std::vector<FeaturePointMatch>::iterator iter = featurePointMatches.begin();
167 while (iter != featurePointMatches.end()) { 187 while (iter != featurePointMatches.end()) {
168 bool deleteFeature = false; 188 bool deleteFeature = false;
169 189
170 if (status[iter->pointNum]) { 190 if (status[iter->pointNum]) {
171 iter->feature->addPoint(frameNum, currPts[iter->pointNum], homography); 191 iter->feature->addPoint(frameNum, currPts[iter->pointNum], homography);
172 192
173 deleteFeature |= iter->feature->isDisplacementSmall(params.nDisplacements, minTotalFeatureDisplacement) 193 deleteFeature = iter->feature->isDisplacementSmall(params.nDisplacements, minTotalFeatureDisplacement)
174 || !iter->feature->isMotionSmooth(params.accelerationBound, params.deviationBound); 194 || !iter->feature->isMotionSmooth(params.accelerationBound, params.deviationBound);
175 if (deleteFeature) 195 if (deleteFeature)
176 iter->feature->shorten(); 196 iter->feature->shorten();
177 } else 197 } else
178 deleteFeature = true; 198 deleteFeature = true;
179 199
180 if (deleteFeature) { 200 if (deleteFeature) {
181 if (iter->feature->length() >= params.minFeatureTime) { 201 if (iter->feature->length() >= params.minFeatureTime) {
182 iter->feature->setId(savedFeatureId); 202 iter->feature->setId(savedFeatureId);
183 savedFeatureId++; 203 savedFeatureId++;
204 iter->feature->movingAverage(params.nFramesSmoothing);
184 lostFeatures.push_back(iter->feature); 205 lostFeatures.push_back(iter->feature);
185 } 206 }
186 iter = featurePointMatches.erase(iter); 207 iter = featurePointMatches.erase(iter);
187 } else { 208 } else {
188 trackedPts.push_back(currPts[iter->pointNum]); 209 trackedPts.push_back(currPts[iter->pointNum]);
201 // vector<Rect> locations; 222 // vector<Rect> locations;
202 // hog.detectMultiScale(frame, locations, 0, Size(8,8), Size(32,32), 1.05, 2); 223 // hog.detectMultiScale(frame, locations, 0, Size(8,8), Size(32,32), 1.05, 2);
203 // BOOST_FOREACH(Rect r, locations) 224 // BOOST_FOREACH(Rect r, locations)
204 // rectangle(frame, r.tl(), r.br(), cv::Scalar(0,255,0), 3); 225 // rectangle(frame, r.tl(), r.br(), cv::Scalar(0,255,0), 3);
205 } 226 }
206 //drawOpticalFlow(prevPts, currPts, status, frame);
207
208 // cout << matches.size() << " matches" << endl;
209 // descMatcher.match(currDesc, prevDesc, matches);
210 // cout << matches.size() << " matches" << endl;
211 //drawMatchesRelative(prevKpts, currKpts, matches, frame);
212 } 227 }
213 228
214 // adding new features, using mask around existing feature positions 229 // adding new features, using mask around existing feature positions
215 Mat featureMask = mask.clone(); 230 Mat featureMask = mask.clone();
216 for (unsigned int n=0;n<currPts.size(); n++) 231 for (unsigned int n=0;n<currPts.size(); n++)
217 for (int j=MAX(0, currPts[n].x-params.minFeatureDistanceKLT); j<MIN(videoSize.width, currPts[n].x+params.minFeatureDistanceKLT+1); j++) 232 for (int j=MAX(0, currPts[n].x-params.minFeatureDistanceKLT); j<MIN(videoSize.width, currPts[n].x+params.minFeatureDistanceKLT+1); j++)
218 for (int i=MAX(0, currPts[n].y-params.minFeatureDistanceKLT); i<MIN(videoSize.height, currPts[n].y+params.minFeatureDistanceKLT+1); i++) 233 for (int i=MAX(0, currPts[n].y-params.minFeatureDistanceKLT); i<MIN(videoSize.height, currPts[n].y+params.minFeatureDistanceKLT+1); i++)
219 featureMask.at<uchar>(i,j)=0; 234 featureMask.at<uchar>(i,j)=0;
220 goodFeaturesToTrack(currentFrameBW, newPts, params.maxNFeatures, params.featureQuality, params.minFeatureDistanceKLT, featureMask, params.windowSize, params.useHarrisDetector, params.k); 235 goodFeaturesToTrack(currentFrameBW, newPts, params.maxNFeatures, params.featureQuality, params.minFeatureDistanceKLT, featureMask, params.blockSize, params.useHarrisDetector, params.k);
221 BOOST_FOREACH(Point2f p, newPts) { //for (unsigned int i=0; i<newPts.size(); i++) { 236 BOOST_FOREACH(Point2f p, newPts) { //for (unsigned int i=0; i<newPts.size(); i++) {
222 FeatureTrajectoryPtr f = FeatureTrajectoryPtr(new FeatureTrajectory(frameNum, p, homography)); 237 FeatureTrajectoryPtr f = FeatureTrajectoryPtr(new FeatureTrajectory(frameNum, p, homography));
223 featurePointMatches.push_back(FeaturePointMatch(f, currPts.size())); 238 featurePointMatches.push_back(FeaturePointMatch(f, currPts.size()));
224 currPts.push_back(p); 239 currPts.push_back(p);
225 } 240 }
226 // currPts.insert(currPts.end(), newPts.begin(), newPts.end());
227 //::keyPoints2Points(currKpts, currPts, false);
228
229 //brief.compute(currentFrameBW, currKpts, currDesc); //Compute brief descriptors at each keypoint location
230 241
231 if (params.display) { 242 if (params.display) {
243 imshow("mask", featureMask*256);
232 imshow("frame", frame); 244 imshow("frame", frame);
233 imshow("mask", featureMask*256);
234 key = waitKey(2); 245 key = waitKey(2);
235 } 246 }
236 previousFrameBW = currentFrameBW.clone(); 247 previousFrameBW = currentFrameBW.clone();
237 prevPts = currPts; 248 prevPts = currPts;
238 //prevKpts = currKpts; 249 }
239 //currDesc.copyTo(prevDesc); 250
240 } 251 // save the remaining currently tracked features
252 std::vector<FeaturePointMatch>::iterator iter = featurePointMatches.begin();
253 while (iter != featurePointMatches.end()) {
254 if (iter->feature->length() >= params.minFeatureTime) {
255 iter->feature->setId(savedFeatureId);
256 savedFeatureId++;
257 iter->feature->movingAverage(params.nFramesSmoothing);
258 iter->feature->write(*trajectoryDB, "positions", "velocities");
259 }
260 iter++;
261 }
241 262
242 trajectoryDB->endTransaction(); 263 trajectoryDB->endTransaction();
243 trajectoryDB->disconnect(); 264 trajectoryDB->disconnect();
244 } 265 }
245 266
310 331
311 // check for connected components 332 // check for connected components
312 int lastInstant = frameNum+params.minFeatureTime-maxTrajectoryLength; 333 int lastInstant = frameNum+params.minFeatureTime-maxTrajectoryLength;
313 if (lastInstant > 0 && frameNum%10==0) { 334 if (lastInstant > 0 && frameNum%10==0) {
314 featureGraph.connectedComponents(lastInstant); 335 featureGraph.connectedComponents(lastInstant);
315 vector<vector<unsigned int> > featureGroups = featureGraph.getFeatureGroups(); 336 vector<vector<FeatureTrajectoryPtr> > featureGroups;
337 featureGraph.getFeatureGroups(featureGroups);
316 for (unsigned int i=0; i<featureGroups.size(); ++i) { 338 for (unsigned int i=0; i<featureGroups.size(); ++i) {
317 trajectoryDB->writeObject(savedObjectId, featureGroups[i], -1, 1, string("objects"), string("objects_features")); 339 vector<unsigned int> featureNumbers;
340 for (unsigned int j=0; j<featureGroups[i].size(); ++j)
341 featureNumbers.push_back(featureGroups[i][j]->getId());
342 trajectoryDB->writeObject(savedObjectId, featureNumbers, 0 /* unknown */, 1, string("objects"), string("objects_features"));
318 savedObjectId++; 343 savedObjectId++;
319 } 344 }
320 } 345 }
321 346
322 if (frameNum%100 ==0) 347 if (frameNum%100 ==0)
323 cout << featureGraph.informationString() << endl; 348 cout << featureGraph.informationString() << endl;
324 } 349 }
325 350
326 // save remaining objects 351 // save remaining objects
327 featureGraph.connectedComponents(frameNum+maxTrajectoryLength+1); 352 featureGraph.connectedComponents(frameNum+maxTrajectoryLength+1);
328 vector<vector<unsigned int> > featureGroups = featureGraph.getFeatureGroups(); 353 vector<vector<FeatureTrajectoryPtr> > featureGroups;
354 featureGraph.getFeatureGroups(featureGroups);
329 for (unsigned int i=0; i<featureGroups.size(); ++i) { 355 for (unsigned int i=0; i<featureGroups.size(); ++i) {
330 trajectoryDB->writeObject(savedObjectId, featureGroups[i], -1, 1, string("objects"), string("objects_features")); 356 vector<unsigned int> featureNumbers;
357 for (unsigned int j=0; j<featureGroups[i].size(); ++j)
358 featureNumbers.push_back(featureGroups[i][j]->getId());
359 trajectoryDB->writeObject(savedObjectId, featureNumbers, 0 /* unknown */, 1, string("objects"), string("objects_features"));
331 savedObjectId++; 360 savedObjectId++;
332 } 361 }
333 362
334 trajectoryDB->endTransaction(); 363 trajectoryDB->endTransaction();
335 trajectoryDB->disconnect(); 364 trajectoryDB->disconnect();
343 cout << "The program tracks features" << endl; 372 cout << "The program tracks features" << endl;
344 trackFeatures(params); 373 trackFeatures(params);
345 } else if (params.groupFeatures) { 374 } else if (params.groupFeatures) {
346 cout << "The program groups features" << endl; 375 cout << "The program groups features" << endl;
347 groupFeatures(params); 376 groupFeatures(params);
377 } else {
378 cout << "Main option missing or misspelt" << endl;
348 } 379 }
349 380
350 return 0; 381 return 0;
351 } 382 }
352 383