comparison c/feature-based-tracking.cpp @ 924:a71455bd8367

work in progress on undistortion acceleration
author Nicolas Saunier <nicolas.saunier@polymtl.ca>
date Fri, 07 Jul 2017 18:01:45 -0400
parents daa992ac6b44
children dbd81710d515
comparison
equal deleted inserted replaced
923:238008f81c16 924:a71455bd8367
11 #include "opencv2/imgproc/imgproc.hpp" 11 #include "opencv2/imgproc/imgproc.hpp"
12 #include "opencv2/video/tracking.hpp" 12 #include "opencv2/video/tracking.hpp"
13 #include "opencv2/features2d/features2d.hpp" 13 #include "opencv2/features2d/features2d.hpp"
14 #include "opencv2/highgui/highgui.hpp" 14 #include "opencv2/highgui/highgui.hpp"
15 #include "opencv2/objdetect/objdetect.hpp" 15 #include "opencv2/objdetect/objdetect.hpp"
16 #include "opencv2/calib3d/calib3d.hpp"
16 17
17 #include <boost/foreach.hpp> 18 #include <boost/foreach.hpp>
18 19
19 #include <iostream> 20 #include <iostream>
20 #include <vector> 21 #include <vector>
108 ": width=" << videoSize.width << 109 ": width=" << videoSize.width <<
109 ", height=" << videoSize.height << 110 ", height=" << videoSize.height <<
110 ", nframes=" << nFrames << endl; 111 ", nframes=" << nFrames << endl;
111 112
112 Mat map1, map2; 113 Mat map1, map2;
114 Mat intrinsicCameraMatrix, newIntrinsicCameraMatrix;
113 if (params.undistort) { 115 if (params.undistort) {
114 Mat intrinsicCameraMatrix = ::loadMat(params.intrinsicCameraFilename, " "); 116 intrinsicCameraMatrix = ::loadMat(params.intrinsicCameraFilename, " ");
115 Mat newIntrinsicCameraMatrix = intrinsicCameraMatrix.clone(); 117 //videoSize = Size(static_cast<int>(round(videoSize.width*params.undistortedImageMultiplication)), static_cast<int>(round(videoSize.height*params.undistortedImageMultiplication)));
116 videoSize = Size(static_cast<int>(round(videoSize.width*params.undistortedImageMultiplication)), static_cast<int>(round(videoSize.height*params.undistortedImageMultiplication))); 118 // newIntrinsicCameraMatrix = intrinsicCameraMatrix.clone();
117 newIntrinsicCameraMatrix.at<float>(0,2) = videoSize.width/2.; 119 // newIntrinsicCameraMatrix.at<float>(0,2) = undistortedVideoSize.width/2.;
118 newIntrinsicCameraMatrix.at<float>(1,2) = videoSize.height/2.; 120 // newIntrinsicCameraMatrix.at<float>(1,2) = undistortedVideoSize.height/2.;
119 initUndistortRectifyMap(intrinsicCameraMatrix, params.distortionCoefficients, Mat::eye(3,3, CV_32FC1), newIntrinsicCameraMatrix, videoSize, CV_32FC1, map1, map2); 121 Size undistortedVideoSize = Size(static_cast<int>(round(videoSize.width*params.undistortedImageMultiplication)), static_cast<int>(round(videoSize.height*params.undistortedImageMultiplication)));
120 122 newIntrinsicCameraMatrix = getDefaultNewCameraMatrix(intrinsicCameraMatrix, undistortedVideoSize, true);//getOptimalNewCameraMatrix(intrinsicCameraMatrix, params.distortionCoefficients, videoSize, 1, undistortedVideoSize);//, 0, true);
121 cout << "Undistorted width=" << videoSize.width << 123 initUndistortRectifyMap(intrinsicCameraMatrix, params.distortionCoefficients, Mat::eye(3,3, CV_32FC1) /* 0 ?*/, newIntrinsicCameraMatrix, undistortedVideoSize, CV_32FC1, map1, map2);
122 ", height=" << videoSize.height << endl; 124
125 cout << "Undistorted width=" << undistortedVideoSize.width <<
126 ", height=" << undistortedVideoSize.height << endl;
123 } 127 }
124 128
125 Mat mask = imread(params.maskFilename, 0); 129 Mat mask = imread(params.maskFilename, 0);
126 if (mask.empty()) { 130 if (mask.empty()) {
127 cout << "Mask filename " << params.maskFilename << " could not be opened." << endl; 131 cout << "Mask filename " << params.maskFilename << " could not be opened." << endl;
134 trajectoryDB->createTable("positions"); 138 trajectoryDB->createTable("positions");
135 trajectoryDB->createTable("velocities"); 139 trajectoryDB->createTable("velocities");
136 trajectoryDB->beginTransaction(); 140 trajectoryDB->beginTransaction();
137 141
138 std::vector<KeyPoint> prevKpts, currKpts; 142 std::vector<KeyPoint> prevKpts, currKpts;
139 std::vector<Point2f> prevPts, currPts, newPts; 143 std::vector<Point2f> prevPts, currPts, newPts, undistortedPts;
140 std::vector<uchar> status; 144 std::vector<uchar> status;
141 std::vector<float> errors; 145 std::vector<float> errors;
142 Mat prevDesc, currDesc; 146 Mat prevDesc, currDesc;
143 147
144 std::vector<FeatureTrajectoryPtr> lostFeatures; 148 std::vector<FeatureTrajectoryPtr> lostFeatures;
145 std::vector<FeaturePointMatch> featurePointMatches; 149 std::vector<FeaturePointMatch> featurePointMatches;
146 150
147 int key = '?'; 151 int key = '?';
148 unsigned int savedFeatureId=0; 152 unsigned int savedFeatureId=0;
149 Mat frame = Mat::zeros(1, 1, CV_8UC1), currentFrameBW, previousFrameBW, undistortedFrame; 153 Mat frame, currentFrameBW, previousFrameBW, displayFrame; // = Mat::zeros(1, 1, CV_8UC1)
150 154
151 unsigned int lastFrameNum = nFrames; 155 unsigned int lastFrameNum = nFrames;
152 if (params.nFrames > 0) 156 if (params.nFrames > 0)
153 lastFrameNum = MIN(params.frame1+static_cast<unsigned int>(params.nFrames), nFrames); 157 lastFrameNum = MIN(params.frame1+static_cast<unsigned int>(params.nFrames), nFrames);
154 158
159 cout << "Empty frame " << frameNum << ", breaking (" << frame.empty() << " [" << frame.size().width << "x" << frame.size().height << "])" << endl; 163 cout << "Empty frame " << frameNum << ", breaking (" << frame.empty() << " [" << frame.size().width << "x" << frame.size().height << "])" << endl;
160 break; 164 break;
161 } else if (frameNum%50 ==0) 165 } else if (frameNum%50 ==0)
162 cout << "frame " << frameNum << endl; 166 cout << "frame " << frameNum << endl;
163 167
164 if (params.undistort) { 168 // if (params.undistort) {
165 remap(frame, undistortedFrame, map1, map2, interpolationMethod, BORDER_CONSTANT, 0.); 169 // remap(frame, undistortedFrame, map1, map2, interpolationMethod, BORDER_CONSTANT, 0.);
166 frame = undistortedFrame; 170 // frame = undistortedFrame;
167 171
168 if (frame.size() != videoSize) { 172 // if (frame.size() != videoSize) {
169 cout << "Different frame size " << frameNum << ", breaking ([" << frame.size().width << "x" << frame.size().height << "])" << endl; 173 // cout << "Different frame size " << frameNum << ", breaking ([" << frame.size().width << "x" << frame.size().height << "])" << endl;
170 break; 174 // break;
171 } 175 // }
172 } 176 // }
173 177
174 cvtColor(frame, currentFrameBW, CV_RGB2GRAY); 178 cvtColor(frame, currentFrameBW, CV_RGB2GRAY);
175 179
176 if (!prevPts.empty()) { 180 if (!prevPts.empty()) {
177 currPts.clear(); 181 currPts.clear();
178 calcOpticalFlowPyrLK(previousFrameBW, currentFrameBW, prevPts, currPts, status, errors, window, params.pyramidLevel, TermCriteria(static_cast<int>(TermCriteria::COUNT)+static_cast<int>(TermCriteria::EPS) /* = 3 */, params.maxNumberTrackingIterations, params.minTrackingError), /* int flags = */ 0, params.minFeatureEigThreshold); 182 calcOpticalFlowPyrLK(previousFrameBW, currentFrameBW, prevPts, currPts, status, errors, window, params.pyramidLevel, TermCriteria(static_cast<int>(TermCriteria::COUNT)+static_cast<int>(TermCriteria::EPS) /* = 3 */, params.maxNumberTrackingIterations, params.minTrackingError), /* int flags = */ 0, params.minFeatureEigThreshold);
179 /// \todo try calcOpticalFlowFarneback 183 /// \todo try calcOpticalFlowFarneback
180 184
185 if (params.undistort) {
186 undistortPoints(currPts, undistortedPts, intrinsicCameraMatrix, params.distortionCoefficients, noArray(), newIntrinsicCameraMatrix);
187 //currPts = undistortedPts;
188 }
189
181 std::vector<Point2f> trackedPts; 190 std::vector<Point2f> trackedPts;
182 std::vector<FeaturePointMatch>::iterator iter = featurePointMatches.begin(); 191 std::vector<FeaturePointMatch>::iterator iter = featurePointMatches.begin();
183 while (iter != featurePointMatches.end()) { 192 while (iter != featurePointMatches.end()) {
184 bool deleteFeature = false; 193 bool deleteFeature = false;
185 194
186 int currPtX = static_cast<int>(floor(currPts[iter->pointNum].x)); 195 int currPtX = static_cast<int>(floor(currPts[iter->pointNum].x));
187 int currPtY = static_cast<int>(floor(currPts[iter->pointNum].y)); 196 int currPtY = static_cast<int>(floor(currPts[iter->pointNum].y));
188 if ((status[iter->pointNum] =!0) && 197 if ((status[iter->pointNum] =! 0) &&
189 (currPtX >= 0) && (currPtX < videoSize.width) && 198 (currPtX >= 0) && (currPtX < videoSize.width) &&
190 (currPtY >= 0) && (currPtY < videoSize.height) && 199 (currPtY >= 0) && (currPtY < videoSize.height) &&
191 (mask.at<uchar>(currPtY, currPtX) != 0)) { 200 (mask.at<uchar>(currPtY, currPtX) != 0)) { // todo check point in mask in image space
192 iter->feature->addPoint(frameNum, currPts[iter->pointNum], homography); 201 if (params.undistort)
202 iter->feature->addPoint(frameNum, undistortedPts[iter->pointNum], homography);
203 else
204 iter->feature->addPoint(frameNum, currPts[iter->pointNum], homography);
193 205
194 deleteFeature = iter->feature->isDisplacementSmall(params.nDisplacements, minTotalFeatureDisplacement) 206 deleteFeature = iter->feature->isDisplacementSmall(params.nDisplacements, minTotalFeatureDisplacement)
195 || !iter->feature->isMotionSmooth(params.accelerationBound, params.deviationBound); 207 || !iter->feature->isMotionSmooth(params.accelerationBound, params.deviationBound);
196 if (deleteFeature) 208 if (deleteFeature)
197 iter->feature->shorten(); 209 iter->feature->shorten();
215 currPts = trackedPts; 227 currPts = trackedPts;
216 assert(currPts.size() == featurePointMatches.size()); 228 assert(currPts.size() == featurePointMatches.size());
217 saveFeatures(lostFeatures, *trajectoryDB, "positions", "velocities"); 229 saveFeatures(lostFeatures, *trajectoryDB, "positions", "velocities");
218 230
219 if (params.display) { 231 if (params.display) {
232 if (params.undistort)
233 remap(frame, displayFrame, map1, map2, interpolationMethod, BORDER_CONSTANT, 0.);
234 else
235 displayFrame = frame.clone();
236
220 BOOST_FOREACH(FeaturePointMatch fp, featurePointMatches) 237 BOOST_FOREACH(FeaturePointMatch fp, featurePointMatches)
221 fp.feature->draw(frame, invHomography, Colors::red()); 238 fp.feature->draw(displayFrame, invHomography, Colors::red());
222 } 239 }
223 } 240 }
224 241
225 // adding new features, using mask around existing feature positions 242 // adding new features, using mask around existing feature positions
226 Mat featureMask = mask.clone(); 243 Mat featureMask = mask.clone();
227 for (unsigned int n=0;n<currPts.size(); n++) 244 for (unsigned int n=0;n<currPts.size(); n++)
228 for (int j=MAX(0, currPts[n].x-params.minFeatureDistanceKLT); j<MIN(videoSize.width, currPts[n].x+params.minFeatureDistanceKLT+1); j++) 245 for (int j=MAX(0, currPts[n].x-params.minFeatureDistanceKLT); j<MIN(videoSize.width, currPts[n].x+params.minFeatureDistanceKLT+1); j++)
229 for (int i=MAX(0, currPts[n].y-params.minFeatureDistanceKLT); i<MIN(videoSize.height, currPts[n].y+params.minFeatureDistanceKLT+1); i++) 246 for (int i=MAX(0, currPts[n].y-params.minFeatureDistanceKLT); i<MIN(videoSize.height, currPts[n].y+params.minFeatureDistanceKLT+1); i++)
230 featureMask.at<uchar>(i,j)=0; 247 featureMask.at<uchar>(i,j)=0;
231 goodFeaturesToTrack(currentFrameBW, newPts, params.maxNFeatures, params.featureQuality, params.minFeatureDistanceKLT, featureMask, params.blockSize, params.useHarrisDetector, params.k); 248 goodFeaturesToTrack(currentFrameBW, newPts, params.maxNFeatures, params.featureQuality, params.minFeatureDistanceKLT, featureMask, params.blockSize, params.useHarrisDetector, params.k);
232 BOOST_FOREACH(Point2f p, newPts) { 249 if (params.undistort) {
233 FeatureTrajectoryPtr f = FeatureTrajectoryPtr(new FeatureTrajectory(frameNum, p, homography)); 250 undistortPoints(newPts, undistortedPts, intrinsicCameraMatrix, params.distortionCoefficients, noArray(), newIntrinsicCameraMatrix);
251 //newPts = undistortedPts;
252 }
253 //BOOST_FOREACH(Point2f p, newPts) {
254 for (unsigned int i=0; i<newPts.size(); i++) {
255 FeatureTrajectoryPtr f;
256 if (params.undistort) // write function
257 f = FeatureTrajectoryPtr(new FeatureTrajectory(frameNum, undistortedPts[i], homography));
258 else
259 f = FeatureTrajectoryPtr(new FeatureTrajectory(frameNum, newPts[i], homography));
234 featurePointMatches.push_back(FeaturePointMatch(f, currPts.size())); 260 featurePointMatches.push_back(FeaturePointMatch(f, currPts.size()));
235 currPts.push_back(p); 261 currPts.push_back(newPts[i]);
236 } 262 }
237 263
238 if (params.display) { 264 if (params.display && !displayFrame.empty()) {
239 imshow("mask", featureMask*256); 265 imshow("mask", featureMask*256);
240 imshow("frame", frame); 266 imshow("frame", displayFrame);
241 key = waitKey(2); 267 key = waitKey(2);
242 } 268 }
243 previousFrameBW = currentFrameBW.clone(); 269 previousFrameBW = currentFrameBW.clone();
244 prevPts = currPts; 270 prevPts = currPts;
245 } 271 }