changeset 227:b7612c6d5702

cleaned the code
author Nicolas Saunier <nicolas.saunier@polymtl.ca>
date Wed, 27 Jun 2012 09:52:06 -0400
parents 91197f6a03fe
children 23da16442433
files c/feature-based-tracking.cpp python/cvutils.py
diffstat 2 files changed, 37 insertions(+), 46 deletions(-) [+]
line wrap: on
line diff
--- a/c/feature-based-tracking.cpp	Tue Jun 26 10:15:11 2012 -0400
+++ b/c/feature-based-tracking.cpp	Wed Jun 27 09:52:06 2012 -0400
@@ -77,37 +77,18 @@
 
   // BruteForceMatcher<Hamming> descMatcher;
   // vector<DMatch> matches;
-  Size videoSize;
-
-  // if( argc == 1 || (argc == 2 && strlen(argv[1]) == 1 && isdigit(argv[1][0]))) // if no parameter or number parameter
-  //   capture.open(argc == 2 ? argv[1][0] - '0' : 0);
-  // else if( argc >= 2 )
-  //   {
-  //     capture.open(argv[1]);
-  //     if( capture.isOpened() )
-  // 	videoSize = Size(capture.get(CV_CAP_PROP_FRAME_WIDTH), capture.get(CV_CAP_PROP_FRAME_HEIGHT));
-  // 	cout << "Video " << argv[1] <<
-  // 	  ": width=" << videoSize.width <<
-  // 	  ", height=" << videoSize.height <<
-  // 	  ", nframes=" << capture.get(CV_CAP_PROP_FRAME_COUNT) << endl;
-  //     if( argc > 2 && isdigit(argv[2][0]) ) // could be used to reach first frame, dumping library messages to log file (2> /tmp/log.txt)
-  //       {
-  // 	  sscanf(argv[2], "%d", &params.frame1);
-  //     	  cout << "seeking to frame #" << params.frame1 << endl;
-  //     	  //cap.set(CV_CAP_PROP_POS_FRAMES, pos);
-  // 	  for (int i=0; i<params.frame1; i++)
-  // 	    capture >> frame;
-  //       }
-  //   }
 
   VideoCapture capture;
+  Size videoSize;
+  int nFrames = -1;
   capture.open(params.videoFilename);
   if(capture.isOpened()) {
     videoSize = Size(capture.get(CV_CAP_PROP_FRAME_WIDTH), capture.get(CV_CAP_PROP_FRAME_HEIGHT));
+    nFrames = capture.get(CV_CAP_PROP_FRAME_COUNT);
     cout << "Video " << params.videoFilename <<
       ": width=" << videoSize.width <<
       ", height=" << videoSize.height <<
-      ", nframes=" << capture.get(CV_CAP_PROP_FRAME_COUNT) << endl;
+      ", nframes=" << nFrames << endl;
   } else {
     cout << "Video filename " << params.videoFilename << " could not be opened. Exiting." << endl;
     exit(0);
@@ -145,17 +126,26 @@
   int key = '?';
   unsigned int savedFeatureId=0;
   Mat frame, currentFrameBW, previousFrameBW;
-  for (int frameNum = params.frame1; ((frameNum-params.frame1 < params.nFrames) || (params.nFrames < 0)) && !::interruptionKey(key); frameNum++) {
+
+  unsigned int lastFrameNum = nFrames;
+  if (params.nFrames >= 0)
+    lastFrameNum = min(params.frame1+params.nFrames, nFrames);
+  
+  capture.set(CV_CAP_PROP_POS_FRAMES, params.frame1);
+  for (unsigned int frameNum = params.frame1; (frameNum < lastFrameNum) && !::interruptionKey(key); frameNum++) {
       capture >> frame;
-      cout << frameNum << " " << capture.get(CV_CAP_PROP_POS_FRAMES) << " " << prevPts.size() << endl;
-      int emptyFrameNum = 0;
-      while (frame.empty()) {
-	cerr << "empty frame " << emptyFrameNum  << " " << capture.get(CV_CAP_PROP_POS_FRAMES)<< endl;
-	capture >> frame;//break;
-	emptyFrameNum++;
-	if (emptyFrameNum>=3000)
-	  exit(0);
-      }
+      if (frameNum%50 ==0)
+	cout << "frame " << frameNum << endl;
+      //capture.get(CV_CAP_PROP_POS_FRAMES) << " " << prevPts.size() << endl;
+
+      // int emptyFrameNum = 0;
+      // while (frame.empty()) {
+      // 	cerr << "empty frame " << emptyFrameNum  << " " << capture.get(CV_CAP_PROP_POS_FRAMES)<< endl;
+      // 	capture >> frame;//break;
+      // 	emptyFrameNum++;
+      // 	if (emptyFrameNum>=3000)
+      // 	  exit(0);
+      // }
       
       cvtColor(frame, currentFrameBW, CV_RGB2GRAY);
 
@@ -281,14 +271,11 @@
   }
   cout << "Longest trajectory: " << maxTrajectoryLength << endl;
 
-  // alternative: read and load features in batches directly select * from positions where trajectory_id in select trajectory_id from positions where frame_number <100 and frame_number > 50 group by trajectory_id
-  int queryIntervalLength = 10;
-
   FeatureGraph featureGraph(params.mmConnectionDistance, params.mmSegmentationDistance, params.minFeatureTime, params.minNFeaturesPerGroup);
 
   // main loop
-  int frameNum;
-  unsigned int firstFrameNum, lastFrameNum;
+  unsigned int frameNum;
+  unsigned int firstFrameNum = -1, lastFrameNum = -1;
   trajectoryDB->firstLastInstants(firstFrameNum, lastFrameNum);
   firstFrameNum = MAX(firstFrameNum, params.frame1);
   if (params.nFrames>0)
--- a/python/cvutils.py	Tue Jun 26 10:15:11 2012 -0400
+++ b/python/cvutils.py	Wed Jun 27 09:52:06 2012 -0400
@@ -92,14 +92,15 @@
         for i in range(0, last-1):
             cv2.line(img, positions[i].asint().astuple(), positions[i+1].asint().astuple(), color)
 
-    def playVideo(filename, firstFrame = 0):
+    def playVideo(filename, firstFrameNum = 0):
         '''Plays the video'''
         capture = cv2.VideoCapture(filename)
         if capture.isOpened():
             key = -1
-            frameNum = 1
-            capture.set(cv2.cv.CV_CAP_PROP_POS_FRAMES, firstFrame)
-            while key!= 113: # 'q'
+            ret = True
+            frameNum = firstFrameNum
+            capture.set(cv2.cv.CV_CAP_PROP_POS_FRAMES, firstFrameNum)
+            while ret and key!= 113: # 'q'
                 ret, img = capture.read()
                 if ret:
                     print('frame {0}'.format(frameNum))
@@ -128,16 +129,19 @@
                         images.append(img)
         return images
 
-    def displayTrajectories(videoFilename, objects, homography = None):
+    def displayTrajectories(videoFilename, objects, homography = None, firstFrameNum = 0):
         '''Displays the objects overlaid frame by frame over the video '''
         capture = cv2.VideoCapture(videoFilename)
         if capture.isOpened():
             key = -1
-            frameNum = 0
-            while key!= 113: # 'q'
+            ret = True
+            frameNum = firstFrameNum
+            capture.set(cv2.cv.CV_CAP_PROP_POS_FRAMES, firstFrameNum)
+            while ret and key!= 113: # 'q'
+                print('capture')
                 ret, img = capture.read()
                 if ret:
-                    print(frameNum)
+                    print('frame {0}'.format(frameNum))
                     for obj in objects:
                         if obj.existsAtInstant(frameNum):
                             if obj.getFirstInstant() == frameNum: