view c/feature-based-tracking.cpp @ 124:1e68e18b1aa5

renaming and working on klt
author Nicolas Saunier <nicolas.saunier@polymtl.ca>
date Tue, 16 Aug 2011 12:31:22 -0400
parents df3bdd8e50ba
children 28907fde9855
line wrap: on
line source

//#include "Feature.hpp"
#include "Parameters.hpp"
#include "utils.hpp"

#include "src/Trajectory.h"

#include "opencv2/highgui/highgui.hpp"
//#include "opencv2/imgproc/imgproc.hpp"
#include "opencv2/features2d/features2d.hpp"

#include <iostream>
//#include <list>
#include <vector>

using namespace std;
using namespace cv;

//#include "cv.h"

using namespace std;

void drawMatchesRelative(const vector<KeyPoint>& train, const vector<KeyPoint>& query, std::vector<cv::DMatch>& matches, Mat& img) {
  for (int i = 0; i < (int)matches.size(); i++)
    {
      Point2f pt_new = query[matches[i].queryIdx].pt;
      Point2f pt_old = train[matches[i].trainIdx].pt;
      Point2f dist = pt_new - pt_old;
      if (norm(dist) < 20) {
	cv::line(img, pt_new, pt_old, Scalar(125, 255, 125), 1);
	cv::circle(img, pt_new, 2, Scalar(255, 0, 125), 1);
      }
    }
}

int main(int argc, char *argv[]) {
  vector<TrajectoryPoint2f> features;
  BriefDescriptorExtractor brief(32);
  
  VideoCapture capture;

  Mat frame, display;
  KLTFeatureTrackingParameters params;
  params.frame1 = 0;
  params.nFrames = -1;
  // TODO ajouter klt paremeters, reprendre code de 
  // GoodFeaturesToTrackDetector feature_detector(Params.max_nfeatures, Params.feature_quality, Params.min_feature_distance_klt, Params.window_size, Params.useHarrisDetector_GoodFeaturesToTrackDetector, Params.k_GoodFeaturesToTrackDetector);
  // search descriptor_match.h

  if( argc == 1 || (argc == 2 && strlen(argv[1]) == 1 && isdigit(argv[1][0]))) // if no parameter or number parameter
    capture.open(argc == 2 ? argv[1][0] - '0' : 0);
  else if( argc >= 2 )
    {
      capture.open(argv[1]);
      if( capture.isOpened() )
	cout << "Video " << argv[1] <<
	  ": width=" << capture.get(CV_CAP_PROP_FRAME_WIDTH) <<
	  ", height=" << capture.get(CV_CAP_PROP_FRAME_HEIGHT) <<
	  ", nframes=" << capture.get(CV_CAP_PROP_FRAME_COUNT) << endl;
      if( argc > 2 && isdigit(argv[2][0]) ) // could be used to reach first frame, dumping library messages to log file (2> /tmp/log.txt)
        {
	  sscanf(argv[2], "%d", &params.frame1);
      	  cout << "seeking to frame #" << params.frame1 << endl;
      	  //cap.set(CV_CAP_PROP_POS_FRAMES, pos);
	  for (int i=0; i<params.frame1; i++)
	    capture >> frame;
        }
    }

    //  capture.open(atoi(argv[1]));
  if (!capture.isOpened())
    {
      //help(argv);
      cout << "capture device " << argv[1] << " failed to open!" << endl;
      return 1;
    }
  
  vector<DMatch> matches;
  
  BruteForceMatcher<Hamming> desc_matcher;
  
  vector<KeyPoint> prevKpts, currKpts;
  vector<unsigned char> match_mask;
  
  Mat gray;
  
  Mat prevDesc, currDesc;
  const int DESIRED_FTRS = 500;
  GridAdaptedFeatureDetector detector(new FastFeatureDetector(10, true), DESIRED_FTRS, 4, 4);

  // TODO structure de donnee paires pointeur trajectory, numero de keypoint
  
  for (int frameNum = 0; (params.frame1+frameNum < params.nFrames) || (params.nFrames < 0); frameNum++) {
      frameNum++;
      //capture.set(CV_CAP_PROP_POS_FRAMES, frameNum);
      //capture.grab();capture.grab();capture.retrieve(frame);
      capture >> frame;
      //cout << capture.get(CV_CAP_PROP_POS_FRAMES) << endl;
      while (frame.empty())
	capture >> frame;//break;
      
      cvtColor(frame, gray, CV_RGB2GRAY);
      
      detector.detect(gray, currKpts);
      //cout << currKpts.size() << " kpts" << endl;
      
      brief.compute(gray, currKpts, currDesc); //Compute brief descriptors at each keypoint location
      
      //display = frame.clone();
      if (!prevKpts.empty())
        {
	  desc_matcher.match(currDesc, prevDesc, matches);
	  cout << "matches:" << matches.size() << endl;
	  drawMatchesRelative(prevKpts, currKpts, matches, frame);
	  //drawMatches(frame, prevKpts, frame, currKpts, matches, display);//, Scalar::all(-1), Scalar::all(-1), vector<vector<char> >(), DrawMatchesFlags::DRAW_OVER_OUTIMG);
	}

      imshow("frame", frame);
      prevKpts = currKpts;
      currDesc.copyTo(prevDesc);
      int key = waitKey(0);
      if (::interruptionKey(key))
	break;
    }  
  
  return 0;
}


/* ------------------ DOCUMENTATION ------------------ */


/*! \mainpage 

This project is a collection of software tools for transportation called Traffic Intelligence. Other documents are:

- \ref feature_based_tracking

The code is partially self-described using the doxygen tool and comment formatting. The documentation can be extracted using doxygen, typing \c doxygen in the main directory (or <tt>make doc</tt> on a system with the Makefile tool installed). 

*/

/*! \page feature_based_tracking Feature-based Tracking: User Manual

This document describes a software tool for object tracking in video data, developed for road traffic monitoring and safety diagnosis. It is part of a larger collection of software tools for transportation called Traffic Intelligence. 

The tool relies on feature-based tracking, a robust object tracking methods, particularly suited for the extraction of traffic data such as trajectories and speeds. The best description of this method is given in <a href="http://nicolas.saunier.confins.net/data/saunier06crv.html">this paper</a>. The program has a command line interface and this document will shortly explain how to use the tool. Keep in mind this is a work in progress and major changes are continuously being made. 

\section License

The code is licensed under the MIT open source license (http://www.opensource.org/licenses/mit-license).

If you make use of this piece of software, please cite one of my paper, e.g. N. Saunier, T. Sayed and K. Ismail. Large Scale Automated Analysis of Vehicle Interactions and Collisions. Transportation Research Record: Journal of the Transportation Research Board, 2147:42-50, 2010. I would be very happy in any case to know about any use of the code, and to discuss any opportunity for collaboration. 

Contact me at nicolas.saunier@polymtl.ca and learn more about my work at http://nicolas.saunier.confins.net.

*/