changeset 463:cb9683f9efe7

Merged in szangenehpour/trafficintelligence (pull request #5) TTC Sample
author Nicolas Saunier <nicolas.saunier@polymtl.ca>
date Sun, 23 Feb 2014 22:56:54 -0500
parents 55b424d98b68 (diff) af2222c0c9c0 (current diff)
children dcc821b98efc
files
diffstat 11 files changed, 336 insertions(+), 126 deletions(-) [+]
line wrap: on
line diff
--- a/CMakeLists.txt	Mon Feb 03 15:41:57 2014 -0500
+++ b/CMakeLists.txt	Sun Feb 23 22:56:54 2014 -0500
@@ -14,15 +14,19 @@
 	CMAKE_CXX_FLAGS "-g -Wall"
 )
 
+set(CMAKE_RUNTIME_OUTPUT_DIRECTORY bin)
+
 add_executable(feature-based-tracking
 	c/cvutils.cpp
 	c/feature-based-tracking.cpp
 	c/Motion.cpp
 	c/Parameters.cpp
 	c/utils.cpp
+	c/InputFrameListModule.cpp
+	c/InputVideoFileModule.cpp
 	)
 
-find_package(Boost REQUIRED program_options)
+find_package(Boost REQUIRED program_options filesystem system)
 find_library(TrajectoryManagement_LIBRARY TrajectoryManagementAndAnalysis)
 find_path(TrajectoryManagement_INCLUDE_DIR src/Trajectory.h)
 
--- a/python/events.py	Mon Feb 03 15:41:57 2014 -0500
+++ b/python/events.py	Sun Feb 23 22:56:54 2014 -0500
@@ -67,6 +67,7 @@
         self.roadUser2 = roadUser2
         self.categoryNum = categoryNum
         self.indicators = {}
+        self.interactionInterval = None
 
     def getRoadUserNumbers(self):
         return self.roadUserNumbers
@@ -85,6 +86,7 @@
         velocityAngles = {}
         distances = {}#[0]*int(self.timeInterval.length())
         speedDifferentials = {}
+        interactionInstants = []
         for instant in self.timeInterval:
             deltap = self.roadUser1.getPositionAtInstant(instant)-self.roadUser2.getPositionAtInstant(instant)
             v1 = self.roadUser1.getVelocityAtInstant(instant)
@@ -94,18 +96,20 @@
             collisionCourseDotProducts[instant] = moving.Point.dot(deltap, deltav)
             distances[instant] = deltap.norm2()
             speedDifferentials[instant] = deltav.norm2()
-            #if collisionCourseDotProducts[instant] > 0:
+            if collisionCourseDotProducts[instant] > 0:
+                interactionInstants.append(instant)
             collisionCourseAngles[instant] = arccos(collisionCourseDotProducts[instant]/(distances[instant]*speedDifferentials[instant]))
 
-        # todo shorten the time intervals based on the interaction definition
+        if len(interactionInstants) >= 2:
+            self.interactionInterval = moving.TimeInterval(interactionInstants[0], interactionInstants[-1])
+        else:
+            self.interactionInterval = moving.TimeInterval()
         self.addIndicator(indicators.SeverityIndicator(Interaction.indicatorNames[0], collisionCourseDotProducts))
         self.addIndicator(indicators.SeverityIndicator(Interaction.indicatorNames[1], collisionCourseAngles))
         self.addIndicator(indicators.SeverityIndicator(Interaction.indicatorNames[2], distances))
         self.addIndicator(indicators.SeverityIndicator(Interaction.indicatorNames[4], velocityAngles))
         self.addIndicator(indicators.SeverityIndicator(Interaction.indicatorNames[5], speedDifferentials))
 
-        # todo test for interaction instants and interval, compute indicators
-
         # if we have features, compute other indicators
         if self.roadUser1.features != None and self.roadUser2.features != None:
             minDistance={}
@@ -141,8 +145,8 @@
         self.videoFilename= videoFilename	
 
     def addInteractionType(self,interactionType):
-	''' interaction types: conflict or collision if they are known'''
-        self.interactionType= interactionType			
+        ''' interaction types: conflict or collision if they are known'''
+        self.interactionType= interactionType
 
 def createInteractions(objects):
     '''Create all interactions of two co-existing road users
@@ -163,7 +167,8 @@
     Returns the prototype indices (in the interaction list) and the label of each indicator (interaction)
 
     if an indicator profile (time series) is different enough (<minSimilarity), 
-    it will become a new prototype. Otherwise, it will be assigned to an existing prototypes'''
+    it will become a new prototype. 
+    Non-prototype interactions will be assigned to an existing prototype'''
 
     # sort indicators based on length
     indices = range(similarityMatrix.shape[0])
@@ -192,6 +197,14 @@
 
     return prototypeIndices, labels
 
+def prototypeMultivariateCluster(interactions, similarityMatrics, indicatorNames, minSimilarities, minClusterSize):
+    '''Finds exmaple indicator time series (several indicators) for all interactions
+
+    if any interaction indicator time series is different enough (<minSimilarity),
+    it will become a new prototype. 
+    Non-prototype interactions will be assigned to an existing prototype if all indicators are similar enough'''
+    pass
+
 # TODO:
 #http://stackoverflow.com/questions/3288595/multiprocessing-using-pool-map-on-a-function-defined-in-a-class
 #http://www.rueckstiess.net/research/snippets/show/ca1d7d90
--- a/python/indicators.py	Mon Feb 03 15:41:57 2014 -0500
+++ b/python/indicators.py	Sun Feb 23 22:56:54 2014 -0500
@@ -97,7 +97,7 @@
         return values
 
 
-def distanceForLCSS(x, y): # lambda x,y:abs(x-y)
+def l1Distance(x, y): # lambda x,y:abs(x-y)
     if x == None or y == None:
         return float('inf')
     else:
@@ -107,8 +107,8 @@
 
 class LCSS(utilsLCSS):
     '''Adapted LCSS class for indicators, same pattern'''
-    def __init__(self, threshold, delta = float('inf'), minLength = 0, aligned = False, lengthFunc = min):
-        utilsLCSS.__init__(self, lambda x,y: (distanceForLCSS(x,y) <= threshold), delta, aligned, lengthFunc)
+    def __init__(self, similarityFunc, delta = float('inf'), minLength = 0, aligned = False, lengthFunc = min):
+        utilsLCSS.__init__(self, similarityFunc, delta, aligned, lengthFunc)
         self.minLength = minLength
 
     def checkIndicator(self, indicator):
--- a/python/moving.py	Mon Feb 03 15:41:57 2014 -0500
+++ b/python/moving.py	Sun Feb 23 22:56:54 2014 -0500
@@ -173,6 +173,9 @@
     def __sub__(self, other):
         return Point(self.x-other.x, self.y-other.y)
 
+    def __neg__(self):
+        return Point(-self.x, -self.y)
+
     def multiply(self, alpha):
         return Point(self.x*alpha, self.y*alpha)
 
@@ -612,6 +615,19 @@
         if i < self.__len__():
             self.lanes[i] = lane
 
+    def getIntersections(self, S1, lane = None):
+        '''Returns a list of the indices at which the trajectory 
+        goes past the curvilinear coordinate S1
+        (in provided lane if lane != None)
+        the list is empty if there is no crossing'''
+        indices = []
+        for i in xrange(self.length()-1):
+            q1=self.__getitem__(i)
+            q2=self.__getitem__(i+1)
+            if q1[0] <= S1 <= q2[0] and lane == None or (self.lanes[i] == lane and self.lanes[i+1] == lane):
+                indices.append(i+(S1-q1[0])/(q2[0]-q1[0]))
+        return indices
+
 ##################
 # Moving Objects
 ##################
--- a/python/pavement.py	Mon Feb 03 15:41:57 2014 -0500
+++ b/python/pavement.py	Sun Feb 23 22:56:54 2014 -0500
@@ -1,59 +1,32 @@
 #! /usr/bin/env python
 '''Tools for processing and analyzing pavement marking data'''
 
+import utils
+
 import numpy as np
 
 __metaclass__ = type
 
-class RTSS:
-    'class for data related to a RTSS, including agregating pavement marking measurements'
-
-    def __init__(self, id):
-        self.id = id
-
-class MarkingTest:
-    '''class for a test site for a given product'''
-
-    def __init__(self, siteId, paintingDate, paintingType, color, data):
-        self.siteId = siteId
-        self.paintingDate = paintingDate
-        self.paintingType = paintingType
-        self.color = color
-        self.data = data
-        self.nMeasures = len(data)
-
-    def plot(self, measure, options = 'o', dayRatio = 1., **kwargs):
-        from matplotlib.pyplot import plot
-        plot(self.data['jours']/float(dayRatio), 
-             self.data[measure], options, **kwargs)
+durabilities = {1: 98, #96 to 100
+                2: 85, #75 to 96
+                3: 72, #50 to 75
+                4: 32, #15 to 50
+                5: 7 #0 to 15
+                }
 
-    def getMarkingMeasures(self, dataLabel):
-        from numpy import isnan
-        nonZeroIndices = ~isnan(self.data[dataLabel])
-        return self.data[nonZeroIndices]['jours'], self.data[nonZeroIndices][dataLabel]
-
-    def plotMarkingMeasures(self, measure, options = 'o', dayRatio = 1., **kwargs):
-        for i in range(1,7):
-            self.plot('{}_{}'.format(measure, i), options, dayRatio, **kwargs)
-
-
-def occ_max(a):
-    if a != []:
-        s = set(a)
-        l = list(s)
-        occ = []
-        for i in range(len(l)):
-            b = 0
-            for j in range(len(a)):
-                if a[j] == l[i]:
-                    b += 1
-            occ.append([l[i], b])
-        nbre_occs = []
-        for i in range(len(occ)):
-            nbre_occs.append(occ[i][1])
-        return occ[nbre_occs.index(max(nbre_occs))][0]
-    else:
-        return ""
+roadFunctionalClasses = {40: "Collectrice",
+                         20: "Nationale",
+                         30: "Regionale",
+                         10: "Autoroute",
+                         60: "Acces ressources",
+                         51: "Local 1",
+                         52: "Local 2",
+                         53: "Local 3",
+                         15: "Aut (PRN)",
+                         25: "Nat (PRN)",
+                         70: "Acces isolees",
+                         99: "Autres"
+                         }
 
 def caracteristiques(rtss, maintenanceLevel, rtssWeatherStation, fmr, paintType):
     '''Computes characteristic data for the RTSS (class rtss) 
@@ -80,7 +53,7 @@
         x_moy, y_moy = '',''	
 
     # determination info fmr
-    age_revtm, classe_fonct, type_revtm, milieu, djma, pourc_camions = [], [], [], [], [], []
+    age_revtm, classe_fonct, type_revtm, milieu, djma, pourc_camions, vit_max = [], [], [], [], [], [], []
     if rtss.id in fmr['rtss_debut']:
         for i in range(len(fmr)):
             if fmr['rtss_debut'][i] == rtss.id:
@@ -90,18 +63,22 @@
                 milieu.append(fmr['des_cod_mil'][i])
                 djma.append(fmr['val_djma'][i])
                 pourc_camions.append(fmr['val_pourc_camns'][i])
-        age_revtm = occ_max(age_revtm)
-        classe_fonct = occ_max(classe_fonct)
-        type_revtm = occ_max(type_revtm)
-        milieu = occ_max(milieu)
-        djma = occ_max(djma)
-        pourc_camions = occ_max(pourc_camions)
+                vit_max.append(fmr['val_limt_vitss'][i])
+        age_revtm = utils.mostCommon(age_revtm)
+        classe_fonct = utils.mostCommon(classe_fonct)
+        type_revtm = utils.mostCommon(type_revtm)
+        milieu = utils.mostCommon(milieu)
+        djma = utils.mostCommon(djma)
+        vit_max = utils.mostCommon(vit_max)
+        if vit_max < 0:
+            vit_max = ''
+        pourc_camions = utils.mostCommon(pourc_camions)
         if pourc_camions == "" or pourc_camions < 0:
             djma_camions = ""
         else:
             djma_camions = pourc_camions*djma/100
     else:
-        age_revtm, classe_fonct, type_revtm, milieu, djma, djma_camions  = '','','','','',''
+        age_revtm, classe_fonct, type_revtm, milieu, djma, djma_camions, vit_max  = '','','','','','',''
 
     # determination type peinture
     peinture_rd, peinture_rg, peinture_cl = [], [], []
@@ -112,13 +89,13 @@
                 peinture_rd.append((paintType['peinture_rd'][i]))
                 peinture_rg.append((paintType['peinture_rg'][i]))
                 peinture_cl.append((paintType['peinture_cl'][i]))
-        peinture_lrd = occ_max(peinture_rd)
-        peinture_lrg = occ_max(peinture_rg)
-        peinture_lc = occ_max(peinture_cl)
+        peinture_lrd = utils.mostCommon(peinture_rd)
+        peinture_lrg = utils.mostCommon(peinture_rg)
+        peinture_lc = utils.mostCommon(peinture_cl)
     else:
         peinture_lrd, peinture_lrg, peinture_lc = '','',''		
 
-    return (exigence, x_moy, y_moy, age_revtm, classe_fonct, type_revtm, milieu, djma, djma_camions, peinture_lrd, peinture_lrg, peinture_lc)
+    return (exigence, x_moy, y_moy, age_revtm, classe_fonct, type_revtm, milieu, djma, djma_camions, vit_max, peinture_lrd, peinture_lrg, peinture_lc)
 
 def winterMaintenanceIndicators(data, startDate, endDate, circuitReference, snowThreshold):
     '''Computes several winter maintenance indicators
@@ -151,50 +128,176 @@
 
     return (somme_eau, somme_neige, neigeMTQ_sup_seuil, somme_abrasif, somme_sel, somme_lc, somme_lrg, somme_lrd, premiere_neige, autres_abrasifs)
 
-def ecWeatherIndicators(data, startDate, endDate, snowThreshold):
+def ecWeatherIndicators(data, startDate, endDate, snowThreshold, datatype, minProportionMeasures = 0.):
     '''Computes the indicators from Environment Canada files
     (loaded as a recarray using csv2rec in data),
-    between start and end dates (datetime.datetime objects)'''
+    between start and end dates (datetime.datetime objects)
+
+    dataType is to indicate Environnement Canada data ('ec') or else MTQ
+    minProportionMeasures is proportion of measures necessary to consider the indicators'''
+    from matplotlib.mlab import find
     nbre_jours_T_negatif,nbre_jours_gel_degel,pluie_tot,neige_tot,ecart_type_T = 0,0,0,0,0
     compteur,nbre_jours_gel_consecutifs=0,0
     tmoys = []
     seuils_T = [20,15,10,5]
     deltas_T = [0,0,0,0]
-    for i in range(int((endDate - startDate).days)+1):
-        if data['tmax'][i] != '' and data['tmax'][i] != None:
-            tmax = float(data['tmax'][i].replace(',','.'))
+    startIndex = find(data['date'] == startDate)
+    nDays = (endDate - startDate).days+1
+    if startIndex.size != 0:
+        for i in range(startIndex, startIndex+int(nDays)):
+            if not np.isnan(data['tmax'][i]):
+                tmax = data['tmax'][i]            
+            else:
+                tmax = None
+            if not np.isnan(data['tmin'][i]):
+                tmin = data['tmin'][i]
+            else:
+                tmin = None
+            if datatype == 'ec':
+                if data['pluie_tot'][i] != None and not np.isnan(data['pluie_tot'][i]):
+                    pluie_tot  += data['pluie_tot'][i]
+                if data['neige_tot'][i] != None and not np.isnan(data['neige_tot'][i]):
+                    neige_tot  += data['neige_tot'][i]
+            if tmax != None:
+                if tmax < 0:
+                    nbre_jours_T_negatif += 1
+            if tmax != None and tmin != None:
+                if tmax > 0 and tmin < 0:
+                    nbre_jours_gel_degel += 1
+                for l in range(len(seuils_T)):
+                    if tmax - tmin >=seuils_T[l]:
+                        deltas_T[l] += 1
+            if not np.isnan(data['tmoy'][i]):
+                tmoys.append(data['tmoy'][i])
+            if tmax != None:
+                if tmax < 0:
+                    compteur += 1
+                elif tmax >= 0 and compteur >= nbre_jours_gel_consecutifs:
+                    nbre_jours_gel_consecutifs = compteur
+                    compteur = 0
+                else:
+                    compteur = 0
+            nbre_jours_gel_consecutifs = max(nbre_jours_gel_consecutifs,compteur)
+    if len(tmoys) > 0 and float(len(tmoys))/nDays >= minProportionMeasures:
+        if tmoys != []:
+            ecart_type_T = np.std(tmoys)
+        else:
+            ecart_type = None
+        if neige_tot < snowThreshold:
+            neigeEC_sup_seuil = 0
+        else:
+            neigeEC_sup_seuil = 1
+        return (nbre_jours_T_negatif,nbre_jours_gel_degel, deltas_T, nbre_jours_gel_consecutifs, pluie_tot, neige_tot, neigeEC_sup_seuil, ecart_type_T)
+    else:
+        return [None]*2+[[None]*len(seuils_T)]+[None]*5
+
+def mtqWeatherIndicators(data, startDate, endDate,tmax,tmin,tmoy):
+    from matplotlib.mlab import find
+    nbre_jours_T_negatif,nbre_jours_gel_degel,ecart_type_T = 0,0,0
+    compteur,nbre_jours_gel_consecutifs=0,0
+    tmoys = []
+    seuils_T = [20,15,10,5]
+    deltas_T = [0,0,0,0]
+    startIndex = find(data['date'] == startDate)
+    nDays = (endDate - startDate).days+1
+    for i in range(startIndex, startIndex+nDays):
+        if tmax[i] < 0:
+            nbre_jours_T_negatif += 1
+        if tmax[i] > 0 and tmin[i] < 0:
+            nbre_jours_gel_degel += 1
+        for l in range(len(seuils_T)):
+            if tmax[i] - tmin[i] >=seuils_T[l]:
+                deltas_T[l] += 1
+        tmoys.append(tmoy[i])
+        if tmax[i] < 0:
+            compteur += 1
+        elif tmax[i] >= 0 and compteur >= nbre_jours_gel_consecutifs:
+            nbre_jours_gel_consecutifs = compteur
+            compteur = 0
+        else:
+            compteur = 0
+        nbre_jours_gel_consecutifs = max(nbre_jours_gel_consecutifs,compteur)
+        if tmoys != []:
+            ecart_type_T = np.std(tmoys)
         else:
-            tmax = None
-        if data['tmin'][i] != '' and data['tmin'][i] != None:
-            tmin = float(data['tmin'][i].replace(',','.'))
-        if data['pluie_tot'][i] != '' and data['pluie_tot'][i] != None:
-            pluie_tot  += float(data['pluie_tot'][i].replace(',','.'))
-        if data['neige_tot'][i] != '' and data['neige_tot'][i] != None:
-            neige_tot  += float(data['neige_tot'][i].replace(',','.'))
-        if tmax != None:
-            if tmax < 0:
-                nbre_jours_T_negatif += 1
-        if tmax != None and data['tmin'][i] != '' and data['tmin'][i] != None:
-            if tmax > 0 and tmin < 0:
-                nbre_jours_gel_degel += 1
-            for l in range(len(seuils_T)):
-                if tmax - tmin >=seuils_T[l]:
-                    deltas_T[l] += 1
-        if data['tmoy'][i] != '' and data['tmoy'][i] != None:
-            tmoys.append(float(data['tmoy'][i].replace(',','.')))
-        if tmax != None:
-            if tmax < 0:
-                compteur += 1
-            elif tmax >= 0 and compteur >= nbre_jours_gel_consecutifs:
-                nbre_jours_gel_consecutifs = compteur
-                compteur = 0
-            else:
-                compteur = 0
-        nbre_jours_gel_consecutifs = max(nbre_jours_gel_consecutifs,compteur)
-    ecart_type_T = np.std(tmoys)
-    if neige_tot < snowThreshold:
-        neigeEC_sup_seuil = 0
-    else:
-        neigeEC_sup_seuil = 1
+            ecart_type = None
+
+    return (nbre_jours_T_negatif,nbre_jours_gel_degel, deltas_T, nbre_jours_gel_consecutifs, ecart_type_T)
+
+class RTSS:
+    '''class for data related to a RTSS:
+    - agregating pavement marking measurements
+    - RTSS characteristics from FMR: pavement type, age, AADT, truck AADT
+    - winter maintenance level from V155
+
+    If divided highway, the RTSS ends with G or D and are distinct: there is no ambiguity
+    - retroreflectivity types: there are CB, RJ and RB
+    If undivided, ending with C
+    - durability is fine: ETAT_MARQG_RG ETAT_MARQG_CL ETAT_MARQG_RD (+SG/SD, but recent)
+    - retroreflectivity: CJ is center line, RB and SB are left/right if DEBUT-FIN>0 or <0
+    '''
+
+    def __init__(self, _id, name, data):
+        self.id = _id
+        self.name = name
+        self.data = data
+
+class MarkingTest:
+    '''class for a test site for a given product
+
+    including the series of measurements over the years'''
+
+    def __init__(self, _id, paintingDate, paintingType, color, data):
+        self.id = _id
+        self.paintingDate = paintingDate
+        self.paintingType = paintingType
+        self.color = color
+        self.data = data
+        self.nMeasures = len(data)
+
+    def getSite(self):
+        return int(self.id[:2])
+
+    def getTestAttributes(self):
+        return [self.paintingType, self.color, self.paintingDate.year]
 
-    return (nbre_jours_T_negatif,nbre_jours_gel_degel, deltas_T, nbre_jours_gel_consecutifs, pluie_tot, neige_tot, neigeEC_sup_seuil, ecart_type_T)
+    def plot(self, measure, options = 'o', dayRatio = 1., **kwargs):
+        from matplotlib.pyplot import plot
+        plot(self.data['jours']/float(dayRatio), 
+             self.data[measure], options, **kwargs)
+
+    def getMarkingMeasures(self, dataLabel):
+        nonZeroIndices = ~np.isnan(self.data[dataLabel])
+        return self.data[nonZeroIndices]['jours'], self.data[nonZeroIndices][dataLabel]
+
+    def plotMarkingMeasures(self, measure, options = 'o', dayRatio = 1., **kwargs):
+        for i in range(1,7):
+            self.plot('{}_{}'.format(measure, i), options, dayRatio, **kwargs)
+
+    def computeMarkingMeasureVariations(self, dataLabel, lanePositions, weatherData, snowThreshold, minProportionMeasures = 0.):
+        '''Computes for each successive measurement
+        lanePositions = None
+        measure variation, initial measure, time duration, weather indicators
+        
+        TODO if measurements per lane, add a variable for lane position (position1 to 6)
+        lanePositions = list of integers (range(1,7))
+        measure variation, initial measure, time duration, lane position1, weather indicators
+        measure variation, initial measure, time duration, lane position2, weather indicators
+        ...'''
+        variationData = []
+        if lanePositions == None:
+            nonZeroIndices = ~np.isnan(self.data[dataLabel])
+            days = self.data[nonZeroIndices]['jours']
+            dates = self.data[nonZeroIndices]['date_mesure']
+            measures = self.data[nonZeroIndices][dataLabel]
+            for i in range(1, len(dates)):
+                nDaysTNegative, nDaysThawFreeze, deltaTemp, nConsecutiveFrozenDays, totalRain, totalSnow, snowAboveThreshold, stdevTemp = ecWeatherIndicators(weatherData, dates[i-1], dates[i], snowThreshold, minProportionMeasures)
+                if dates[i-1].year+1 == dates[i].year:
+                    winter = 1
+                    if days[i-1]<365:
+                        firstWinter = 1
+                else:
+                    winter = 0
+                    firstWinter = 0
+                variationData.append([measures[i-1]-measures[i], measures[i-1], days[i]-days[i-1], days[i-1], winter, firstWinter, nDaysTNegative, nDaysThawFreeze] + deltaTemp + [nConsecutiveFrozenDays, totalRain, totalSnow, snowAboveThreshold, stdevTemp])
+        return variationData
--- a/python/prediction.py	Mon Feb 03 15:41:57 2014 -0500
+++ b/python/prediction.py	Sun Feb 23 22:56:54 2014 -0500
@@ -215,20 +215,19 @@
                                                maxSpeed = self.maxSpeed)]
 
 class NormalAdaptationPredictionParameters(PredictionParameters):
-    def __init__(self, maxSpeed, nPredictedTrajectories, maxAcceleration, maxSteering, useFeatures = False):
+    def __init__(self, maxSpeed, nPredictedTrajectories, accelerationDistribution, steeringDistribution, useFeatures = False):
+        '''An example of acceleration and steering distributions is
+        lambda: random.triangular(-self.maxAcceleration, self.maxAcceleration, 0.)
+        '''
         if useFeatures:
             name = 'point set normal adaptation'
         else:
             name = 'normal adaptation'
         PredictionParameters.__init__(self, name, maxSpeed)
         self.nPredictedTrajectories = nPredictedTrajectories
-        self.maxAcceleration = maxAcceleration
-        self.maxSteering = maxSteering
         self.useFeatures = useFeatures
-        self.accelerationDistribution = lambda: random.triangular(-self.maxAcceleration, 
-                                                                   self.maxAcceleration, 0.)
-        self.steeringDistribution = lambda: random.triangular(-self.maxSteering, 
-                                                               self.maxSteering, 0.)
+        self.accelerationDistribution = accelerationDistribution
+        self.steeringDistribution = steeringDistribution
         
     def __str__(self):
         return PredictionParameters.__str__(self)+' {0} {1} {2}'.format(self.nPredictedTrajectories, 
@@ -271,21 +270,19 @@
         return predictedTrajectories
 
 class EvasiveActionPredictionParameters(PredictionParameters):
-    def __init__(self, maxSpeed, nPredictedTrajectories, minAcceleration, maxAcceleration, maxSteering, useFeatures = False):
+    def __init__(self, maxSpeed, nPredictedTrajectories, accelerationDistribution, steeringDistribution, useFeatures = False):
+        '''Suggested acceleration distribution may not be symmetric, eg
+        lambda: random.triangular(self.minAcceleration, self.maxAcceleration, 0.)'''
+
         if useFeatures:
             name = 'point set evasive action'
         else:
             name = 'evasive action'
         PredictionParameters.__init__(self, name, maxSpeed)
         self.nPredictedTrajectories = nPredictedTrajectories
-        self.minAcceleration = minAcceleration
-        self.maxAcceleration = maxAcceleration
-        self.maxSteering = maxSteering
         self.useFeatures = useFeatures
-        self.accelerationDistribution = lambda: random.triangular(self.minAcceleration, 
-                                                                  self.maxAcceleration, 0.)
-        self.steeringDistribution = lambda: random.triangular(-self.maxSteering, 
-                                                               self.maxSteering, 0.)
+        self.accelerationDistribution = accelerationDistribution
+        self.steeringDistribution = steeringDistribution
 
     def __str__(self):
         return PredictionParameters.__str__(self)+' {0} {1} {2} {3}'.format(self.nPredictedTrajectories, self.minAcceleration, self.maxAcceleration, self.maxSteering)
--- a/python/tests/moving.txt	Mon Feb 03 15:41:57 2014 -0500
+++ b/python/tests/moving.txt	Sun Feb 23 22:56:54 2014 -0500
@@ -42,6 +42,8 @@
 
 >>> Point(3,4)-Point(1,7)
 (2.000000,-3.000000)
+>>> -Point(1,2)
+(-1.000000,-2.000000)
 
 >>> Point(3,2).norm2Squared()
 13
--- a/python/tests/utils.txt	Mon Feb 03 15:41:57 2014 -0500
+++ b/python/tests/utils.txt	Sun Feb 23 22:56:54 2014 -0500
@@ -44,6 +44,15 @@
 >>> stepPlot([3, 5, 7, 8], 1, 10, 0)
 ([1, 3, 3, 5, 5, 7, 7, 8, 8, 10], [0, 0, 1, 1, 2, 2, 3, 3, 4, 4])
 
+>>> mostCommon(['a','b','c','b'])
+'b'
+>>> mostCommon(['a','b','c','b', 'c'])
+'b'
+>>> mostCommon(range(10)+[1])
+1
+>>> mostCommon([range(2), range(4), range(2)])
+[0, 1]
+
 >>> lcss = LCSS(lambda x,y: abs(x-y) <= 0.1)
 >>> lcss.compute(range(5), range(5))
 5
--- a/python/utils.py	Mon Feb 03 15:41:57 2014 -0500
+++ b/python/utils.py	Sun Feb 23 22:56:54 2014 -0500
@@ -269,6 +269,33 @@
     return coef
 
 #########################
+# iterable section
+#########################
+
+def mostCommon(L):
+    '''Returns the most frequent element in a iterable
+
+    taken from http://stackoverflow.com/questions/1518522/python-most-common-element-in-a-list'''
+    from itertools import groupby
+    from operator import itemgetter
+    # get an iterable of (item, iterable) pairs
+    SL = sorted((x, i) for i, x in enumerate(L))
+    # print 'SL:', SL
+    groups = groupby(SL, key=itemgetter(0))
+    # auxiliary function to get "quality" for an item
+    def _auxfun(g):
+        item, iterable = g
+        count = 0
+        min_index = len(L)
+        for _, where in iterable:
+            count += 1
+            min_index = min(min_index, where)
+            # print 'item %r, count %r, minind %r' % (item, count, min_index)
+        return count, -min_index
+    # pick the highest-count/earliest item
+    return max(groups, key=_auxfun)[0]
+
+#########################
 # sequence section
 #########################
 
--- a/scripts/compute-homography.py	Mon Feb 03 15:41:57 2014 -0500
+++ b/scripts/compute-homography.py	Sun Feb 23 22:56:54 2014 -0500
@@ -85,11 +85,11 @@
     print('Click on {0} points in the video frame'.format(nPoints))
     plt.figure()
     plt.imshow(videoImg)
-    videoPts = np.array(plt.ginput(nPoints))
+    videoPts = np.array(plt.ginput(nPoints, timeout=3000))
     print('Click on {0} points in the world image'.format(nPoints))
     plt.figure()
     plt.imshow(worldImg)
-    worldPts = unitsPerPixel*np.array(plt.ginput(nPoints))
+    worldPts = unitsPerPixel*np.array(plt.ginput(nPoints, timeout=3000))
     plt.close('all')
     homography, mask = cv2.findHomography(videoPts, worldPts)
     # save the points in file
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/scripts/setup-tracking.sh	Sun Feb 23 22:56:54 2014 -0500
@@ -0,0 +1,39 @@
+version="$(wget -q -O - http://sourceforge.net/projects/opencvlibrary/files/opencv-unix | egrep -m1 -o '\"[0-9](\.[0-9])+' | cut -c2-)"
+echo "Removing any pre-installed ffmpeg and x264"
+sudo apt-get -qq remove ffmpeg x264 libx264-dev
+echo "Installing Dependencies"
+sudo apt-get -qq install libopencv-dev build-essential checkinstall cmake pkg-config yasm libtiff4-dev libjpeg-dev libjasper-dev libavcodec-dev libavformat-dev libswscale-dev libdc1394-22-dev libxine-dev libgstreamer0.10-dev libgstreamer-plugins-base0.10-dev libv4l-dev python-dev python-numpy libtbb-dev libqt4-dev libgtk2.0-dev libfaac-dev libmp3lame-dev libopencore-amrnb-dev libopencore-amrwb-dev libtheora-dev libvorbis-dev libxvidcore-dev x264 v4l-utils ffmpeg
+sudo apt-get -qq install libavfilter-dev libboost-dev libboost-program-options-dev libboost-graph-dev python-setuptools python-dev libcppunit-dev sqlite3 libsqlite3-dev cmake-qt-gui libboost-all-dev
+sudo easy_install -U mercurial
+echo "Installing OpenCV" $version
+cd
+mkdir OpenCV
+cd OpenCV
+echo "Downloading OpenCV" $version
+wget -O OpenCV-$version.tar.gz http://sourceforge.net/projects/opencvlibrary/files/opencv-unix/$version/opencv-"$version".tar.gz/download
+echo "Installing OpenCV" $version
+tar -xvf OpenCV-$version.tar.gz
+cd opencv-$version
+mkdir release
+cd release
+cmake -D CMAKE_BUILD_TYPE=RELEASE -D CMAKE_INSTALL_PREFIX=/usr/local ..
+make
+sudo make install
+echo "OpenCV" $version "ready to be used"
+
+echo "Installing Traffic Intelligence..."
+cd
+mkdir Research
+cd Research
+mkdir Code
+cd Code
+hg clone https://Nicolas@bitbucket.org/trajectories/trajectorymanagementandanalysis
+hg clone https://Nicolas@bitbucket.org/Nicolas/trafficintelligence
+cd trajectorymanagementandanalysis/trunk/src/TrajectoryManagementAndAnalysis/
+cmake .
+make TrajectoryManagementAndAnalysis
+cd
+cd trafficintelligence/c/
+make feature-based-tracking
+cd
+