changeset 795:a34ec862371f

merged with dev branch
author Nicolas Saunier <nicolas.saunier@polymtl.ca>
date Mon, 09 May 2016 15:33:11 -0400
parents 0a05883216cf (current diff) 8eb8a6bd70e8 (diff)
children c5f98916297e
files python/laurier.sqlite scripts/learn-motion-patterns.py
diffstat 20 files changed, 905 insertions(+), 272 deletions(-) [+]
line wrap: on
line diff
--- a/.hgtags	Tue Nov 03 13:48:56 2015 -0500
+++ b/.hgtags	Mon May 09 15:33:11 2016 -0400
@@ -1,1 +1,2 @@
 ea2a8e8e4e77dbf0374e3ddc935d3a28672e8456 v0.1
+6022350f81736f3881726076987300fb3381cd25 OpenCV 3.1
--- a/c/Makefile	Tue Nov 03 13:48:56 2015 -0500
+++ b/c/Makefile	Mon May 09 15:33:11 2016 -0400
@@ -19,17 +19,15 @@
 
 ifneq ($(OPENCV), 0)
 	CFLAGS += -DUSE_OPENCV
-	LDFLAGS += -lopencv_highgui -lopencv_core -lopencv_video -lopencv_ml -lopencv_features2d -lopencv_imgproc -lopencv_objdetect
+	LDFLAGS += -lopencv_highgui -lopencv_core -lopencv_video -lopencv_features2d -lopencv_imgproc -lopencv_imgcodecs -lopencv_videoio
 endif
 
 #LDFLAGS += -Wl,--as-needed -Wl,-Bdynamic,-lgcc_s,-Bstatic
 
 ifeq ($(UNAME), Linux)
-	#OPENCV_HOME=$(HOME)/Research/Code/opencv-2.4.10/release
 	OPENCV_HOME=/usr/local
 	INCLUDE+= -I$(OPENCV_HOME)/include -I$(OPENCV_HOME)/include/opencv
 	LIBS += -L$(OPENCV_HOME)/lib
-	#LIBS += -L/usr/lib/gcc/x86_64-linux-gnu/4.8/
 	LINUX_BOOST_PREFIX = /usr/local
 	CFLAGS += -DLINUX
 	EXE_EXTENSION=''
--- a/c/Motion.cpp	Tue Nov 03 13:48:56 2015 -0500
+++ b/c/Motion.cpp	Mon May 09 15:33:11 2016 -0400
@@ -3,7 +3,8 @@
 
 #include "src/TrajectoryDBAccessList.h"
 
-#include "opencv2/core/core.hpp"
+//#include "opencv2/core/core.hpp"
+#include "opencv2/imgproc/imgproc.hpp"
 #include "opencv2/highgui/highgui.hpp"
 
 #include <boost/graph/connected_components.hpp>
--- a/python/cvutils.py	Tue Nov 03 13:48:56 2015 -0500
+++ b/python/cvutils.py	Mon May 09 15:33:11 2016 -0400
@@ -1,7 +1,7 @@
 #! /usr/bin/env python
 '''Image/Video utilities'''
 
-import utils
+import utils, moving
 
 try:
     import cv2
@@ -16,8 +16,16 @@
     print('Scikit-image library could not be loaded (HoG-based classification methods will not be available)')
     skimageAvailable = False
     
-from sys import stdout
-from numpy import dot, array, append, float32
+from sys import stdout, maxint
+from os import listdir
+from copy import deepcopy
+from math import floor, log10, ceil
+
+from numpy import dot, array, append, float32, loadtxt, savetxt, append, zeros, ones, identity, abs as npabs, logical_and, unravel_index, sum as npsum, isnan, mgrid, median, floor as npfloor, ceil as npceil
+from matplotlib.mlab import find
+from matplotlib.pyplot import imread, imsave
+
+
 
 #import aggdraw # agg on top of PIL (antialiased drawing)
 
@@ -77,27 +85,27 @@
 def matlab2PointCorrespondences(filename):
     '''Loads and converts the point correspondences saved 
     by the matlab camera calibration tool'''
-    from numpy.lib.io import loadtxt, savetxt
-    from numpy.lib.function_base import append
     points = loadtxt(filename, delimiter=',')
     savetxt(utils.removeExtension(filename)+'-point-correspondences.txt',append(points[:,:2].T, points[:,3:].T, axis=0))
 
 def loadPointCorrespondences(filename):
     '''Loads and returns the corresponding points in world (first 2 lines) and image spaces (last 2 lines)'''
-    from numpy import loadtxt, float32
     points = loadtxt(filename, dtype=float32)
     return  (points[:2,:].T, points[2:,:].T) # (world points, image points)
 
 def cvMatToArray(cvmat):
     '''Converts an OpenCV CvMat to numpy array.'''
     print('Deprecated, use new interface')
-    from numpy import zeros
     a = zeros((cvmat.rows, cvmat.cols))#array([[0.0]*cvmat.width]*cvmat.height)
     for i in xrange(cvmat.rows):
         for j in xrange(cvmat.cols):
             a[i,j] = cvmat[i,j]
     return a
 
+def createWhiteImage(height, width, filename):
+    img = ones((height, width, 3), uint8)*255
+    imsave(filename, img)
+
 if opencvAvailable:
     def computeHomography(srcPoints, dstPoints, method=0, ransacReprojThreshold=3.0):
         '''Returns the homography matrix mapping from srcPoints to dstPoints (dimension Nx2)'''
@@ -132,8 +140,6 @@
             cv2.imshow(windowName, img)
 
     def computeUndistortMaps(width, height, undistortedImageMultiplication, intrinsicCameraMatrix, distortionCoefficients):
-        from copy import deepcopy
-        from numpy import identity
         newImgSize = (int(round(width*undistortedImageMultiplication)), int(round(height*undistortedImageMultiplication)))
         newCameraMatrix = deepcopy(intrinsicCameraMatrix)
         newCameraMatrix[0,2] = newImgSize[0]/2.
@@ -155,7 +161,7 @@
             key = -1
             ret = True
             frameNum = firstFrameNum
-            capture.set(cv2.cv.CV_CAP_PROP_POS_FRAMES, firstFrameNum)
+            capture.set(cv2.CAP_PROP_POS_FRAMES, firstFrameNum)
             while ret and not quitKey(key):
                 #ret, img = capture.read()
                 for i in xrange(step):
@@ -165,7 +171,7 @@
                         print('frame {0}'.format(frameNum))
                     frameNum+=step
                     if text is not None:
-                       cv2.putText(img, text, (10,50), cv2.cv.CV_FONT_HERSHEY_PLAIN, 1, cvRed) 
+                       cv2.putText(img, text, (10,50), cv2.FONT_HERSHEY_PLAIN, 1, cvRed) 
                     cvImshow(windowName, img, rescale)
                     key = cv2.waitKey(wait)
                     if saveKey(key):
@@ -176,37 +182,37 @@
 
     def infoVideo(filename):
         '''Provides all available info on video '''
-        cvPropertyNames = {cv2.cv.CV_CAP_PROP_FORMAT: "format",
-                           cv2.cv.CV_CAP_PROP_FOURCC: "codec (fourcc)",
-                           cv2.cv.CV_CAP_PROP_FPS: "fps",
-                           cv2.cv.CV_CAP_PROP_FRAME_COUNT: "number of frames",
-                           cv2.cv.CV_CAP_PROP_FRAME_HEIGHT: "heigh",
-                           cv2.cv.CV_CAP_PROP_FRAME_WIDTH: "width",
-                           cv2.cv.CV_CAP_PROP_RECTIFICATION: "rectification",
-                           cv2.cv.CV_CAP_PROP_SATURATION: "saturation"}
+        cvPropertyNames = {cv2.CAP_PROP_FORMAT: "format",
+                           cv2.CAP_PROP_FOURCC: "codec (fourcc)",
+                           cv2.CAP_PROP_FPS: "fps",
+                           cv2.CAP_PROP_FRAME_COUNT: "number of frames",
+                           cv2.CAP_PROP_FRAME_HEIGHT: "heigh",
+                           cv2.CAP_PROP_FRAME_WIDTH: "width",
+                           cv2.CAP_PROP_RECTIFICATION: "rectification",
+                           cv2.CAP_PROP_SATURATION: "saturation"}
         capture = cv2.VideoCapture(filename)
         if capture.isOpened():
-            for cvprop in [#cv2.cv.CV_CAP_PROP_BRIGHTNESS
-                    #cv2.cv.CV_CAP_PROP_CONTRAST
-                    #cv2.cv.CV_CAP_PROP_CONVERT_RGB
-                    #cv2.cv.CV_CAP_PROP_EXPOSURE
-                    cv2.cv.CV_CAP_PROP_FORMAT,
-                    cv2.cv.CV_CAP_PROP_FOURCC,
-                    cv2.cv.CV_CAP_PROP_FPS,
-                    cv2.cv.CV_CAP_PROP_FRAME_COUNT,
-                    cv2.cv.CV_CAP_PROP_FRAME_HEIGHT,
-                    cv2.cv.CV_CAP_PROP_FRAME_WIDTH,
-                    #cv2.cv.CV_CAP_PROP_GAIN,
-                    #cv2.cv.CV_CAP_PROP_HUE
-                    #cv2.cv.CV_CAP_PROP_MODE
-                    #cv2.cv.CV_CAP_PROP_POS_AVI_RATIO
-                    #cv2.cv.CV_CAP_PROP_POS_FRAMES
-                    #cv2.cv.CV_CAP_PROP_POS_MSEC
-                    #cv2.cv.CV_CAP_PROP_RECTIFICATION,
-                    #cv2.cv.CV_CAP_PROP_SATURATION
+            for cvprop in [#cv2.CAP_PROP_BRIGHTNESS
+                    #cv2.CAP_PROP_CONTRAST
+                    #cv2.CAP_PROP_CONVERT_RGB
+                    #cv2.CAP_PROP_EXPOSURE
+                    cv2.CAP_PROP_FORMAT,
+                    cv2.CAP_PROP_FOURCC,
+                    cv2.CAP_PROP_FPS,
+                    cv2.CAP_PROP_FRAME_COUNT,
+                    cv2.CAP_PROP_FRAME_HEIGHT,
+                    cv2.CAP_PROP_FRAME_WIDTH,
+                    #cv2.CAP_PROP_GAIN,
+                    #cv2.CAP_PROP_HUE
+                    #cv2.CAP_PROP_MODE
+                    #cv2.CAP_PROP_POS_AVI_RATIO
+                    #cv2.CAP_PROP_POS_FRAMES
+                    #cv2.CAP_PROP_POS_MSEC
+                    #cv2.CAP_PROP_RECTIFICATION,
+                    #cv2.CAP_PROP_SATURATION
             ]:
                 prop = capture.get(cvprop)
-                if cvprop == cv2.cv.CV_CAP_PROP_FOURCC and prop > 0:
+                if cvprop == cv2.CAP_PROP_FOURCC and prop > 0:
                     prop = int2FOURCC(int(prop))
                 print('Video {}: {}'.format(cvPropertyNames[cvprop], prop))
         else:
@@ -214,16 +220,15 @@
 
     def getImagesFromVideo(videoFilename, firstFrameNum = 0, nFrames = 1, saveImage = False, outputPrefix = 'image'):
         '''Returns nFrames images from the video sequence'''
-        from math import floor, log10
         images = []
         capture = cv2.VideoCapture(videoFilename)
         if capture.isOpened():
-            rawCount = capture.get(cv2.cv.CV_CAP_PROP_FRAME_COUNT)
+            rawCount = capture.get(cv2.CAP_PROP_FRAME_COUNT)
             if rawCount < 0:
                 rawCount = firstFrameNum+nFrames+1
             nDigits = int(floor(log10(rawCount)))+1
             ret = False
-            capture.set(cv2.cv.CV_CAP_PROP_POS_FRAMES, firstFrameNum)
+            capture.set(cv2.CAP_PROP_POS_FRAMES, firstFrameNum)
             imgNum = 0
             while imgNum<nFrames:
                 ret, img = capture.read()
@@ -246,7 +251,7 @@
     def getFPS(videoFilename):
         capture = cv2.VideoCapture(videoFilename)
         if capture.isOpened():
-            fps = capture.get(cv2.cv.CV_CAP_PROP_FPS)
+            fps = capture.get(cv2.CAP_PROP_FPS)
             capture.release()
             return fps
         else:
@@ -283,12 +288,9 @@
 
     def displayTrajectories(videoFilename, objects, boundingBoxes = {}, homography = None, firstFrameNum = 0, lastFrameNumArg = None, printFrames = True, rescale = 1., nFramesStep = 1, saveAllImages = False, undistort = False, intrinsicCameraMatrix = None, distortionCoefficients = None, undistortedImageMultiplication = 1., annotations = [], gtMatches = {}, toMatches = {}):
         '''Displays the objects overlaid frame by frame over the video '''
-        from moving import userTypeNames
-        from math import ceil, log10
-
         capture = cv2.VideoCapture(videoFilename)
-        width = int(capture.get(cv2.cv.CV_CAP_PROP_FRAME_WIDTH))
-        height = int(capture.get(cv2.cv.CV_CAP_PROP_FRAME_HEIGHT))
+        width = int(capture.get(cv2.CAP_PROP_FRAME_WIDTH))
+        height = int(capture.get(cv2.CAP_PROP_FRAME_HEIGHT))
 
         windowName = 'frame'
         if rescale == 1.:
@@ -300,9 +302,8 @@
             key = -1
             ret = True
             frameNum = firstFrameNum
-            capture.set(cv2.cv.CV_CAP_PROP_POS_FRAMES, firstFrameNum)
+            capture.set(cv2.CAP_PROP_POS_FRAMES, firstFrameNum)
             if lastFrameNumArg is None:
-                from sys import maxint
                 lastFrameNum = maxint
             else:
                 lastFrameNum = lastFrameNumArg
@@ -333,12 +334,12 @@
                                 imgcrop, yCropMin, yCropMax, xCropMin, xCropMax = imageBox(img, obj, frameNum, homography, width, height)
                                 cv2.rectangle(img, (xCropMin, yCropMin), (xCropMax, yCropMax), cvBlue, 1)
                             objDescription = '{} '.format(obj.num)
-                            if userTypeNames[obj.userType] != 'unknown':
-                                objDescription += userTypeNames[obj.userType][0].upper()
+                            if moving.userTypeNames[obj.userType] != 'unknown':
+                                objDescription += moving.userTypeNames[obj.userType][0].upper()
                             if len(annotations) > 0: # if we loaded annotations, but there is no match
                                 if frameNum not in toMatches[obj.getNum()]:
                                     objDescription += " FA"
-                            cv2.putText(img, objDescription, obj.projectedPositions[frameNum-obj.getFirstInstant()].asint().astuple(), cv2.cv.CV_FONT_HERSHEY_PLAIN, 1, cvColors[obj.getNum()])
+                            cv2.putText(img, objDescription, obj.projectedPositions[frameNum-obj.getFirstInstant()].asint().astuple(), cv2.FONT_HERSHEY_PLAIN, 1, cvColors[obj.getNum()])
                     # plot object bounding boxes
                     if frameNum in boundingBoxes.keys():
                         for rect in boundingBoxes[frameNum]:
@@ -351,7 +352,7 @@
                                     color = cvColors[gtMatches[gt.getNum()][frameNum]] # same color as object
                                 else:
                                     color = cvRed
-                                    cv2.putText(img, 'Miss', gt.topLeftPositions[frameNum-gt.getFirstInstant()].asint().astuple(), cv2.cv.CV_FONT_HERSHEY_PLAIN, 1, cvRed)
+                                    cv2.putText(img, 'Miss', gt.topLeftPositions[frameNum-gt.getFirstInstant()].asint().astuple(), cv2.FONT_HERSHEY_PLAIN, 1, cvRed)
                                 cv2.rectangle(img, gt.topLeftPositions[frameNum-gt.getFirstInstant()].asint().astuple(), gt.bottomRightPositions[frameNum-gt.getFirstInstant()].asint().astuple(), color)
                     # saving images and going to next
                     if not saveAllImages:
@@ -361,10 +362,10 @@
                         cv2.imwrite('image-{{:0{}}}.png'.format(nZerosFilename).format(frameNum), img)
                     frameNum += nFramesStep
                     if nFramesStep > 1:
-                        capture.set(cv2.cv.CV_CAP_PROP_POS_FRAMES, frameNum)
+                        capture.set(cv2.CAP_PROP_POS_FRAMES, frameNum)
             cv2.destroyAllWindows()
         else:
-            print 'Cannot load file ' + videoFilename
+            print('Cannot load file ' + videoFilename)
 
     def computeHomographyFromPDTV(camera):
         '''Returns the homography matrix at ground level from PDTV camera
@@ -383,32 +384,27 @@
         map1 and map2 are the mapping functions from undistorted image
         to distorted (original image)
         map1(x,y) = originalx, originaly'''
-        from numpy import abs, logical_and, unravel_index, sum
-        from matplotlib.mlab import find
-        distx = abs(map1-x)
-        disty = abs(map2-y)
+        distx = npabs(map1-x)
+        disty = npabs(map2-y)
         indices = logical_and(distx<maxDistance, disty<maxDistance)
         closeCoordinates = unravel_index(find(indices), distx.shape) # returns i,j, ie y,x
         xWeights = 1-distx[indices]
         yWeights = 1-disty[indices]
-        return dot(xWeights, closeCoordinates[1])/sum(xWeights), dot(yWeights, closeCoordinates[0])/sum(yWeights)
+        return dot(xWeights, closeCoordinates[1])/npsum(xWeights), dot(yWeights, closeCoordinates[0])/npsum(yWeights)
 
     def undistortTrajectoryFromCVMapping(map1, map2, t):
         '''test 'perfect' inversion'''
-        from moving import Trajectory
-        from numpy import isnan
-        undistortedTrajectory = Trajectory()
+        undistortedTrajectory = moving.Trajectory()
         for i,p in enumerate(t):
             res = undistortedCoordinates(map1, map2, p.x,p.y)
             if not isnan(res).any():
                 undistortedTrajectory.addPositionXY(res[0], res[1])
             else:
-                print i,p,res
+                print('{} {} {}'.format(i,p,res))
         return undistortedTrajectory
 
     def computeInverseMapping(originalImageSize, map1, map2):
         'Computes inverse mapping from maps provided by cv2.initUndistortRectifyMap'
-        from numpy import ones, isnan
         invMap1 = -ones(originalImageSize)
         invMap2 = -ones(originalImageSize)
         for x in range(0,originalImageSize[1]):
@@ -432,7 +428,6 @@
             https://opencv-python-tutroals.readthedocs.org/en/latest/py_tutorials/py_calib3d/py_calibration/py_calibration.html
             Modified by Paul St-Aubin
             '''
-        from numpy import zeros, mgrid, float32, savetxt
         import glob, os
 
         # termination criteria
@@ -457,21 +452,22 @@
 
             # If found, add object points, image points (after refining them)
             if ret:
-                print 'Found pattern in '+fname
+                print('Found pattern in '+fname)
                 
-                if(secondPassSearch): 
+                if secondPassSearch:
                     corners = cv2.cornerSubPix(gray, corners, (11,11), (-1,-1), criteria)
 
                 objpoints.append(objp)
                 imgpoints.append(corners)
 
                 # Draw and display the corners
-                if(display):
+                if display:
                     img = cv2.drawChessboardCorners(img, (checkerBoardSize[1],checkerBoardSize[0]), corners, ret)
-                    if(img):
+                    if img is not None:
                         cv2.imshow('img',img)
                         cv2.waitKey(0)
-
+            else:
+                print('Pattern not found in '+fname)
         ## Close up image loading and calibrate
         cv2.destroyAllWindows()
         if len(objpoints) == 0 or len(imgpoints) == 0: 
@@ -533,9 +529,8 @@
     return invH
 
 def undistortTrajectory(invMap1, invMap2, positions):
-    from numpy import floor, ceil
-    floorPositions = floor(positions)
-    #ceilPositions = ceil(positions)
+    floorPositions = npfloor(positions)
+    #ceilPositions = npceil(positions)
     undistortedTrajectory = [[],[]]
     for i in xrange(len(positions[0])):
         x,y = None, None
@@ -563,7 +558,6 @@
         img1Points are used to compute the translation
 
         TODO add diagnostic if data is all over the place, and it most likely is not a translation (eg zoom, other non linear distortion)'''
-        from numpy import median, sum
 
         nextPoints = array([])
         (img2Points, status, track_error) = cv2.calcOpticalFlowPyrLK(img1, img2, img1Points, nextPoints, winSize=windowSize, maxLevel=level, criteria=criteria)
@@ -572,7 +566,7 @@
         for (k, (p1,p2)) in enumerate(zip(img1Points, img2Points)):
             if status[k] == 1:
                 dp = p2-p1
-                d = sum(dp**2)
+                d = npsum(dp**2)
                 if d < maxTranslation2:
                     delta.append(dp)
         if len(delta) >= minNMatches:
@@ -601,9 +595,6 @@
         return float32(features)
 
     def createHOGTrainingSet(imageDirectory, classLabel, rescaleSize = (64, 64), orientations=9, pixelsPerCell=(8, 8), cellsPerBlock=(2, 2), visualize=False, normalize=False):
-        from os import listdir
-        from matplotlib.pyplot import imread
-
         inputData = []
         for filename in listdir(imageDirectory):
             img = imread(imageDirectory+filename)
@@ -611,6 +602,6 @@
             inputData.append(features)
 
         nImages = len(inputData)
-        return array(inputData, dtype = float32), array([classLabel]*nImages, dtype = float32)
+        return array(inputData, dtype = float32), array([classLabel]*nImages)
 
         
--- a/python/ml.py	Tue Nov 03 13:48:56 2015 -0500
+++ b/python/ml.py	Mon May 09 15:33:11 2016 -0400
@@ -1,13 +1,29 @@
 #! /usr/bin/env python
 '''Libraries for machine learning algorithms'''
 
+from os import path
+from random import shuffle
+from copy import copy, deepcopy
+
 import numpy as np
-
+from matplotlib.pylab import text
+import matplotlib as mpl
+import matplotlib.pyplot as plt
+from scipy.cluster.vq import kmeans, whiten, vq
+from sklearn import mixture
+import cv2
 
-class Model(object):
-    '''Abstract class for loading/saving model'''    
+import utils
+
+#####################
+# OpenCV ML models
+#####################
+
+class StatModel(object):
+    '''Abstract class for loading/saving model
+
+    Issues with OpenCV, does not seem to work'''    
     def load(self, filename):
-        from os import path
         if path.exists(filename):
             self.model.load(filename)
         else:
@@ -16,21 +32,36 @@
     def save(self, filename):
         self.model.save(filename)
 
-class SVM(Model):
+class SVM(StatModel):
     '''wrapper for OpenCV SimpleVectorMachine algorithm'''
+    def __init__(self, svmType = cv2.ml.SVM_C_SVC, kernelType = cv2.ml.SVM_RBF, degree = 0, gamma = 1, coef0 = 0, Cvalue = 1, nu = 0, p = 0):
+        self.model = cv2.ml.SVM_create()
+        self.model.setType(svmType)
+        self.model.setKernel(kernelType)
+        self.model.setDegree(degree)
+        self.model.setGamma(gamma)
+        self.model.setCoef0(coef0)
+        self.model.setC(Cvalue)
+        self.model.setNu(nu)
+        self.model.setP(p)
 
-    def __init__(self):
-        import cv2
-        self.model = cv2.SVM()
+    def load(self, filename):
+        if path.exists(filename):
+            cv2.ml.SVM_load(filename)
+        else:
+            print('Provided filename {} does not exist: model not loaded!'.format(filename))
 
-    def train(self, samples, responses, svm_type, kernel_type, degree = 0, gamma = 1, coef0 = 0, Cvalue = 1, nu = 0, p = 0):
-        self.params = dict(svm_type = svm_type, kernel_type = kernel_type, degree = degree, gamma = gamma, coef0 = coef0, Cvalue = Cvalue, nu = nu, p = p)
-        self.model.train(samples, responses, params = self.params)
+    def train(self, samples, layout, responses):
+        self.model.train(samples, layout, responses)
 
     def predict(self, hog):
         return self.model.predict(hog)
 
 
+#####################
+# Clustering
+#####################
+
 class Centroid(object):
     'Wrapper around instances to add a counter'
 
@@ -52,7 +83,6 @@
         return Centroid(inst, self.nInstances+instance.nInstances)
 
     def plot(self, options = ''):
-        from matplotlib.pylab import text
         self.instance.plot(options)
         text(self.instance.position.x+1, self.instance.position.y+1, str(self.nInstances))
 
@@ -68,9 +98,6 @@
 
     data: list of instances
     averageCentroid: '''
-
-    from random import shuffle
-    from copy import copy, deepcopy
     localdata = copy(data) # shallow copy to avoid modifying data
     if shuffleData:
         shuffle(localdata)
@@ -105,7 +132,6 @@
 	# by stacking eigenvectors as columns
 	features = np.array(V[:k]).T
 	# k-means
-	from scipy.cluster.vq import kmeans, whiten, vq
 	features = whiten(features)
 	centroids,distortion = kmeans(features,k, iter)
 	code,distance = vq(features,centroids) # code starting from 0 (represent first cluster) to k-1 (last cluster)
@@ -179,5 +205,30 @@
 
 def computeClusterSizes(labels, prototypeIndices, outlierIndex = -1):
     clusterSizes = {i: sum(np.array(labels) == i) for i in prototypeIndices}
-    clusterSizes['outlier'] = sum(np.array(labels) == -1)
+    clusterSizes['outlier'] = sum(np.array(labels) == outlierIndex)
     return clusterSizes
+
+# Gaussian Mixture Models
+def plotGMMClusters(model, dataset = None, fig = None, colors = utils.colors, nPixelsPerUnit = 1., alpha = 0.3):
+    '''plot the ellipse corresponding to the Gaussians
+    and the predicted classes of the instances in the dataset'''
+    if fig is None:
+        fig = plt.figure()
+    labels = model.predict(dataset)
+    tmpDataset = nPixelsPerUnit*dataset
+    for i in xrange(model.n_components):
+        mean = nPixelsPerUnit*model.means_[i]
+        covariance = nPixelsPerUnit*model.covars_[i]
+        if dataset is not None:
+            plt.scatter(tmpDataset[labels == i, 0], tmpDataset[labels == i, 1], .8, color=colors[i])
+        plt.annotate(str(i), xy=(mean[0]+1, mean[1]+1))
+
+        # Plot an ellipse to show the Gaussian component                                                  
+        v, w = np.linalg.eigh(covariance)
+        angle = np.arctan2(w[0][1], w[0][0])
+        angle = 180*angle/np.pi  # convert to degrees                                             
+	v *= 4
+        ell = mpl.patches.Ellipse(mean, v[0], v[1], 180+angle, color=colors[i])
+        ell.set_clip_box(fig.bbox)
+        ell.set_alpha(alpha)
+        fig.axes[0].add_artist(ell)
--- a/python/moving.py	Tue Nov 03 13:48:56 2015 -0500
+++ b/python/moving.py	Mon May 09 15:33:11 2016 -0400
@@ -12,7 +12,7 @@
 
 try:
     from shapely.geometry import Polygon, Point as shapelyPoint
-    from shapely.prepared import prep
+    from shapely.prepared import prep, PreparedGeometry
     shapelyAvailable = True
 except ImportError:
     print('Shapely library could not be loaded')
@@ -35,6 +35,9 @@
     def __repr__(self):
         return self.__str__()
 
+    def __eq__(self, other):
+        return ((self.first == other.first) and (self.last == other.last)) or ((self.first == other.last) and (self.last == other.first))
+
     def empty(self):
         return self.first > self.last
 
@@ -58,6 +61,10 @@
         '''Indicates if the temporal interval of self is comprised in interval2'''
         return (self.first >= interval2.first) and (self.last <= interval2.last)
 
+    def shift(self, offset):
+        self.first += offset
+        self.last += offset
+
     @classmethod
     def union(cls, interval1, interval2):
         '''Smallest interval comprising self and interval2'''
@@ -171,6 +178,9 @@
     def commonTimeInterval(self, obj2):
         return TimeInterval.intersection(self.getTimeInterval(), obj2.getTimeInterval())
 
+    def shiftTimeInterval(self, offset):
+        self.timeInterval.shift(offset)
+
 class Point(object):
     def __init__(self, x, y):
         self.x = x
@@ -182,6 +192,9 @@
     def __repr__(self):
         return self.__str__()
 
+    def __eq__(self, other):
+        return (self.x == other.x) and (self.y == other.y)
+
     def __add__(self, other):
         return Point(self.x+other.x, self.y+other.y)
 
@@ -217,6 +230,10 @@
     def plot(self, options = 'o', **kwargs):
         plot([self.x], [self.y], options, **kwargs)
 
+    @staticmethod
+    def plotSegment(p1, p2, options = 'o', **kwargs):
+        plot([p1.x, p2.x], [p1.y, p2.y], options, **kwargs)
+
     def norm2Squared(self):
         '''2-norm distance (Euclidean distance)'''
         return self.x**2+self.y**2
@@ -339,8 +356,11 @@
 
 if shapelyAvailable:
     def pointsInPolygon(points, polygon):
-        '''Optimized tests of a series of points within (Shapely) polygon '''
-        prepared_polygon = prep(polygon)
+        '''Optimized tests of a series of points within (Shapely) polygon (not prepared)'''
+        if type(polygon) == PreparedGeometry:
+            prepared_polygon = polygon
+        else:
+            prepared_polygon = prep(polygon)
         return filter(prepared_polygon.contains, points)
 
 # Functions for coordinate transformation
@@ -360,10 +380,10 @@
     ss_spline_d = []
     #Prepare subsegment distances
     for spline in range(len(splines)):
-        ss_spline_d.append([[],[],[]])
-        ss_spline_d[spline][0] = zeros(len(splines[spline])-1)  #Incremental distance
-        ss_spline_d[spline][1] = zeros(len(splines[spline])-1)  #Cumulative distance
-        ss_spline_d[spline][2] = zeros(len(splines[spline]))  #Cumulative distance with trailing distance
+        ss_spline_d[spline]=[]#.append([[],[],[]])
+        ss_spline_d[spline].append(zeros(len(splines[spline])-1))  #Incremental distance
+        ss_spline_d[spline].append(zeros(len(splines[spline])-1))  #Cumulative distance
+        ss_spline_d[spline].append(zeros(len(splines[spline])))  #Cumulative distance with trailing distance
         for spline_p in range(len(splines[spline])):
             if spline_p > (len(splines[spline]) - 2):
                 break
@@ -375,6 +395,18 @@
 
     return ss_spline_d
 
+def prepareSplines(splines):
+    'Approximates slope singularity by giving some slope roundoff; account for roundoff error'
+    for spline in splines:
+        p1 = spline[0]
+        for i in xrange(len(spline)-1):
+            p2 = spline[i+1]
+            if(round(p1.x, 10) == round(p2.x, 10)):
+                p2.x += 0.0000000001
+            if(round(p1.y, 10) == round(p2.y, 10)):
+                p2.y += 0.0000000001            
+            p1 = p2
+
 def ppldb2p(qx,qy, p0x,p0y, p1x,p1y):
     ''' Point-projection (Q) on line defined by 2 points (P0,P1). 
         http://cs.nyu.edu/~yap/classes/visual/03s/hw/h2/math.pdf
@@ -383,10 +415,10 @@
         return None
     try:
         #Approximate slope singularity by giving some slope roundoff; account for roundoff error
-        if(round(p0x, 10) == round(p1x, 10)):
-            p1x += 0.0000000001
-        if(round(p0y, 10) == round(p1y, 10)):
-            p1y += 0.0000000001            
+        # if(round(p0x, 10) == round(p1x, 10)):
+        #     p1x += 0.0000000001
+        # if(round(p0y, 10) == round(p1y, 10)):
+        #     p1y += 0.0000000001            
         #make the calculation
         Y = (-(qx)*(p0y-p1y)-(qy*(p0y-p1y)**2)/(p0x-p1x)+p0x**2*(p0y-p1y)/(p0x-p1x)-p0x*p1x*(p0y-p1y)/(p0x-p1x)-p0y*(p0x-p1x))/(p1x-p0x-(p0y-p1y)**2/(p0x-p1x))
         X = (-Y*(p1y-p0y)+qx*(p1x-p0x)+qy*(p1y-p0y))/(p1x-p0x)
@@ -397,8 +429,9 @@
     return Point(X,Y)
 
 def getSYfromXY(p, splines, goodEnoughSplineDistance = 0.5):
-    ''' Snap a point p to it's nearest subsegment of it's nearest spline (from the list splines). A spline is a list of points (class Point), most likely a trajectory. 
-        
+    ''' Snap a point p to it's nearest subsegment of it's nearest spline (from the list splines). 
+    A spline is a list of points (class Point), most likely a trajectory. 
+    
     Output:
     =======
     [spline index, 
@@ -412,36 +445,38 @@
     '''
     minOffsetY = float('inf')
     #For each spline
-    for spline in range(len(splines)):
+    for splineIdx in range(len(splines)):
         #For each spline point index
-        for spline_p in range(len(splines[spline])-1):
+        for spline_p in range(len(splines[splineIdx])-1):
             #Get closest point on spline
-            closestPoint = ppldb2p(p.x,p.y,splines[spline][spline_p][0],splines[spline][spline_p][1],splines[spline][spline_p+1][0],splines[spline][spline_p+1][1])
+            closestPoint = ppldb2p(p.x,p.y,splines[splineIdx][spline_p][0],splines[splineIdx][spline_p][1],splines[splineIdx][spline_p+1][0],splines[splineIdx][spline_p+1][1])
             if closestPoint is None:
-                print('Error: Spline {0}, segment {1} has identical bounds and therefore is not a vector. Projection cannot continue.'.format(spline, spline_p))
+                print('Error: Spline {0}, segment {1} has identical bounds and therefore is not a vector. Projection cannot continue.'.format(splineIdx, spline_p))
                 return None
-            # check if the 
-            if utils.inBetween(splines[spline][spline_p][0], splines[spline][spline_p+1][0], closestPoint.x) and utils.inBetween(splines[spline][spline_p][1], splines[spline][spline_p+1][1], closestPoint.y): 
+            # check if the projected point is in between the current segment of the alignment bounds
+            if utils.inBetween(splines[splineIdx][spline_p][0], splines[splineIdx][spline_p+1][0], closestPoint.x) and utils.inBetween(splines[splineIdx][spline_p][1], splines[splineIdx][spline_p+1][1], closestPoint.y): 
                 offsetY = Point.distanceNorm2(closestPoint, p)
                 if offsetY < minOffsetY:
                     minOffsetY = offsetY
-                    snappedSpline = spline
+                    snappedSplineIdx = splineIdx
                     snappedSplineLeadingPoint = spline_p
                     snappedPoint = Point(closestPoint.x, closestPoint.y)
                 #Jump loop if significantly close
                 if offsetY < goodEnoughSplineDistance: 
                     break
+
     #Get sub-segment distance
     if minOffsetY != float('inf'):
-        subsegmentDistance = Point.distanceNorm2(snappedPoint, splines[snappedSpline][snappedSplineLeadingPoint])
+        subsegmentDistance = Point.distanceNorm2(snappedPoint, splines[snappedSplineIdx][snappedSplineLeadingPoint])
         #Get cumulative alignment distance (total segment distance)
-        splineDistanceS = splines[snappedSpline].getCumulativeDistance(snappedSplineLeadingPoint) + subsegmentDistance
-        orthogonalSplineVector = (splines[snappedSpline][snappedSplineLeadingPoint+1]-splines[snappedSpline][snappedSplineLeadingPoint]).orthogonal()
+        splineDistanceS = splines[snappedSplineIdx].getCumulativeDistance(snappedSplineLeadingPoint) + subsegmentDistance
+        orthogonalSplineVector = (splines[snappedSplineIdx][snappedSplineLeadingPoint+1]-splines[snappedSplineIdx][snappedSplineLeadingPoint]).orthogonal()
         offsetVector = p-snappedPoint
         if Point.dot(orthogonalSplineVector, offsetVector) < 0:
             minOffsetY = -minOffsetY
-        return [snappedSpline, snappedSplineLeadingPoint, snappedPoint, subsegmentDistance, splineDistanceS, minOffsetY]
+        return [snappedSplineIdx, snappedSplineLeadingPoint, snappedPoint, subsegmentDistance, splineDistanceS, minOffsetY]
     else:
+        print('Offset for point {} is infinite (check with prepareSplines if some spline segments are aligned with axes)'.format(p))
         return None
 
 def getXYfromSY(s, y, splineNum, splines, mode = 0):
@@ -656,7 +691,6 @@
     def __repr__(self):
         return self.__str__()
 
-
     def __iter__(self):
         self.iterInstantNum = 0
         return self
@@ -668,6 +702,15 @@
             self.iterInstantNum += 1
             return self[self.iterInstantNum-1]
 
+    def __eq__(self, other):
+        if self.length() == other.length():
+            result = True
+            for p, po in zip(self, other):
+                result = result and (p == po)
+            return result
+        else:
+            return False
+
     def setPositionXY(self, i, x, y):
         if i < self.__len__():
             self.positions[0][i] = x
@@ -758,6 +801,26 @@
             diff.addPosition(diff[-1])
         return diff
 
+    def differentiateSG(self, window_length, polyorder, deriv=0, delta=1.0, axis=-1, mode='interp', cval=0.0, removeBothEnds = 2):
+        '''Differentiates the trajectory using the Savitsky Golay filter
+
+        window_length : The length of the filter window (i.e. the number of coefficients). window_length must be a positive odd integer.
+        polyorder : The order of the polynomial used to fit the samples. polyorder must be less than window_length.
+        deriv : The order of the derivative to compute. This must be a nonnegative integer. The default is 0, which means to filter the data without differentiating.
+        delta : The spacing of the samples to which the filter will be applied. This is only used if deriv > 0. Default is 1.0.
+        axis : The axis of the array x along which the filter is to be applied. Default is -1.
+        mode : Must be mirror, constant, nearest, wrap or interp. This determines the type of extension to use for the padded signal to which the filter is applied. When mode is constant, the padding value is given by cval. See the Notes for more details on mirror, constant, wrap, and nearest. When the interp mode is selected (the default), no extension is used. Instead, a degree polyorder polynomial is fit to the last window_length values of the edges, and this polynomial is used to evaluate the last window_length // 2 output values.
+        cval : Value to fill past the edges of the input if mode is constant. Default is 0.0.'''
+        from scipy.signal import savgol_filter
+
+        if removeBothEnds >=1:
+            pos = [self.positions[0][removeBothEnds:-removeBothEnds],
+                   self.positions[1][removeBothEnds:-removeBothEnds]]
+        else:
+            pos = self.positions
+        filtered = savgol_filter(pos, window_length, polyorder, deriv, delta, axis, mode, cval)
+        return Trajectory(filtered)
+
     def norm(self):
         '''Returns the list of the norms at each instant'''
 #        def add(x, y): return x+y
@@ -800,6 +863,11 @@
         else:
             print('Index {} beyond trajectory length {}'.format(i, self.length()))
 
+    def getMaxDistance(self, metric):
+        'Returns the maximum distance between points in the trajectory' 
+        positions = self.getPositions().asArray().T
+        return cdist(positions, positions, metric = metric).max()
+
     def similarOrientation(self, refDirection, cosineThreshold, minProportion = 0.5):
         '''Indicates whether the minProportion (<=1.) (eg half) of the trajectory elements (vectors for velocity) 
         have a cosine with refDirection is smaller than cosineThreshold'''
@@ -811,19 +879,23 @@
         return count >= lengthThreshold
 
     def wiggliness(self):
-        return self.getCumulativeDistance(self.length()-1)/float(Point.distanceNorm2(self.__getitem__(0),self.__getitem__(self.length()-1)))
+        straightDistance = Point.distanceNorm2(self.__getitem__(0),self.__getitem__(self.length()-1))
+        if straightDistance > 0:
+            return self.getCumulativeDistance(self.length()-1)/float(straightDistance)
+        else:
+            return None
 
     def getIntersections(self, p1, p2):
         '''Returns a list of the indices at which the trajectory 
         intersects with the segment of extremities p1 and p2 
-        the list is empty if there is no crossing'''
+        Returns an empty list if there is no crossing'''
         indices = []
         intersections = []
 
         for i in xrange(self.length()-1):
             q1=self.__getitem__(i)
             q2=self.__getitem__(i+1)
-            p = utils.segmentIntersection(q1, q2, p1, p2)
+            p = segmentIntersection(q1, q2, p1, p2)
             if p is not None:
                 if q1.x != q2.x:
                     ratio = (p.x-q1.x)/(q2.x-q1.x)
@@ -833,19 +905,19 @@
                     ratio = 0
                 indices.append(i+ratio)
                 intersections.append(p)
-        return indices
+        return indices, intersections
 
     def getLineIntersections(self, p1, p2):
         '''Returns a list of the indices at which the trajectory 
-        intersects with the segment of extremities p1 and p2 
-        the list is empty if there is no crossing'''
+        intersects with the line going through p1 and p2 
+        Returns an empty list if there is no crossing'''
         indices = []
         intersections = []
         
         for i in xrange(self.length()-1):
             q1=self.__getitem__(i)
             q2=self.__getitem__(i+1)
-            p = utils.segmentLineIntersection(p1, p2, q1, q2)
+            p = segmentLineIntersection(p1, p2, q1, q2)
             if p is not None:
                 if q1.x != q2.x:
                     ratio = (p.x-q1.x)/(q2.x-q1.x)
@@ -864,34 +936,57 @@
                                self.positions[1][inter.first:inter.last+1]])
         else:
             return None
-    
+
+    def subSample(self, step):
+        'Returns the positions very step'
+        return Trajectory([self.positions[0][::step],
+                           self.positions[1][::step]])
+
     if shapelyAvailable:
-        def getTrajectoryInPolygon(self, polygon):
-            '''Returns the trajectory built with the set of points inside the (shapely) polygon'''
+        def getTrajectoryInPolygon(self, polygon, t2 = None):
+            '''Returns the trajectory built with the set of points inside the (shapely) polygon
+            The polygon could be a prepared polygon (faster) from prepared.prep
+
+            t2 is another trajectory (could be velocities) 
+            which is filtered based on the first (self) trajectory'''
             traj = Trajectory()
-            points = [p.asShapely() for p in self]
-            for p in pointsInPolygon(points, polygon):
-                    traj.addPositionXY(p.x, p.y)
-            return traj
+            inPolygon = []
+            for x, y in zip(self.positions[0], self.positions[1]):
+                inPolygon.append(polygon.contains(shapelyPoint(x, y)))
+                if inPolygon[-1]:
+                    traj.addPositionXY(x, y)
+            traj2 = Trajectory()
+            if t2 is not None:
+                for inp, x, y in zip(inPolygon, t2.positions[0], t2.positions[1]):
+                    if inp:
+                        traj2.addPositionXY(x, y)
+            return traj, traj2
 
         def proportionInPolygon(self, polygon, minProportion = 0.5):
-            pointsIn = pointsInPolygon([p.asShapely() for p in self], polygon)
+            inPolygon = [polygon.contains(shapelyPoint(x, y)) for x, y in zip(self.positions[0], self.positions[1])]
             lengthThreshold = float(self.length())*minProportion
-            return len(pointsIn) >= lengthThreshold
+            return sum(inPolygon) >= lengthThreshold
     else:
-        def getTrajectoryInPolygon(self, polygon):
+        def getTrajectoryInPolygon(self, polygon, t2 = None):
             '''Returns the trajectory built with the set of points inside the polygon
             (array of Nx2 coordinates of the polygon vertices)'''
             traj = Trajectory()
+            inPolygon = []
             for p in self:
-                if p.inPolygon(polygon):
+                inPolygon.append(p.inPolygon(polygon))
+                if inPolygon[-1]:
                     traj.addPosition(p)
-            return traj
+            traj2 = Trajectory()
+            if t2 is not None:
+                for inp, x, y in zip(inPolygon, t2.positions[0], t2.positions[1]):
+                    if inp:
+                        traj2.addPositionXY(p.x, p.y)
+            return traj, traj2
 
         def proportionInPolygon(self, polygon, minProportion = 0.5):
-            pointsInPolygon = [p.inPolygon(polygon) for p in self]
+            inPolygon = [p.inPolygon(polygon) for p in self]
             lengthThreshold = float(self.length())*minProportion
-            return len(pointsInPolygon) >= lengthThreshold
+            return sum(inPolygon) >= lengthThreshold
 
     @staticmethod
     def lcss(t1, t2, lcss):
@@ -955,7 +1050,7 @@
         '''Returns a list of the indices at which the trajectory 
         goes past the curvilinear coordinate S1
         (in provided lane if lane is not None)
-        the list is empty if there is no crossing'''
+        Returns an empty list if there is no crossing'''
         indices = []
         for i in xrange(self.length()-1):
             q1=self.__getitem__(i)
@@ -1526,15 +1621,19 @@
 # Annotations
 ##################
 
-class BBAnnotation(MovingObject):
-    '''Class for a series of ground truth annotations using bounding boxes
-    Its center is the center of the containing shape
+class BBMovingObject(MovingObject):
+    '''Class for a moving object represented as a bounding box
+    used for series of ground truth annotations using bounding boxes
+     and for the output of Urban Tracker http://www.jpjodoin.com/urbantracker/
 
     By default in image space
+
+    Its center is the center of the box (generalize to other shapes?) 
+    (computed after projecting if homography available)
     '''
 
     def __init__(self, num = None, timeInterval = None, topLeftPositions = None, bottomRightPositions = None, userType = userType2Num['unknown']):
-        super(BBAnnotation, self).__init__(num, timeInterval, userType = userType)
+        super(BBMovingObject, self).__init__(num, timeInterval, userType = userType)
         self.topLeftPositions = topLeftPositions.getPositions()
         self.bottomRightPositions = bottomRightPositions.getPositions()
 
@@ -1561,7 +1660,7 @@
     Keni, Bernardin, and Stiefelhagen Rainer. "Evaluating multiple object tracking performance: the CLEAR MOT metrics." EURASIP Journal on Image and Video Processing 2008 (2008)
 
     objects and annotations are supposed to in the same space
-    current implementation is BBAnnotations (bounding boxes)
+    current implementation is BBMovingObject (bounding boxes)
     mathingDistance is threshold on matching between annotation and object
 
     TO: tracker output (objects)
--- a/python/storage.py	Tue Nov 03 13:48:56 2015 -0500
+++ b/python/storage.py	Mon May 09 15:33:11 2016 -0400
@@ -89,38 +89,6 @@
     except sqlite3.OperationalError as error:
         printDBError(error)
 
-# TODO: add test if database connection is open
-# IO to sqlite
-def writeTrajectoriesToSqlite(objects, outputFilename, trajectoryType, objectNumbers = -1):
-    """
-    This function writers trajectories to a specified sqlite file
-    @param[in] objects -> a list of trajectories
-    @param[in] trajectoryType -
-    @param[out] outputFilename -> the .sqlite file containting the written objects
-    @param[in] objectNumber : number of objects loaded
-    """
-    connection = sqlite3.connect(outputFilename)
-    cursor = connection.cursor()
-
-    schema = "CREATE TABLE IF NOT EXISTS \"positions\"(trajectory_id INTEGER,frame_number INTEGER, x_coordinate REAL, y_coordinate REAL, PRIMARY KEY(trajectory_id, frame_number))"
-    cursor.execute(schema)
-
-    trajectory_id = 0
-    frame_number = 0
-    if trajectoryType == 'feature':
-        if type(objectNumbers) == int and objectNumbers == -1:
-            for trajectory in objects:
-                trajectory_id += 1
-                frame_number = 0
-                for position in trajectory.getPositions():
-                    frame_number += 1
-                    query = "insert into positions (trajectory_id, frame_number, x_coordinate, y_coordinate) values (?,?,?,?)"
-                    cursor.execute(query,(trajectory_id,frame_number,position.x,position.y))
-                    
-    connection.commit()
-    connection.close()
-    
-
 def loadPrototypeMatchIndexesFromSqlite(filename):
     """
     This function loads the prototypes table in the database of name <filename>.
@@ -185,7 +153,7 @@
                 queryStatement += ' WHERE object_id '+objectCriteria
             queryStatement += ' ORDER BY object_id, frame_number'
         else:
-            print('no trajectory type was chosen')
+            print('Unknown trajectory type {}'.format(trajectoryType))
         if queryStatement is not None:
             cursor.execute(queryStatement)
             logging.debug(queryStatement)
@@ -227,7 +195,10 @@
     return userTypes
 
 def loadTrajectoriesFromSqlite(filename, trajectoryType, objectNumbers = None, withFeatures = False):
-    '''Loads the first objectNumbers objects or the indices in objectNumbers from the database'''
+    '''Loads the trajectories (in the general sense, 
+    either features, objects (feature groups) or bounding box series) 
+    The number loaded is either the first objectNumbers objects,
+    or the indices in objectNumbers from the database'''
     connection = sqlite3.connect(filename)
 
     objects = loadTrajectoriesFromTable(connection, 'positions', trajectoryType, objectNumbers)
@@ -284,6 +255,83 @@
     connection.close()
     return objects
 
+def addCurvilinearTrajectoriesFromSqlite(filename, objects):
+    '''Adds curvilinear positions (s_coordinate, y_coordinate, lane)
+    from a database to an existing MovingObject dict (indexed by each objects's num)'''
+    connection = sqlite3.connect(filename)
+    cursor = connection.cursor()
+
+    try:
+        cursor.execute('SELECT * from curvilinear_positions order by trajectory_id, frame_number')
+    except sqlite3.OperationalError as error:
+        printDBError(error)
+        return []
+    
+    missingObjectNumbers = []
+    objNum = None
+    for row in cursor:
+        if objNum != row[0]:
+            objNum = row[0]
+            if objNum in objects:
+                objects[objNum].curvilinearPositions = moving.CurvilinearTrajectory()
+            else:
+                missingObjectNumbers.append(objNum)
+        if objNum in objects:
+            objects[objNum].curvilinearPositions.addPositionSYL(row[2],row[3],row[4])
+    if len(missingObjectNumbers) > 0:
+        print('List of missing objects to attach corresponding curvilinear trajectories: {}'.format(missingObjectNumbers))
+
+def saveTrajectoriesToSqlite(outputFilename, objects, trajectoryType, withFeatures = False):
+    '''Writes features, ie the trajectories positions (and velocities if exist)
+    with their instants to a specified sqlite file
+    Either feature positions (and velocities if they exist)
+    or curvilinear positions will be saved at a time
+
+    TODO: Not implemented for trajectoryType MovingObject with features
+    For objects, with features will control whether the features
+    corresponding to the object are also saved'''
+
+    connection = sqlite3.connect(outputFilename)
+    try:
+        cursor = connection.cursor()
+
+        if trajectoryType == 'feature':
+            cursor.execute("CREATE TABLE IF NOT EXISTS positions (trajectory_id INTEGER, frame_number INTEGER, x_coordinate REAL, y_coordinate REAL, PRIMARY KEY(trajectory_id, frame_number))")
+            cursor.execute("CREATE TABLE IF NOT EXISTS velocities (trajectory_id INTEGER, frame_number INTEGER, x_coordinate REAL, y_coordinate REAL, PRIMARY KEY(trajectory_id, frame_number))")
+
+            positionQuery = "insert into positions (trajectory_id, frame_number, x_coordinate, y_coordinate) values (?,?,?,?)"
+            velocityQuery = "insert into velocities (trajectory_id, frame_number, x_coordinate, y_coordinate) values (?,?,?,?)"
+            for obj in objects:
+                num = obj.getNum()
+                frame_number = obj.getFirstInstant()
+                for position in obj.getPositions():
+                    cursor.execute(positionQuery, (num, frame_number, position.x, position.y))
+                    frame_number += 1
+                # velocities
+                velocities = obj.getVelocities()
+                if velocities is not None:
+                    frame_number = obj.getFirstInstant()
+                    for i in xrange(velocities.length()-1):
+                        v = velocities[i]
+                        cursor.execute(velocityQuery, (num, frame_number, v.x, v.y))
+                        frame_number += 1
+        elif trajectoryType == 'curvilinear':
+            cursor.execute("CREATE TABLE IF NOT EXISTS curvilinear_positions (trajectory_id INTEGER, frame_number INTEGER, s_coordinate REAL, y_coordinate REAL, lane TEXT, PRIMARY KEY(trajectory_id, frame_number))")
+            curvilinearQuery = "insert into curvilinear_positions (trajectory_id, frame_number, s_coordinate, y_coordinate, lane) values (?,?,?,?,?)"
+            for obj in objects:
+                num = obj.getNum()
+                frame_number = obj.getFirstInstant()
+                for position in obj.getCurvilinearPositions():
+                    cursor.execute(curvilinearQuery, (num, frame_number, position[0], position[1], position[2]))
+                    frame_number += 1
+        #elif trajectoryType == 'object':
+        else:
+            print('Unknown trajectory type {}'.format(trajectoryType))
+        connection.commit()
+    except sqlite3.OperationalError as error:
+        printDBError(error)
+    connection.close()
+
 def savePrototypesToSqlite(filename, prototypes, trajectoryType = 'feature'):
     'Work in progress, do not use'
     connection = sqlite3.connect(filename)
@@ -300,26 +348,30 @@
 def loadPrototypesFromSqlite(filename):
     pass
 
-def loadGroundTruthFromSqlite(filename, gtType = 'bb', gtNumbers = None):
-    'Loads bounding box annotations (ground truth) from an SQLite '
-    connection = sqlite3.connect(filename)
-    gt = []
+def loadBBMovingObjectsFromSqlite(filename, objectType = 'bb', objectNumbers = None):
+    '''Loads bounding box moving object from an SQLite
+    (format of SQLite output by the ground truth annotation tool
+    or Urban Tracker
 
-    if gtType == 'bb':
-        topCorners = loadTrajectoriesFromTable(connection, 'bounding_boxes', 'bbtop', gtNumbers)
-        bottomCorners = loadTrajectoriesFromTable(connection, 'bounding_boxes', 'bbbottom', gtNumbers)
-        userTypes = loadUserTypesFromTable(connection.cursor(), 'object', gtNumbers) # string format is same as object
+    Load descriptions?'''
+    connection = sqlite3.connect(filename)
+    objects = []
+
+    if objectType == 'bb':
+        topCorners = loadTrajectoriesFromTable(connection, 'bounding_boxes', 'bbtop', objectNumbers)
+        bottomCorners = loadTrajectoriesFromTable(connection, 'bounding_boxes', 'bbbottom', objectNumbers)
+        userTypes = loadUserTypesFromTable(connection.cursor(), 'object', objectNumbers) # string format is same as object
         
         for t, b in zip(topCorners, bottomCorners):
             num = t.getNum()
             if t.getNum() == b.getNum():
-                annotation = moving.BBAnnotation(num, t.getTimeInterval(), t, b, userTypes[num])
-                gt.append(annotation)
+                annotation = moving.BBMovingObject(num, t.getTimeInterval(), t, b, userTypes[num])
+                objects.append(annotation)
     else:
-        print ('Unknown type of annotation {}'.format(gtType))
+        print ('Unknown type of bounding box {}'.format(objectType))
 
     connection.close()
-    return gt
+    return objects
 
 def deleteFromSqlite(filename, dataType):
     'Deletes (drops) some tables in the filename depending on type of data'
@@ -442,7 +494,7 @@
     boundingBoxes = {} # list of bounding boxes for each instant
     try:
         cursor.execute('SELECT name FROM sqlite_master WHERE type=\'table\' AND name=\'bounding_boxes\'')
-        result = [row for row in cursor]
+        result = cursor.fetchall()
         if len(result) > 0:
             cursor.execute('SELECT * FROM bounding_boxes')
             for row in cursor:
@@ -457,33 +509,12 @@
 # saving and loading for scene interpretation (Mohamed Gomaa Mohamed's PhD)
 #########################
 
-def writeFeaturesToSqlite(objects, outputFilename, trajectoryType, objectNumbers = -1):
-    '''write features trajectories maintain trajectory ID,velocities dataset  '''
-    connection = sqlite3.connect(outputFilename)
-    cursor = connection.cursor()
-
-    cursor.execute("CREATE TABLE IF NOT EXISTS \"positions\"(trajectory_id INTEGER,frame_number INTEGER, x_coordinate REAL, y_coordinate REAL, PRIMARY KEY(trajectory_id, frame_number))")
-    cursor.execute("CREATE TABLE IF NOT EXISTS \"velocities\"(trajectory_id INTEGER,frame_number INTEGER, x_coordinate REAL, y_coordinate REAL, PRIMARY KEY(trajectory_id, frame_number))")
-    
-    if trajectoryType == 'feature':
-        if type(objectNumbers) == int and objectNumbers == -1:
-            for trajectory in objects:
-                trajectory_id = trajectory.num
-                frame_number = trajectory.timeInterval.first
-                for position,velocity in zip(trajectory.getPositions(),trajectory.getVelocities()):
-                    cursor.execute("insert into positions (trajectory_id, frame_number, x_coordinate, y_coordinate) values (?,?,?,?)",(trajectory_id,frame_number,position.x,position.y))
-                    cursor.execute("insert into velocities (trajectory_id, frame_number, x_coordinate, y_coordinate) values (?,?,?,?)",(trajectory_id,frame_number,velocity.x,velocity.y))
-                    frame_number += 1
-                    
-    connection.commit()
-    connection.close()
-    
 def writePrototypesToSqlite(prototypes,nMatching, outputFilename):
     """ prototype dataset is a dictionary with  keys== routes, values== prototypes Ids """
     connection = sqlite3.connect(outputFilename)
     cursor = connection.cursor()
 
-    cursor.execute("CREATE TABLE IF NOT EXISTS \"prototypes\"(prototype_id INTEGER,routeIDstart INTEGER,routeIDend INTEGER, nMatching INTEGER, PRIMARY KEY(prototype_id))")
+    cursor.execute("CREATE TABLE IF NOT EXISTS prototypes (prototype_id INTEGER,routeIDstart INTEGER,routeIDend INTEGER, nMatching INTEGER, PRIMARY KEY(prototype_id))")
     
     for route in prototypes.keys():
         if prototypes[route]!=[]:
@@ -507,7 +538,7 @@
     try:
         cursor.execute('SELECT * from prototypes order by prototype_id, routeIDstart,routeIDend, nMatching')
     except sqlite3.OperationalError as error:
-        utils.printDBError(error)
+        printDBError(error)
         return []
 
     for row in cursor:
@@ -526,7 +557,7 @@
     connection = sqlite3.connect(outputFilename)
     cursor = connection.cursor()
 
-    cursor.execute("CREATE TABLE IF NOT EXISTS \"labels\"(object_id INTEGER,routeIDstart INTEGER,routeIDend INTEGER, prototype_id INTEGER, PRIMARY KEY(object_id))")
+    cursor.execute("CREATE TABLE IF NOT EXISTS labels (object_id INTEGER,routeIDstart INTEGER,routeIDend INTEGER, prototype_id INTEGER, PRIMARY KEY(object_id))")
     
     for route in labels.keys():
         if labels[route]!=[]:
@@ -546,7 +577,7 @@
     try:
         cursor.execute('SELECT * from labels order by object_id, routeIDstart,routeIDend, prototype_id')
     except sqlite3.OperationalError as error:
-        utils.printDBError(error)
+        printDBError(error)
         return []
 
     for row in cursor:
@@ -566,7 +597,7 @@
     connection = sqlite3.connect(outFilename)
     cursor = connection.cursor()
 
-    cursor.execute("CREATE TABLE IF NOT EXISTS \"speedprototypes\"(spdprototype_id INTEGER,prototype_id INTEGER,routeID_start INTEGER, routeID_end INTEGER, nMatching INTEGER, PRIMARY KEY(spdprototype_id))")
+    cursor.execute("CREATE TABLE IF NOT EXISTS speedprototypes (spdprototype_id INTEGER,prototype_id INTEGER,routeID_start INTEGER, routeID_end INTEGER, nMatching INTEGER, PRIMARY KEY(spdprototype_id))")
     
     for route in prototypes.keys():
         if prototypes[route]!={}:
@@ -590,7 +621,7 @@
     try:
         cursor.execute('SELECT * from speedprototypes order by spdprototype_id,prototype_id, routeID_start, routeID_end, nMatching')
     except sqlite3.OperationalError as error:
-        utils.printDBError(error)
+        printDBError(error)
         return []
 
     for row in cursor:
@@ -611,7 +642,7 @@
     connection = sqlite3.connect(outputFilename)
     cursor = connection.cursor()
 
-    cursor.execute("CREATE TABLE IF NOT EXISTS \"routes\"(object_id INTEGER,routeIDstart INTEGER,routeIDend INTEGER, PRIMARY KEY(object_id))")
+    cursor.execute("CREATE TABLE IF NOT EXISTS routes (object_id INTEGER,routeIDstart INTEGER,routeIDend INTEGER, PRIMARY KEY(object_id))")
     
     for route in Routes.keys():
         if Routes[route]!=[]:
@@ -630,7 +661,7 @@
     try:
         cursor.execute('SELECT * from routes order by object_id, routeIDstart,routeIDend')
     except sqlite3.OperationalError as error:
-        utils.printDBError(error)
+        printDBError(error)
         return []
 
     for row in cursor:
@@ -761,6 +792,16 @@
     except sqlite3.OperationalError as error:
         printDBError(error)
 
+def getNObjectsInLinkFromVissimFile(filename, linkIds):
+    '''Returns the number of objects that traveled through the link ids'''
+    connection = sqlite3.connect(filename)
+    cursor = connection.cursor()
+    queryStatement = 'SELECT link_id, COUNT(DISTINCT trajectory_id) FROM curvilinear_positions where link_id IN ('+','.join([str(id) for id in linkIds])+') GROUP BY link_id'
+    try:
+        cursor.execute(queryStatement)
+        return {row[0]:row[1] for row in cursor}
+    except sqlite3.OperationalError as error:
+        printDBError(error)
 
 def loadTrajectoriesFromVissimFile(filename, simulationStepsPerTimeUnit, objectNumbers = None, warmUpLastInstant = None, usePandas = False, nDecimals = 2, lowMemory = True):
     '''Reads data from VISSIM .fzp trajectory file
--- a/python/tests/moving.txt	Tue Nov 03 13:48:56 2015 -0500
+++ b/python/tests/moving.txt	Mon May 09 15:33:11 2016 -0400
@@ -19,6 +19,12 @@
 2.0
 >>> TimeInterval(10,8).length()
 0.0
+>>> TimeInterval(10,8) == TimeInterval(10,8)
+True
+>>> TimeInterval(10,8) == TimeInterval(8,10)
+True
+>>> TimeInterval(11,8) == TimeInterval(10,8)
+False
 
 >>> [i for i in TimeInterval(9,13)]
 [9, 10, 11, 12, 13]
@@ -41,6 +47,10 @@
 >>> TimeInterval.unionIntervals([TimeInterval(3,6), TimeInterval(8,10),TimeInterval(11,15)])
 [3, 15]
 
+>>> Point(0,3) == Point(0,3)
+True
+>>> Point(0,3) == Point(0,3.2)
+False
 >>> Point(3,4)-Point(1,7)
 (2.000000,-3.000000)
 >>> -Point(1,2)
@@ -70,6 +80,17 @@
 (2.000000,2.000000)
 >>> segmentIntersection(Point(0,1), Point(1,2), Point(2,0), Point(3,2))
 
+>>> t1 = Trajectory.fromPointList([(92.2, 102.9), (56.7, 69.6)])
+>>> t2 = Trajectory.fromPointList([(92.2, 102.9), (56.7, 69.6)])
+>>> t1 == t2
+True
+>>> t3 = Trajectory.fromPointList([(92.24, 102.9), (56.7, 69.6)])
+>>> t1 == t3
+False
+>>> t3 = Trajectory.fromPointList([(92.2, 102.9), (56.7, 69.6), (56.7, 69.6)])
+>>> t1 == t3
+False
+
 >>> left = Trajectory.fromPointList([(92.291666666666686, 102.99239033124439), (56.774193548387103, 69.688898836168306)])
 >>> middle = Trajectory.fromPointList([(87.211021505376351, 93.390778871978512), (59.032258064516128, 67.540286481647257)])
 >>> right = Trajectory.fromPointList([(118.82392473118281, 115.68263205013426), (63.172043010752688, 66.600268576544309)])
@@ -176,7 +197,7 @@
 'pedestrian'
 
 >>> o1 = MovingObject.generate(Point(0.,0.), Point(1.,0.), TimeInterval(0,10))
->>> gt1 = BBAnnotation(1, TimeInterval(0,10), MovingObject.generate(Point(0.2,0.6), Point(1.,0.), TimeInterval(0,10)), MovingObject.generate(Point(-0.2,-0.4), Point(1.,0.), TimeInterval(0,10)))
+>>> gt1 = BBMovingObject(1, TimeInterval(0,10), MovingObject.generate(Point(0.2,0.6), Point(1.,0.), TimeInterval(0,10)), MovingObject.generate(Point(-0.2,-0.4), Point(1.,0.), TimeInterval(0,10)))
 >>> gt1.computeCentroidTrajectory()
 >>> computeClearMOT([gt1], [], 0.2, 0, 10)
 (None, 0.0, 11, 0, 0, 11)
@@ -189,9 +210,9 @@
 
 >>> o1 = MovingObject(1, TimeInterval(0,3), positions = Trajectory([range(4), [0.1, 0.1, 1.1, 1.1]]))
 >>> o2 = MovingObject(2, TimeInterval(0,3), positions = Trajectory([range(4), [0.9, 0.9, -0.1, -0.1]]))
->>> gt1 = BBAnnotation(1, TimeInterval(0,3), MovingObject(positions = Trajectory([range(4), [0.]*4])), MovingObject(positions = Trajectory([range(4), [0.]*4])))
+>>> gt1 = BBMovingObject(1, TimeInterval(0,3), MovingObject(positions = Trajectory([range(4), [0.]*4])), MovingObject(positions = Trajectory([range(4), [0.]*4])))
 >>> gt1.computeCentroidTrajectory()
->>> gt2 = BBAnnotation(2, TimeInterval(0,3), MovingObject(positions = Trajectory([range(4), [1.]*4])), MovingObject(positions = Trajectory([range(4), [1.]*4])))
+>>> gt2 = BBMovingObject(2, TimeInterval(0,3), MovingObject(positions = Trajectory([range(4), [1.]*4])), MovingObject(positions = Trajectory([range(4), [1.]*4])))
 >>> gt2.computeCentroidTrajectory()
 >>> computeClearMOT([gt1, gt2], [o1, o2], 0.2, 0, 3) # doctest:+ELLIPSIS
 (0.1..., 0.75, 0, 2, 0, 8)
--- a/python/tests/moving_shapely.txt	Tue Nov 03 13:48:56 2015 -0500
+++ b/python/tests/moving_shapely.txt	Mon May 09 15:33:11 2016 -0400
@@ -1,9 +1,28 @@
 >>> from moving import *
 >>> from shapely.geometry import Polygon
+>>> from shapely.prepared import prep
 
 >>> t1 = Trajectory([[0.5,1.5,2.5],[0.5,3.5,6.5]])
->>> t1.getTrajectoryInPolygon(Polygon([[0,0],[4,0],[4,3],[0,3]]))
+>>> poly = Polygon([[0,0],[4,0],[4,3],[0,3]])
+>>> sub1, sub2 = t1.getTrajectoryInPolygon(poly)
+>>> sub1
+(0.500000,0.500000)
+>>> sub1, sub2 = t1.getTrajectoryInPolygon(Polygon([[10,10],[14,10],[14,13],[10,13]]))
+>>> sub1.length()
+0
+>>> sub1, sub2 = t1.getTrajectoryInPolygon(prep(poly))
+>>> sub1
 (0.500000,0.500000)
->>> t1.getTrajectoryInPolygon(Polygon([[10,10],[14,10],[14,13],[10,13]])).length()
-0
+>>> t2 = t1.differentiate(True)
+>>> sub1, sub2 = t1.getTrajectoryInPolygon(prep(poly), t2)
+>>> sub1.length() == sub2.length()
+True
+>>> sub1
+(0.500000,0.500000)
+>>> sub2
+(1.000000,3.000000)
 
+>>> t1.proportionInPolygon(poly, 0.5)
+False
+>>> t1.proportionInPolygon(poly, 0.3)
+True
--- a/python/tests/storage.txt	Tue Nov 03 13:48:56 2015 -0500
+++ b/python/tests/storage.txt	Mon May 09 15:33:11 2016 -0400
@@ -1,5 +1,6 @@
 >>> from storage import *
 >>> from StringIO import StringIO
+>>> from moving import MovingObject, Point, TimeInterval, Trajectory, prepareSplines
 
 >>> f = openCheck('non_existant_file.txt')
 File non_existant_file.txt could not be opened.
@@ -15,6 +16,51 @@
 >>> from os import remove
 >>> remove(nonexistentFilename)
 
+>>> o1 = MovingObject.generate(Point(0.,0.), Point(1.,0.), TimeInterval(0,10))
+>>> o1.num = 2
+>>> o2 = MovingObject.generate(Point(1.,1.), Point(-0.5,-0.2), TimeInterval(0,9))
+>>> o2.num = 3
+>>> saveTrajectoriesToSqlite('test.sqlite', [o1, o2], 'feature')
+>>> objects = loadTrajectoriesFromSqlite('test.sqlite', 'feature')
+>>> objects[0].getNum() == o1.num
+True
+>>> objects[1].getNum() == o2.num
+True
+>>> o1.getTimeInterval() == objects[0].getTimeInterval()
+True
+>>> o2.getTimeInterval() == objects[1].getTimeInterval()
+True
+>>> o1.getVelocities() == objects[0].getVelocities()
+True
+>>> o2.getVelocities() == objects[1].getVelocities()
+True
+>>> o1.getPositions() == objects[0].getPositions()
+True
+>>> o2.getPositions() == objects[1].getPositions()
+True
+>>> align1 = Trajectory.fromPointList([Point(-1, 0), Point(20, 0)])
+>>> align2 = Trajectory.fromPointList([Point(-9, -3), Point(6, 3)])
+>>> align1.computeCumulativeDistances()
+>>> align2.computeCumulativeDistances()
+>>> prepareSplines([align1, align2])
+>>> o1.projectCurvilinear([align1, align2])
+>>> o2.projectCurvilinear([align1, align2])
+>>> saveTrajectoriesToSqlite('test.sqlite', [o1, o2], 'curvilinear')
+>>> addCurvilinearTrajectoriesFromSqlite('test.sqlite', {o.num: o for o in objects})
+>>> o1.curvilinearPositions[3][:2] == objects[0].curvilinearPositions[3][:2]
+True
+>>> o1.curvilinearPositions[7][:2] == objects[0].curvilinearPositions[7][:2]
+True
+>>> [str(l) for l in o1.curvilinearPositions.getLanes()] == objects[0].curvilinearPositions.getLanes()
+True
+>>> o2.curvilinearPositions[2][:2] == objects[1].curvilinearPositions[2][:2]
+True
+>>> o2.curvilinearPositions[6][:2] == objects[1].curvilinearPositions[6][:2]
+True
+>>> [str(l) for l in o2.curvilinearPositions.getLanes()] == objects[1].curvilinearPositions.getLanes()
+True
+>>> remove('test.sqlite')
+
 >>> strio = StringIO('# asdlfjasdlkj0\nsadlkfjsdlakjf')
 >>> readline(strio)
 'sadlkfjsdlakjf'
--- a/python/traffic_engineering.py	Tue Nov 03 13:48:56 2015 -0500
+++ b/python/traffic_engineering.py	Mon May 09 15:33:11 2016 -0400
@@ -54,6 +54,71 @@
     def getDescriptor(self):
         return 'og'
 
+#########################
+# queueing models
+#########################
+
+class CapacityReduction(object):
+    def __init__(self, beta, reductionDuration, demandCapacityRatio = None, demand = None, capacity = None):
+        '''reduction duration should be positive
+        demandCapacityRatio is demand/capacity (q/s)'''
+        if demandCapacityRatio is None and demand is None and capacity is None:
+            print('Missing too much information (demand, capacity and ratio)')
+            import sys
+            sys.exit()
+        if 0 <= beta < 1:
+            self.beta = beta
+            self.reductionDuration = reductionDuration
+
+            if demandCapacityRatio is not None:
+                self.demandCapacityRatio = demandCapacityRatio
+            if demand is not None:
+                self.demand = demand
+            if capacity is not None:
+                self.capacity = capacity
+            if capacity is not None and demand is not None:
+                self.demandCapacityRatio = float(self.demand)/self.capacity
+                if demand <= beta*capacity:
+                    print('There is no queueing as the demand {} is inferior to the reduced capacity {}'.format(demand, beta*capacity))
+        else:
+            print('reduction coefficient (beta={}) is not in [0, 1['.format(beta))
+
+    def queueingDuration(self):
+        return self.reductionDuration*(1-self.beta)/(1-self.demandCapacityRatio)
+
+    def nArrived(self, t):
+        if self.demand is None:
+            print('Missing demand field')
+            return None
+        return self.demand*t
+
+    def nServed(self, t):
+        if self.capacity is None:
+            print('Missing capacity field')
+            return None
+        if 0<=t<=self.reductionDuration:
+            return self.beta*self.capacity*t
+        elif self.reductionDuration < t <= self.queueingDuration():
+            return self.beta*self.capacity*self.reductionDuration+self.capacity*(t-self.reductionDuration)
+
+    def nQueued(self, t):
+        return self.nArrived(t)-self.nServed(t)
+
+    def maxNQueued(self):
+        return self.nQueued(self.reductionDuration)
+
+    def totalDelay(self):
+        if self.capacity is None:
+            print('Missing capacity field')
+            return None
+        return self.capacity*self.reductionDuration**2*(1-self.beta)*(self.demandCapacityRatio-self.beta)/(2*(1-self.demandCapacityRatio))
+    
+    def averageDelay(self):
+        return self.reductionDuration*(self.demandCapacityRatio-self.beta)/(2*self.demandCapacityRatio)
+
+    def averageNQueued(self):
+        return self.totalDelay()/self.queueingDuration()
+
 
 #########################
 # fundamental diagram
@@ -246,8 +311,8 @@
     '''Computes the uniform delay'''
     return 0.5*cycleLength*(1-float(effectiveGreen)/cycleLength)/(1-float(effectiveGreen*saturationDegree)/cycleLength)
 
-def overflowDelay(T, X, c, k=0.5, I=1):
-    '''Computes the overflow delay (HCM)
+def incrementalDelay(T, X, c, k=0.5, I=1):
+    '''Computes the incremental delay (HCM)
     T in hours
     c capacity of the lane group
     k default for fixed time signal
@@ -260,7 +325,9 @@
 #########################
 
 def timeChangingSpeed(v0, vf, a, TPR):
-    return TPR+(vf-v0)/a
+    'for decelerations, a < 0'
+    return TPR-(vf-v0)/a
 
 def distanceChangingSpeed(v0, vf, a, TPR):
-    return TPR*v0+(vf*vf-v0*v0)/(2*a)
+    'for decelerations, a < 0'
+    return TPR*v0-(vf**2-v0**2)/(2*a)
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/samples/visualize-monresovelo.ipynb	Mon May 09 15:33:11 2016 -0400
@@ -0,0 +1,165 @@
+{
+ "metadata": {
+  "name": "",
+  "signature": "sha256:41993ae5abde8891ff6d0f0e629cee32073532fd0e17b4f7181a43a70dbecd73"
+ },
+ "nbformat": 3,
+ "nbformat_minor": 0,
+ "worksheets": [
+  {
+   "cells": [
+    {
+     "cell_type": "code",
+     "collapsed": false,
+     "input": [
+      "%matplotlib inline\n",
+      "# -*- coding: utf-8 -*-\n",
+      "import simplejson, sys, datetime\n",
+      "import moving, utils # from Traffic Intelligence https://bitbucket.org/Nicolas/trafficintelligence\n",
+      "from pyproj import Proj\n",
+      "\n",
+      "import matplotlib.pyplot as plt\n",
+      "import matplotlib.mlab as pylab\n",
+      "import numpy as np\n",
+      "import pandas as pd\n",
+      "\n",
+      "english2French = {'Commute': 'Domicile-travail',\n",
+      "                  'Errand': 'Courses',\n",
+      "                  'Exercise': 'Sport',\n",
+      "                  'Leisure': 'Loisirs',\n",
+      "                  'Other': 'Autre',\n",
+      "                  'Autres': 'Autre',\n",
+      "                  'Autres motifs': 'Autre',\n",
+      "                  'School': u'\u00c9cole',\n",
+      "                  'Shopping': 'Magasinage',\n",
+      "                  'Work-Related': 'Travail',\n",
+      "                  'Work-related': 'Travail',\n",
+      "                  'Other': 'Autre',\n",
+      "                  'other': 'Autre'}\n",
+      "\n",
+      "odMotifs = ['Magasinage', 'Retour au domicile', \"Chercher quelqu'un\",\n",
+      "            'Travail ', '\\xc9tude / \\xc9cole', 'Autre', 'Loisir',\n",
+      "            \"Visites d'ami(e)s et/ou de la parent\\xe9\", \"Reconduire quelqu'un\",\n",
+      "            \"Rendez-vous d'affaires\", 'Sant\\xe9',\n",
+      "            'Ind\\xe9termin\\xe9 / refus / NSP']\n",
+      "\n",
+      "def convertJsonToMTM(data = None, filename = None):\n",
+      "    'Converts the in put json data to MTM and optionally saves to file'\n",
+      "    proj = Proj(init=\"epsg:2950\")\n",
+      "    if data is None:\n",
+      "        data = simplejson.load(open(filename))\n",
+      "    for i in xrange(len(data['features'])):\n",
+      "        latlon = data['features'][i]['geometry']['coordinates']\n",
+      "        mtm = [proj(p[0], p[1]) for p in latlon]\n",
+      "        data['features'][i]['geometry']['coordinates'] = mtm\n",
+      "    if filename is not None:\n",
+      "        simplejson.dump(data, open(utils.removeExtension(filename)+'-mtm.json', 'w'))\n",
+      "    return data\n",
+      "\n",
+      "def convertToObjects(data, timeStep = 1, project = True, minLength = 10):\n",
+      "    'Converts the trips to the moving.MovingObject class from Traffic Intelligence'\n",
+      "    if project:\n",
+      "        proj = Proj(init=\"epsg:2950\")\n",
+      "    objects = []\n",
+      "    nTrips = len(data['features'])\n",
+      "    for i in xrange(nTrips):\n",
+      "        latlon = data['features'][i]['geometry']['coordinates']\n",
+      "        if project:\n",
+      "            projectedX = [proj(p[0], p[1]) for p in latlon[::timeStep]]\n",
+      "        else:\n",
+      "            projectedX = latlon[::timeStep]\n",
+      "        o = moving.MovingObject(num = i, positions = moving.Trajectory(np.array(projectedX).T.tolist()))\n",
+      "        o.properties = data['features'][i]['properties']\n",
+      "        o.motif = english2French.get(o.properties['purpose'], o.properties['purpose'])\n",
+      "        try:\n",
+      "            o.start = datetime.datetime.strptime(o.properties['start'], '%Y-%m-%d %H:%M:%S')\n",
+      "            o.stop = datetime.datetime.strptime(o.properties['stop'], '%Y-%m-%d %H:%M:%S')\n",
+      "            if o.positions.length() > minLength:\n",
+      "                objects.append(o)\n",
+      "        except TypeError:\n",
+      "            print('{} {}'.format(o.properties['start'], o.properties['stop']))\n",
+      "            print('issue with {}'.format(o.properties))\n",
+      "            #o.start = datetime.datetime(1979, 7, 21)\n",
+      "            #o.stop = o.start\n",
+      "            #print(e)\n",
+      "    return objects\n",
+      "\n",
+      "def printMRV(objects, motifs, color, linewidth, alpha, blackBG = False):\n",
+      "    fig = plt.figure()\n",
+      "    for o in objects:\n",
+      "        if o.motif in motifs:\n",
+      "            o.plot(color, linewidth = linewidth, alpha = alpha)\n",
+      "    plt.axis('equal')\n",
+      "    plt.axis('off')\n",
+      "    plt.tight_layout()\n",
+      "    if blackBG:\n",
+      "        fig.patch.set_facecolor('black')\n",
+      "        plt.savefig(u'mrv-'+u'-'.join(motifs).replace(' ', '-')+'-'+color+'-blackbg.png', dpi = 300, facecolor=fig.get_facecolor(), edgecolor='none')\n",
+      "    else:\n",
+      "        plt.savefig(u'mrv-'+u'-'.join(motifs).replace(' ', '-')+'-'+color+u'.png', dpi = 300)"
+     ],
+     "language": "python",
+     "metadata": {},
+     "outputs": [],
+     "prompt_number": 6
+    },
+    {
+     "cell_type": "code",
+     "collapsed": false,
+     "input": [
+      "# load the data\n",
+      "filename = './trip5000.json'\n",
+      "data = simplejson.load(open(filename))\n",
+      "\n",
+      "#convertJsonToMTM(data)\n",
+      "#objects = convertToObjects(data, project = False, minLength = 10)\n",
+      "objects = convertToObjects(data, minLength = 10)\n",
+      "colors = utils.PlottingPropertyValues('rk')"
+     ],
+     "language": "python",
+     "metadata": {},
+     "outputs": [
+      {
+       "output_type": "stream",
+       "stream": "stdout",
+       "text": [
+        "None None\n",
+        "issue with {'stop': None, 'id_origine': 9795, 'start': None, 'length': 1181, 'purpose': 'Commute', 'liste_segments_jsonb': [{'source': 'RES_CYCL_2015', 'id': 4824}, {'source': 'RES_CYCL_2015', 'id': 5513}, {'source': 'RES_CYCL_2015', 'id': 4177}, {'source': 'RES_CYCL_2015', 'id': 4094}], 'n_coord': 1216, 'id': 9795}\n"
+       ]
+      }
+     ],
+     "prompt_number": 7
+    },
+    {
+     "cell_type": "code",
+     "collapsed": false,
+     "input": [
+      "allmotifs = set([o.motif for o in objects])\n",
+      "printMRV(objects, allmotifs, 'r', 0.5, 0.1, True)\n",
+      "# visualize all trip purposes\n",
+      "#for i, m in enumerate(allmotifs):\n",
+      "#    printMRV(objects, [m], colors[i], 0.5, 0.1)\n",
+      "\n",
+      "#printMRV(objects, ['Aller au travail', 'Domicile-travail'], 'k', 0.5, 0.1)\n",
+      "#printMRV(objects, ['Travail', u'D\\xe9placement professionnel'], 'k', 0.5, 0.1)\n",
+      "#printMRV(objects, ['Aller au travail', 'Domicile-travail', 'Travail', u'D\\xe9placement professionnel'], 'k', 0.5, 0.1)"
+     ],
+     "language": "python",
+     "metadata": {},
+     "outputs": [
+      {
+       "metadata": {},
+       "output_type": "display_data",
+       "png": "iVBORw0KGgoAAAANSUhEUgAAAagAAAEbCAYAAACP7BAbAAAABHNCSVQICAgIfAhkiAAAAAlwSFlz\nAAALEgAACxIB0t1+/AAAIABJREFUeJzsnXeYZFd17X/VOU/OMxrNjEYaZQkxygkhiWDJIotsAfaz\nsQm2wQRjMH424GyMDdjYZIQlksAiKYAkQAJJSBrlOGKUJseeTjMd6v2xzn7nVvWt1F3dXT291/f1\n190V7j333Kq9zt57nb0zQBaHw+FwOGoMdVM9AIfD4XA40uAE5XA4HI6ahBOUw+FwOGoSTlAOh8Ph\nqEk4QTkcDoejJuEE5XA4HI6ahBOUw+FwOGoSTlAOh8PhqEk4QTkcDoejJuEE5XA4HI6ahBOUw+Fw\nOGoSTlAOh8PhqEk4QTkcDoejJuEE5XA4HI6ahBOUw+FwOGoSTlAOh8PhqEk4QTkcDoejJuEE5XA4\nHI6ahBOUw+FwOGoSTlAOh8PhqEk4QTkcDoejJuEE5XA4HI6ahBOUw+FwOGoSTlAOh8PhqEk4QTkc\nDoejJuEE5XA4HI6ahBOUw+FwOGoSTlAOh8PhqEk4QTkcDoejJuEE5XA4HI6ahBOUw+FwOGoSTlAO\nh8PhqEk4QTkcDoejJuEE5XA4HI6ahBOUw+FwOGoSTlAOh8PhqEk4QTkcDoejJuEE5XA4qopmoHWq\nB+E4JNAw1QNwOByHFuYCw0AG6JvisTimN5ygHA5HVZEFtgMLgA5gBIVqdgNDUzgux/SDE5TD4agq\nsuH3DuRFGZYBz07+cBzTGBni58nhcDjGhDpgFso9bUVeUz66gBbkXTkc5cAJyuFwjBkdQCcK3e0F\nBgu8biFwMLzG4SgXTlAOh2NMWAz0AvtLvG4psBMRlMNRCZygHA5HRWhGAoitlBY9tAH1lCYxhyMN\nLpJwOBxF0YRk4x1AO5KOlyt2qMOVe46xwwnK4XCkogXtaepDRNMH7KvwGCPkKvkcjkrgBOWoKTSi\nFbv99KA8h2Py0IBCeAPA5nEea4RoZFqB/nEezzGz4Dkox5RhMbmr6yxKpCd/HGNHAzAH5YAM9mW3\nL34vIqIMCt+1IiXeziqNoQmYD6wDHsAl5o7K4B6UY9LRgAzhEPKQhlA4yUlp/GhDsu8Mmsud5JJS\nM5r/ATTvreH1I4isypWBNwPHI/HDbyh834aRxHw7qiThcFQCJyjHpKIDGcb95Cq7ehLPpW3ynK5o\nBg5M0rmWoDndlvd4I1oADIexDCBiaiESU3Lem8Lz+xl9L5pRXupU4CbgNciIPIY8MBNE9ITfbWiD\n7oO4WMJROTzE55g0dKBEezEC6gK6J2c4ZcPyYVlkZIfDT6kvTjPyTg4QxQI9Rd8hMhlM+bsUFgN7\nyCVDk3gfoLhn2hB+LBRoeaPkAiIDnByeywIPJ475BkRyGeAOJKToIZLiacCjqPSRw1EJnKAck4IO\nZChLGdxMeO1U75tpQATRQK6Br0/81JGbQ0teX1N4b7KadweFCao1nG848VgWEcJAyutbEFnOC2PY\nEd7bEJ7LIkFCpd6ohV/z5/9c4G7kJeV7aADHhHGsAH4cHrPrvRj4FbW38HDUPjzE55hQ1BENVTnG\n0gzrVCq+uhApHEgZg3lPaWhC1woiqvxWE73Io7IVoVX5tmtOu96WcEwTkGTRl3ZhOJ6RRTPymA5S\n2ksrBFNQ5pPTvHAeyxfWMfpePgRcCvws/J8Jr2lEZO7qPcdY4B6UY8LQjAzUWAymGdvJzFsYme5n\n4r8UGTQ3lYhCmsLvNJIs5p2VgzYiUebjYuAWRNidFPZu3x5e80vgSURQpyH1ntfgc4wFTlCOCUEH\n41flFTOGE4HxGvmpxFjHnkHzXMjDPRxYBDyCvNo6Cu+NWoTyT38N3IA8vMcQ6U3XeXVMLTzE56g6\nOpFRGq/3s5/JI6mZulJrp3hu6AXA15H31lfkta3h+QEkJ7+NSEodBd7jcJSCE5SjquhEK+dqScX7\nUfipGq3D61HYMUN6+R1f5ediPfAUyoNZTq4Q2oFdKY83UL4S0eHIhxOUo2qwUFE1PZEhoqJuEK3U\n68Pjaeq2QrANql42KSJDYcHHscAq4BsljmF7rNqJ1SeWhMd6kOflAgnHWOEE5agKzHOaiDDZAFHJ\nZvtzyi1A2kqUijs55aKJdO+mHjgOuLrIezvD67qQd5u8H3cjktpJVCk6HGNB3VQPwDH90UnpDbjj\nRQ9arVs+qpTRM3n2wfAeL6GUizpEUGl5wlcBPyzyXqvbN4LyTT3kemIrgKerM0zHDId7UI5xwapD\nFAoVVRPlhIpso+8AlYUApzsqaWlhlTHSxCeLEHkVEqbUoQXJNuIiYRa5RWBta4H3gnKMF+5BOcaM\nNkQCk0FO5aAZrez3M/MS81bLsBRaUGguTRDSBrya4nmnJkaHclsZLaDIhte65+oYD9yDcowZGWpn\nhdyGQk4zVYmXRSRhirs0tFK4dNI64ALgixRfcDQVOb49P0ys5zeTvFhH9eEE5RgTmqkd41Nunb9D\nHYPE0kL5JGOVy/M9nQwipiOBL1Fczt+Y8v78MN5LgA2VDNrhKAInKMeYkFaPbSpg7SxmOjkZ+hhd\nEb6V6GEZLkBeUz8qS/RZSocI0/Jc7eSS2pxwPKvF53CMB05QjjGhntqQDxdK9s9kWAWOXqKIJenl\nrEfzdiXytCws2osIp5Ac/yBRHTkSjtuAFittqKhsM7AJzz85qgMXSTimLaZz7byJRBaF42YhTypJ\nTicBa1BLjG4Kt4MvhAFkNKwViYUT+9C+J2s62Ujt5Ccd0xdOUI5pC0vEOyLq0SbZLGpgmMSlwDLg\nqvB/B6Ol+33IGyqENqJScoTcjsHrkPc1b4xjdzjy4d9vx5hQC+G9PmQsrdPtTEUXmgcQaWwnVyTR\nArwOuAu4L/H4EKPnLRt+CuUY65HnNRLOOUjM/x0GfBt4PspDORzjhROUY1rD8iZ1zMycxxwUktuS\n8tyZKJxXh/JN+fPTT3rOqY/R4VPrldVLJD/739CC1ICzUs7lcIwFTlCOMaGSygUTjV7iPqiZ5EnN\nQ2SRH8oDeDPwLGqVUWhf0zwKNxK0br8jxM29+a028luUXI0EGPNxVaWjOvAclOOQwABKzM8ULEJe\nSj7BdADvROG8n1K6ykeh5/uQErAzvCZN2VeX8v5nEKH5ytdRDfjnyHFIwFb8MwFLEDHlCxxOA04E\nPsPoHFT+pmprbV8M+UVgDRnkfaW1PNmKPLqzgZtLHN/hKAUnKMeYUAsiiXzUUthxIpABFqPGgPk5\nnsOB1cDnCrzP+mlZJfLBlGMYWhi9sdfQiRYD9eR6VRYObAjjW1jqYhyOMuAE5agYGWaOt1IrqEdG\nP1+hByKn4ynssfQT+zc1UHjvmJ1jBFUrz4eVOjqIvDhbEGSBpeH3ZuRZ7QXmonYcDsdY4XbGUTGy\nxByFY+IxGxHHVkaT03FIqXctsA/tc0rDfkQchcipHe1p2oKIaBWjPdJGotdlooke5EltQl7ZCYis\nbgHOKnVhDkcJOEE5xgSrHlBOi4fJQHLD6KGCVuSpDCHiyA+rnh1e85Pw/3DifWlIUzhaV9x+Yk29\nZ1HDwSPIDdWZsZiFyCpZg28QeAp4gFgGy8MzjvHCCcoxZgyjJHl7qRdOMBqJOZZDBXPQvG4h3es5\nM/y+O/y28N1zyONaUsY5WsKPbbxNYhh4HM3pkai2XvJ9e5GScBGRiOoRCXYDRyNFn8MxHuRvZXA4\nKkYDMlpTURevKZy/WJuI6YY56EuZtkcpA1yGclEbiXugmqhs/jtQuK7cDbWHIdI6iIQaj4a/O8Pv\nBrQX7QDw4jD+H3Bo3RfH5MO9cMe4YXLjqSjeWqlhrmXMQV/IQptvAV6OxBC7kdd4GArJVTIHXeH1\nlbTDeDr8Non5QmJR2L2IiDLhuPXEKuoOx3jgIT5HVTCE8hiTmZNq49BaYb0NhcwKKd/WAw8jL6UV\nCRmeofzcWwaRkxV6HQvMW30WiTZGEFkdjwQbJ6CQYH7VCYdjLDiUvt+OKcYwUdI80T2arA7cobJK\nPxIVcp1d4PkTUD7oSWA5muvHKjh+PSL08RJHE2qrsRKp9UZQqHF7eG4fcCESrTgc44V7UI6qwsri\nTKQE3cJgh0ry9Ojw+3oUMluc9/ylqL7ddkRke5Csu1wYOVVj0ZBBi4KngF8ij24QEdIwqmSRJbdq\nusMxVjhBOaqOEURSXVS/ukMGhbfyy/xMVxxJrjd0E1Lo2RfzDeia70ek/zCVbX5tRPM1UR6t7YVq\nDn+fhzysDRN0PsfMgqv4HBOKdkRY1SKUlciDOBRyHCsReTyS93gLcDHypO4HHkQeVKU9lhpR2K1Q\nC/exoJAQZjHwPORNzQG+UcVzOmYu3INyTCh6kdGqVshvK/LMpjvmAQsYTU6gjbBvROTyACKsSsmp\nCRFUNcmpGBoRcb0K+NYkndNx6MNFEo4JxxAx5Ddez+cAMtiGNrQPZ7r1gToa+EXK4wuBf0b9nMwD\n2l7hsZvRyrPaAhLbiJuPJnRvV6GNwmNVCDoc+XCCckwKRlAepBok1UyUs/cx/QziOpRLysdC4B+A\nK5DXmd/KohwYeU9Ejq6FdI+sGUnMFyOCcjiqBQ/xOSYNWSJJjQWzkLR5ERIWVLrZtFYwF7WkSKIT\neU6/z9hLNrWiOR4LsZWDQoKX44HXAD8GPj9B53bMTLgH5ZhUZJEHVa4n1YQMOqhiwT60ij8XuG4i\nBjjBSFPULQc+Avw5Y/9CtqHwW7mliypFofAewIeBl+JqK0f14QTlmBIYSe2nsGFrROS0Ne/xfSis\nVI1wYSGkdaGtBo4EHkr8fxbwLuB30VwU2qhbDBNNTqD5TstpfRz4L5ycHBMDD/E5pgzdFFf3LWA0\nORl+hFbt1UY9GtNEhck6iCG8NuCdwOUodJkhvUBsMUwGOUFhQ3Ec8J0JPrdj5sIJyjGl6CW9f1Gp\nwrODqKL26VUcSwvyFCZqU+tcYEfi/xuB94e/H0XelRFkOSjVur1aqGN0o0TQXq2HUh4fz3kM3gzT\nAU5QjinGMKPjzA2IoEqF7+4BViAvYjxoQgKMESa2tt9RxIoRpyKlnfVuqkMVGJZRHkFau4zJ6IFV\nKNx5BfD3VTzPvPC7icJNFx0zC05QjinHIPIcQB/IRRQO7eXju8DLxnFu2zO0j4n1RJaQK/3+G2Tg\nn0OhzA5ETHsp/aXsDMcaDzlVQgDWITcf1e7DZSQ4j1xP0zFz4QTlmHIMIIPZhHIxleylGQR+BVw0\nhvNaF9pq5Zvq0F6g5cQvVl34/wCxpxJI4PEM8pp6w1gyyKNMk85nkKdoVdzTQm6VoIm4KEgiuQk6\ng+Ynjbifh7r9VpPUF6L524OLLhyCq/gcNYE2ZDSfHcN7n0SVGZZX8P5qVvgG1cqrQ55fPQrdDaDQ\n4bOIWKxt+kWoGvgslGfbCJyMyh6lSblbwzF7qZ7h3oeuv47cXN/scA0tyDisRLmugfC6reHvN6KW\n8NXeh1au5+yYGfBisY4pRwv6EJbbeK8QXgtcVeZrZ1O5Yi4NneFnO7nkchSwLXGOtvCzE7gVNSfM\nr8P3PJSjMiJqRSTRz8SUcqpHnlwWEc0IIqSHiEahg1hBfh4iq0XhvW9EG3SrhTWIrB0Og3tQjilH\nI9XxZG4BfgcZ9GLVtM0jGC8WoRzM5pTnHs3736owfAYRQVqR2CeQSKIHeTj9TFxbkTUoTNiIQmo7\nw+NPolJMc9GCYQcizF2IJHejMNwZVF4j0OGoFJ6Dckw5qvUh3AJ8ucRrbI/TeAmqExnucom1HZHN\nsUgc0RAe6yR6KQuR52WVwcvJM9UT6xKWi3o050tQ2HFB4rl+VCfwVpRfOhp5dmvCWE3Rdzy5ObVq\nYDqWrXJMLNyDckwZLM9R7ZYQhWrGdSLDX42YdiOVCQRmERsNHofGuI3YhXYQhdaeQHu7tlGe99RO\nrhz/DNTpthjqkDc0kBhDPhrDc5ZnugCFKAeBG1C+79oyxlcuCikFHTMb7kE5pgR1yCj1UDq/srLC\nY9+M2qSnoRpGsAORjakPS6ENhQI/hQqq3kYUIByBwoFPoDnpQD2g1of3NYfXppFuG6M3M/cBbwEO\nLzCWJUjQsQe4FxFnmhfYjMioAV3jRiTp3w28Lhx/W+FLrhhdYUwORxJOUI5JRyta+ZcriqjUzTfv\nY03e4wPkyqjHAjPcIGJNk2on0YWa+G1FYbH9wPkox/QMIiMLbfUkfu5BBDaEvqQtiLzsp4uoDGxE\nXWwbEOlcBZyDKj3kX++W8LM3vLe5yNitz9ZFyMs6FeWdHiGKKqqF2WhOHI4knKAck4pOZPj2U74y\nbSewGliLDO7h5BpW8zySuBF5IUnPwzYElyKVQmgNx0sSazGSakKhsSuB5yNRxa9QKG8LIsxGRDZt\nSHzQHt67P4y3Fc1XP7kE1hveb+G5g6iqxsownmsQgbwceU02xuMT52hCc9eI5s+MQSvyrPrDmPaj\n+7YShfz2hWPPKjxVFcMNkSMN/rlwTBpMWFAq+Z//odyH1GXdyFPYioy9oY30fM2PgFfkPdZLNNCV\nYCEy4klxRQsy/IWu58XA98PzvxXO3YkIAUQqhyGPb0/4nczHPYFCgMlrNQyj+bCuu71og7PtV+oB\nform7hQ0byvC604N530RIlt7zwK0UXol8sYWI2l5AyKkg2EOtqF9XIsLXHelaGTiivM6pjfqgY9O\n9SAchz6MnEqFhTqQMR1mtIfVS8xZWR6lWMv3A0gu3UWUUWeQQWymfJHDvMSxbONgB9G4p+Ek5CXt\nQB7fJ4HziCWVFoUx70NfwkIlg7aG864kXdY9iOaqHXmZv0k8lw3/vwGF9HYTc1a2gfhZYp6rITGe\nvehezUFFbLOI8HqQhzUfhSqvKTDuSrAsXKer+Bz5cA/KMeFoQR5OOQaoHRmrxhKvG0Z5i0GK16S7\nFYX65iBSMaLsozxPah4xJLkRfWEGkAEvdt6VxMKwvwbeFI6xBRn3XuSxmFCkGJ5BeZ/nF3h+BHkz\n29H1JXN2Q2j++xBpnopEGXeH3yZp7w/XtiJcWz8iq0ZEZuuJZDyA5PBbKKyYrATFmiE6ZjacoBwT\njgbKN0Bm8EqR2SYkgiinYOr/AL+NDHE38gaGiRtVC8E2qyZVbrsove/obOD28PctKAf1HCKJYbR/\nyMKF+RL7JaSLQgYR4R2V93gHyivNRt7a/nCMw9BcnoKEGKuJXlFyboeIreKHiJuDrRRUfTjevYi8\nbM9UMxJyjFcVWYdXLncUhhOUo6ZgQgaTOBfDw2hPUSkcQMKEc/IeN1VfmhfQhcgk37tppTgpdhC9\nQJBn83HiZlwr6bQVEUB+/mo78trSjHY3uQZ9LvJsBoG7Eq/bgwhxESLLLwPfRnuk7sw7Zi/yEDsR\n+XQjT20uIjsjuu+F42bC9X2O0fm9SlGPwof5VTccDoMTlKOmMII+lE1oRV+MpPqQ0VxaxnHvQJtL\n8xP7plBLog6RR77s2RSIxeTxFwDXh7/PCuPrQCE663e0L/ykeR9tSISwIJwv/wv6MKruAPKc7kKl\nluxY7UQhiuWLDAfQHByed8xhNA/mXVnb+QzaT/aviXEPIw/r0XCusSoiQeRkknWHIw1OUI4JRyVh\nIPNQBpBhryca9jQ8h1b75exv+h/gEkaH9fKVfQsZvQnV8iTFQpVrw3jsen8P2IBIrT3lvHOQSs++\nhFZGaAHy4PaHx/IbMg4Abw7HTlaSME/Qzr88PLcceGF4/f0oNJqUlYNI6WkkfLgNzcksRJKbEq+z\nkKfNw1jbfhyOF4Z1lIYTlGNCUUkCfA7K8YBIyurRlWqK9wDyJsrB1YyuwG2KwWaiZD2fVEco7i1k\ngGPIDbWdhEQaGVSA1a6tHXkPd4ZjNhJ7Lw2iXNLS8P4+RApd4TW2V+qn4fmksu89iNBMfLEsnP9y\nRAj3EnNVPeF6TVSxFBFpa3huBFUr35Q4fvJeNjF+z2eiW9U7pj+coBwTBstXlGuImonhs3rixtxy\natLdB5xQYAzzE//vR4TUlfe6A8h4F6poUBeOVajk0EXATYnHrFrGCJK4P4fagbQg0cGj4blHw/8r\nyc13XY9ECU3EcF0LyrndichuIyLmY1A/qRcAHwm/P4wI6k3Ap1Go0apY7CaGNfeHuWhE+6JuQwuF\nYxBJP5wYUxPx/nwZ+KOUuSgHC/GOuY7y4ATlmBC0EMNP5Yb4BpCxXoyM5BZkzA6ntNLLWkMsTzxW\nhwzxXnLDZNegjbNJdCHvpVB1cvNgkvmqeuBcFDYcQteaCa+9E/gS2oc0nyj3vgRtwE1iEwopLiE3\nDPhDRGqLEZlZ999ZwM/QHqYNKI9zD/AfSDRxDZK2L0dk+3+AX6Cw3hAitz7ixtuTEYEdFZ6fh7yz\nvcjrag/XZQVdL0VzPdZq5lnGl7tyzBw4QTmqjnbSFXClsBcZ6q3I6+hBFSQ2oVxNKWxBxtuqpLch\n0hhCBt462h5ABtf+t/zQbtJFE5Cb5zFxwFkohPZrFHIjnPMKRLB7kAcyF5HFGuC6AmPvT4zfcm5t\nwFeQZzOHSDhXI0/JMBKu+e5w7vsRsZwN/DVxX9O7kFS9H4X0VhNl7iNo3veifVc9aM5vRWRm7ebr\ngT8E/qDAdRSDlUbaQa5X63AUghOUo6poRORUzv6kNBTKazxHeUbtQRT2aiKXIAeItedAXsbrkWe2\nmpgfgtGbeBvJ3eDbiEQHtyJvIOkhLg3HakAS7WOJXsc3iF5doQ2uOxFJLkMeTgsq/vr2cA1zw/lv\nyXvfUjRHz6Kw4eeQh3M2ytENEknucuQtrQQ+GMZ6HArrNaEQ4U/Cce36bOPuJ9G+rkqxKIxxKdXJ\nXzlmBrwflKOqaKE63XGTqENhrofKfP1B0oUVPSj81k3ckHomubkjyBVNHEDGfYS4ufcAMtomSLBQ\n10Wot9Pbga8Bfwv8CyK2n4TzHSCWJjpIen7uICIbC0sehxSIK4G/R9Ug6hHRZYkt2UHNBf8KhQdf\ngTyq2xARfRHNYTMKnfYgwtob3nM5sSrFWUSvEEQqTYjM35ky5mJoQ+HLDWjOjgl/Oxyl4B6Uo2oo\nV9BQKUaorBTODkQUaeghekctKDeTtpo/gFZvlhNKhv76kVrvLGKx2IZw7GvD8a9Ehv6zyDifT8yt\nmUKxnuLllvrCawaIuSz7wtrepR4UEnwUheS6kYDhc2F8P0BqvEtQ3u3N4dqeQYT6OCLZAZSLOh2F\nAZ8P/DwxlgZEulcUGW8aMmjf1gZiF+H7KzyGY+bCCcpRFTQSO7VOBHpQ6K6c5PpmlLc6DpHDCuRh\ndBK9jgYUJjuzyHF6iQ37LMzViryO54VjmaptCOWc1qN5eA6pCl+HqjFfCaxC3tUyYgv4/nCcQn2Z\nziQWgL0O5YnuRoKIryOxxH8T+0YZCX4LeVovRCKTb6JWH0nP8nAUpnw18JfIk3phGNNzeeN4CZrX\nStV3JyJCsgK9uxj73inHzIOH+BzjQj3RuJbarzQebA4/65BqrRT2odW7hdCWo1CcbYJ9Kvx/Xonj\n9CNPqykcazaqb/cgUuMlq1g8gjbn9iJi7A3v3Yc8m93IOF8e/v858mDMO7O8WTKn1RIeWxvGfAkS\nOywJx/04UhI+hQhmOSJkK6b7T8hD/DDKA30nHPdS5AUuDvPRh0QQK4BXAn+TNw9/gMirEhyJ5ngw\nHLvaoV/HoQ8nKMeY0Eb0mNI2tk4UNqGV/6bw/xGMlm0b9hENoxVofRaR6jpEMn3k7r9KwwDyeIbC\n350oRDYLkUISr0KhsA4UUvt0ON/2cM79KNx1KpJ/dyEyWIvCdK3hNRYqvR6F4obQHFuI7Opw3BZE\ndCciQUR9GNvPkPR8FiLl24BfEsN2x6Ic2T+Gx7PA24B3IKK0sk4jyHOt1HNqRcbF9lzlF8V1OMqB\nE5SjJKyytZGQhbumQok1gAz4IiRJ34nq3/005bVZREBdaLwHiTmjJuRh/QQRwPcT72tktAqxl9i3\n6Kbwnl+TXq/vOhTumxvOawVwW8MxMkjsYHXvPooI8tWI3LaHx+8N798QjnV2uJYHkQLxEUQ87wbe\ni7yoc1DO52Mo79WIJOk/RmS3C3Ub/nIY78lEz6g9cT11KLfVGd7/b1SGdcRSTH24as8xNjhBOYrC\nCqfWUnhmF/Ki2oj7dtYzulI3xM6zIKIwFd+TKFT3a6RoS6Ip8XqIBL0ZeUZbUBjshsR7/gF5RN9H\nEu5OJGXPhPetRDmpBcBniAZ7L/DHieP8KyKdHyJSakb5oL0oLLcF5bX6EJH9HsovPYnCd/tQYdyN\niMCuQl7chSjM14n2RjWgvJO1BXkrUVpOGN+O8LMGeWTlYjUiqIcp3nHY4SgFJyhHSSTDd2nexVRg\nEyKCTcirWY7yMltKvG8EGX7LCy1H0uszUKgLYu7IcjNGhFlE2Jbz6UTG99ZwzOuQcu7c8PrXofzR\nmYgITkAqvxOR95emZttE3DDch7yueeHnWOB/EUm+LIz9KeRBZVDIbj4imwHkMX0jjO+XYWwj4Tre\niIj5SqRk3BmOCbEtCOF6K1VSnYfaexyNPMF8Gb/DUS5cxTcDsRCtcsvBCCIkqyFXSx+Yx5GR7kVG\n8HSi95MP6wzbiIz+MJJTW+fb5eFvg+XXOonihiZEAEeh2n8DKCQ3G3lwH0Be1XUolHcLqldXh3JM\nm1Fe6JFw3N9ndJfcfwnH/1MkWOhEZPYu5PkMo/Dhh8N4h1G48U3Iw1qH9hl1IZIYDOc9CakI3xWu\n40Ykgd+O9kgdhvZJHRfOb1Ufria2DykHJ6GcYHe41m2UX8jX4chHcrHkmAHIIGHBMyhM9gRRGn4k\nIqRdyAga5iGjZiVzaglNaHyNaNyXIfl1PuqIYbV6FGrbGv4+Jzzfj0Jjnej6+xEhNROJIYuM/ydR\nnTyTk5+AQow/QmGJPwrv+wqSfe9Fi4I6RAL70b14Xnj+62j+u8Ix56ASR19DZPRTtFn3MmI+bAUK\n8TWi+/k9lHcaAv6OWBV9b/h7Y7jebSiUeDzy7m5BOasTkZe1BJHor8Nx1hac/dFz/DuI6Awd4XhZ\nCotZHI7IszpMAAAgAElEQVRCqKUFsWMSsAYZqgFkhJaiVfwpyDBuRivtFeH1JiHfQu2RE0j4sA2p\n9FqRdPq14blka/Zkkt6a7i0Nf9+NSGUEGeiNyGs6iAy/taewPVRHIsL4cXjfS5ER/yTaGFuHSOGx\ncLxjwnl3huN1IGLYi/I+NyAPaC0SFvQQq1O8DXmH9yMiXYZyO5cg76cd5bBuCc8fgcj3Nygk2ISU\ni8eE91i5qEWIOG5He5y+Gs7zE0R4r0S5qzSyL4SXMjpXlUGe7qLw/yzg/Yy/G69jZsA9qEMYRyDj\nNBD+n4OMz/a813UyWgSxDnkYwynP1TpeisJUe5BH8qkCr2tHBLMv/G1VuncRS/G0I4Wddfc9HxH5\nw4hY1iB5+A6Ue7kfeVGPEnNOF6E8UAPaT/Qq9MW7HhFQYzj+3DDmdWh/UjOSoz8UfiwftQ7dlw8h\n2XgvUgJ+By0wvoX2WrWhMJ5tBL4/zMdnkQhjAHl9twD/hUKGLchz+g0iqTXh+ku1TKlDVSq+lPd4\nByLcFYg4L0L34yWIkP+Jidvc7Zj+cII6hFGPjM4AMiDtyHAm0YBW2WmbbE9GeYSxoJWp8bjqEBGP\nIOJZjIjnP1NeewQynv2IhBcjQmhAnlB9OM4B5I0sQJ6TiSV2orDbzYgsDg+PbwDeR2xdcQnwfxHx\nrUAG+n3Ie9mJQmu94b3L0P1ai0jmXuRpHRV+X4sEDi8Pj30zPG7NBb+C7vHqcN1LUI7sC+HaehGB\nfxdVo3gV8o4XEKtRvAERy12ISJpJ30ScxGsQYe/Je7wjzO8ZiLC/Ea4Z5FVdgcKIDkcanKAOcdhm\nzkF0o/MVeIU6yIKM1REoVFUOMihs1o6M7GwkJqg2rKpDuTgMKdS+QG6F82PRxtT3o6R+HQpzfR15\nM7cjo30AqeW2Jt77JCK1J1AYrxmR13FoHuYiQnoY1cXbg4z1S8N4PoO8n90oNHdPeN/LkUd1FhI5\nNKDq4ra4OBF5Kdej/Um/F45zeTjGu8PxLkT39FNIuPCOMP6jwu86REDXhzFdh8KOT6D5fV+4huTn\nooMYHk1iPbGcUj5OCM/djbz5fKxAe7r2hHlyOJJwgprhsH1BhdCO8hZPlTiOtSJ/hkgeS9EHLL+u\n21hQj3JjVuHA6sUVqwBBOL9J0E9Dxt0ECvOBi5HHdQCFtD4bftaQa4j/GpHc8ShM9S0UFmtHBvjc\nMK5XEzfg/h4ilCwy0E8gpeFNyFj/KLzmnSis9nUUojs7HONalLPpBv6E6KEtRyHFBWj/1XvCOe5H\nhPMU2uN0K5J6fwnd57PD9W5DPZ0+Gh7/DSLKxxEhvyEce1nKfFqfrV5iOaXLiBt/kzgbkdqPU55L\nwvdKOQrBCWqGw3b6F/sQLEUGKa0Vuu3R2UJ6ruo4ZPhKEUkaGhBRNCPjv4mYT1uB5PJmXAthBbHN\n+37kSfw7sZJEA/IyulFe5hkkGPlLRFDNyCBbnytra3EXWvEfhuTlHWFs14XjvoVYXPYtSDZuZPO/\niCSuQISzGYXfdiD13/NRKOwVKFx4MfLO/iq8bz0SuCxBxn13OE4PsSju/Si8+wti1fSLwjzeFq6x\nlZgj+k24/nuR57aixLy2heu8MBwvvxTSS8L7b8fhGDucoA4xNKNVcP6KdC7KVaSVnDEjVQxrUVhr\nmNxQ3k6KGzKQwd2HPJ5yC8quQaHJRymeRD8JNeRLe80cYmHYhvCzAhn+QeQBzUfen220PRt5UIvC\n/z0ot3ILIod2RLqNiOxORV5EE8r/3IbCosegEN9fojn/UDj3JlSayZogWlv7ekT0WeQR2T1pQXP2\nGuRxfSRc7/GIhI5D878V+ATaW3UOqly+NPweRJ7UehQ6vAB5dkcir+xORHabwxydRezqWwjtKEd5\nJJLCW9fi84ideMsp6utwFIMT1CECWyVbO3PrTwSxpXkf6bmbcgjK+vpYw70tVOYVNaGQkUm/HySX\nLBcjQ9sQzvME6R5bGp6HQmgGq7qwk9jksAt5UM0oD9OMwm03I0P+YxTWuhdtlv06Mt4LkBdwgFwi\nrkNCgwFk2C9GUvFlyBt5GBHPxxCB3YAI7NOIDBaF658Trvnb4b0Xhv93of1E1tPp0jCWWcjzGQnX\n0I2I6tfIa5kXzruRKHhoRR5ZJyK5BxAhvwbJ2Q9HMvMfoVJNf4Uk88WwDnmG3wrXui6MbTnp4T6H\nYyzwfVCHADIo5NKNDOlA+G39jw4i76mOGKIyzKI8wUEWGfuNyAuoNGR3EBnu+8NxTgmPt6CVeAZ5\nBStR+KxccgJ5Wcci43gyusafISNsH3Ar8WOe5B0ofLYSkVMGkdPdyCM4iEhiFTL8GXK/LKcg8cj9\n4XX/jogIRITWH+qrKC+0HJHESqT8s4279cizvBQR7fcQgcxGYcENKIR4NSKw96C57w5jagnH+SUi\n0rko57QShR9vRp6g1VNcTGy53kjsDrwwHAtKkxMoX/fvYQ5eGsby9nD8w8p4v8NRDtyDOgRQTOiQ\nf4Ob0ArdQm2lRBIThQYUctpH+a3ci8FCUrsSj9UhefOtiKhbEZHdh3JqA4nXbgH+Ahn5p4jtzR9A\nnsq7kZf5bUTQn0Thtk7k5TwSjjc//L4beWhbw2NdKEw6B7Xh6CQSxOHECvE9YWwZ4r6kfcRNyGvQ\n3qGPIZLbizy488L7P4VUf2uQfPvrYey/G+bjpyjPdjoiyrPDtZ+OwqWPovuSBtvM/DYU7rX2IK8M\n77kjjO1NyFt04YNjvKhHYh7HNEUnpcNzSQyjm54hehVToaIaQUq2SvoMWdfetDyadadNwvYrWdir\nARFHH7k5q6eRYf0MmodV4fUPh+c7UB7nGhSCuyz8//NwnLuJlSb2oBzPShRi7EOey3HIQ/w4Csd9\nEYUWTwzX8wjykDqJYb87w9jbEWHWhd8vIXphu1C7jB+h0OGZyIux/W1noDk+C93zB8Mx16BOvDcg\nL2gFIu81KXNrqEchwz4kvuhDoo7Hwrh/iMozfR4RazO1UVjYMX3hBDWN0YZW4JX22hlCRu8gUZlW\nyb6iqcIIGneG0oTaheZmH9qLsx+RSDcy3C9GhvTvUAjuo8iomjfZHv5vR8RyC9HAW6jS8i5rkKe0\nBAkptiOCOocoQV+CjPWjSFhwCRIwfDGM8QWIgJ9OnHsVIrMO4iZfW0wsRnL1y1HYFfR5aA3jfAgJ\nOI4O131kmI86orT8BERSw0gw8hGKq+4WIY/rG+EYFyBv73S0EXkvIs914VxPI+IC96YcY4MT1DRF\nC7HS+FgwiIzHIFptT5eVrjUd7Ao/aZ1au5B67leIjM9ACf2vhucb0Gr/RpSvuYJoUJsRyWxB3tAK\nFCI8iOaoldgbagsijh7kIc1FYbImRDIPht89qIiqVVDoRoSwGnk8gyjHdBPatFqPSKsfKeLMO9yI\nvC5TAJ4cxvVhRIIrEDltRCSxK5zfylbVIfI8EYkbfoW8xCNQmaJS9fHeC/wzIt/HUXWOlch7uyPM\n8Y9R3ut5aBFwC7Gf1nT5jDlqB56DmoZoQl/68ZYSakSr833UXsdTq3t3Y5HX1CHPJLkR+G+RMfwB\n8i6s8OrzUQitHRHYe5C3sBUZ9u+iQqkfQwSSRZ7Rs+QKQqxL7jCxOkc+ViGimotEDn1IOWh7mQaQ\nZ3QTEls8jBR1W5DX8+owTsK1LUX3pw6FAo9F984whL7I24k5q0WIqNaGsTYgMns2XPNK1NX3eyhE\n2U4s6JqPI1BJpF+Fa7oXeZ6PhOvfQKxQ8YVwnNeiiulJtIfrqMWiw47ahBPUNEO+yGG64l0od7E5\n/G8lkRqQWKGPWHooP+xk1bbrkBeyEhneFUiBd3PK+S5BBvnPkFz8DnLDTh3Iq3oCiQrmISLZmnuY\n/98UsY2o6hskl8RakQdxJqpe0YuIYF8Y5z7kKa0Jc/Dv4Zreg0JxtyIhwlHEPKE1TrQQX0PiuSyx\n6vzbkGezG5Gz9YzKIIIaCeP5R6IQ4w8RMabtWzomjPUvkVrvLnTPbguv/yHyFB9OvOcKRheNNdh2\niPw8oMORBieoaYQmYtHO6Y4MqoF3IzK2T6JcxrlIsGCdcf8RGfQ6ojG2ViH7kSdiIbYnkfFLCi86\niPmlzyHv4zvkEortH+tCqrdGJJneHca5DxFWWzjOZuRlGcF1IIOb9EKbkVH/HCpl9GJiv6gNiOga\nw1hfjojgKKSwOxaFxp5GZGltr3uJFTCGkIe0Jvy/NYxvQRj3E8g7G0b5ptVE6fw8RJQfQKG93yJK\n5PPxPlROKYs80qvRXq4GNPdXIkHJ2nDefeFcpZoctiKyOhQ+y46JgxPUNEE7sXDpoYQ3I+n2B1Co\n6SaUbK8UVqA2Sy5RLEQkdyta5T9B3HSbRYTTEf4eRgbzK4gwfhsZ0hMRKXUiYjSjuoMoOjgVkctz\nRKHF0vC+I1Do8HKkpns3EjFcGd57F/IaH0EeUAbJtuuR6u+j4XiDiLyPQZ7ZHOIXeCuxe20DsQHj\nQJibLNqEexnyiC4I43k4nGtOgXl9F5KuZ8J8fA95T3cCf4OIdh+S5s9FXtbXChwrHxk09wc59D7X\njurACarGUYe+xFac81BDF5Imp1XCrgT51TAakRezDhV5XYHEBIOInBYgL60pjKGeSCw9yPt5LfBW\n5HX0IrKag0hvCOVkQOG8h5GRPZA4Xg+xOeFlyOP4YThnN/KcjkWS7fVIxv4xpJSbg4QU56FCsf9A\nzEvtRF/c9nCebLimhYiUTNyxKVyjtVyvD++dH8a5NxzDPK80vB7lzrYSq42vR2HUPwn/N6DNua3h\ndbuoDE3oXvVSe7lQx9TCCaqG0Yy+/GlKNUdEU/idL5W/iLix9L+Rt/QkMdfSieTam4mbY+uQwT6I\nVG1/gWruPYXyQw8ho/4bRGB7UTiuGxHEQaJ3BgqfXR/ecyESfmxDgox7UGjyXFRJYj6SflsJpG+H\n170aCS9uRWTXiIg2g7y4vchLMzQgkppH3OT7O8gzzCICmR1e1xiO/4MCc3seIuPbkSd6AiLjx4F/\no7qLJvP0pnt+1VE9NJR+iWMqYJtDnZxKI20P1/MQGV2OPIBdiESeJc7tIkQmNsdZ5P1Y073/QfuW\nvog8mG0oT3YQeT4/RgQwi1ifsB55I/WIOAYR4dgYNyADfxix7fsL0Rfxi+G1TchzsRDeUhRWOx15\nYC8mbjOYE463gLgPagiR74EwtiFELvvDmF+AQp3HhPNb7bxnkJTcVHmXI0K17r4jSGa+JIz1vDC3\n1UIvcQuBiygc4ARVc8gQq0NMdLijmVy5sq1ep7tLfQwihjNQhe/jUFjqW4icrGDtEynvteoT9cgj\nehwRwg9Q2O929KV5C7Gh4w4kWLgJhedORnml04CrEHm1I3K8Ad3fecgQfwDd88cQEXwKeVzvQLmn\nD6B7Yl1+14fzmmCiGRHF3nAeU/ZBVPaNIAKzTrtPEHNuP0A9qTqQ+OFKYv3GPchzfALlm05Coolt\niJgLhQULoTVcR7EW8sNoIdESflxEMbPhIb4aQgP6Eqf1Vao2LDmdbyjMOE3XpPWZyNPYiUJahjXE\nMjzFrm0OCrX1EJWEhMd+hAqvfgR5Nc1IaTcUnj8DGdVvoHzXkvCaTaim31OIyL6BjP1eVNS2B3kt\n5yADvT68dicKC34CiRVuC8dcjxYWtrpsDGMwNeRW4gbgRnQ/rQVLI/LCLkMhy5egvUuGlUjCfwYq\na3QQeVWfRuQ2gOT69yJ5+S8QYZWDcqrmJ2F5tnwZv2PmwAmqRtCCVsYTGX83z6GYp2T13pLtOkqh\nCxmvZyieIJ9FZVXKK0U9MqZ/h4i2j9wcyUpkkD9Hundqns2mxPEaiV+Sg4hc5gFvpHCn4HqkkjuI\nvIFOFMZ7KzK4N6MQ3iDylm5CXsrViPg+gwzyWahKeRtSzV1G3ExrsnurVGFV6a0MlFXLeGl4bYa4\naXkdIr8/RvvPfhOOmUG19f4LeYi7UH7tdcizez76nN4a5uWDiGQ/S2kCaUPkNpaoQGM4r4soZh68\n1FENoB0ZlYFSLxwHjgzHrw+/CyW3s8Tmcy2UV56mAxnIWcj7W4UMV5IAM4jEkjmeauPdyGuyvled\nxLCX7Q+6DzX1u4+Y42hERruFaKwh7ruyArVtyGD3IiP+XXIrwZvooBWp6Ez9dhwSW2xGBDkLEdaF\n4bz/icikHXknn0ThwXZiBYzjw3VtIqr3WsL5GsM4txPDaHZtJh8fCuOfFa7lvjA31+TN4Wp0r1Yg\nsjwcEXEbEpccIPbT+g5SDWZQeJLwd9pnppmxe0GWw7POxl4yaebACWoKkUEGu5+JTQgvRavhA+E8\nLZQ2FiPI0FnV82LoJ3brXRR+jiUaUgtZ7kBEsZzyw0LF0ERU3L0wjPV24v6aYeJ1Ws+jPqSeezUK\nU4HySNY6It+z7CB6IHaP9qJw27eQR5JUBfYio255mjtRyOxFqNX75vDYY6gtRT3ynHYhJd+28Prj\nw3H+De19uhmRx9GI+Gaj+WwOx3wYbZZ9BpFQHZGcRsL470HiDFOGtiHvzTzFZkSGhyEhxTxE+h9E\n4ccN4RofRyRxFCL9RhS6bEahznnE9vbZ8PwI41f8meCkI/x2b+rQh4skpgiWb5qMXkztxJJCUL6n\n1o+MwRCl48DWWXYzMthnIkn2USjcZe9vQkasDeV4NpU5ljQsDMd9EZrPbxH3CuWHJ/sRmVi4zsbT\nigxnllj3rgcRlbVbHybWP+wMjz+CvJvbUK7mjUQ14OGotNKFSITwPmRMFxPDcatQuaV9iFSWIcXc\nrYikVoe/X46k6sOImO5H5H8QlUY6Fu2d+hLKe91AJNMGdF/aiK05PoPCd8cgsluQGAOossescD2f\nQHvIrAVIBgk9liIv+LHw2DnhNQ8QN1nXI6KrC89tS8yrNUss1xNqJ4a/h9GCx0UUMwNOUFMAyzdN\ntBgig7yDp8fw3gaiF1IpsuHcL0GeyVcTz5mhPwcZ7Lnkeo8HkKF/tsQ5FqGc0gG0ep8VjrWN4mRq\nzz2ACqA+QG4Nus2ItC4Mx32cGLrrRaQ6SKxsfjoy+g8hwcMmlCv6JJGAbaVvXukyoodzZ3j8NWiu\nVqISRU8jkr8CiTMOhPd+CZHGPESOn0Xe03bkjd+LBBgN4VqNgLsQKS1DZP0gItiTkQdnsGtbjqTk\nm5CI4hqiBH4/0XPdi4QjS5Da8efh+eHwXiOowcTfc4leVZqSsp6opLRwpXl81thxgOgpDzGx4XHH\n1MEJapJh6rmJ/kK1Iu/lScYW+ze5chPlq2hmIwNre3B2h/MncRCtiG9DRugsZMzuQYn7FmS4L0AE\n8VT4e3liXCaPH0TGsA95G5Xktu4JY13L6CKpS8Pzc1H+aBO6X9Y08DlycyrvRPf162Gs5sk9h1Ru\nhkEUujsMeSyd4fk5iDi2IBLZg3JQy8ldxAwhArV6g1ZMdxYSMtyOSPKkMF4refRR5F3NRl7nU6h8\n0wBxr1YX8kasIno/+gz9CRJI7CPe13lhjPkdiQeRZ7iZ2FqklSiMsaodB1GurwUtoIaJ9RMtFGh5\np+Q5TG7fGR4fDGO29iMuojj04AQ1SbDk+X4mXjZpvXc2oA2r9xQ5Zx0yePneXAMyJqU8GZDR7UBG\n6XFk/GYjBVrafhcLhbWickG94T2nhXH/BpUFWoVyS78gd0PoaeG6Pl/G2ArhaFRXbg2xCCqIFIaI\njQePDtf3ICKsTeF1SdLfj67/g2hRcCQKy21H4gdDGwpH/m0439uRQb0CkZl5TS9AG2oXM/q+XIcI\ncTUi8EZiHcJt6H4OEauQ3IMqXvwAhR3rEGE0or1Nt4XXPRGu4VbkCQ2jBYY1tbQK659Hns0iciuY\ng0KIRyPSm488nNlEj8oIxcgrGW4tN1Q3FI7XTCQl8/rmEjdIg4spDgU4QU0Cmon11yYaLehLbF/O\nB5Dx31Dg9SPIoM5F+QvIXaUWIrZMOG57OEcdUn6djYxeOQanmej13Bl+2/6gteG5L6S8bwUSHJSL\nOmRQtyCj34HCaVm02q9HpYDuRPNmYb1OFLozGf1WFLb8KbkENQcR3R2IaHYjwcNDqBrFX4djHo/E\nDv8ZnqtDm2wXoI607yca7R3onuTjJ+iz9Hso9PdHKEz3eDjOPORpzEGkcByRQAnXsD38/fnwmq3h\nPQtTzmee2NNhTJehcF+ywomVh8oi0monCjEeD69pDq/pQp/R9eH3Myhndz6FW3SkweoethHnzIjQ\nrn0sRYcdtYW60i9xjAf2BZqMzbd1xMZ0hoMopHN0gfdY5YoHw/8mX4a40s3HyShP8igySKvQSvk+\nRITlkFMno+fEehZtR5UYkuHB1UgQcHo4V9oepLQPcwsiu15inbsniDL64XAd1yGS2YOM6DxEVouJ\nYamLkaE+Ne8cq8OYZ6H7/UpE1H+GPKVzkCd4FzLYH0Ke0otRhfAvIOK7JhzrRyhvd0zK9ZyKCM/G\ncXMY59+E67oRzW2GaKBbkGdYjwhhFbGSeyb8PiHlXHXh+k1o8RWiunBeeI1tpu1Bc7wzzEMPkcTn\nhsf6EcmdhzzRa9Hn5bvIY3wn8rwqQV/4MXn7CPLkBhApZyo8nqO24AQ1TphENw1WlWGi800Wpmsj\nnRz2IDI4PO/xerTabCSq/OwYWdLd6zXIGP8aEcbzkSG0/UO7UMK8GCw0ky/AyD+nGVDCOF+I8kZX\npRyzGXmC64gf6nlhLI8i7/U+ovdgrzEjuhV5J2uR93MKCiUejjyTfpRjOh4Rp3k3R6Gc1QPhGGvD\neY9Ec9WPPL5F6D7Uo5DeZ1EYsAEZ1jWouO228LOe0R7FHOT1vBF5UV9GCsE3IwK6FRFbJlyX5ZKe\nRYTehwje9jptRveu0ALqRlSDcBPKRQ2H8VvI2EK5yfd2hHONhOeWoM//dvRdeEk47uPkYitq3Phy\nlJesBFlG16wcQJ/jduKCyzH94AQ1DnShsMh8YkVtiD2Cepj4gpdNKJfTS3HP5VmixBtERCZDtjBb\nO/GL3kcMCyVhLdbPRjmQ68NjreF5S1IvKzKWYnUGW5FhWxzGCDIwxyK13H3ktgyfgwjvMCR2eBSR\nw5HoPiQ33iYxwuiFQx9a6W9FRvhJFPZ7AhHAG9C1/y4iifchD+g74f3DyHt6FHkSx6Gw4tOIuE5A\nhHcOsYXHV9CerA3Io9qO5nUDo8Or/xuufX4Y5ztQXskUht8jilvuJDY3tA2+N6BcVJoyM+1+vIpI\n/GcRPbDfQaS6Ed2j5OfEWozUhWvZEs7fGK7vegoXQM6iDdBtiHSrgR50vZ1h/I7pBS91NA4sQV8+\nUzjtZ3K73loVgUrKI52EDHmylNFZKH+SPNYSZKiTH47jiYn824jqrA5EIA8iQ9eHvIpucueh2Idt\nCTJM/cSSOt3hWIuIFclBBtMa5DUgj+Bxcvd6jeWDfWY4bi8i2P0ojNYWjm1y/a2oVNGRyCu4ChHK\nnyBiuA/VshtAc7Qp/H888oy+isJcj6F9Tf3hPC9BXtBvobDXCPLiHg7vz4TXXhXe/1XkKZ2B7unV\nYayWQ9sbfi9BgpWrwphfjzy/DwL/iuZ4GYVVkDvQZ+PDaJHweeQFdqDP/W6iJ5pBi4Y2osCmGakb\nf0z59+RwRL6fJ3dBMh5YuN07BEwfOEGNEfMQEVlB0Qwxzj4ZhS0biE3eKsW5qHutkcfRSO57bfh/\nDlFxZZiHwmfLkQF8LPF8J/J6hsgtsHoE8j7qifX9RlLGfDTydJIezX8jMcG+cK58rEEEsoxYA3A8\nH+QTwzifDsey8OyS8GPXOoSIsQ6R+gAy9AtQPujm8LqViLT7kJLvCmTIFyOi+gmaj8PD86egPUTn\nIC/tHGLbiaVo3m8Mxz4vvLcJeQcvDMdYTywQ2w/8Adpf9QUULrwnHGMV8IdoAbA4nGMNufcuHxk0\nz8uR8ON14fElKCRpHlhXOO4cYsmsc1FerVI0AP8HSecfGMP702DhcJOpO2obTlAVwvbpgFaWk92z\nxpLSw4xtZdmKPJK5iKROQNdhK+2N6IubVBw2oeZ9J6OcxE5iWZ1GYg7iZGQEDYeH53qQAXw6jNtU\nhnXIo3sicb6TUGjpK4ggNofflpuy+a5Hhv4hCrduKBdHojm9B3kFy9E8WRuMfmJYLIPEEjclzvsy\norrve8gDsqKtRnAPovn5GJJr34sIzOT0i1FL9i50X0aQ/P1aYvmg7WjeZiNP7wDyaJoRSdWFc38W\n3d8PIgI/JVzPg6iKxCI0f9ehnE85BAVaRDyB7tHr0dyPoHucIRakNbJahYj/uyWOWwqvRHN9bakX\nVgBT1lq+1VGbcIKqAKbSMqM72bAyPuMJUbQQlXpHIkNpYb1LkTG+MfH6DDKoQ4iYfogMUBej90l1\notX+08gIWsmeVcgrWIM2sXaFxwaRoZ2FDPxSRHpfShxzLbGmnuVaBpDHsoPxqyPXhmPdHs4/G5Hl\ntnCe2ciLy4bnj0XhMrv/89E83oZI/OhwrJ+E55ciT+awMAc7kFpvD5qbSxAhWc7yeiI5WhsPU6nt\nQV6pVdvYgvJKixERHRV+7N6ch+5FHZrDWxAp1RGTz/+L8mjLkYdXDB8N53k3yhH9HBn65cSFxx7k\nWS5Ei5/70D0ab5jOQqTfKmOclcCqpVQrjOioLrxYbJnoRASxnanpTZNBxmC8xDhEbCGxGX05M2iV\nvwGV+HmG2AL9FSiX8avw+MnI6B1g9L6ug2jl3oTI7DA0bz8P52pDYahu5Jk0AX9K9B5+wOj9WruJ\nKkTbj9QSHsvPmTwPkYqFCmeH8exHHthyRDBNiBSXI2/g6fC63Yhw94R5yiCC6EaikEZGbz6ejTwJ\nUK7ogcT/FgpcQqygcSHKO81G+arN4XXrEJntCI/1hddY5QxTxO0M13ME8jSzKIdo5aE2hvNk0Gd1\neXjfJxBpdqIQ3WnI6zoffQY+RumyVrcg0vsBuge7iffygXC+HuRhtqFwZ28Y+3jbyGwJxzwH3ZON\n4wYf8IQAABvMSURBVDyewe5luTUnHZML96DKQDsyjjuZ2ri1ldqppmy9E305k+GdP0B9jy5BifeX\noQT3EOov9G1Gq76s+G1PGKNVDk/2fzoaKbn+BYVtjkUbVMvFImQYV5BbQghEfHuILT9aEfFtI9af\n24ZIZg8yrq8P11Jov9dyZPTPQXm3rRWMFXKFM5eicNeWcN5foZXhXET8b0UhtKMQSV2P7ovVopsT\nrmEzmvuPIkP9KqQqfDtabX4AfT7Wobn/k3CuPw3HejiMZR9xcWL5zOYwvmIG4e+RerERkfud6H5b\nfcnJiCycgT47n6e6xstFFLUHJ6giSO7nSSqVphKW4B1v6wKQ4bSwWRKLgD9Hxu5SlHfYgIykKfas\nhE9aTswMu8FCiq9BYZ8hlA/5Zsq509CKDLmpCk9Hnox9cNeG19yPSOFIZGxvLXC8ZWgT7I0U/vBb\nbbzTiGWIKkEDUcYPmpMWNJ/HoDnoAj4e/q4L19eCSh0tRHN0b7gma+l+MlokbETk/j9o8fBT5Lm9\nGnmS9vdtaL9UNwqPnYOIEqLs2lrS9xFbjhTCB1ELkJ5wno2I7FrR4m0ycrJW088WTuWU4yoXJqIo\n1pbeMXlwggpYQPrGVOt1VEvoZPzJ3WZ08/O9sTkoH3QzMqY7UPhmDpqfLciD2UEsA5QM9c0nek3N\n4fdF4T23hmM8gIipFRFksX1RC4i5jXZ07UPEbrVrwm8rNjornHf7qCOVjzZiBYpdpFfcLoXknjLr\nW7UXXfOnkVE9BeWEhpFi8QhiX6XXEjvpdhMrfK9A9+K94X1XoGv9EJr381H/qMWoGvlO5KG9IZzv\nw+EY+ehAxNREcQ99HSLQz4b/l4frtAocfUw8SXWgMQ4hEt6Owo/FUE9lizoXUdQGPAeFPKU65CXt\nRR9K+6nFMv4HkTEYq0fXjG58WmL4VSi8dxYyzr9AYaXnkAG0TZfrEFkli98uQIb5ICKcM8Lx7kaq\nt91ElSDIwBwgVslOogndF9vQOh+RhokXFoXx9BCFEh1hbIVCduViEBnzBmIfo0qRbBVhK/IhlEPL\nok28P0R5oANIlt6Ownx9aM6eQ2HW49Dm30HkPS5EJPFapDz8A3SPjkLebQvyLDYgr+x25F0+h2rp\n/VvKeBvDmA9S3CDvROpCq9vYjdSaO9F3aDI63lp33RG02FmE5qkJfX/N85mHCHsWItBKxjUcjtOB\n5mWy1boOYUYSVCsypp3hB2RsqxE2mywMEsmgXFjBzkL9c96M9qu8EBlXC3MME5vDNSMDdgkiOKvU\nsCiMaWt4/TuRQfwaMp4DFCbU4XBcMwJz0T3aiozOImJCnnD+jnC8DpSEty6r1WgnfxT6YuwKY7eW\n75VgCF3DQaJy8kA4Zj3amPtVtIn398N7mlCebhaqYpElhjVPRXm751C+6o2ott9biPmgPeF4F6Ji\nsgfC77sQiW1EIcv/ShlvC7EiSSksQCRp934/It4dTF5Y7CBR7fg0IuD5KIR8GlpArUWKSWsFkoTt\nISvnPOAiiqnCjKpm3kQscgm5GwynG0aQAWqluETW8j9Wn62QLNu6pJ6Nwk8Ph9efggxbPTKcZyKi\n+nu0ifJZtOLfGsZ0Eqoa8G+Uv2K19hDWxmFvOOYCZETyC8OasWgJ11MNUgJ9PlYj7/SOKhyvD5Ht\nPmKzvl5iZ9gMCnu+EuWJulC4agHylLqR4b0GqfCeAj4SjnEryinZfqZlaHNzOwp7NaKmiScikUQj\nkof3oEVKfp6pkjYwVxFVgRA7Ek/2d8maGIKI5G4kZulFczinwPtAn51mytuwO4TuRSu6jy6imDzM\nmFp8DYicbP/IFqYvORkGiSqsJBqREbeQl9XpK/ZFHEar9L1o82gv+mL2IMO5AIWPbkDx/v3I2zoL\neTfW12gt8M8lzpWG3vBe82SXh7Gk7XnpQYuLbqobehlBBn8sOSfDxcT9atZTKdkLaR4SKlgzx1bk\njbwTXdNRaGNrH/pydhBLBL0WiSoaERl9Gc3RmUjU8jhaUHSiBca7kEhiNSK5Y4F/JF0EYca6HIwQ\nq2KAiGBb+N0VxjxZBVqN7EFz1I2upYXR/aoMjehabftBe4HX5aM/nK+T3NqbjomDiySmIZrIDaVY\neMtWG4NUnp9ahXoLvTfv8TpkGO8ltuQwZFA4sAWFpr7N6A665aIReU/Wg2pX8ZdXHdZ2ZDw9u6wv\n0jFoD9h2VKYnidloz9fZ4f/O8Pp6lMv5CQrBPYZaaBihrEM5q0+ixcItiNxeisJ4W1A+6rvAe5C3\n+z4Uiv0t9Hm4HG12tetN5lYWo8/AfZTnIXQCf4xaf4A88M3UjrjAtj2kRQzyW72MpTpLE3FfohvQ\niYMT1DTDx1FC/T7g7xKPjyVPYpiLVvHnA/+R91xnOLblhBrQB8b2jJyFNm/+NWMPfWSQZHlHOMdU\nyHtnM361ZlLRuBDNVYZcb/1DSEX3eaTQOwN5PquJLeEfR/f4A6jFh9XW+xcUXluAPKPDkDd2FzKY\nL0BihVXhuNeG838TiVWWEb3uhUR5u7XGWIHCkXeVeb2fQLJziHsFd1E78mxbdCRVos3h7zQPv57Y\njbpcb7I9HM8rUUwMZkyIb7rjH4ErUWL6DSgUdEHi+bGQUz1a+fYjcvpa3vONyNjYPpe5xA2d+1Gi\nfikypFnG1s5gLspbPYZyTVNh3LrI3VA8VmSIX6h65Pk8h8jGcDESLKxERVQXIi90AOX6foqI5wWo\niscr0L25DhHOV9B9Xx3OcS66N+eh+7g6nGc/8t4+EI7Vijyc7SiXtSH83INUgtuR99uDPlvl4Lto\n07HlOLPUDjlBDPm1EtvBNFE4/DyM5m2I8sN4vURV7YxK6E8SnKCmCd6LiMkUWG9FNfLGikYU1tmM\nDOYyRsupW4ihwt+g1WUv+tD8FUq+fz4830f5sfwkjkJG0qo/TDa6qEwgUAw9RKOWIS4ankaS+beh\nvMj7kFFbgTzGekROdciDmoek/u9COb/Phfd8EJUZslYpH0L7mqy1x+GJ61iBjHMW5apuD9d5MrFg\n7DKUSzqRKDPfjRZDS9Hnw34WohxTO7FB550or2W5ylrtXmv51CWU5+UPEUOAnRRuSGoYRnNtuV9H\n9eCkPw2xABmavVQemmoiktFmlJzfi8JHSaQlzXciklyDqhjky3T7kBEoVRXbUEcuMQwyOr82kajG\nhuckWogGsBuJPu5B87QFNfo7H8nL70EKyDaUW7oA5ZluQN7cArT36SfIu/xmOK4pGE9FoThryvgi\nNP9vRPftPHRft4Rj/TkKFz4TfkBk2IM8tmPDe5ajvW//QKzt9ygiOKvj2IY+d3XhHEeh0F4b8gxN\nbl5Le4eMfDsovjE8iYPhp5nYfqVYbrefGFYs9VpHefAc1DRCB5If70aS2jNQ9YGvlvHeRmQwrcPq\nTlTD7VmUtLf9TqAvs9XVM9ShhPx8JG8utHnV9pWVU2V8Hdq3kyTCapNGIVjlhGoqOc34GY5F3lMP\nIqdGFJJ7CBnx24htyc9HHumRyMuy8NRTeeeYg5oTtiCC+g90D5tRo8ddiJQ2osXEz5A3dS+6b2uQ\np7UsHLsbkeD3iSFk+xx0hWOuQ8QzC31+bDPuAbSg2ITatVt7Eat5aLUj85ENr03+9DNxqtomcjem\ndxDJpxLkV00pdU7r1zbd1cJTCfegpgEaUJ5iH7mtMH6JVFrFErUNxNpv1jxwDtoT81MkKTdjYitA\nayliqEcbIJ8ghqIKYT8KCZXzxXyG0UZiP6MN/UTA9pBVa09LsrQRKExnlTba0X16D/JmVoXXLkXl\npAaBTxEFDYXm7WyUh+pG4b3taP52ovvXjvZN3UesnLAI3ddFxBDcfESG7cgTfxQRTDe5XnE3+oz8\nNHGNyc9ZHVr0vJx4v7aH85Ty6i1fZxU35iHiGm8VkHxYo8yktL6HmJcqV9xg/b3KhRFgO7qu8VZz\nn6lwgpoGqEert+tSnrOqDSPIE9mMyr/sR0TTilbr28NrXocI4F+IBnUesZZdlkhS/UjEsAat+k9D\nRrfUKnIbyllsK/C8bVrtJl3aPcLEu/aDVDfn1UjueK1yRBMKpV0N/CXyep5D1/1OtJfsS0iosIF0\ncmpBnlgjqkp+EtrXdDJaYLSje/oI8pZ+RgyfNofHjyNKy63OYCsqffSZMNZC4oFGYvgyOb4RRG5J\nMupDC6BSC4ykF3WQWKjW+ohVA2nkZOgn5ozKWQylbW4uB71ozjvDOWsp7Dkd4AQ1DXAAGajk/o1z\n0Qr8U8RV53z0Bb8AhYoa0d6mOhTuWYdUYI/mHT9L7hfHkv3HI6/NqirUIwIsBSO5fFjr9zpESvNJ\nDwUOoQ/mVLQ2mc/YVvFGtiZR7kX5nKcQsd+PQmzvQl7w65FXtR8Z6V+nHPMoVDGiLrzndeEcp6P7\n3IPIoZvYi+mlaO7aUO6qN4zhGHRfupD3dBgKzf0T8H4keEhDR7imchtD2mbnsaA/jHEh4yv2CyLm\nOoqTyiCa+1JCGfssj3XBZOrAVmLJMEd5cIKaJrgdGbSn0Gr4DkZvAt0ZftpRmK0RJc+PRd7XNWjf\nTD5B7UaEtgUZtFXog7GRXC9rgPJXkVZuZhdx1W1EaMRj5Zfykdx0PJEYYHSY5wC61ko3Clvr8wZE\nIkbGl6Ic4W8h7+Za4E1I5GB5vnzD14gWAvXIu/kE8T70oJzTXGKJp3o0l13IMB+L7uXu8NOD6vM9\niT4bcxGxDSBv7tuIpP4mMYZmYhWSSg3zeKosDKB7bx2TxwIjp3LCdyPExUWyKkUSY/We8uEiisrh\nBDVNMA8ZtS2oB1Ax3IFEFB9ERuyfEWF1olXz0eSWgWlARu9kRHCPoUT6meF/K876K8pPLNsqsYMY\nwhshN0R0gHS1YCEPrFroJJJlvhGzHNhYSAqiRHk+mus6JDTYgUQIj6OcTSPpXsmtiGC+g8J53cQ5\na0VkdCu6NxbyNUn7SchDNm9gPyLF4fD6daix4yN512atPOxYVoh3rCv98bZktxb3Y7kHVmKp0o2z\nliscJPczbqHSaoWb7b40EWszuoiiMJygpgnWo5YV5e7yHyKWoQGRQDciuDPQF886qR4gKsC2ImP2\nLJI8NxG/oJXEz+09xTySEdI/gFkm1oMaJndF3IKu07rCwthzBfXI0Fl9wgeJRu/9iKgWIiFDPrYg\n4lqF7lfSyLYQCfVO1KzPemF1o/mdH45he3H6kLd8Tfh9Erofe4j9wFYjz/xPiQRYbjivEKrhbfSi\n8c2h/GLARk5jbZHTG46R9JjaGf98pMFFFOXBN+pOEyxHK/KxwopcdqLQ3Xnh74WI/DYg43QSWqGb\nvPkgY++U2odk8EcyehNvMQVVIeKqFhYSN6DOJzbp244Ieitjy0O1IcOf9HqGkMDkIKr+/iZiI8ck\ntiPyWIPyfjY3jURpdyuxo/IeRDpLUI7rKiSO6SCG8TLh91rUhHIh8qwHw+NtiJj+jJg/q6X8SA9a\nTMwu47WtyNCPt3+bVTdPkvx4jaQpLK01jBXUBZGi9URzb2E0fE6mCY4gyofHUjMuue9jESKpuchI\n34724WykdGfSSjCCwknNKGSYH1bsRwZoEfqCtiWeXwV8r4pjSaIekVC1YFXHeyksRX4a+F9US/HV\nec9tRYbxGHJX0laVuwl5Xkll49Zw3ttQC46b0X6mAWQMzft4NdHQNxEFKA2ossU3iaWrahHd6Dry\nC7wm0Yaur1o5nXLFE2moI6oHIdb9y9/O0Jn420QULbiIIh9OUDWKNWjFO4A+tN9GH+rL0KrfcCej\nxRLFYAnop1E+ZDdSh315/ENOhe2bmZV4rB55hLPC32eFce1B4oDvI5XZRKGajSmtGWE5GzjPBC7K\ne+xO5B09BbwGSc4NllvqRwQ4HM7VA/wcCR86iC1SbD9RKyKdZ5F3tAItRLqRwMb2Ze1Enlsth5es\nceYA6SRlTTurrfgsRzwBcQ+h5UyHwnhKeXJGehl0vxqI2yvmoHtei928JxtOUDWGJhRyexptxG2h\n+Af15cA70EbLUrAK5L9MPHYYk7M3Y4i44bcNeWtHIEHAF5ChnIW8iD9DlQ7uQ/NQbeyjvKoAxSrE\nm5hggPINiZEJ6D7b3qblyCB9HW0R+B1iuCr/2O1onvaivOG5SOV3JlpwdKAv9RHIA/3XcA3Wr2ko\nvOfrKPRYDUxkkt9IeYQYph5A899McfKoBkw8MUT0Qi3klyXdOyoHw8RFxwFyFwmt4TnzbGe6iCLr\nP1P/0wbZ08NPY4XvXQHZD0N2bonXdeT9/xrInj8J19YI2TWQXQ/ZpsRj/1TkPU2Q/W/IPm8CxpOB\n7MqUx+bk/d9WZGz5c1nq5yHIvjf8vQGyeyF7L2TrE/e/GbJLIfsfkH1ZgbHUQ3YWZDsh+7eQPQey\nb4Ds2ZCdB9llkG2H7Pche1PeGE6B7MWQ7aryfL41jHGiPj914XptLhoh2zCB50v7aQ73fSKvM/+a\n2yG7hNzP5Uz78Vp8NYBVKARzB2NfDWaQrPyLxGKtDUTv6BUo/HOQ2N/pSqrTZqIQLLE/QK50txHl\nYj5G6Xzaf6OKC9Xut7MGeXFJ5FcxmMXo+bGNq+XmOzIoN3QPWom/Fq2WVyWOYZ6Y3avZSNTQjEJ+\nXWiV3oy8iWYkcukgNzSbQVsI3oFKY5XbNmO8eC0Ky05k7mQmG6o29FnsZua1m5/J933KkUF7m/ag\nvUfVON5foEaGB4mx7BaUu/oaMmptKPc00RsFO9CHK/9L9UqU5L+1jGMsRpW196BqDHcgVeC3KO+D\nW4fyPisRYXcjYl7NaILqIoZTZoXjW+UM69BaSSHbBlSNoxttDxhEYpHkfOTnOA5DJPNLNH+/jcoh\n7UD3shk1d3w5CoXm40oU+psscgJ9tn7J+Ks/OIqjnokNZ9YiPAc1RWgFTkGy32olqbPELqf/gwzw\nJSgx3o2Mbg+xP85EoSv87itwnuOR6KMcbEXS7NmoKsaLEWl8juiZdRF79jyOxAHLkLz6IKrkvRER\nxBxEbrej3FdSTm5KxxGUa+hCBNlGrHXYnvJ6kOEYQAKWZPO6x5EcvAd5PUlyaicqGQ2HI+GENYp8\nhigMaEOEth74NFGqnA3X9iIkODmcycUBxtYLzFEZZho5gXtQU4Yz0apzoib/NUi9tRNVF5is2nZJ\nDy2NnLqQLPq9VTiXyXL3EefxVOQt7ULqxrQx1KFq4KchsvtD4t6rkfCe2cSNm4V2+3eEHyuuO0Qs\n7ZRB3s+n0WLhj5Dq7jZE3C3hdfmVOY5F9+32cMzlaC4tvHM6Is8f5r3vIuCziASfSxnrROISFFYu\ndxO5w1EunKCmAGuQMcvv9XMoIIO8jh5GS4KPQUVrP8rUFIJNw1+gxYL1h9qIJNzfIXomJgXO37A5\nhLYCmNKwhbi/62G0z8ya+RGO8yKUk7LX5aMeEdN6RFLmcbWgeb0Q5eWSOD889pYw9snGxWic5YRs\nHY5K4CG+KUAjo/Mfhwqyeb/nIAJYhQzvh6ZiUEWQLJB6GDK2LwD+GHkFNyPxxAsRobyMXGGHeWit\nyCM6F+X6bkD7y5KeVxaRzlnhuGmwfVXbGZ27s35QSdQjocTvMjXkBL7CdUwc3IOaAixBRq7ayrTJ\ngokfhkgXWtgG40VIPfhDVKapVqsVFMKLgN9H1/le4HLkpQygjbH/heZiLapavhbloE5FlcMLYXV4\nX1o9vkaUZzMiyoRjvhgVh80XIjyOKkJUslm72ngRyhXeO4VjcByacIKaAsxBkz6WkkVTicXEAqUj\nSLb+CKON5onI42hFcvJD7QPWiDyvM1A+aD66p3chD6wcHIZyZyNofqz9/LlIkm5kvgyF8G5B4o8k\nHgLeiIQ2U4lXIaXhTJNAOyYeHuKbAvQi72K6ENQCJO3eQcwztCLjfDRK7B+NVjsZRGBfofrtu6cS\nX0Skkiyc+//au3/fqOs4juOvHv1BUILSyKLRwRIZJCbq5ObAwmCcdPU/MP4H/gv+E05EY9TdicU4\nECOicSAkSiQw0AF/QNXhfZdrS2m99q73pjweSRN+pTm40Gc/38+vjVRUvkj9/a9N8PlupBZiLGV8\nPcWLw88xitPpVAS/zMMnSnyTuqV33nFK6jGjODELRlBzspbkl3m/iD28nHoceSdbD3rd7EwqUJcz\nfty3lNqvc1QPvVxNRftmtm7kHf0bvD3B5xodNPtMxnudkorVu6kwji55fJBaZv7x8DV8sM/XP23v\np2IJ02YExUNeSoXnevZezHErDz/iu58+q/Rm4U52vt/qrSQfpeafLmb3EdViann7r6mR5m+pOB1L\nXSx4MbUy743UvrErqbul7qaWkr85hb/HNJzPZCNHmIQR1Jx0G0Etpk4oWExNeO82yc/ullMLID7M\n+Nbb7V5NjZh+T83tvZJaXHE3NQr7KrUH6mbqG4DXUxG7lLoh+bPZvfyJXEitWIRZEKg5OZsK1Lz/\n8Y+nHiedTO3LOkrzRvN2JXWKxHs7/N5oVd5C6oSPtdSc1u3Ugon7qVidTM1zPZs6PeP71M28HZxL\nvf5HPf6Fg3Kj7pxcT0XqsA1S80prw4/Tqe/Qr0Wcpu211GO5n4c/3uxY6puT5dQese9SCx5upN6H\n0UKa1eGf+SQVrS5xWk6deCFOzJIR1BytpJYRH9bjtKdSm2Xvpb4ALqZGUA/icrRZWkhdx/556vHc\namqe74eML6QczeOdyvgOsPXUcvZ3knydnQ+HnZcLqaXv249qgmkSqDl7IfWF6P/cyLpfo1s6t38x\nmeTaCA7uUuobktG14CupkdPmx73PpUZOoxtaf0ydSNHFILXH7ds8PtskeHwJVANnUycCTNuJ1OGs\nC6n5jD/jds5uPk2dSHE+Nd90ObXHqeN/ylOp/XBXY98Th0OgGjiROo1gGtebD1KPkBZTj/LW4w3m\nYBZTCyI2Ys6Jw2UfVAP3Mj7Fer93Qx1LhWmQ2qNzlPch8WhPZzpziiupvXCrqVH3T/E4mMNnBNXI\nuUy+6XGQmrP4JxWmWV5ESH+rqXnNhb3+4B7+Su3TsrKTeRKoJp5PvRnbDwTd/AYtDT8Gw1/7NzUv\ncCtP5m2bwNEmUE0cT42g1lNzUQ9Sb86ZjA9h/Tv16G5j06/9EaMm4GgSqGaOpx7ZLQ1/fjtbrzQH\neFIIFAAtOeoIgJYECoCWBAqAlgQKgJYECoCWBAqAlgQKgJYECoCWBAqAlgQKgJYECoCWBAqAlgQK\ngJYECoCWBAqAlgQKgJYECoCWBAqAlgQKgJYECoCWBAqAlgQKgJYECoCWBAqAlgQKgJYECoCWBAqA\nlgQKgJYECoCWBAqAlgQKgJYECoCWBAqAlgQKgJYECoCWBAqAlgQKgJYECoCWBAqAlgQKgJYECoCW\nBAqAlgQKgJYECoCWBAqAlgQKgJYECoCWBAqAlgQKgJYECoCWBAqAlgQKgJYECoCWBAqAlgQKgJYE\nCoCWBAqAlgQKgJYECoCWBAqAlgQKgJYECoCWBAqAlgQKgJYECoCWBAqAlgQKgJYECoCWBAqAlv4D\nuzMqwIjiJ78AAAAASUVORK5CYII=\n",
+       "text": [
+        "<matplotlib.figure.Figure at 0x7f90c46d61d0>"
+       ]
+      }
+     ],
+     "prompt_number": 9
+    }
+   ],
+   "metadata": {}
+  }
+ ]
+}
\ No newline at end of file
--- a/scripts/classify-objects.py	Tue Nov 03 13:48:56 2015 -0500
+++ b/scripts/classify-objects.py	Mon May 09 15:33:11 2016 -0400
@@ -4,14 +4,16 @@
 
 import numpy as np
 import sys, argparse
-#from cv2 import SVM_RBF, SVM_C_SVC
+from cv2.ml import SVM_RBF, SVM_C_SVC
 import cv2
 from scipy.stats import norm, lognorm
 
-# TODO add mode detection live
+# TODO add mode detection live, add choice of kernel and svm type (to be saved in future classifier format)
 
 parser = argparse.ArgumentParser(description='The program processes indicators for all pairs of road users in the scene')
 parser.add_argument('--cfg', dest = 'configFilename', help = 'name of the configuration file', required = True)
+parser.add_argument('--kernel', dest = 'kernelType', help = 'kernel type for the support vector machine (SVM)', default = SVM_RBF, type = long)
+parser.add_argument('--svm', dest = 'svmType', help = 'SVM type', default = SVM_C_SVC, type = long)
 parser.add_argument('-d', dest = 'databaseFilename', help = 'name of the Sqlite database file (overrides the configuration file)')
 parser.add_argument('-i', dest = 'videoFilename', help = 'name of the video file (overrides the configuration file)')
 parser.add_argument('-n', dest = 'nObjects', help = 'number of objects to classify', type = int, default = None)
@@ -33,6 +35,8 @@
 params.convertToFrames(3.6)
 if params.homography is not None:
     invHomography = np.linalg.inv(params.homography)
+else:
+    invHomography = None
 
 if params.speedAggregationMethod == 'median':
     speedAggregationFunc = np.median
@@ -44,9 +48,9 @@
     print('Unknown speed aggregation method: {}. Exiting'.format(params.speedAggregationMethod))
     sys.exit()
 
-pedBikeCarSVM = ml.SVM()
+pedBikeCarSVM = ml.SVM(args.svmType, args.kernelType)
 pedBikeCarSVM.load(params.pedBikeCarSVMFilename)
-bikeCarSVM = ml.SVM()
+bikeCarSVM = ml.SVM(args.svmType, args.kernelType)
 bikeCarSVM.load(params.bikeCarSVMFilename)
 
 # log logistic for ped and bik otherwise ((pedBeta/pedAlfa)*((sMean/pedAlfa)**(pedBeta-1)))/((1+(sMean/pedAlfa)**pedBeta)**2.)
@@ -72,19 +76,19 @@
 for obj in objects:
     #obj.setFeatures(features)
     intervals.append(obj.getTimeInterval())
-timeInterval = moving.unionIntervals(intervals)
+timeInterval = moving.TimeInterval.unionIntervals(intervals)
 
 capture = cv2.VideoCapture(videoFilename)
-width = int(capture.get(cv2.cv.CV_CAP_PROP_FRAME_WIDTH))
-height = int(capture.get(cv2.cv.CV_CAP_PROP_FRAME_HEIGHT))
+width = int(capture.get(cv2.CAP_PROP_FRAME_WIDTH))
+height = int(capture.get(cv2.CAP_PROP_FRAME_HEIGHT))
 
 pastObjects = []
 if params.undistort: # setup undistortion
-    [map1, map2] = computeUndistortMaps(width, height, undistortedImageMultiplication, intrinsicCameraMatrix, distortionCoefficients)
+    [map1, map2] = cvutils.computeUndistortMaps(width, height, params.undistortedImageMultiplication, params.intrinsicCameraMatrix, params.distortionCoefficients)
 if capture.isOpened():
     ret = True
     frameNum = timeInterval.first
-    capture.set(cv2.cv.CV_CAP_PROP_POS_FRAMES, frameNum)
+    capture.set(cv2.CAP_PROP_POS_FRAMES, frameNum)
     lastFrameNum = timeInterval.last
 
     while ret and frameNum <= lastFrameNum:
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/scripts/clean-ground-truth.py	Mon May 09 15:33:11 2016 -0400
@@ -0,0 +1,30 @@
+#! /usr/bin/env python                                                                                
+import argparse
+import pandas as pd
+import sqlite3
+from numpy import min, max
+
+#import utils
+#import storage
+
+parser = argparse.ArgumentParser(description='The program finds ground truth annotation objects with missing positions (and the times that do not have a position')
+#parser.add_argument('configFilename', help = 'name of the configuration file') 
+parser.add_argument('-d', dest = 'databaseFilename', help = 'name of the Ground Truth Sqlite database', required = True)
+
+args = parser.parse_args()
+
+data = pd.read_sql_query("select object_id, frame_number from bounding_boxes", sqlite3.connect(args.databaseFilename))
+#aggData = data.groupby(["object_id"]).agg([min, max, len])
+
+nProblems = 0
+for object_id in data["object_id"].unique():
+    tmp = data[data["object_id"] == object_id]
+    firstInstant = min(tmp["frame_number"])
+    lastInstant = max(tmp["frame_number"])
+    if lastInstant-firstInstant != len(tmp)-1:
+        nProblems += 1
+        times=set(range(firstInstant, lastInstant+1))
+        print("Object {} has missing positions at instants: {}".format(object_id, times.difference(tmp["frame_number"])))
+
+if nProblems == 0:
+    print("No problems were detected in database {}".format(args.databaseFilename))
--- a/scripts/compute-clearmot.py	Tue Nov 03 13:48:56 2015 -0500
+++ b/scripts/compute-clearmot.py	Mon May 09 15:33:11 2016 -0400
@@ -20,6 +20,8 @@
 parser.add_argument('--mask', dest = 'maskFilename', help = 'filename of the mask file used to define the where objects were tracked')
 parser.add_argument('-f', dest = 'firstInstant', help = 'first instant for measurement', required = True, type = int)
 parser.add_argument('-l', dest = 'lastInstant', help = 'last instant for measurement', required = True, type = int)
+parser.add_argument('--offset', dest = 'nFramesOffsetAnnotations', help = 'number of frames to offset the ground truth annotations', type = int)
+parser.add_argument('--displayOffset', dest = 'nFramesOffsetDisplay', help = 'number of frames to offset annotations and objects for display', type = int)
 parser.add_argument('--display', dest = 'display', help = 'display the ground truth to object matches (graphically)', action = 'store_true')
 parser.add_argument('-i', dest = 'videoFilename', help = 'name of the video file (for display)')
 args = parser.parse_args()
@@ -41,10 +43,14 @@
         maskObjects += obj.getObjectsInMask(mask, inv(homography), 2) # TODO add option to keep object if at least one feature in mask
     objects = maskObjects    
 
-annotations = storage.loadGroundTruthFromSqlite(args.groundTruthDatabaseFilename)
+annotations = storage.loadBBMovingObjectsFromSqlite(args.groundTruthDatabaseFilename)
 for a in annotations:
     a.computeCentroidTrajectory(homography)
 
+if args.nFramesOffsetAnnotations is not None:
+    for a in annotations:
+        a.shiftTimeInterval(args.nFramesOffsetAnnotations)
+
 if args.display:
     motp, mota, mt, mme, fpt, gt, gtMatches, toMatches = moving.computeClearMOT(annotations, objects, args.matchingDistance, args.firstInstant, args.lastInstant, True)
 else:
@@ -56,8 +62,24 @@
 print 'Number of missed objects.frames: {}'.format(mt)
 print 'Number of mismatches: {}'.format(mme)
 print 'Number of false alarms.frames: {}'.format(fpt)
+
+def shiftMatches(matches, offset):
+    shifted = {}
+    for k in matches:
+        shifted[k] = {t+offset:v for t, v in matches[k].iteritems()}
+    return shifted
+
 if args.display:
-    cvutils.displayTrajectories(args.videoFilename, objects, {}, inv(homography), args.firstInstant, args.lastInstant, annotations = annotations, gtMatches = gtMatches, toMatches = toMatches)#, rescale = args.rescale, nFramesStep = args.nFramesStep, saveAllImages = args.saveAllImages, undistort = (undistort or args.undistort), intrinsicCameraMatrix = intrinsicCameraMatrix, distortionCoefficients = distortionCoefficients, undistortedImageMultiplication = undistortedImageMultiplication)
+    if args.nFramesOffsetDisplay is not None:
+        firstInstant = args.firstInstant+args.nFramesOffsetDisplay
+        lastInstant = args.lastInstant+args.nFramesOffsetDisplay
+        for a in annotations:
+            a.shiftTimeInterval(args.nFramesOffsetDisplay)
+        for o in objects:
+            o.shiftTimeInterval(args.nFramesOffsetDisplay)
+        gtMatches = shiftMatches(gtMatches, args.nFramesOffsetDisplay)
+        toMatches = shiftMatches(toMatches, args.nFramesOffsetDisplay)
+    cvutils.displayTrajectories(args.videoFilename, objects, {}, inv(homography), firstInstant, lastInstant, annotations = annotations, gtMatches = gtMatches, toMatches = toMatches)#, rescale = args.rescale, nFramesStep = args.nFramesStep, saveAllImages = args.saveAllImages, undistort = (undistort or args.undistort), intrinsicCameraMatrix = intrinsicCameraMatrix, distortionCoefficients = distortionCoefficients, undistortedImageMultiplication = undistortedImageMultiplication)
 
     #print('Ground truth matches')
     #print(gtMatches)
--- a/scripts/compute-homography.py	Tue Nov 03 13:48:56 2015 -0500
+++ b/scripts/compute-homography.py	Mon May 09 15:33:11 2016 -0400
@@ -100,10 +100,12 @@
     print('Click on {0} points in the video frame'.format(args.nPoints))
     plt.figure()
     plt.imshow(videoImg)
+    plt.tight_layout()
     videoPts = np.array(plt.ginput(args.nPoints, timeout=3000))
     print('Click on {0} points in the world image'.format(args.nPoints))
     plt.figure()
     plt.imshow(worldImg)
+    plt.tight_layout()
     worldPts = args.unitsPerPixel*np.array(plt.ginput(args.nPoints, timeout=3000))
     plt.close('all')
     homography, mask = cv2.findHomography(videoPts, worldPts)
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/scripts/learn-poi.py	Mon May 09 15:33:11 2016 -0400
@@ -0,0 +1,53 @@
+#! /usr/bin/env python
+
+import argparse
+
+import numpy as np
+from sklearn import mixture
+import matplotlib.pyplot as plt
+
+import storage, ml
+
+parser = argparse.ArgumentParser(description='The program learns and displays Gaussians fit to beginnings and ends of object trajectories (based on Mohamed Gomaa Mohamed 2015 PhD). TODO: save the data')
+parser.add_argument('-d', dest = 'databaseFilename', help = 'name of the Sqlite database file', required = True)
+parser.add_argument('-t', dest = 'trajectoryType', help = 'type of trajectories to display', choices = ['feature', 'object'], default = 'object')
+parser.add_argument('-n', dest = 'nClusters', help = 'number of point clusters', required = True, type = int)
+parser.add_argument('--covariance-type', dest = 'covarianceType', help = 'type of covariance of Gaussian model', default = "full")
+parser.add_argument('-w', dest = 'worldImageFilename', help = 'filename of the world image')
+parser.add_argument('-u', dest = 'pixelsPerUnit', help = 'number pixels per unit of distance', type = float, default = 1.)
+
+args = parser.parse_args()
+
+objects = storage.loadTrajectoriesFromSqlite(args.databaseFilename, args.trajectoryType)
+
+beginnings = []
+ends = []
+for o in objects:
+    beginnings.append(o.getPositionAt(0).aslist())
+    ends.append(o.getPositionAt(int(o.length())-1).aslist())
+
+beginnings = np.array(beginnings)
+ends = np.array(ends)
+
+gmm = mixture.GMM(n_components=args.nClusters, covariance_type = args.covarianceType)
+beginningModel=gmm.fit(beginnings)
+gmm = mixture.GMM(n_components=args.nClusters, covariance_type = args.covarianceType)
+endModel=gmm.fit(ends)
+
+fig = plt.figure()
+if args.worldImageFilename is not None and args.pixelsPerUnit is not None:
+    img = plt.imread(args.worldImageFilename)
+    plt.imshow(img)
+ml.plotGMMClusters(beginningModel, beginnings, fig, nPixelsPerUnit = args.pixelsPerUnit)
+plt.axis('equal')
+plt.title('Origins')
+print('Origin Clusters:\n{}'.format(ml.computeClusterSizes(beginningModel.predict(beginnings), range(args.nClusters))))
+
+fig = plt.figure()
+if args.worldImageFilename is not None and args.pixelsPerUnit is not None:
+    img = plt.imread(args.worldImageFilename)
+    plt.imshow(img)
+ml.plotGMMClusters(endModel, ends, fig, nPixelsPerUnit = args.pixelsPerUnit)
+plt.axis('equal')
+plt.title('Destinations')
+print('Destination Clusters:\n{}'.format(ml.computeClusterSizes(endModel.predict(ends), range(args.nClusters))))
--- a/scripts/train-object-classification.py	Tue Nov 03 13:48:56 2015 -0500
+++ b/scripts/train-object-classification.py	Mon May 09 15:33:11 2016 -0400
@@ -2,7 +2,7 @@
 
 import numpy as np
 import sys, argparse
-from cv2 import SVM_RBF, SVM_C_SVC
+from cv2.ml import SVM_RBF, SVM_C_SVC, ROW_SAMPLE
 
 import cvutils, moving, ml
 
@@ -10,6 +10,7 @@
 parser.add_argument('-d', dest = 'directoryName', help = 'parent directory name for the directories containing the samples for the different road users', required = True)
 parser.add_argument('--kernel', dest = 'kernelType', help = 'kernel type for the support vector machine (SVM)', default = SVM_RBF, type = long)
 parser.add_argument('--svm', dest = 'svmType', help = 'SVM type', default = SVM_C_SVC, type = long)
+# TODO make other SVM parameters apparent: C, C0, Nu, etc.
 parser.add_argument('-s', dest = 'rescaleSize', help = 'rescale size of image samples', default = 64, type = int)
 parser.add_argument('-o', dest = 'nOrientations', help = 'number of orientations in HoG', default = 9, type = int)
 parser.add_argument('-p', dest = 'nPixelsPerCell', help = 'number of pixels per cell', default = 8, type = int)
@@ -24,7 +25,6 @@
                     'bicycle': args.directoryName + "/Cyclists/",
                     'car': args.directoryName + "/Vehicles/"}
 
-#directory_model = args.directoryName
 trainingSamplesPBV = {}
 trainingLabelsPBV = {}
 trainingSamplesBV = {}
@@ -47,21 +47,21 @@
 
 # Training the Support Vector Machine
 print "Training Pedestrian-Cyclist-Vehicle Model"
-model = ml.SVM()
-model.train(np.concatenate(trainingSamplesPBV.values()), np.concatenate(trainingLabelsPBV.values()), args.svmType, args.kernelType)
+model = ml.SVM(args.svmType, args.kernelType)
+model.train(np.concatenate(trainingSamplesPBV.values()), ROW_SAMPLE, np.concatenate(trainingLabelsPBV.values()))
 model.save(args.directoryName + "/modelPBV.xml")
 
 print "Training Cyclist-Vehicle Model"
-model = ml.SVM()
-model.train(np.concatenate(trainingSamplesBV.values()), np.concatenate(trainingLabelsBV.values()), args.svmType, args.kernelType)
+model = ml.SVM(args.svmType, args.kernelType)
+model.train(np.concatenate(trainingSamplesBV.values()), ROW_SAMPLE, np.concatenate(trainingLabelsBV.values()))
 model.save(args.directoryName + "/modelBV.xml")
 
 print "Training Pedestrian-Cyclist Model"
-model = ml.SVM()
-model.train(np.concatenate(trainingSamplesPB.values()), np.concatenate(trainingLabelsPB.values()), args.svmType, args.kernelType)
+model = ml.SVM(args.svmType, args.kernelType)
+model.train(np.concatenate(trainingSamplesPB.values()), ROW_SAMPLE, np.concatenate(trainingLabelsPB.values()))
 model.save(args.directoryName + "/modelPB.xml")
 
 print "Training Pedestrian-Vehicle Model"
-model = ml.SVM()
-model.train(np.concatenate(trainingSamplesPV.values()), np.concatenate(trainingLabelsPV.values()), args.svmType, args.kernelType)
+model = ml.SVM(args.svmType, args.kernelType)
+model.train(np.concatenate(trainingSamplesPV.values()), ROW_SAMPLE, np.concatenate(trainingLabelsPV.values()))
 model.save(args.directoryName + "/modelPV.xml")
--- a/scripts/undistort-video.py	Tue Nov 03 13:48:56 2015 -0500
+++ b/scripts/undistort-video.py	Mon May 09 15:33:11 2016 -0400
@@ -7,6 +7,7 @@
 
 import cvutils
 from math import ceil, log10
+from os import path, mkdir
 
 parser = argparse.ArgumentParser(description='''The program converts a video into a series of images corrected for distortion. One can then use mencoder to generate a movie, eg
 $ mencoder 'mf://./*.png' -mf fps=[framerate]:type=png -ovc xvid -xvidencopts bitrate=[bitrate] -nosound -o [output.avi]''')
@@ -17,6 +18,10 @@
 parser.add_argument('--undistorted-multiplication', dest = 'undistortedImageMultiplication', help = 'undistorted image multiplication', type = float)
 parser.add_argument('-f', dest = 'firstFrameNum', help = 'number of first frame number to display', type = int)
 parser.add_argument('-l', dest = 'lastFrameNum', help = 'number of last frame number to save', type = int)
+parser.add_argument('-d', dest = 'destinationDirname', help = 'name of the directory where the undistorted frames are saved')
+parser.add_argument('--encode', dest = 'encodeVideo', help = 'indicate if video is generated at the end (default Xvid)', action = 'store_true')
+parser.add_argument('--fps', dest = 'fps', help = 'frame per second of the output video file if encoding', type = float, default = 30)
+parser.add_argument('--bitrate', dest = 'bitrate', help = 'bitrate of the output video file if encoding', type = int, default = 5000)
 
 args = parser.parse_args()
 
@@ -24,15 +29,24 @@
 #distortionCoefficients = args.distortionCoefficients
 #undistortedImageMultiplication = args.undistortedImageMultiplication
 #firstFrameNum = params.firstFrameNum
+if args.destinationDirname is None:
+    destinationDirname = ''
+else:
+    if not args.destinationDirname.endswith('/'):
+        destinationDirname = args.destinationDirname+'/'
+    else:
+        destinationDirname = args.destinationDirname
+    if not path.exists(destinationDirname):
+        mkdir(destinationDirname)
 
 capture = cv2.VideoCapture(args.videoFilename)
-width = int(capture.get(cv2.cv.CV_CAP_PROP_FRAME_WIDTH))
-height = int(capture.get(cv2.cv.CV_CAP_PROP_FRAME_HEIGHT))
+width = int(capture.get(cv2.CAP_PROP_FRAME_WIDTH))
+height = int(capture.get(cv2.CAP_PROP_FRAME_HEIGHT))
 [map1, map2] = cvutils.computeUndistortMaps(width, height, args.undistortedImageMultiplication, intrinsicCameraMatrix, args.distortionCoefficients)
 if capture.isOpened():
     ret = True
     frameNum = args.firstFrameNum
-    capture.set(cv2.cv.CV_CAP_PROP_POS_FRAMES, args.firstFrameNum)
+    capture.set(cv2.CAP_PROP_POS_FRAMES, args.firstFrameNum)
     if args.lastFrameNum is None:
         from sys import maxint
         lastFrameNum = maxint
@@ -43,5 +57,13 @@
             ret, img = capture.read()
             if ret:
                 img = cv2.remap(img, map1, map2, interpolation=cv2.INTER_LINEAR)
-                cv2.imwrite('undistorted-{{:0{}}}.png'.format(nZerosFilename).format(frameNum), img)
+                cv2.imwrite(destinationDirname+'undistorted-{{:0{}}}.png'.format(nZerosFilename).format(frameNum), img)
             frameNum += 1
+
+if args.encodeVideo:
+    print('Encoding the images files in video')
+    from subprocess import check_call
+    from storage import openCheck
+    out = openCheck("err.log", "w")
+    check_call("mencoder \'mf://"+destinationDirname+"*.png\' -mf fps={}:type=png -ovc xvid -xvidencopts bitrate={} -nosound -o ".format(args.fps, args.bitrate)+destinationDirname+"undistort.avi", stderr = out, shell = True)
+    out.close()
--- a/tracking.cfg	Tue Nov 03 13:48:56 2015 -0500
+++ b/tracking.cfg	Mon May 09 15:33:11 2016 -0400
@@ -36,9 +36,9 @@
 # maximum number of features added at each frame
 max-nfeatures = 1000
 # quality level of the good features to track
-feature-quality = 0.1
+feature-quality = 0.0812219538558
 # minimum distance between features (px)
-min-feature-distanceklt = 5
+min-feature-distanceklt = 3.54964337411
 # size of the block for feature characteristics (px)
 block-size = 7
 # use of Harris corner detector
@@ -46,7 +46,7 @@
 # k parameter to detect good features to track (OpenCV)
 k = 0.4
 # size of the search window at each pyramid level (px)
-window-size = 7
+window-size = 6
 # maximal pyramid level in the feature tracking algorithm
 pyramid-level = 5
 # number of displacement to test minimum feature motion
@@ -64,22 +64,22 @@
 # maximum number of iterations to stop feature tracking
 max-number-iterations = 20
 # minimum error to reach to stop feature tracking
-min-tracking-error = 0.3
+min-tracking-error = 0.183328975142
 # minimum eigen value of a 2x2 normal matrix of optical flow equations
 min-feature-eig-threshold = 1e-4
 # minimum length of a feature (number of frames) to consider a feature for grouping
-min-feature-time = 20
+min-feature-time = 9
 # Min Max similarity parameters (Beymer et al. method)
 # connection distance in feature grouping (world distance unit or px)
-mm-connection-distance = 3.75
+mm-connection-distance = 2.68813545522
 # segmentation distance in feature grouping (world distance unit or px)
-mm-segmentation-distance = 1.5
+mm-segmentation-distance = 1.81511847456
 # maximum distance between features for grouping (world distance unit or px)
 max-distance = 5
 # minimum cosine of the angle between the velocity vectors for grouping
 min-velocity-cosine = 0.8
 # minimum average number of features per frame to create a vehicle hypothesis
-min-nfeatures-group = 3
+min-nfeatures-group = 3.16747690802
 # Road user classification
 # min number of pixels in cropped image to classify by SVM
 min-npixels-crop = 400