Mercurial Hosting > traffic-intelligence
changeset 619:dc2d0a0d7fe1
merged code from Mohamed Gomaa Mohamed for the use of points of interests in mation pattern learning and motion prediction (TRB 2015)
author | Nicolas Saunier <nicolas.saunier@polymtl.ca> |
---|---|
date | Wed, 10 Dec 2014 15:27:08 -0500 |
parents | 04a8304e13f0 (current diff) 1a92d28e2d05 (diff) |
children | 582508610572 |
files | python/events.py python/moving.py python/objectsmoothing.py python/poly-utils.py python/prediction.py python/storage.py |
diffstat | 7 files changed, 595 insertions(+), 196 deletions(-) [+] |
line wrap: on
line diff
--- a/python/events.py Sun Dec 07 23:01:02 2014 -0500 +++ b/python/events.py Wed Dec 10 15:27:08 2014 -0500 @@ -9,8 +9,37 @@ import itertools import moving, prediction, indicators, utils +__metaclass__ = type -__metaclass__ = type +def findRoute(prototypes,objects,i,j,noiseEntryNums,noiseExitNums,minSimilarity= 0.3, spatialThreshold=1.0, delta=180): + if i[0] not in noiseEntryNums: + prototypesRoutes= [ x for x in sorted(prototypes.keys()) if i[0]==x[0]] + elif i[1] not in noiseExitNums: + prototypesRoutes=[ x for x in sorted(prototypes.keys()) if i[1]==x[1]] + else: + prototypesRoutes=[x for x in sorted(prototypes.keys())] + routeSim={} + lcss = utils.LCSS(similarityFunc=lambda x,y: (distanceForLCSS(x,y) <= spatialThreshold),delta=delta) + for y in prototypesRoutes: + if y in prototypes.keys(): + prototypesIDs=prototypes[y] + similarity=[] + for x in prototypesIDs: + s=lcss.computeNormalized(objects[j].positions, objects[x].positions) + similarity.append(s) + routeSim[y]=max(similarity) + route=max(routeSim, key=routeSim.get) + if routeSim[route]>=minSimilarity: + return route + else: + return i + +def getRoute(obj,prototypes,objects,noiseEntryNums,noiseExitNums,useDestination=True): + route=(obj.startRouteID,obj.endRouteID) + if useDestination: + if route not in prototypes.keys(): + route= findRoute(prototypes,objects,route,obj.getNum(),noiseEntryNums,noiseExitNums) + return route class Interaction(moving.STObject): '''Class for an interaction between two road users @@ -124,17 +153,20 @@ minDistance[instant] = moving.MovingObject.minDistance(self.roadUser1, self.roadUser2, instant) self.addIndicator(indicators.SeverityIndicator(Interaction.indicatorNames[3], minDistance)) - def computeCrossingsCollisions(self, predictionParameters, collisionDistanceThreshold, timeHorizon, computeCZ = False, debug = False, timeInterval = None, nProcesses = 1): + def computeCrossingsCollisions(self, predictionParameters, collisionDistanceThreshold, timeHorizon, computeCZ = False, debug = False, timeInterval = None, nProcesses = 1,usePrototypes=True,route1= (-1,-1),route2=(-1,-1),prototypes={},secondStepPrototypes={},nMatching={},objects=[],noiseEntryNums=[],noiseExitNums=[],minSimilarity=0.1,mostMatched=None,useDestination=True,useSpeedPrototype=True,acceptPartialLength=30, step=1): '''Computes all crossing and collision points at each common instant for two road users. ''' self.collisionPoints={} self.crossingZones={} TTCs = {} + if usePrototypes: + route1= getRoute(self.roadUser1,prototypes,objects,noiseEntryNums,noiseExitNums,useDestination) + route2= getRoute(self.roadUser2,prototypes,objects,noiseEntryNums,noiseExitNums,useDestination) if timeInterval: commonTimeInterval = timeInterval else: commonTimeInterval = self.timeInterval - self.collisionPoints, self.crossingZones = prediction.computeCrossingsCollisions(predictionParameters, self.roadUser1, self.roadUser2, collisionDistanceThreshold, timeHorizon, computeCZ, debug, commonTimeInterval, nProcesses) + self.collisionPoints, self.crossingZones = prediction.computeCrossingsCollisions(predictionParameters, self.roadUser1, self.roadUser2, collisionDistanceThreshold, timeHorizon, computeCZ, debug, commonTimeInterval, nProcesses,usePrototypes,route1,route2,prototypes,secondStepPrototypes,nMatching,objects,noiseEntryNums,noiseExitNums,minSimilarity,mostMatched,useDestination,useSpeedPrototype,acceptPartialLength, step) for i, cp in self.collisionPoints.iteritems(): TTCs[i] = prediction.SafetyPoint.computeExpectedIndicator(cp) # add probability of collision, and probability of successful evasive action @@ -142,13 +174,12 @@ if computeCZ: pPETs = {} - for i in list(commonTimeInterval)[:-1]: - if len(self.crossingZones[i]) > 0: - pPETs[i] = prediction.SafetyPoint.computeExpectedIndicator(self.crossingZones[i]) + for i, cz in self.crossingZones.iteritems(): + pPETs[i] = prediction.SafetyPoint.computeExpectedIndicator(cz) self.addIndicator(indicators.SeverityIndicator(Interaction.indicatorNames[9], pPETs)) def addVideoFilename(self,videoFilename): - self.videoFilename= videoFilename + self.videoFilename= videoFilename def addInteractionType(self,interactionType): ''' interaction types: conflict or collision if they are known'''
--- a/python/moving.py Sun Dec 07 23:01:02 2014 -0500 +++ b/python/moving.py Wed Dec 10 15:27:08 2014 -0500 @@ -795,7 +795,7 @@ return False def wiggliness(self): - return self.cumulatedDisplacement()/float(Point.distanceNorm2(self.__getitem__(0),self.__getitem__(self.length()-1))) + return self.getCumulativeDistance(self.length()-1)/float(Point.distanceNorm2(self.__getitem__(0),self.__getitem__(self.length()-1))) def getIntersections(self, p1, p2): '''Returns a list of the indices at which the trajectory @@ -1068,7 +1068,7 @@ else: print('Load features to compute a maximum size') return None - + def setRoutes(self, startRouteID, endRouteID): self.startRouteID = startRouteID self.endRouteID = endRouteID @@ -1165,7 +1165,10 @@ def classifyUserTypeSpeedMotorized(self, threshold, aggregationFunc = median, ignoreNInstantsAtEnds = 0): '''Classifies slow and fast road users slow: non-motorized -> pedestrians - fast: motorized -> cars''' + fast: motorized -> cars + + aggregationFunc can be any function that can be applied to a vector of speeds, including percentile: + aggregationFunc = lambda x: percentile(x, percentileFactor) # where percentileFactor is 85 for 85th percentile''' if ignoreNInstantsAtEnds > 0: speeds = self.getSpeeds()[ignoreNInstantsAtEnds:-ignoreNInstantsAtEnds] else:
--- a/python/objectsmoothing.py Sun Dec 07 23:01:02 2014 -0500 +++ b/python/objectsmoothing.py Wed Dec 10 15:27:08 2014 -0500 @@ -1,131 +1,234 @@ -import storage, moving, utils -from math import * #atan2,asin,degrees,sin,cos,pi -import numpy as np - -import matplotlib.pyplot as plt - -def findNearest(feat, featureSet,t,reverse=True): - dist={} - for f in featureSet: - if reverse: - dist[f]= moving.Point.distanceNorm2(feat.getPositionAtInstant(t+1),f.getPositionAtInstant(t)) - else: - dist[f]= moving.Point.distanceNorm2(feat.getPositionAtInstant(t-1),f.getPositionAtInstant(t)) - return min(dist, key=dist.get) # = utils.argmaxDict(dist) - -def getFeatures(obj): - longestFeature = utils.argmaxDict({f:f.length() for i,f in enumerate(obj.features)}) - t1,t3 = longestFeature.getFirstInstant(), longestFeature.getLastInstant() - listFeatures=[[longestFeature,t1,t3,moving.Point(0,0)]] - # find the features to fill in the beginning of the object existence - currentFeature = longestFeature - while t1!=obj.getFirstInstant(): - delta=listFeatures[-1][3] - featureSet = [f for f in obj.features if f.existsAtInstant(t1-1)] - feat = findNearest(currentFeature,featureSet,t1-1,reverse=True) - if feat.existsAtInstant(t1): - listFeatures.append([feat,feat.getFirstInstant(),t1-1,(currentFeature.getPositionAtInstant(t1)-feat.getPositionAtInstant(t1))+delta]) - else: - listFeatures.append([feat,feat.getFirstInstant(),t1-1,(currentFeature.getPositionAtInstant(t1)-feat.getPositionAtInstant(t1-1))+delta]) - currentFeature = feat - t1= feat.getFirstInstant() - # find the features to fill in the end of the object existence - delta=moving.Point(0,0) - currentFeature = longestFeature - while t3!= obj.getLastInstant(): - featureSet = [f for f in obj.features if f.existsAtInstant(t3+1)] - feat = findNearest(currentFeature,featureSet,t3+1,reverse=False) - if feat.existsAtInstant(t3): - listFeatures.append([feat,t3+1,feat.getLastInstant(),(currentFeature.getPositionAtInstant(t3)-feat.getPositionAtInstant(t3))+delta]) - else: - listFeatures.append([feat,t3+1,feat.getLastInstant(),(currentFeature.getPositionAtInstant(t3)-feat.getPositionAtInstant(t3+1))+delta]) - currentFeature = feat - t3= feat.getLastInstant() - delta=listFeatures[-1][3] - return listFeatures - -def buildFeature(obj,num=1): - listFeatures= getFeatures(obj) - tmp={} - delta={} - for i in listFeatures: - for t in xrange(i[1],i[2]+1): - tmp[t]=[i[0],i[3]] - newTraj = moving.Trajectory() - - for instant in obj.getTimeInterval(): - newTraj.addPosition(tmp[instant][0].getPositionAtInstant(instant)+tmp[instant][1]) - newFeature= moving.MovingObject(num,timeInterval=obj.getTimeInterval(),positions=newTraj) - return newFeature - -def getBearing(p1,p2,p3): - angle = degrees(atan2(p3.y -p1.y, p3.x -p1.x)) - bearing1 = (90 - angle) % 360 - angle2 = degrees(atan2(p2.y -p1.y, p2.x -p1.x)) - bearing2 = (90 - angle2) % 360 - dist= moving.Point.distanceNorm2(p1, p2) - return [dist,bearing1,bearing2,bearing2-bearing1] - -def smoothObjectTrajectory(obj,newNum,smoothing=False,plotResults=True,halfWidth=3): - results=[] - bearing={} - feature= buildFeature(obj,1) - for t in feature.getTimeInterval(): - p1= feature.getPositionAtInstant(t) - p2= obj.getPositionAtInstant(t) - if t!=feature.getLastInstant(): - p3= feature.getPositionAtInstant(t+1) - else: - p1= feature.getPositionAtInstant(t-1) - p3= feature.getPositionAtInstant(t) - bearing[t]= getBearing(p1,p2,p3)[1] - results.append(getBearing(p1,p2,p3)) - - medianResults=np.median(results,0) - dist= medianResults[0] - angle= medianResults[3] - - for i in sorted(bearing.keys()): - bearing[i]= bearing[i]+angle - - if smoothing: - bearingInput=[] - for i in sorted(bearing.keys()): - bearingInput.append(bearing[i]) - bearingOut=utils.filterMovingWindow(bearingInput, halfWidth) - for t,i in enumerate(sorted(bearing.keys())): - bearing[i]=bearingOut[t] - - #solve a smoothing problem in case of big drop in computing bearing (0,360) - for t,i in enumerate(sorted(bearing.keys())): - if i!= max(bearing.keys()) and abs(bearingInput[t] - bearingInput[t+1])>=340: - for x in xrange(max(i-halfWidth,min(bearing.keys())),min(i+halfWidth,max(bearing.keys()))+1): - bearing[x]=bearingInput[t-i+x] - - translated = moving.Trajectory() - for t in feature.getTimeInterval(): - p1= feature.getPositionAtInstant(t) - p1.x = p1.x + dist*sin(bearing[t]*pi/180) - p1.y = p1.y + dist*cos(bearing[t]*pi/180) - translated.addPosition(p1) - - #modify first and last un-smoothed positions (half width) - if smoothing: - d1= translated[halfWidth]- feature.positions[halfWidth] - d2= translated[-halfWidth-1]- feature.positions[-halfWidth-1] - for i in xrange(halfWidth): - p1 = feature.getPositionAt(i)+d1 - p2 = feature.getPositionAt(-i-1)+d2 - translated.setPosition(i,p1) - translated.setPosition(-i-1,p2) - - newObj= moving.MovingObject(newNum,timeInterval=feature.getTimeInterval(),positions=translated,velocities=obj.getVelocities()) - newObj.features=obj.features - newObj.userType=obj.userType - if plotResults: - plt.figure() - plt.title('objects_id = {}'.format(obj.num)) - newObj.plot('gx-') - feature.plot('cx-') - obj.plot('rx-') - return newObj +import storage, moving, utils +from math import * #atan2,asin,degrees,sin,cos,pi +import numpy as np + +import matplotlib.pyplot as plt + +def findNearest(feat, featureSet,t,reverse=True): + dist={} + for f in featureSet: + if reverse: + dist[f]= moving.Point.distanceNorm2(feat.getPositionAtInstant(t+1),f.getPositionAtInstant(t)) + else: + dist[f]= moving.Point.distanceNorm2(feat.getPositionAtInstant(t-1),f.getPositionAtInstant(t)) + return min(dist, key=dist.get) # = utils.argmaxDict(dist) + +def getFeatures(obj,features,featureID): + #longestFeature = utils.argmaxDict({f:f.length() for i,f in enumerate(obj.features)}) + t1,t3 = features[featureID].getFirstInstant(), features[featureID].getLastInstant() + listFeatures=[[features[featureID],t1,t3,moving.Point(0,0)]] + # find the features to fill in the beginning of the object existence + currentFeature = features[featureID] + while t1!=obj.getFirstInstant(): + delta=listFeatures[-1][3] + featureSet = [f for f in obj.features if f.existsAtInstant(t1-1)] + feat = findNearest(currentFeature,featureSet,t1-1,reverse=True) + if feat.existsAtInstant(t1): + listFeatures.append([feat,feat.getFirstInstant(),t1-1,(currentFeature.getPositionAtInstant(t1)-feat.getPositionAtInstant(t1))+delta]) + else: + listFeatures.append([feat,feat.getFirstInstant(),t1-1,(currentFeature.getPositionAtInstant(t1)-feat.getPositionAtInstant(t1-1))+delta]) + currentFeature = feat + t1= feat.getFirstInstant() + # find the features to fill in the end of the object existence + delta=moving.Point(0,0) + currentFeature = features[featureID] + while t3!= obj.getLastInstant(): + featureSet = [f for f in obj.features if f.existsAtInstant(t3+1)] + feat = findNearest(currentFeature,featureSet,t3+1,reverse=False) + if feat.existsAtInstant(t3): + listFeatures.append([feat,t3+1,feat.getLastInstant(),(currentFeature.getPositionAtInstant(t3)-feat.getPositionAtInstant(t3))+delta]) + else: + listFeatures.append([feat,t3+1,feat.getLastInstant(),(currentFeature.getPositionAtInstant(t3)-feat.getPositionAtInstant(t3+1))+delta]) + currentFeature = feat + t3= feat.getLastInstant() + delta=listFeatures[-1][3] + return listFeatures + +def buildFeature(obj,features,featureID,num=1): + listFeatures= getFeatures(obj,features,featureID) + tmp={} + delta={} + for i in listFeatures: + for t in xrange(i[1],i[2]+1): + tmp[t]=[i[0],i[3]] + newTraj = moving.Trajectory() + + for instant in obj.getTimeInterval(): + newTraj.addPosition(tmp[instant][0].getPositionAtInstant(instant)+tmp[instant][1]) + newFeature= moving.MovingObject(num,timeInterval=obj.getTimeInterval(),positions=newTraj) + return newFeature + +def getBearing(p1,p2,p3): + angle = degrees(atan2(p3.y -p1.y, p3.x -p1.x)) + bearing1 = (90 - angle) % 360 + angle2 = degrees(atan2(p2.y -p1.y, p2.x -p1.x)) + bearing2 = (90 - angle2) % 360 + dist= moving.Point.distanceNorm2(p1, p2) + return [dist,bearing1,bearing2,bearing2-bearing1] + +#Quantitative analysis "CSJ" functions +def computeVelocities (object,smoothing=True,halfWidth=3): #compute velocities from positions + velocities={} + for i in list(object.timeInterval)[:-1]: + p1= object.getPositionAtInstant(i) + p2= object.getPositionAtInstant(i+1) + velocities[i]=p2-p1 + velocities[object.getLastInstant()]= velocities[object.getLastInstant()-1] # duplicate last point + if smoothing: + velX= [velocities[y].aslist()[0] for y in sorted(velocities.keys())] + velY= [velocities[y].aslist()[1] for y in sorted(velocities.keys())] + v1= list(utils.filterMovingWindow(velX, halfWidth)) + v2= list(utils.filterMovingWindow(velY, halfWidth)) + smoothedVelocity={} + for t,i in enumerate(sorted(velocities.keys())): + smoothedVelocity[i]=moving.Point(v1[t], v2[t]) + velocities=smoothedVelocity + return velocities + +def computeAcceleration (object,fromPosition=True): + acceleration={} + if fromPosition: + velocities=computeVelocities(object,False,1) + for i in sorted (velocities.keys()): + if i != sorted (velocities.keys())[-1]: + acceleration[i]= velocities[i+1]-velocities[i] + else: + for i in list(object.timeInterval)[:-1]: + v1= object.getVelocityAtInstant(i) + v2= object.getVelocityAtInstant(i+1) + acceleration[i]= v2-v1 + return acceleration + +def computeJerk (object,fromPosition=True): + jerk={} + acceleration=computeAcceleration (object,fromPosition=fromPosition) + for i in sorted (acceleration.keys()): + if i != sorted (acceleration.keys())[-1]: + jerk[i]= (acceleration[i+1]-acceleration[i]).norm2() + return jerk + +def sumSquaredJerk (object,fromPosition=True): + jerk= computeJerk (object,fromPosition=fromPosition) + t=0 + for i in sorted(jerk.keys()): + t+= jerk[i]* jerk[i] + return t + +def smoothObjectTrajectory(obj,features,featureID,newNum,smoothing=False,halfWidth=3,create=False): + results=[] + bearing={} + if create: + feature= buildFeature(obj,features,featureID,num=1) + else: + feature=features[featureID] + for t in feature.getTimeInterval(): + p1= feature.getPositionAtInstant(t) + p2= obj.getPositionAtInstant(t) + if t!=feature.getLastInstant(): + p3= feature.getPositionAtInstant(t+1) + else: + p1= feature.getPositionAtInstant(t-1) + p3= feature.getPositionAtInstant(t) + bearing[t]= getBearing(p1,p2,p3)[1] + results.append(getBearing(p1,p2,p3)) + + + medianResults=np.median(results,0) + dist= medianResults[0] + angle= medianResults[3] + + for i in sorted(bearing.keys()): + bearing[i]= bearing[i]+angle + + if smoothing: + bearingInput=[] + for i in sorted(bearing.keys()): + bearingInput.append(bearing[i]) + import utils + bearingOut=utils.filterMovingWindow(bearingInput, halfWidth) + for t,i in enumerate(sorted(bearing.keys())): + bearing[i]=bearingOut[t] + + #solve a smoothing problem in case of big drop in computing bearing (0,360) + for t,i in enumerate(sorted(bearing.keys())): + if i!= max(bearing.keys()) and abs(bearingInput[t] - bearingInput[t+1])>=340: + for x in xrange(max(i-halfWidth,min(bearing.keys())),min(i+halfWidth,max(bearing.keys()))+1): + bearing[x]=bearingInput[t-i+x] + + translated = moving.Trajectory() + for t in feature.getTimeInterval(): + p1= feature.getPositionAtInstant(t) + p1.x = p1.x + dist*sin(bearing[t]*pi/180) + p1.y = p1.y + dist*cos(bearing[t]*pi/180) + translated.addPosition(p1) + + #modify first and last un-smoothed positions (half width) + if smoothing: + d1= translated[halfWidth]- feature.positions[halfWidth] + d2= translated[-halfWidth-1]- feature.positions[-halfWidth-1] + for i in xrange(halfWidth): + p1= feature.getPositionAt(i)+d1 + p2= feature.getPositionAt(-i-1)+d2 + translated.setPosition(i,p1) + translated.setPosition(-i-1,p2) + + newObj= moving.MovingObject(newNum,timeInterval=feature.getTimeInterval(),positions=translated) + return newObj + +def smoothObjectTrajectory(obj,features,newNum,minLengthParam=0.7,smoothing=False,plotResults=True,halfWidth=3,computeVelocities=True,optimize=True,create=False): + featureList=[i.num for i in obj.features if i.length() >= minLengthParam*obj.length()] + if featureList==[]: + featureList.append(longestFeature(obj)) + create=True + objs=[] + for featureID in featureList: + objTMP=smoothObjectTrajectory(obj,features,featureID,newNum,smoothing=smoothing,halfWidth=halfWidth,create=create) + objs.append(objTMP) + newTranslated = moving.Trajectory() + newInterval=[] + for t in obj.timeInterval: + xCoord=[] + yCoord=[] + for i in objs: + if i.existsAtInstant(t): + p1= i.getPositionAtInstant(t) + xCoord.append(p1.x) + yCoord.append(p1.y) + if xCoord!=[]: + tmp= moving.Point(np.median(xCoord),np.median(yCoord)) + newInterval.append(t) + newTranslated.addPosition(tmp) + + newObj= moving.MovingObject(newNum,timeInterval=moving.TimeInterval(min(newInterval),max(newInterval)),positions=newTranslated) + + if computeVelocities: + tmpTraj = moving.Trajectory() + velocities= computeVelocities(newObj,True,5) + for i in sorted(velocities.keys()): + tmpTraj.addPosition(velocities[i]) + newObj.velocities=tmpTraj + else: + newObj.velocities=obj.velocities + + if optimize: + csj1= sumSquaredJerk (obj,fromPosition=True) + csj2= sumSquaredJerk (newObj,fromPosition=True) + if csj1<csj2: + newObj=obj + newObj.velocities=obj.velocities + if computeVelocities and csj1>=csj2: + csj3= sumSquaredJerk (obj,fromPosition=False) + csj4= sumSquaredJerk (newObj,fromPosition=False) + if csj4<=csj3: + newObj.velocities= obj.velocities + newObj.featureNumbers=obj.featureNumbers + newObj.features=obj.features + newObj.userType=obj.userType + if plotResults: + plt.figure() + plt.title('objects_id = {}'.format(obj.num)) + for i in featureList: + features[i].plot('cx-') + obj.plot('rx-') + newObj.plot('gx-') + return newObj
--- a/python/poly-utils.py Sun Dec 07 23:01:02 2014 -0500 +++ b/python/poly-utils.py Wed Dec 10 15:27:08 2014 -0500 @@ -6,26 +6,18 @@ import numpy as np __metaclass__ = type +from indicators import SeverityIndicator -# inputs variables -#dirname= 'G:/0-phdstart/Code/trial/indicatorsNew/' -#extension= '-indicatorsNew.csv' -#indicatorsNames= {1:'Distance',2:'Cosine',3:'collision Course Angle',4:'Velocity Cosine',5:'Velocity Angle',6:'Speed Differential',7:'Collision Probability',8:'Severity Index',9:'TTC'} -''' min Distance case''' -dirname= 'G:/0-phdstart/Code/trial/minDistanceIndicator/' -extension= '-minDistanceInd.csv' -indicatorsNames= {1:'minDistance'} -def loadNewInteractions(videoFilename,interactionType, roaduserNum1,roaduserNum2, selectedIndicators=[]): +def loadNewInteractions(videoFilename,interactionType,dirname, extension, indicatorsNames, roaduserNum1,roaduserNum2, selectedIndicators=[]): '''Loads interactions from the POLY traffic event format''' from events import Interaction - from indicators import SeverityIndicator - #filename= dirname + videoFilename + extension - filename= dirname + interactionType+ '-' + videoFilename + extension # case of min distance todo: change the saving format to be matched with all outputs + filename= dirname + videoFilename + extension + #filename= dirname + interactionType+ '-' + videoFilename + extension # case of min distance todo: change the saving format to be matched with all outputs file = utils.openCheck(filename) if (not file): return [] - interactions = [] + #interactions = [] interactionNum = 0 data= np.loadtxt(filename) indicatorFrameNums= data[:,0] @@ -41,9 +33,94 @@ values= {} for i,t in enumerate(indicatorFrameNums): values[t] = [data[i,index] for index in selectedIndicators] - inter.addIndicator(SeverityIndicator('selectedIndicators', values)) - - interactions.append(inter) + inter.addIndicator(SeverityIndicator('selectedIndicators', values)) + + #interactions.append(inter) file.close() - return interactions + #return interactions + return inter + +# Plotting results + +frameRate = 15. + +# To run in directory that contains the directories that contain the results (Miss-xx and Incident-xx) +#dirname = '/home/nicolas/Research/Data/kentucky-db/' + +interactingRoadUsers = {'Miss/0404052336': [(0,3)] # 0,2 and 1 vs 3 + #, + #'Incident/0306022035': [(1,3)] + #, + #'Miss/0208030956': [(4,5),(5,7)] + } + + +def getIndicatorName(filename, withUnit = False): + if withUnit: + unit = ' (s)' + else: + unit = '' + if 'collision-point' in filename: + return 'TTC'+unit + elif 'crossing' in filename: + return 'pPET'+unit + elif 'probability' in filename: + return 'P(UEA)' + +def getMethodName(fileprefix): + if fileprefix == 'constant-velocity': + return 'Con. Vel.' + elif fileprefix == 'normal-adaptation': + return 'Norm. Ad.' + elif fileprefix == 'point-set': + return 'Pos. Set' + elif fileprefix == 'evasive-action': + return 'Ev. Act.' + elif fileprefix == 'point-set-evasive-action': + return 'Pos. Set' + +indicator2TimeIdx = {'TTC':2,'pPET':2, 'P(UEA)':3} +def getDataAtInstant(data, i): + return data[data[:,2] == i] + +def getPointsAtInstant(data, i): + return getDataAtInstant(i)[3:5] + +def getIndicator(data, roadUserNumbers, indicatorName): + if data.ndim ==1: + data.shape = (1,data.shape[0]) + + # find the order for the roadUserNumbers + uniqueObj1 = np.unique(data[:,0]) + uniqueObj2 = np.unique(data[:,1]) + found = False + if roadUserNumbers[0] in uniqueObj1 and roadUserNumbers[1] in uniqueObj2: + objNum1 = roadUserNumbers[0] + objNum2 = roadUserNumbers[1] + found = True + if roadUserNumbers[1] in uniqueObj1 and roadUserNumbers[0] in uniqueObj2: + objNum1 = roadUserNumbers[1] + objNum2 = roadUserNumbers[0] + found = True + + # get subset of data for road user numbers + if found: + roadUserData = data[np.logical_and(data[:,0] == objNum1, data[:,1] == objNum2),:] + if roadUserData.size > 0: + time = np.unique(roadUserData[:,indicator2TimeIdx[indicatorName]]) + values = {} + if indicatorName == 'P(UEA)': + tmp = roadUserData[:,4] + for k,v in zip(time, tmp): + values[k]=v + return SeverityIndicator(indicatorName, values, mostSevereIsMax = False, maxValue = 1.), roadUserData + else: + for i in xrange(time[0],time[-1]+1): + try: + tmp = getDataAtInstant(roadUserData, i) + values[i] = np.sum(tmp[:,5]*tmp[:,6])/np.sum(tmp[:,5])/frameRate + except IOError: + values[i] = np.inf + return SeverityIndicator(indicatorName, values, mostSevereIsMax = False), roadUserData + return None, None
--- a/python/prediction.py Sun Dec 07 23:01:02 2014 -0500 +++ b/python/prediction.py Wed Dec 10 15:27:08 2014 -0500 @@ -4,6 +4,8 @@ import moving import math import random +import numpy as np +from utils import LCSS class PredictedTrajectory: '''Class for predicted trajectories with lazy evaluation @@ -46,6 +48,14 @@ def getControl(self): return self.control + +def findNearestParams(initialPosition,prototypeTrajectory): + ''' nearest parameters are the index of minDistance and the orientation ''' + distances=[] + for position in prototypeTrajectory.positions: + distances.append(moving.Point.distanceNorm2(initialPosition, position)) + minDistanceIndex= np.argmin(distances) + return minDistanceIndex, moving.NormAngle.fromPoint(prototypeTrajectory.velocities[minDistanceIndex]).angle class PredictedTrajectoryPrototype(PredictedTrajectory): '''Predicted trajectory that follows a prototype trajectory @@ -64,15 +74,31 @@ self.constantSpeed = constantSpeed self.probability = probability self.predictedPositions = {0: initialPosition} - self.predictedSpeedOrientations = {0: moving.NormAngle.fromPoint(initialVelocity)} - + self.predictedSpeedOrientations = {0: moving.NormAngle(moving.NormAngle.fromPoint(initialVelocity).norm, findNearestParams(initialPosition,prototypeTrajectory)[1])}#moving.NormAngle.fromPoint(initialVelocity)} + def predictPosition(self, nTimeSteps): if nTimeSteps > 0 and not nTimeSteps in self.predictedPositions.keys(): - if constantSpeed: + if self.constantSpeed: # calculate cumulative distance - pass + speedNorm= self.predictedSpeedOrientations[0].norm #moving.NormAngle.fromPoint(initialVelocity).norm + anglePrototype = findNearestParams(self.predictedPositions[nTimeSteps-1],self.prototypeTrajectory)[1] + self.predictedSpeedOrientations[nTimeSteps]= moving.NormAngle(speedNorm, anglePrototype) + self.predictedPositions[nTimeSteps],tmp= moving.predictPosition(self.predictedPositions[nTimeSteps-1], self.predictedSpeedOrientations[nTimeSteps-1], moving.NormAngle(0,0), None) + else: # see c++ code, calculate ratio - pass + speedNorm= self.predictedSpeedOrientations[0].norm + instant=findNearestParams(self.predictedPositions[0],self.prototypeTrajectory)[0] + prototypeSpeeds= self.prototypeTrajectory.getSpeeds()[instant:] + ratio=float(speedNorm)/prototypeSpeeds[0] + resampledSpeeds=[sp*ratio for sp in prototypeSpeeds] + anglePrototype = findNearestParams(self.predictedPositions[nTimeSteps-1],self.prototypeTrajectory)[1] + if nTimeSteps<len(resampledSpeeds): + self.predictedSpeedOrientations[nTimeSteps]= moving.NormAngle(resampledSpeeds[nTimeSteps], anglePrototype) + self.predictedPositions[nTimeSteps],tmp= moving.predictPosition(self.predictedPositions[nTimeSteps-1], self.predictedSpeedOrientations[nTimeSteps-1], moving.NormAngle(0,0), None) + else: + self.predictedSpeedOrientations[nTimeSteps]= moving.NormAngle(resampledSpeeds[-1], anglePrototype) + self.predictedPositions[nTimeSteps],tmp= moving.predictPosition(self.predictedPositions[nTimeSteps-1], self.predictedSpeedOrientations[nTimeSteps-1], moving.NormAngle(0,0), None) + return self.predictedPositions[nTimeSteps] class PredictedTrajectoryRandomControl(PredictedTrajectory): @@ -141,10 +167,94 @@ savefig('predicted-trajectories-t-{0}.png'.format(currentInstant)) close() -def computeCrossingsCollisionsAtInstant(predictionParams, currentInstant, obj1, obj2, collisionDistanceThreshold, timeHorizon, computeCZ = False, debug = False): +def calculateProbability(nMatching,similarity,objects): + sumFrequencies=sum([nMatching[p] for p in similarity.keys()]) + prototypeProbability={} + for i in similarity.keys(): + prototypeProbability[i]= similarity[i] * float(nMatching[i])/sumFrequencies + sumProbabilities= sum([prototypeProbability[p] for p in prototypeProbability.keys()]) + probabilities={} + for i in prototypeProbability.keys(): + probabilities[objects[i]]= float(prototypeProbability[i])/sumProbabilities + return probabilities + +def findPrototypes(prototypes,nMatching,objects,route,partialObjPositions,noiseEntryNums,noiseExitNums,minSimilarity=0.1,mostMatched=None,spatialThreshold=1.0, delta=180): + ''' behaviour prediction first step''' + if route[0] not in noiseEntryNums: + prototypesRoutes= [ x for x in sorted(prototypes.keys()) if route[0]==x[0]] + elif route[1] not in noiseExitNums: + prototypesRoutes=[ x for x in sorted(prototypes.keys()) if route[1]==x[1]] + else: + prototypesRoutes=[x for x in sorted(prototypes.keys())] + lcss = LCSS(similarityFunc=lambda x,y: (distanceForLCSS(x,y) <= spatialThreshold),delta=delta) + similarity={} + for y in prototypesRoutes: + if y in prototypes.keys(): + prototypesIDs=prototypes[y] + for x in prototypesIDs: + s=lcss.computeNormalized(partialObjPositions, objects[x].positions) + if s >= minSimilarity: + similarity[x]=s + + if mostMatched==None: + probabilities= calculateProbability(nMatching,similarity,objects) + return probabilities + else: + mostMatchedValues=sorted(similarity.values(),reverse=True)[:mostMatched] + keys=[k for k in similarity.keys() if similarity[k] in mostMatchedValues] + newSimilarity={} + for i in keys: + newSimilarity[i]=similarity[i] + probabilities= calculateProbability(nMatching,newSimilarity,objects) + return probabilities + +def findPrototypesSpeed(prototypes,secondStepPrototypes,nMatching,objects,route,partialObjPositions,noiseEntryNums,noiseExitNums,minSimilarity=0.1,mostMatched=None,useDestination=True,spatialThreshold=1.0, delta=180): + if useDestination: + prototypesRoutes=[route] + else: + if route[0] not in noiseEntryNums: + prototypesRoutes= [ x for x in sorted(prototypes.keys()) if route[0]==x[0]] + elif route[1] not in noiseExitNums: + prototypesRoutes=[ x for x in sorted(prototypes.keys()) if route[1]==x[1]] + else: + prototypesRoutes=[x for x in sorted(prototypes.keys())] + lcss = LCSS(similarityFunc=lambda x,y: (distanceForLCSS(x,y) <= spatialThreshold),delta=delta) + similarity={} + for y in prototypesRoutes: + if y in prototypes.keys(): + prototypesIDs=prototypes[y] + for x in prototypesIDs: + s=lcss.computeNormalized(partialObjPositions, objects[x].positions) + if s >= minSimilarity: + similarity[x]=s + + newSimilarity={} + for i in similarity.keys(): + if i in secondStepPrototypes.keys(): + for j in secondStepPrototypes[i]: + newSimilarity[j]=similarity[i] + probabilities= calculateProbability(nMatching,newSimilarity,objects) + return probabilities + +def getPrototypeTrajectory(obj,route,currentInstant,prototypes,secondStepPrototypes,nMatching,objects,noiseEntryNums,noiseExitNums,minSimilarity=0.1,mostMatched=None,useDestination=True,useSpeedPrototype=True): + partialInterval=moving.Interval(obj.getFirstInstant(),currentInstant) + partialObjPositions= obj.getObjectInTimeInterval(partialInterval).positions + if useSpeedPrototype: + prototypeTrajectories=findPrototypesSpeed(prototypes,secondStepPrototypes,nMatching,objects,route,partialObjPositions,noiseEntryNums,noiseExitNums,minSimilarity,mostMatched,useDestination) + else: + prototypeTrajectories=findPrototypes(prototypes,nMatching,objects,route,partialObjPositions,noiseEntryNums,noiseExitNums,minSimilarity,mostMatched) + return prototypeTrajectories + +def computeCrossingsCollisionsAtInstant(predictionParams,currentInstant, obj1, obj2, collisionDistanceThreshold, timeHorizon, computeCZ = False, debug = False,usePrototypes=True,route1= (-1,-1),route2=(-1,-1),prototypes={},secondStepPrototypes={},nMatching={},objects=[],noiseEntryNums=[],noiseExitNums=[],minSimilarity=0.1,mostMatched=None,useDestination=True,useSpeedPrototype=True): '''returns the lists of collision points and crossing zones''' - predictedTrajectories1 = predictionParams.generatePredictedTrajectories(obj1, currentInstant) - predictedTrajectories2 = predictionParams.generatePredictedTrajectories(obj2, currentInstant) + if usePrototypes: + prototypeTrajectories1=getPrototypeTrajectory(obj1,route1,currentInstant,prototypes,secondStepPrototypes,nMatching,objects,noiseEntryNums,noiseExitNums,minSimilarity,mostMatched,useDestination,useSpeedPrototype) + prototypeTrajectories2= getPrototypeTrajectory(obj2,route2,currentInstant,prototypes,secondStepPrototypes,nMatching,objects,noiseEntryNums,noiseExitNums,minSimilarity,mostMatched,useDestination,useSpeedPrototype) + predictedTrajectories1 = predictionParams.generatePredictedTrajectories(obj1, currentInstant,prototypeTrajectories1) + predictedTrajectories2 = predictionParams.generatePredictedTrajectories(obj2, currentInstant,prototypeTrajectories2) + else: + predictedTrajectories1 = predictionParams.generatePredictedTrajectories(obj1, currentInstant) + predictedTrajectories2 = predictionParams.generatePredictedTrajectories(obj2, currentInstant) collisionPoints = [] crossingZones = [] @@ -167,7 +277,8 @@ # cz = (et1.predictPosition(t1)+et2.predictPosition(t2)).multiply(0.5) cz = moving.segmentIntersection(et1.predictPosition(t1), et1.predictPosition(t1+1), et2.predictPosition(t2), et2.predictPosition(t2+1)) if cz != None: - crossingZones.append(SafetyPoint(cz, et1.probability*et2.probability, abs(t1-t2))) + deltaV= (et1.predictPosition(t1)- et1.predictPosition(t1+1) - et2.predictPosition(t2)+ et2.predictPosition(t2+1)).norm2() + crossingZones.append(SafetyPoint(cz, et1.probability*et2.probability, abs(t1-t2)-(float(collisionDistanceThreshold)/deltaV))) t2 += 1 t1 += 1 @@ -176,7 +287,7 @@ return currentInstant, collisionPoints, crossingZones -def computeCrossingsCollisions(predictionParams, obj1, obj2, collisionDistanceThreshold, timeHorizon, computeCZ = False, debug = False, timeInterval = None, nProcesses = 1): +def computeCrossingsCollisions(predictionParams, obj1, obj2, collisionDistanceThreshold, timeHorizon, computeCZ = False, debug = False, timeInterval = None,nProcesses = 1,usePrototypes=True,route1= (-1,-1),route2=(-1,-1),prototypes={},secondStepPrototypes={},nMatching={},objects=[],noiseEntryNums=[],noiseExitNums=[],minSimilarity=0.1,mostMatched=None,useDestination=True,useSpeedPrototype=True,acceptPartialLength=30, step=1): '''Computes all crossing and collision points at each common instant for two road users. ''' collisionPoints={} crossingZones={} @@ -185,16 +296,35 @@ else: commonTimeInterval = obj1.commonTimeInterval(obj2) if nProcesses == 1: - for i in list(commonTimeInterval)[:-1]: # do not look at the 1 last position/velocities, often with errors - i, cp, cz = computeCrossingsCollisionsAtInstant(predictionParams, i, obj1, obj2, collisionDistanceThreshold, timeHorizon, computeCZ, debug) - if len(cp) != 0: - collisionPoints[i] = cp - if len(cz) != 0: - crossingZones[i] = cz + if usePrototypes: + firstInstant= next( (x for x in xrange(commonTimeInterval.first,commonTimeInterval.last) if x-obj1.getFirstInstant() >= acceptPartialLength and x-obj2.getFirstInstant() >= acceptPartialLength), commonTimeInterval.last) + commonTimeIntervalList1= list(xrange(firstInstant,commonTimeInterval.last-1)) # do not look at the 1 last position/velocities, often with errors + commonTimeIntervalList2= list(xrange(firstInstant,commonTimeInterval.last-1,step)) # do not look at the 1 last position/velocities, often with errors + for i in commonTimeIntervalList2: + i, cp, cz = computeCrossingsCollisionsAtInstant(predictionParams, i, obj1, obj2, collisionDistanceThreshold, timeHorizon, computeCZ, debug,usePrototypes,route1,route2,prototypes,secondStepPrototypes,nMatching,objects,noiseEntryNums,noiseExitNums,minSimilarity,mostMatched,useDestination,useSpeedPrototype) + if len(cp) != 0: + collisionPoints[i] = cp + if len(cz) != 0: + crossingZones[i] = cz + if collisionPoints!={} or crossingZones!={}: + for i in commonTimeIntervalList1: + if i not in commonTimeIntervalList2: + i, cp, cz = computeCrossingsCollisionsAtInstant(predictionParams, i, obj1, obj2, collisionDistanceThreshold, timeHorizon, computeCZ, debug,usePrototypes,route1,route2,prototypes,secondStepPrototypes,nMatching,objects,noiseEntryNums,noiseExitNums,minSimilarity,mostMatched,useDestination,useSpeedPrototype) + if len(cp) != 0: + collisionPoints[i] = cp + if len(cz) != 0: + crossingZones[i] = cz + else: + for i in list(commonTimeInterval)[:-1]: # do not look at the 1 last position/velocities, often with errors + i, cp, cz = computeCrossingsCollisionsAtInstant(predictionParams, i, obj1, obj2, collisionDistanceThreshold, timeHorizon, computeCZ, debug,usePrototypes,route1,route2,prototypes,secondStepPrototypes,nMatching,objects,noiseEntryNums,noiseExitNums,minSimilarity,mostMatched,useDestination,useSpeedPrototype) + if len(cp) != 0: + collisionPoints[i] = cp + if len(cz) != 0: + crossingZones[i] = cz else: from multiprocessing import Pool pool = Pool(processes = nProcesses) - jobs = [pool.apply_async(computeCrossingsCollisionsAtInstant, args = (predictionParams, i, obj1, obj2, collisionDistanceThreshold, timeHorizon, computeCZ, debug)) for i in list(commonTimeInterval)[:-1]] + jobs = [pool.apply_async(computeCrossingsCollisionsAtInstant, args = (predictionParams, i, obj1, obj2, collisionDistanceThreshold, timeHorizon, computeCZ, debug,usePrototypes,route1,route2,prototypes,secondStepPrototypes,nMatching,objects,noiseEntryNums,noiseExitNums,minSimilarity,mostMatched,useDestination,useSpeedPrototype)) for i in list(commonTimeInterval)[:-1]] #results = [j.get() for j in jobs] #results.sort() for j in jobs: @@ -218,11 +348,11 @@ def generatePredictedTrajectories(self, obj, instant): return [] - def computeCrossingsCollisionsAtInstant(self, currentInstant, obj1, obj2, collisionDistanceThreshold, timeHorizon, computeCZ = False, debug = False): - return computeCrossingsCollisionsAtInstant(self, currentInstant, obj1, obj2, collisionDistanceThreshold, timeHorizon, computeCZ, debug) + def computeCrossingsCollisionsAtInstant(self, currentInstant, obj1, obj2, collisionDistanceThreshold, timeHorizon, computeCZ = False, debug = False,usePrototypes=True,route1= (-1,-1),route2=(-1,-1),prototypes={},secondStepPrototypes={},nMatching={},objects=[],noiseEntryNums=[],noiseExitNums=[],minSimilarity=0.1,mostMatched=None,useDestination=True,useSpeedPrototype=True): + return computeCrossingsCollisionsAtInstant(self, currentInstant, obj1, obj2, collisionDistanceThreshold, timeHorizon, computeCZ, debug,usePrototypes,route1,route2,prototypes,secondStepPrototypes,nMatching,objects,noiseEntryNums,noiseExitNums,minSimilarity,mostMatched,useDestination,useSpeedPrototype) - def computeCrossingsCollisions(self, obj1, obj2, collisionDistanceThreshold, timeHorizon, computeCZ = False, debug = False, timeInterval = None, nProcesses = 1): - return computeCrossingsCollisions(self, obj1, obj2, collisionDistanceThreshold, timeHorizon, computeCZ, debug, timeInterval, nProcesses) + def computeCrossingsCollisions(self, obj1, obj2, collisionDistanceThreshold, timeHorizon, computeCZ = False, debug = False, timeInterval = None, nProcesses = 1,usePrototypes=True,route1= (-1,-1),route2=(-1,-1),prototypes={},secondStepPrototypes={},nMatching={},objects=[],noiseEntryNums=[],noiseExitNums=[],minSimilarity=0.1,mostMatched=None,useDestination=True,useSpeedPrototype=True,acceptPartialLength=30, step=1): + return computeCrossingsCollisions(self, obj1, obj2, collisionDistanceThreshold, timeHorizon, computeCZ, debug, timeInterval, nProcesses,usePrototypes,route1,route2,prototypes,secondStepPrototypes,nMatching,objects,noiseEntryNums,noiseExitNums,minSimilarity,mostMatched,useDestination,useSpeedPrototype,acceptPartialLength, step) def computeCollisionProbability(self, obj1, obj2, collisionDistanceThreshold, timeHorizon, debug = False, timeInterval = None): '''Computes only collision probabilities @@ -428,10 +558,20 @@ #### # Other Methods #### - - - - +class PrototypePredictionParameters(PredictionParameters): + def __init__(self, maxSpeed, nPredictedTrajectories, constantSpeed = True): + name = 'prototype' + PredictionParameters.__init__(self, name, maxSpeed) + self.nPredictedTrajectories = nPredictedTrajectories + self.constantSpeed = constantSpeed + + def generatePredictedTrajectories(self, obj, instant,prototypeTrajectories): + predictedTrajectories = [] + initialPosition = obj.getPositionAtInstant(instant) + initialVelocity = obj.getVelocityAtInstant(instant) + for prototypeTraj in prototypeTrajectories.keys(): + predictedTrajectories.append(PredictedTrajectoryPrototype(initialPosition, initialVelocity, prototypeTraj, constantSpeed = self.constantSpeed, probability = prototypeTrajectories[prototypeTraj])) + return predictedTrajectories if __name__ == "__main__": import doctest
--- a/python/storage.py Sun Dec 07 23:01:02 2014 -0500 +++ b/python/storage.py Wed Dec 10 15:27:08 2014 -0500 @@ -64,7 +64,7 @@ connection.commit() connection.close() - + def writeFeaturesToSqlite(objects, outputFilename, trajectoryType, objectNumbers = -1): '''write features trajectories maintain trajectory ID,velocities dataset ''' connection = sqlite3.connect(outputFilename) @@ -72,7 +72,7 @@ cursor.execute("CREATE TABLE IF NOT EXISTS \"positions\"(trajectory_id INTEGER,frame_number INTEGER, x_coordinate REAL, y_coordinate REAL, PRIMARY KEY(trajectory_id, frame_number))") cursor.execute("CREATE TABLE IF NOT EXISTS \"velocities\"(trajectory_id INTEGER,frame_number INTEGER, x_coordinate REAL, y_coordinate REAL, PRIMARY KEY(trajectory_id, frame_number))") - + if trajectoryType == 'feature': if type(objectNumbers) == int and objectNumbers == -1: for trajectory in objects: @@ -85,14 +85,14 @@ connection.commit() connection.close() - + def writePrototypesToSqlite(prototypes,nMatching, outputFilename): """ prototype dataset is a dictionary with keys== routes, values== prototypes Ids """ connection = sqlite3.connect(outputFilename) cursor = connection.cursor() cursor.execute("CREATE TABLE IF NOT EXISTS \"prototypes\"(prototype_id INTEGER,routeIDstart INTEGER,routeIDend INTEGER, nMatching INTEGER, PRIMARY KEY(prototype_id))") - + for route in prototypes.keys(): if prototypes[route]!=[]: for i in prototypes[route]: @@ -100,7 +100,7 @@ connection.commit() connection.close() - + def loadPrototypesFromSqlite(filename): """ This function loads the prototype file in the database @@ -127,7 +127,7 @@ connection.close() return prototypes,nMatching - + def writeLabelsToSqlite(labels, outputFilename): """ labels is a dictionary with keys: routes, values: prototypes Ids """ @@ -135,7 +135,7 @@ cursor = connection.cursor() cursor.execute("CREATE TABLE IF NOT EXISTS \"labels\"(object_id INTEGER,routeIDstart INTEGER,routeIDend INTEGER, prototype_id INTEGER, PRIMARY KEY(object_id))") - + for route in labels.keys(): if labels[route]!=[]: for i in labels[route]: @@ -144,7 +144,7 @@ connection.commit() connection.close() - + def loadLabelsFromSqlite(filename): labels = {} @@ -168,6 +168,50 @@ connection.close() return labels +def writeSpeedPrototypeToSqlite(prototypes,nmatching, outFilename): + """ to match the format of second layer prototypes""" + connection = sqlite3.connect(outFilename) + cursor = connection.cursor() + + cursor.execute("CREATE TABLE IF NOT EXISTS \"speedprototypes\"(spdprototype_id INTEGER,prototype_id INTEGER,routeID_start INTEGER, routeID_end INTEGER, nMatching INTEGER, PRIMARY KEY(spdprototype_id))") + + for route in prototypes.keys(): + if prototypes[route]!={}: + for i in prototypes[route]: + if prototypes[route][i]!= []: + for j in prototypes[route][i]: + cursor.execute("insert into speedprototypes (spdprototype_id,prototype_id, routeID_start, routeID_end, nMatching) values (?,?,?,?,?)",(j,i,route[0],route[1],nmatching[j])) + + connection.commit() + connection.close() + +def loadSpeedPrototypeFromSqlite(filename): + """ + This function loads the prototypes table in the database of name <filename>. + """ + prototypes = {} + nMatching={} + connection = sqlite3.connect(filename) + cursor = connection.cursor() + + try: + cursor.execute('SELECT * from speedprototypes order by spdprototype_id,prototype_id, routeID_start, routeID_end, nMatching') + except sqlite3.OperationalError as error: + utils.printDBError(error) + return [] + + for row in cursor: + route=(row[2],row[3]) + if route not in prototypes.keys(): + prototypes[route]={} + if row[1] not in prototypes[route].keys(): + prototypes[route][row[1]]=[] + prototypes[route][row[1]].append(row[0]) + nMatching[row[0]]=row[4] + + connection.close() + return prototypes,nMatching + def writeRoutesToSqlite(Routes, outputFilename): """ This function writes the activity path define by start and end IDs""" @@ -175,7 +219,7 @@ cursor = connection.cursor() cursor.execute("CREATE TABLE IF NOT EXISTS \"routes\"(object_id INTEGER,routeIDstart INTEGER,routeIDend INTEGER, PRIMARY KEY(object_id))") - + for route in Routes.keys(): if Routes[route]!=[]: for i in Routes[route]: @@ -183,7 +227,7 @@ connection.commit() connection.close() - + def loadRoutesFromSqlite(filename): Routes = {} @@ -203,14 +247,14 @@ Routes[route].append(row[0]) connection.close() - return Routes + return Routes def setRoutes(filename, objects): connection = sqlite3.connect(filename) cursor = connection.cursor() for obj in objects: cursor.execute('update objects set startRouteID = {} where object_id = {}'.format(obj.startRouteID, obj.getNum())) - cursor.execute('update objects set endRouteID = {} where object_id = {}'.format(obj.endRouteID, obj.getNum())) + cursor.execute('update objects set endRouteID = {} where object_id = {}'.format(obj.endRouteID, obj.getNum())) connection.commit() connection.close() @@ -843,6 +887,7 @@ return configDict + if __name__ == "__main__": import doctest import unittest
--- a/python/utils.py Sun Dec 07 23:01:02 2014 -0500 +++ b/python/utils.py Wed Dec 10 15:27:08 2014 -0500 @@ -361,7 +361,7 @@ based on the threshold on distance between two elements of lists l1, l2 similarityFunc returns True or False whether the two points are considered similar - if aligned, returns the best matching if using a finite delta by shiftinig the series alignments + if aligned, returns the best matching if using a finite delta by shifting the series alignments eg distance(p1, p2) < epsilon '''