comparison python/moving.py @ 522:ce40a89bd6ae

added functions for classification refactored from Sohail s work for TRB/TRC (to be tested)
author Nicolas Saunier <nicolas.saunier@polymtl.ca>
date Wed, 18 Jun 2014 23:36:56 -0400
parents fd9641cbd24b
children ce4eaabacc26
comparison
equal deleted inserted replaced
521:3707eeb20f25 522:ce40a89bd6ae
816 def predictPosition(self, instant, nTimeSteps, externalAcceleration = Point(0,0)): 816 def predictPosition(self, instant, nTimeSteps, externalAcceleration = Point(0,0)):
817 '''Predicts the position of object at instant+deltaT, 817 '''Predicts the position of object at instant+deltaT,
818 at constant speed''' 818 at constant speed'''
819 return predictPositionNoLimit(nTimeSteps, self.getPositionAtInstant(instant), self.getVelocityAtInstant(instant), externalAcceleration) 819 return predictPositionNoLimit(nTimeSteps, self.getPositionAtInstant(instant), self.getVelocityAtInstant(instant), externalAcceleration)
820 820
821 def classifyUserTypeSpeed(self, threshold, statisticsFunc = median, ignoreNInstantsAtEnds = 0): 821 ###
822 # User Type Classification
823 ###
824 def classifyUserTypeSpeedPedstrianCar(self, threshold, aggregationFunc = median, ignoreNInstantsAtEnds = 0):
822 '''Classifies slow and fast road users 825 '''Classifies slow and fast road users
823 slow: non-motorized -> pedestrians 826 slow: non-motorized -> pedestrians
824 fast: motorized -> cars''' 827 fast: motorized -> cars'''
825 if ignoreNInstantsAtEnds > 0: 828 if ignoreNInstantsAtEnds > 0:
826 speeds = self.velocities.norm()[ignoreNInstantsAtEnds:-ignoreNInstantsAtEnds] 829 speeds = self.getSpeeds()[ignoreNInstantsAtEnds:-ignoreNInstantsAtEnds]
827 else: 830 else:
828 speeds = self.velocities.norm() 831 speeds = self.getSpeeds()
829 if statisticsFunc(speeds) >= threshold: 832 if aggregationFunc(speeds) >= threshold:
830 self.setUserType(userType2Num['car']) 833 self.setUserType(userType2Num['car'])
831 else: 834 else:
832 self.setUserType(userType2Num['pedestrian']) 835 self.setUserType(userType2Num['pedestrian'])
833 836
834 def classifyUserTypeHoGSVMAtInstant(self, img, svm, instant, homography, width, height, px = 0.2, py = 0.2, pixelThreshold = 800): 837 def classifyUserTypeSpeed(self, aggregationFunc = median, speedProbabilities):
838 '''Classifies road user per road user type
839 speedProbabilities are functions return P(speed|class)
840 in a dictionary indexed by user type names
841 Returns probabilities for each class
842
843 for simple threshold classification, simply pass non-overlapping indicator functions (membership)
844 e.g. def indic(x):
845 if abs(x-mu) < sigma:
846 return 1
847 else:
848 return x'''
849 if not hasattr(self, aggregatedSpeed):
850 self.aggregatedSpeed = aggregationFunc(self.getSpeeds())
851 userTypeProbabilities = {userType2Num[userTypename]: speedProbabilities[userTypename](self.aggregatedSpeed) for userTypename in speedProbabilities}
852 self.setUserType(utils.argmaxDict(userTypeProbabilities))
853 return userTypeProbabilities
854
855 def initClassifyUserTypeHoGSVM(self, aggregationFunc = median):
856 '''Initializes the data structures for classification
857
858 TODO? compute speed for longest feature?
859 Skip beginning and end of feature for speed? Offer options instead of median'''
860 self.aggregatedSpeed = aggregationFunc(self.getSpeeds())
861 self.userTypes = {}
862
863 def classifyUserTypeHoGSVMAtInstant(self, img, pedBikeCarSVM, instant, homography, width, height, bikeCarSVM = None, pedBikeSpeedTreshold = float('Inf'), bikeCarSpeedThreshold = float('Inf'), px = 0.2, py = 0.2, pixelThreshold = 800):
835 '''Extract the image box around the object and 864 '''Extract the image box around the object and
836 applies the SVM model on it''' 865 applies the SVM model on it'''
837 from numpy import array 866 from numpy import array
838 croppedImg, yCropMin, yCropMax, xCropMin, xCropMax = imageBox(img, self, instant, homography, width, height, px, py, pixelThreshold) 867 croppedImg, yCropMin, yCropMax, xCropMin, xCropMax = imageBox(img, self, instant, homography, width, height, px, py, pixelThreshold)
839 if len(croppedImg) > 0: # != [] 868 if len(croppedImg) > 0: # != []
840 hog = array([cvutils.HOG(croppedImg)], dtype = np.float32) 869 hog = array([cvutils.HOG(croppedImg)], dtype = np.float32)
841 return int(svm.predict(hog)) 870 if self.aggregatedSpeed < pedBikeSpeedTreshold or bikeCarSVM == None:
842 else: 871 self.userTypes[instant] = int(pedBikeCarSVM.predict(hog))
843 return userType2Num['unknown'] 872 elif self.aggregatedSpeed < bikeCarSpeedTreshold:
873 self.userTypes[instant] = int(bikeCarSVM.predict(hog))
874 else:
875 self.userTypes[instant] = userType2Num['car']
876 else:
877 self.userTypes[instant] = userType2Num['unknown']
878
879 def classifyUserTypeHoGSVM(self, images, pedBikeCarSVM, homography, width, height, bikeCarSVM = None, pedBikeSpeedTreshold = float('Inf'), bikeCarSpeedThreshold = float('Inf'), aggregationFunc = median, speedProbabilities = None, px = 0.2, py = 0.2, pixelThreshold = 800):
880 '''Agregates SVM detections in each image and returns probability
881 (proportion of instants with classification in each category)
882
883 iamges is a dictionary of images indexed by instant
884 With default parameters, the general (ped-bike-car) classifier will be used
885 TODO? consider all categories?'''
886 if not hasattr(self, aggregatedSpeed) or not hasattr(self, userTypes):
887 print('Initilize the data structures for classification by HoG-SVM')
888 self.initClassifyUserTypeHoGSVM(aggregationFunc)
889
890 if len(self.userTypes) != self.length(): # if classification has not been done previously
891 for t in self.getTimeInterval():
892 if t not in self.userTypes:
893 self.classifyUserTypeHoGSVMAtInstant(images[t], pedBikeCarSVM, t, homography, width, height, bikeCarSVM, pedBikeSpeedTreshold, bikeCarSpeedThreshold, px, py, pixelThreshold)
894 # compute P(Speed|Class)
895 if speedProbabilities = None: # equiprobable information from speed
896 userTypeProbabilities = {userType2Num['car']: 1., userType2Num['pedestrian']: 1., userType2Num['bicycle']: 1.}
897 else:
898 userTypeProbabilities = {userType2Num[userTypename]: speedProbabilities[userTypename](self.aggregatedSpeed) for userTypename in speedProbabilities}
899 # result is P(Class|Appearance) x P(Speed|Class)
900 nInstantsUserType = {userType2Num[userTypename]: 0 for userTypename in userTypeProbabilities}# number of instants the object is classified as userTypename
901 for t in self.userTypes:
902 nInstantsUserType[self.userTypes[t]] += 1
903 for userTypename in userTypeProbabilities:
904 userTypeProbabilities[userTypename] *= nInstantsUserType[userTypename]
905 # class is the user type that maximizes usertype probabilities
906 self.setUserType(utils.argmaxDict(userTypeProbabilities))
907
908 def classifyUserTypeArea(self, areas, homography):
909 '''Classifies the object based on its location (projected to image space)
910 areas is a dictionary of matrix of the size of the image space
911 for different road users possible locations, indexed by road user type names
912
913 TODO: areas could be a wrapper object with a contains method that would work for polygons and images (with wrapper class)
914 skip frames at beginning/end?'''
915 print('not implemented/tested yet')
916 if not hasattr(self, projectedPositions):
917 if homography != None:
918 self.projectedPositions = obj.positions.project(homography)
919 else:
920 self.projectedPositions = obj.positions
921 possibleUserTypes = {}
922 for userTypename in userTypenames:
923 possibleUserTypes[userTypename] = 0
924 for p in self.projectedPositions:
925 for userTypename in areas:
926 if areas[userTypename][p.x, p.y] != 0:
927 possibleUserTypes[userTypename] += 1
928 # what to do: threshold for most common type? self.setUserType()
929 return possibleUserTypes
844 930
845 @staticmethod 931 @staticmethod
846 def collisionCourseDotProduct(movingObject1, movingObject2, instant): 932 def collisionCourseDotProduct(movingObject1, movingObject2, instant):
847 'A positive result indicates that the road users are getting closer' 933 'A positive result indicates that the road users are getting closer'
848 deltap = movingObject1.getPositionAtInstant(instant)-movingObject2.getPositionAtInstant(instant) 934 deltap = movingObject1.getPositionAtInstant(instant)-movingObject2.getPositionAtInstant(instant)