comparison python/compute-object-from-features.py @ 105:9844c69d8fa2

added multiply method to Point
author Nicolas Saunier <nico@confins.net>
date Thu, 14 Jul 2011 19:48:30 -0400
parents 1621b46a1523
children ce4cb46b3603
comparison
equal deleted inserted replaced
104:13187af8622d 105:9844c69d8fa2
30 features = ubc_utils.loadTrajectories(videoFilenamePrefix+'-features.txt', max(obj.featureNumbers)+1) 30 features = ubc_utils.loadTrajectories(videoFilenamePrefix+'-features.txt', max(obj.featureNumbers)+1)
31 h = np.loadtxt(videoFilenamePrefix+'-homography.txt') 31 h = np.loadtxt(videoFilenamePrefix+'-homography.txt')
32 32
33 invh = cvutils.invertHomography(h) 33 invh = cvutils.invertHomography(h)
34 34
35 yCoordinates = -np.ones((len(obj.featureNumbers),int(obj.length()))) 35 def computeObject(features, homography, timeInterval = None):
36 for i,fnum in enumerate(obj.featureNumbers): 36 if not timeInterval:
37 traj = features[fnum].getPositions().asArray() 37 raise Exception('not implemented') # compute from the features
38 imgTraj = cvutils.projectArray(invh, traj) 38
39 yCoordinates[i,features[fnum].getFirstInstant()-obj.getFirstInstant():features[fnum].getLastInstant()+1-obj.getFirstInstant()] = imgTraj[1,:] 39 yCoordinates = -np.ones((len(features),int(timeInterval.length())))
40 for i,f in enumerate(features):
41 traj = f.getPositions().asArray()
42 imgTraj = cvutils.projectArray(homography, traj)
43 yCoordinates[i,f.getFirstInstant()-timeInterval.first:f.getLastInstant()+1-timeInterval.first] = imgTraj[1,:]
40 44
41 indices = argmax(yCoordinates,0) 45 indices = np.argmax(yCoordinates,0)
42 newTraj = moving.Trajectory() 46 newTraj = moving.Trajectory()
43 for j,idx in enumerate(indices): 47 for j,idx in enumerate(indices):
44 newTraj.addPosition(features[obj.featureNumbers[idx]].getPositionAtInstant(j+obj.getFirstInstant())) 48 newTraj.addPosition(features[idx].getPositionAtInstant(j+timeInterval.first))
49 #newVelocities.addPosition(features[obj.featureNumbers[idx]].getVelocityAtInstant(j+obj.getFirstInstant()))
45 50
46 # TODO 51 return newTraj
47 # use kalman filter over the resulting trajectory 52
48 # estimate the error terms using actual features 53 # TODO version median
54
55 def kalmanFilter(positions, velocities, processNoiseCov, measurementNoiseCov):
56 kalman=cv.CreateKalman(6, 4)
57 kalman.transition_matrix[0,2]=1
58 kalman.transition_matrix[0,4]=1./2
59 kalman.transition_matrix[1,3]=1
60 kalman.transition_matrix[1,5]=1./2
61 kalman.transition_matrix[2,4]=1
62 kalman.transition_matrix[3,5]=1
63
64 cv.SetIdentity(kalman.measurement_matrix, 1.)
65 cv.SetIdentity(kalman.process_noise_cov, processNoiseCov)
66 cv.SetIdentity(kalman.measurement_noise_cov, measurementNoiseCov)
67 cv.SetIdentity(kalman.error_cov_post, 1.)
68
69 p = positions[0]
70 v = velocities[0]
71 v2 = velocities[2]
72 a = (v2-v).multiply(0.5)
73 kalman.state_post[0,0]=p.x
74 kalman.state_post[1,0]=p.y
75 kalman.state_post[2,0]=v.x
76 kalman.state_post[3,0]=v.y
77 kalman.state_post[4,0]=a.x
78 kalman.state_post[5,0]=a.y
79
80 filteredPositions = moving.Trajectory()
81 filteredVelocities = moving.Trajectory()
82 measurement = cv.CreateMat(4,1,cv.CV_32FC1)
83 for i in xrange(positions.length()):
84 cv.KalmanPredict(kalman) # no control
85 p = positions[i]
86 v = velocities[i]
87 measurement[0,0] = p.x
88 measurement[1,0] = p.y
89 measurement[2,0] = v.x
90 measurement[3,0] = v.y
91 cv.KalmanCorrect(kalman, measurement)
92 filteredPositions.addPositionXY(kalman.state_post[0,0], kalman.state_post[1,0])
93 filteredVelocities.addPositionXY(kalman.state_post[2,0], kalman.state_post[3,0])
94
95 return (filteredPositions, filteredVelocities)
96
97 (filteredPositions, filteredVelocities) = kalmanFilter(newTraj, obj.getVelocities(), 1e-5, 1e-1)
98
99 plt.clf()
100 obj.draw('rx')
101 for fnum in obj.featureNumbers: features[fnum].draw()
102 newTraj.draw('bx')
103 plt.axis('equal')