comparison python/cvutils.py @ 154:668710d4c773

updated computeTranslation with cv2
author Nicolas Saunier <nicolas.saunier@polymtl.ca>
date Wed, 07 Sep 2011 16:35:51 -0400
parents 74b1fc68d4df
children 2eef5620c0b3
comparison
equal deleted inserted replaced
153:c8a149fccfda 154:668710d4c773
133 invH = inv(homography) 133 invH = inv(homography)
134 invH /= invH[2,2] 134 invH /= invH[2,2]
135 return invH 135 return invH
136 136
137 if opencvExists: 137 if opencvExists:
138 def computeTranslation(img1, img2, img1Points, maxTranslation, minNMatches, windowSize = (5,5), level = 5, criteria = (cv2.TERM_CRITERIA_EPS, 0, 0.01)): 138 def computeTranslation(img1, img2, img1Points, maxTranslation2, minNMatches, windowSize = (5,5), level = 5, criteria = (cv2.TERM_CRITERIA_EPS, 0, 0.01)):
139 '''Computes the translation between of img2 with respect to img1 139 '''Computes the translation of img2 with respect to img1
140 (loaded using OpenCV) 140 (loaded using OpenCV as numpy arrays)
141 img1Points are used to compute the translation 141 img1Points are used to compute the translation
142 142
143 TODO add diagnostic if data is all over the place, and it most likely is not a translation (eg zoom)''' 143 TODO add diagnostic if data is all over the place, and it most likely is not a translation (eg zoom, other non linear distortion)'''
144 from numpy.core.multiarray import zeros 144 from numpy.core.multiarray import array
145 from numpy.lib.function_base import median 145 from numpy.lib.function_base import median
146 from numpy.core.fromnumeric import sum
146 147
147 (img2Points, status, track_error) = cv.CalcOpticalFlowPyrLK(img1, img2, zeros((img1.rows,img1.cols+8)), zeros((img1.rows,img1.cols+8)), img1Points, windowSize, level, criteria, 0) 148 nextPoints = array([])
148 149 (img2Points, status, track_error) = cv2.calcOpticalFlowPyrLK(img1, img2, img1Points, nextPoints, winSize=windowSize, maxLevel=level, criteria=criteria)
149 deltaX = [] 150 # calcOpticalFlowPyrLK(prevImg, nextImg, prevPts[, nextPts[, status[, err[, winSize[, maxLevel[, criteria[, derivLambda[, flags]]]]]]]]) -> nextPts, status, err
150 deltaY = [] 151 delta = []
151 for (k, (p1,p2)) in enumerate(zip(img1Points, img2Points)): 152 for (k, (p1,p2)) in enumerate(zip(img1Points, img2Points)):
152 if status[k] == 1: 153 if status[k] == 1:
153 dx = p2[0]-p1[0] 154 dp = p2-p1
154 dy = p2[1]-p1[1] 155 d = sum(dp**2)
155 d = dx**2 + dy**2 156 if d < maxTranslation2:
156 if d < maxTranslation: 157 delta.append(dp)
157 deltaX.append(dx) 158 if len(delta) >= minNMatches:
158 deltaY.append(dy) 159 return median(delta, axis=0)
159 if len(deltaX) >= 10:
160 return [median(deltaX), median(deltaY)]
161 else: 160 else:
162 return None 161 return None