comparison scripts/dltrack.py @ 1265:0f5bebd62a55

minor modifications
author Nicolas Saunier <nicolas.saunier@polymtl.ca>
date Fri, 24 May 2024 16:15:38 -0400
parents dff5b678a33a
children 27b206d118b7
comparison
equal deleted inserted replaced
1264:dff5b678a33a 1265:0f5bebd62a55
233 if not args.notSavingImageCoordinates: 233 if not args.notSavingImageCoordinates:
234 storage.saveTrajectoriesToSqlite(utils.removeExtension(args.databaseFilename)+'-bb.sqlite', list(objects.values()), 'object') 234 storage.saveTrajectoriesToSqlite(utils.removeExtension(args.databaseFilename)+'-bb.sqlite', list(objects.values()), 'object')
235 # project, smooth and save 235 # project, smooth and save
236 for num, obj in objects.items(): 236 for num, obj in objects.items():
237 features = obj.getFeatures() 237 features = obj.getFeatures()
238 if moving.userTypeNames[obj.getUserType()] == 'pedestrian': 238 # possible to save bottom pedestrians? not consistent with other users
239 assert len(features) == 2 239 # if moving.userTypeNames[obj.getUserType()] == 'pedestrian':
240 t1 = features[0].getPositions() 240 # assert len(features) == 2
241 t2 = features[1].getPositions() 241 # t1 = features[0].getPositions()
242 t = [[(p1.x+p2.x)/2., max(p1.y, p2.y)] for p1, p2 in zip(t1, t2)] 242 # t2 = features[1].getPositions()
243 else: 243 # t = [[(p1.x+p2.x)/2., max(p1.y, p2.y)] for p1, p2 in zip(t1, t2)]
244 t = [] 244 # else:
245 for instant in obj.getTimeInterval(): 245 t = []
246 points = [f.getPositionAtInstant(instant) for f in features if f.existsAtInstant(instant)] 246 for instant in obj.getTimeInterval():
247 t.append(moving.Point.agg(points, np.mean).aslist()) 247 points = [f.getPositionAtInstant(instant) for f in features if f.existsAtInstant(instant)]
248 #t = sum([f.getPositions().asArray() for f in features])/len(features) 248 t.append(moving.Point.agg(points, np.mean).aslist())
249 #t = (moving.Trajectory.add(t1, t2)*0.5).asArray() 249 #t = sum([f.getPositions().asArray() for f in features])/len(features)
250 #t = (moving.Trajectory.add(t1, t2)*0.5).asArray()
250 projected = cvutils.imageToWorldProject(np.array(t).T, intrinsicCameraMatrix, distortionCoefficients, homography) 251 projected = cvutils.imageToWorldProject(np.array(t).T, intrinsicCameraMatrix, distortionCoefficients, homography)
251 featureNum = features[0].getNum() 252 featureNum = features[0].getNum()
252 obj.features=[moving.MovingObject(featureNum, obj.getTimeInterval(), moving.Trajectory(projected.tolist()))] 253 obj.features=[moving.MovingObject(featureNum, obj.getTimeInterval(), moving.Trajectory(projected.tolist()))]
253 obj.featureNumbers = [featureNum] 254 obj.featureNumbers = [featureNum]
254 if smoothingHalfWidth is not None: # smoothing 255 if smoothingHalfWidth is not None: # smoothing