comparison python/moving.py @ 594:9e39cd95e017

first implementation of CLEAR MOT (needs formal tests)
author Nicolas Saunier <nicolas.saunier@polymtl.ca>
date Sun, 07 Dec 2014 01:32:36 -0500
parents e2a873e08568
children 17b02c8054d0
comparison
equal deleted inserted replaced
593:e2a873e08568 594:9e39cd95e017
1304 if homography != None: 1304 if homography != None:
1305 self.positions = self.positions.project(homography) 1305 self.positions = self.positions.project(homography)
1306 1306
1307 def matches(self, obj, instant, matchingDistance): 1307 def matches(self, obj, instant, matchingDistance):
1308 '''Indicates if the annotation matches obj (MovingObject) 1308 '''Indicates if the annotation matches obj (MovingObject)
1309 with threshold matchingDistance''' 1309 with threshold matchingDistance
1310 Returns distance if below matchingDistance, matchingDistance+1 otherwise
1311 (returns an actual value, otherwise munkres does not terminate)'''
1310 d = Point.distanceNorm2(self.getPositionAtInstant(instant), obj.getPositionAtInstant(instant)) 1312 d = Point.distanceNorm2(self.getPositionAtInstant(instant), obj.getPositionAtInstant(instant))
1311 return d<matchingDistance, d 1313 if d < matchingDistance:
1312 1314 return d
1313 def matchingGroundTruthToTracker(objects, annotations, matchingDistance, firstInstant, lastInstant): 1315 else:
1314 '''Returns a matching of tracker output (objects) to ground truth (annnotations) 1316 return matchingDistance + 1
1317
1318 def computeClearMOT(objects, annotations, matchingDistance, firstInstant, lastInstant, debug = False):
1319 '''Computes the CLEAR MOT metrics
1320
1321 Reference:
1322 Keni, Bernardin, and Stiefelhagen Rainer. "Evaluating multiple object tracking performance: the CLEAR MOT metrics." EURASIP Journal on Image and Video Processing 2008 (2008)
1315 1323
1316 objects and annotations are supposed to in the same space 1324 objects and annotations are supposed to in the same space
1317 current implementation is BBAnnotations (bounding boxes) 1325 current implementation is BBAnnotations (bounding boxes)
1318 mathingDistance is threshold on matching between annotation and object 1326 mathingDistance is threshold on matching between annotation and object
1319 1327
1320 Output is list of 1328 TO: tracker output (objects)
1321 [frame number, ground truth id, tracker object id, distance] 1329 GT: ground truth (annotations)
1322 where tracker object id is None if no matching was found''' 1330
1323 1331 Should we use the distance as weights or just 1/0 if distance below matchingDistance?
1324 matchTable = [] 1332 (add argument useDistanceForWeights = False)'''
1333 from munkres import Munkres
1334 from numpy import array
1335 from sys import maxsize
1336
1337 munk = Munkres()
1338 dist = 0. # total distance between GT and TO
1339 ct = 0 # number of associations between GT and tracker output in each frame
1340 gt = 0 # number of GT.frames
1341 mt = 0 # number of missed GT.frames (sum of the number of GT not detected in each frame)
1342 fpt = 0 # number of false alarm.frames (tracker objects without match in each frame)
1343 mme = 0 # number of mismatches
1344 matches = {} # match[i] is the tracker track associated with GT i (using object references)
1325 for t in xrange(firstInstant, lastInstant+1): 1345 for t in xrange(firstInstant, lastInstant+1):
1326 for a in annotations: 1346 previousMatches = matches.copy()
1327 if a.existsAtInstant(t): 1347 # go through currently matched GT-TO and check if they are still matched withing matchingDistance
1328 minDist = float('inf') 1348 toDelete = []
1329 matchingObject = None 1349 for a in matches:
1330 for o in objects: 1350 if a.existsAtInstant(t) and matches[a].existsAtInstant(t):
1331 if o.existsAtInstant(t): 1351 d = a.matches(matches[a], t, matchingDistance)
1332 match, d = a.matches(o, t, matchingDistance) 1352 if d < matchingDistance:
1333 if match and d<minDist: 1353 dist += d
1334 minDist = d
1335 matchingObject = o
1336 if matchingObject == None:
1337 matchTable.append([t, a.getNum(), None, minDist])
1338 else: 1354 else:
1339 matchTable.append([t, a.getNum(), matchingObject.getNum(), minDist]) 1355 toDelete.append(a)
1340 return matchTable 1356 else:
1341 1357 toDelete.append(a)
1342 def computeClearMOT(matchTable, nTrackFrames): 1358 for a in toDelete:
1343 '''Computes the MOTA/MOTP measures from the matching statistics 1359 del matches[a]
1344 between ground truth and tracker output 1360
1345 computed by matchingGroundTruthToTracker 1361 # match all unmatched GT-TO
1346 1362 matchedGTs = matches.keys()
1347 nTrackFrames is the sum of the number of frames of existence of all tracker output 1363 matchedTOs = matches.values()
1348 1364 costs = []
1349 Adapted from Dariush Ettehadieh's thesis work''' 1365 unmatchedGTs = [a for a in annotations if a.existsAtInstant(t) and a not in matchedGTs]
1350 #Calculate MOTP 1366 unmatchedTOs = [o for o in objects if o.existsAtInstant(t) and o not in matchedTOs]
1351 dist = 0. # total distance between GT and tracker output 1367 nGTs = len(matchedGTs)+len(unmatchedGTs)
1352 nAssociatedGTFrames = 0 1368 nTOs = len(matchedTOs)+len(unmatchedTOs)
1353 mt = 0 # number of missed GT frames (sum of the number of GT not detected in each frame) 1369 if len(unmatchedTOs) > 0:
1354 for mtab in matchTable: 1370 for a in unmatchedGTs:
1355 if mtab[2] != None: 1371 aCosts = [a.matches(o, t, matchingDistance) for o in unmatchedTOs]
1356 dist += float(mtab[3])#/T 1372 if min(aCosts) < matchingDistance:
1357 nAssociatedGTFrames += 1 1373 costs.append(aCosts)
1358 else: 1374 # print costs
1359 mt += 1 1375 if len(costs) > 0:
1360 if nAssociatedGTFrames != 0: 1376 newMatches = munk.compute(costs)
1361 motp = dist/nAssociatedGTFrames 1377 for k,v in newMatches:
1378 if costs[k][v] < matchingDistance:
1379 matches[unmatchedGTs[k]]=unmatchedTOs[v]
1380 dist += costs[k][v]
1381 if debug:
1382 print('{} '.format(t)+', '.join(['{} {}'.format(k.getNum(), v.getNum()) for k,v in matches.iteritems()]))
1383
1384 # compute metrics elements
1385 ct += len(matches)
1386 mt += nGTs-len(matches)
1387 fpt += nTOs-len(matches)
1388 gt += nGTs
1389 # compute mismatches
1390 # for gt that do not appear in both frames, check if the corresponding to was matched to another gt in previous/next frame
1391 mismatches = []
1392 for a in matches:
1393 if a in previousMatches:
1394 if matches[a] != previousMatches[a]:
1395 mismatches.append(a)
1396 elif matches[a] in previousMatches.values():
1397 mismatches.append(matches[a])
1398 for a in previousMatches:
1399 if a not in matches and previousMatches[a] in matches.values():
1400 mismatches.append(previousMatches[a])
1401 if debug:
1402 for mm in set(mismatches):
1403 print type(mm), mm.getNum()
1404 # some object mismatches may appear twice
1405 mme += len(set(mismatches))
1406
1407 if ct > 0:
1408 motp = dist/ct
1362 else: 1409 else:
1363 return 0,0,0,0,0,0 1410 motp = None
1364 1411 return motp, 1.-float(mt+fpt+mme)/gt, mt, mme, fpt, gt
1365 #Calculate MOTA
1366 gt = len(matchTable) # sum of the number of GT in each frame, or sum of the length of existence of each GT
1367 #for sgt in sorted_gt_positions:
1368 # gt += (len(sgt)-1)
1369
1370 #total_traces = len(object_positions)
1371 fpt = nTrackFrames - nAssociatedGTFrames
1372
1373 # gtobj = 0
1374 mme = 0
1375 # while gtobj <= n_gt_objects:
1376 # prev = [0,0,-1,0]
1377 # new_match = 0
1378 # for mtab in matchTable:
1379 # if mtab[1] == gtobj:
1380 # if new_match == 0:
1381 # new_match = 1
1382 # mme = mme - 1
1383 # if mtab[2] != prev[2]:
1384 # mme += 1
1385 # prev = mtab
1386 # gtobj += 1
1387
1388 mota = 1-(float(mt+fpt+mme)/gt)
1389
1390 print 'MOTP: ' + str(motp)
1391 print 'MOTA: ' + str(mota)
1392 return motp, mota, dist, mt, mme, fpt
1393 1412
1394 1413
1395 def plotRoadUsers(objects, colors): 1414 def plotRoadUsers(objects, colors):
1396 '''Colors is a PlottingPropertyValues instance''' 1415 '''Colors is a PlottingPropertyValues instance'''
1397 from matplotlib.pyplot import figure, axis 1416 from matplotlib.pyplot import figure, axis