Mercurial Hosting > traffic-intelligence
comparison scripts/display-synced-trajectories.py @ 832:02f2809c2f66
work in progress on synced trajectory display
author | Nicolas Saunier <nicolas.saunier@polymtl.ca> |
---|---|
date | Wed, 29 Jun 2016 17:57:21 -0400 |
parents | |
children | 7058a40a4bbc |
comparison
equal
deleted
inserted
replaced
831:a8ff35e6fb43 | 832:02f2809c2f66 |
---|---|
1 #! /usr/bin/env python | |
2 | |
3 import sys, argparse, os.path | |
4 from datetime import datetime, timedelta | |
5 from numpy import array | |
6 import cv2 | |
7 import cvutils, utils, storage | |
8 from metadata import createDatabase, Site, VideoSequence | |
9 | |
10 parser = argparse.ArgumentParser(description='The program displays several views of the same site synchronously.') | |
11 parser.add_argument('-i', dest = 'metadataFilename', help = 'name of the metadata file', required = True) | |
12 #parser.add_argument('-n', dest = 'siteId', help = 'site id or site name', required = True) | |
13 parser.add_argument('-d', dest = 'databaseFilename', help = 'name of the Sqlite database file', required = True) | |
14 parser.add_argument('-f', dest = 'startTime', help = 'time to start playing (format %Y-%m-%d %H:%M:%S, eg 2011-06-22 10:00:39)', required = True) | |
15 parser.add_argument('-t', dest = 'trajectoryType', help = 'type of trajectories to display', choices = ['feature', 'object'], default = 'object') | |
16 parser.add_argument('-r', dest = 'rescale', help = 'rescaling factor for the displayed image', default = 1., type = float) | |
17 parser.add_argument('-s', dest = 'step', help = 'display every s image', default = 1, type = int) | |
18 | |
19 args = parser.parse_args() | |
20 | |
21 session = createDatabase(args.metadataFilename) | |
22 | |
23 mergedSequence = session.query(VideoSequence).filter(VideoSequence.databaseFilename == args.databaseFilename).first() | |
24 if mergedSequence is None: | |
25 print('Video sequence {} was not found in {}. Exiting'.format(args.databaseFilename, args.metadataFilename)) | |
26 sys.exit() | |
27 | |
28 dirname = os.path.split(args.metadataFilename)[0] | |
29 | |
30 startTime = datetime.strptime(args.startTime, utils.datetimeFormat) | |
31 # TODO issue with framerate | |
32 if startTime > mergedSequence.startTime: | |
33 mergedFirstFrameNum = (startTime-mergedSequence.startTime).seconds* | |
34 | |
35 videoSequences = session.query(VideoSequence).filter(VideoSequence.site == mergedSequence.site).filter(VideoSequence.startTime <= startTime).all() | |
36 videoSequences.remove(mergedSequence) | |
37 videoSequences = [v for v in videoSequences if v.containsInstant(startTime)] | |
38 filenames = [dirname+os.path.sep+v.getVideoSequenceFilename() for v in videoSequences] | |
39 firstFrameNums = [v.getFrameNum(startTime) for v in videoSequences] | |
40 windowNames = [v.cameraView.description for v in videoSequences] | |
41 | |
42 objects = storage.loadTrajectoriesFromSqlite(dirname+os.path.sep+mergedSequence.getDatabaseFilename(), args.trajectoryType) | |
43 | |
44 #def playVideo(filenames, windowNames = None, firstFrameNums = None, frameRate = -1, interactive = False, printFrames = True, text = None, rescale = 1., step = 1): | |
45 if len(filenames) == 0: | |
46 print('Empty filename list') | |
47 sys.exit() | |
48 | |
49 if windowNames is None: | |
50 windowNames = ['frame{}'.format(i) for i in xrange(len(filenames))] | |
51 #wait = 5 | |
52 #if rescale == 1.: | |
53 for windowName in windowNames: | |
54 cv2.namedWindow(windowName, cv2.WINDOW_NORMAL) | |
55 #if frameRate > 0: | |
56 # wait = int(round(1000./frameRate)) | |
57 #if interactive: | |
58 wait = 0 | |
59 step = 1 | |
60 rescale = 1. | |
61 captures = [cv2.VideoCapture(fn) for fn in filenames] | |
62 if array([cap.isOpened() for cap in captures]).all(): | |
63 key = -1 | |
64 ret = True | |
65 nFramesShown = 0 | |
66 if firstFrameNums is not None: | |
67 for i in xrange(len(captures)): | |
68 captures[i].set(cv2.cv.CV_CAP_PROP_POS_FRAMES, firstFrameNums[i]) | |
69 while ret and not cvutils.quitKey(key): | |
70 rets = [] | |
71 images = [] | |
72 for cap in captures: | |
73 ret, img = cap.read() | |
74 rets.append(ret) | |
75 images.append(img) | |
76 if array(rets).all(): | |
77 #if printFrames: | |
78 print('frame shown {0}'.format(nFramesShown)) | |
79 for i in xrange(len(filenames)): | |
80 for obj in objects: | |
81 if obj.existsAtInstant(): | |
82 #if text is not None: | |
83 # cv2.putText(images[i], text, (10,50), cv2.FONT_HERSHEY_PLAIN, 1, cvRed) | |
84 cvutils.cvImshow(windowNames[i], images[i], rescale) # cv2.imshow('frame', img) | |
85 key = cv2.waitKey(wait) | |
86 #if cvutils.saveKey(key): | |
87 # cv2.imwrite('image-{}.png'.format(frameNum), img) | |
88 nFramesShown += step | |
89 if step > 1: | |
90 for i in xrange(len(captures)): | |
91 captures.set(cv2.cv.CV_CAP_PROP_POS_FRAMES, firstFrameNums[i]+nFramesShown) | |
92 cv2.destroyAllWindows() | |
93 else: | |
94 print('Video captures for {} failed'.format(filenames)) |