comparison scripts/process.py @ 1050:9d4a06f49cb8

work in progress
author Nicolas Saunier <nicolas.saunier@polymtl.ca>
date Fri, 06 Jul 2018 18:12:15 -0400
parents c9c03c97ed9f
children 16575ca4537d
comparison
equal deleted inserted replaced
1049:c9c03c97ed9f 1050:9d4a06f49cb8
59 59
60 # analysis options 60 # analysis options
61 parser.add_argument('--output', dest = 'output', help = 'kind of output to produce (interval means)', choices = ['figure', 'interval', 'event']) 61 parser.add_argument('--output', dest = 'output', help = 'kind of output to produce (interval means)', choices = ['figure', 'interval', 'event'])
62 parser.add_argument('--min-user-duration', dest = 'minUserDuration', help = 'mininum duration we have to see the user to take into account in the analysis (s)', type = float, default = 0.1) 62 parser.add_argument('--min-user-duration', dest = 'minUserDuration', help = 'mininum duration we have to see the user to take into account in the analysis (s)', type = float, default = 0.1)
63 parser.add_argument('--interval-duration', dest = 'intervalDuration', help = 'length of time interval to aggregate data (min)', type = float, default = 15.) 63 parser.add_argument('--interval-duration', dest = 'intervalDuration', help = 'length of time interval to aggregate data (min)', type = float, default = 15.)
64 parser.add_argument('--aggregation', dest = 'aggMethod', help = 'aggregation method per user/event and per interval', choices = ['mean', 'median', 'centile'], nargs = '*', default = ['median']) 64 parser.add_argument('--aggregation', dest = 'aggMethod', help = 'aggregation method per user/interaction and per interval', choices = ['mean', 'median', 'centile'], nargs = '*', default = ['median'])
65 parser.add_argument('--aggregation-centile', dest = 'aggCentiles', help = 'centile(s) to compute from the observations', nargs = '*', type = int) 65 parser.add_argument('--aggregation-centile', dest = 'aggCentiles', help = 'centile(s) to compute from the observations', nargs = '*', type = int)
66 dpi = 150 66 dpi = 150
67 # unit of analysis: site or video sequence? 67 # unit of analysis: site - camera-view
68 68
69 # need way of selecting sites as similar as possible to sql alchemy syntax 69 # need way of selecting sites as similar as possible to sql alchemy syntax
70 # override tracking.cfg from db 70 # override tracking.cfg from db
71 # manage cfg files, overwrite them (or a subset of parameters) 71 # manage cfg files, overwrite them (or a subset of parameters)
72 # delete sqlite files 72 # delete sqlite files
88 siteIds = set(args.siteIds) 88 siteIds = set(args.siteIds)
89 for siteId in siteIds: 89 for siteId in siteIds:
90 tmpsites = getSite(session, siteId) 90 tmpsites = getSite(session, siteId)
91 sites.extend(tmpsites) 91 sites.extend(tmpsites)
92 for site in tmpsites: 92 for site in tmpsites:
93 for cv in site.cameraViews: 93 videoSequences.extend(getSiteVideoSequences(site))
94 videoSequences.extend(cv.videoSequences)
95 else: 94 else:
96 print('No video/site to process') 95 print('No video/site to process')
97 96
98 if args.nProcesses > 1: 97 if args.nProcesses > 1:
99 pool = Pool(args.nProcesses) 98 pool = Pool(args.nProcesses)
145 print('SQLite already exists: {}'.format(parentPath/vs.getDatabaseFilename())) 144 print('SQLite already exists: {}'.format(parentPath/vs.getDatabaseFilename()))
146 pool.close() 145 pool.close()
147 pool.join() 146 pool.join()
148 147
149 elif args.process == 'prototype': # motion pattern learning 148 elif args.process == 'prototype': # motion pattern learning
150 # learn by site by default -> group videos by site (or by camera view? TODO add cameraviews) 149 # learn by site by default -> group videos by camera view TODO
151 # by default, load all objects, learn and then assign (BUT not save the assignments) 150 # by default, load all objects, learn and then assign (BUT not save the assignments)
152 for site in sites: 151 for site in sites:
153 print('Learning motion patterns for site {} ({})'.format(site.idx, site.name)) 152 print('Learning motion patterns for site {} ({})'.format(site.idx, site.name))
154 objects = {} 153 objects = {}
155 object2VideoSequences = {} 154 object2VideoSequences = {}
175 prototypeIndices, labels = processing.learnAssignMotionPatterns(True, True, trainingObjects, similarities, args.minSimilarity, similarityFunc, args.minClusterSize, args.optimizeCentroid, args.randomInitialization, True, []) 174 prototypeIndices, labels = processing.learnAssignMotionPatterns(True, True, trainingObjects, similarities, args.minSimilarity, similarityFunc, args.minClusterSize, args.optimizeCentroid, args.randomInitialization, True, [])
176 if args.outputPrototypeDatabaseFilename is None: 175 if args.outputPrototypeDatabaseFilename is None:
177 outputPrototypeDatabaseFilename = args.databaseFilename 176 outputPrototypeDatabaseFilename = args.databaseFilename
178 else: 177 else:
179 outputPrototypeDatabaseFilename = args.outputPrototypeDatabaseFilename 178 outputPrototypeDatabaseFilename = args.outputPrototypeDatabaseFilename
180 # TODO maintain mapping from object prototype to db filename + compute nmatchings before
181 clusterSizes = ml.computeClusterSizes(labels, prototypeIndices, -1) 179 clusterSizes = ml.computeClusterSizes(labels, prototypeIndices, -1)
182 storage.savePrototypesToSqlite(str(parentPath/site.getPath()/outputPrototypeDatabaseFilename), [moving.Prototype(object2VideoSequences[trainingObjects[i]].getDatabaseFilename(False), trainingObjects[i].getNum(), prototypeType, clusterSizes[i]) for i in prototypeIndices]) 180 storage.savePrototypesToSqlite(str(parentPath/site.getPath()/outputPrototypeDatabaseFilename), [moving.Prototype(object2VideoSequences[trainingObjects[i]].getDatabaseFilename(False), trainingObjects[i].getNum(), prototypeType, clusterSizes[i]) for i in prototypeIndices])
183 181
184 182
185 elif args.process == 'interaction': 183 elif args.process == 'interaction':
230 minUserDuration = args.minUserDuration*vs.cameraView.cameraType.frameRate 228 minUserDuration = args.minUserDuration*vs.cameraView.cameraType.frameRate
231 print('Extracting speed from '+vs.getDatabaseFilename()) 229 print('Extracting speed from '+vs.getDatabaseFilename())
232 objects = storage.loadTrajectoriesFromSqlite(str(parentPath/vs.getDatabaseFilename()), 'object', args.nObjects) 230 objects = storage.loadTrajectoriesFromSqlite(str(parentPath/vs.getDatabaseFilename()), 'object', args.nObjects)
233 for o in objects: 231 for o in objects:
234 if o.length() > minUserDuration: 232 if o.length() > minUserDuration:
235 row = [vs.cameraView.siteIdx, d, utils.framesToTime(o.getFirstInstant(), vs.cameraView.cameraType.frameRate, t1), o.getUserType()] 233 row = [vs.cameraView.site.name, d, utils.framesToTime(o.getFirstInstant(), vs.cameraView.cameraType.frameRate, t1), o.getUserType()]
236 tmp = o.getSpeeds() 234 tmp = o.getSpeeds()
237 for method,func in aggFunctions.items(): 235 for method,func in aggFunctions.items():
238 aggSpeeds = vs.cameraView.cameraType.frameRate*3.6*func(tmp) 236 aggSpeeds = vs.cameraView.cameraType.frameRate*3.6*func(tmp)
239 if method == 'centile': 237 if method == 'centile':
240 row += aggSpeeds.tolist() 238 row += aggSpeeds.tolist()
250 plt.ylabel(name+' Speeds (km/h)') 248 plt.ylabel(name+' Speeds (km/h)')
251 plt.savefig(name.lower()+'-speeds.png', dpi=dpi) 249 plt.savefig(name.lower()+'-speeds.png', dpi=dpi)
252 plt.close() 250 plt.close()
253 elif args.output == 'event': 251 elif args.output == 'event':
254 data.to_csv('speeds.csv', index = False) 252 data.to_csv('speeds.csv', index = False)
255 if args.analyze == 'interaction': 253
254 if args.analyze == 'interaction': # redo as for object, export in dataframe all interaction data
256 indicatorIds = [2,5,7,10] 255 indicatorIds = [2,5,7,10]
257 conversionFactors = {2: 1., 5: 30.*3.6, 7:1./30, 10:1./30} 256 conversionFactors = {2: 1., 5: 30.*3.6, 7:1./30, 10:1./30}
258 maxIndicatorValue = {2: float('inf'), 5: float('inf'), 7:10., 10:10.} 257 maxIndicatorValue = {2: float('inf'), 5: float('inf'), 7:10., 10:10.}
259 indicators = {} 258 indicators = {}
260 interactions = {} 259 interactions = {}