Mercurial Hosting > traffic-intelligence
comparison scripts/process.py @ 985:668a85c963c3
work on processing and managing large video datasets
author | Nicolas Saunier <nicolas.saunier@polymtl.ca> |
---|---|
date | Tue, 06 Mar 2018 22:44:33 -0500 |
parents | a69695d14e59 |
children | 3be8aaa47651 |
comparison
equal
deleted
inserted
replaced
984:a69695d14e59 | 985:668a85c963c3 |
---|---|
8 | 8 |
9 parser = argparse.ArgumentParser(description='This program manages the processing of several files based on a description of the sites and video data in an SQLite database following the metadata module.') | 9 parser = argparse.ArgumentParser(description='This program manages the processing of several files based on a description of the sites and video data in an SQLite database following the metadata module.') |
10 parser.add_argument('--db', dest = 'metadataFilename', help = 'name of the metadata file', required = True) | 10 parser.add_argument('--db', dest = 'metadataFilename', help = 'name of the metadata file', required = True) |
11 parser.add_argument('--videos', dest = 'videoIds', help = 'indices of the video sequences', nargs = '*', type = int) | 11 parser.add_argument('--videos', dest = 'videoIds', help = 'indices of the video sequences', nargs = '*', type = int) |
12 parser.add_argument('--pet', dest = 'computePET', help = 'computes PET', action = 'store_true') | 12 parser.add_argument('--pet', dest = 'computePET', help = 'computes PET', action = 'store_true') |
13 parser.add_argument('--delete', dest = 'delete', help = 'data to delete', choices = ['feature', 'object', 'classification', 'interaction']) | |
14 parser.add_argument('--process', dest = 'process', help = 'data to process', choices = ['feature', 'object', 'classification', 'interaction']) | |
15 parser.add_argument('--analyze', dest = 'analyze', help = 'data to analyze (results)', choices = ['feature', 'object', 'classification', 'interaction']) | |
13 | 16 |
14 # need way of selecting sites as similar as possible to sql alchemy syntax | 17 # need way of selecting sites as similar as possible to sql alchemy syntax |
15 # override tracking.cfg from db | 18 # override tracking.cfg from db |
16 # manage cfg files, overwrite them (or a subset of parameters) | 19 # manage cfg files, overwrite them (or a subset of parameters) |
17 # delete sqlite files | 20 # delete sqlite files |
19 parser.add_argument('--nthreads', dest = 'nProcesses', help = 'number of processes to run in parallel', type = int, default = 1) | 22 parser.add_argument('--nthreads', dest = 'nProcesses', help = 'number of processes to run in parallel', type = int, default = 1) |
20 | 23 |
21 args = parser.parse_args() | 24 args = parser.parse_args() |
22 # files are relative to metadata location | 25 # files are relative to metadata location |
23 | 26 |
24 session = createDatabase(args.metadataFilename) | 27 session = connectDatabase(args.metadataFilename) |
25 parentDir = Path(args.metadataFilename).parent | 28 parentDir = Path(args.metadataFilename).parent |
26 | 29 |
27 # todo change prediction parameters | 30 if args.delete is not None: |
28 predictionParameters = prediction.CVExactPredictionParameters() | 31 if args.delete in ['object', 'interaction']: |
32 #parser.add_argument('-t', dest = 'dataType', help = 'type of the data to remove', required = True, choices = ['object','interaction', 'bb', 'pois', 'prototype']) | |
33 for videoId in args.videoIds: | |
34 vs = session.query(VideoSequence).get(videoId) | |
35 storage.deleteFromSqlite(str(parentDir/vs.getDatabaseFilename()), args.delete) | |
29 | 36 |
30 for videoId in args.videoIds: | 37 if args.process == 'interaction': |
31 vs = session.query(VideoSequence).get(videoId) | 38 # safety analysis TODO make function in safety analysis script |
32 print(vs.getDatabaseFilename()) | 39 predictionParameters = prediction.CVExactPredictionParameters() |
33 objects = storage.loadTrajectoriesFromSqlite(str(parentDir/vs.getDatabaseFilename()), 'object')#, args.nObjects, withFeatures = (params.useFeaturesForPrediction or predictionMethod == 'ps' or predictionMethod == 'mp')) | 40 for videoId in args.videoIds: |
34 interactions = events.createInteractions(objects) | 41 vs = session.query(VideoSequence).get(videoId) |
35 #if args.nProcesses == 1: | 42 print('Processing '+vs.getDatabaseFilename()) |
36 params = storage.ProcessParameters(str(parentDir/vs.cameraView.getTrackingConfigurationFilename())) | 43 objects = storage.loadTrajectoriesFromSqlite(str(parentDir/vs.getDatabaseFilename()), 'object')#, args.nObjects, withFeatures = (params.useFeaturesForPrediction or predictionMethod == 'ps' or predictionMethod == 'mp')) |
37 #print(interactions, True, args.computePET, predictionParameters, params.collisionDistance, params.predictionTimeHorizon, params.crossingZones) | 44 interactions = events.createInteractions(objects) |
38 processed = events.computeIndicators(interactions, True, args.computePET, predictionParameters, params.collisionDistance, params.predictionTimeHorizon, params.crossingZones, False, None) | 45 #if args.nProcesses == 1: |
39 storage.saveIndicatorsToSqlite(str(parentDir/vs.getDatabaseFilename()), processed) | 46 #print(str(parentDir/vs.cameraView.getTrackingConfigurationFilename())) |
40 | 47 params = storage.ProcessParameters(str(parentDir/vs.cameraView.getTrackingConfigurationFilename())) |
41 # else: | 48 #print(len(interactions), args.computePET, predictionParameters, params.collisionDistance, params.predictionTimeHorizon, params.crossingZones) |
42 # pool = Pool(processes = args.nProcesses) | 49 processed = events.computeIndicators(interactions, True, args.computePET, predictionParameters, params.collisionDistance, params.predictionTimeHorizon, params.crossingZones, False, None) |
43 # nInteractionPerProcess = int(np.ceil(len(interactions)/float(args.nProcesses))) | 50 storage.saveIndicatorsToSqlite(str(parentDir/vs.getDatabaseFilename()), processed) |
44 # jobs = [pool.apply_async(events.computeIndicators, args = (interactions[i*nInteractionPerProcess:(i+1)*nInteractionPerProcess], not args.noMotionPrediction, args.computePET, predictionParameters, params.collisionDistance, params.predictionTimeHorizon, params.crossingZones, False, None)) for i in range(args.nProcesses)] | 51 # else: |
45 # processed = [] | 52 # pool = Pool(processes = args.nProcesses) |
46 # for job in jobs: | 53 # nInteractionPerProcess = int(np.ceil(len(interactions)/float(args.nProcesses))) |
47 # processed += job.get() | 54 # jobs = [pool.apply_async(events.computeIndicators, args = (interactions[i*nInteractionPerProcess:(i+1)*nInteractionPerProcess], not args.noMotionPrediction, args.computePET, predictionParameters, params.collisionDistance, params.predictionTimeHorizon, params.crossingZones, False, None)) for i in range(args.nProcesses)] |
48 # pool.close() | 55 # processed = [] |
56 # for job in jobs: | |
57 # processed += job.get() | |
58 # pool.close() | |
59 | |
60 if args.analyze == 'interaction': | |
61 indicatorIds = [2,5,7,10] | |
62 indicators = {} | |
63 interactions = {} | |
64 for videoId in args.videoIds: | |
65 vs = session.query(VideoSequence).get(videoId) | |
66 if not vs.cameraView.siteIdx in interactions: | |
67 interactions[vs.cameraView.siteIdx] = [] | |
68 for i in indicatorIds: | |
69 indicators[events.Interaction.indicatorNames[i]][vs.cameraView.siteIdx] = [] | |
70 interactions[vs.cameraView.siteIdx].append(storage.loadInteractionsFromSqlite(str(parentDir/vs.getDatabaseFilename()))) | |
71 print(vs.getDatabaseFilename(), len(interactions)) | |
72 for inter in interactions[videoId]: | |
73 for i in indicatorIds: | |
74 indic = inter.getIndicator(events.Interaction.indicatorNames[i]) | |
75 if indic is not None: | |
76 indicators[events.Interaction.indicatorNames[i]][vs.cameraView.siteIdx].append(indic.getMostSevereValue()) |