Mercurial Hosting > traffic-intelligence
comparison scripts/process.py @ 1043:b735895c8815
work in progress on process (learn motion patterns)
author | Nicolas Saunier <nicolas.saunier@polymtl.ca> |
---|---|
date | Wed, 04 Jul 2018 17:39:39 -0400 |
parents | 5621e4ad2428 |
children | 75a6ad604cc5 |
comparison
equal
deleted
inserted
replaced
1042:b1ba6d44fcb9 | 1043:b735895c8815 |
---|---|
26 parser.add_argument('--analyze', dest = 'analyze', help = 'data to analyze (results)', choices = ['feature', 'object', 'classification', 'interaction']) | 26 parser.add_argument('--analyze', dest = 'analyze', help = 'data to analyze (results)', choices = ['feature', 'object', 'classification', 'interaction']) |
27 | 27 |
28 # common options | 28 # common options |
29 parser.add_argument('--cfg', dest = 'configFilename', help = 'name of the configuration file') | 29 parser.add_argument('--cfg', dest = 'configFilename', help = 'name of the configuration file') |
30 parser.add_argument('-n', dest = 'nObjects', help = 'number of objects/interactions to process', type = int) | 30 parser.add_argument('-n', dest = 'nObjects', help = 'number of objects/interactions to process', type = int) |
31 parser.add_argument('-t', dest = 'trajectoryType', help = 'type of trajectories', choices = ['objectfeature', 'feature', 'object'], default = 'objectfeature') | |
31 parser.add_argument('--dry', dest = 'dryRun', help = 'dry run of processing', action = 'store_true') | 32 parser.add_argument('--dry', dest = 'dryRun', help = 'dry run of processing', action = 'store_true') |
32 parser.add_argument('--nthreads', dest = 'nProcesses', help = 'number of processes to run in parallel', type = int, default = 1) | 33 parser.add_argument('--nthreads', dest = 'nProcesses', help = 'number of processes to run in parallel', type = int, default = 1) |
34 parser.add_argument('--subsample', dest = 'positionSubsamplingRate', help = 'rate of position subsampling (1 every n positions)', type = int) | |
35 parser.add_argument('--display', dest = 'display', help = 'display trajectories', action = 'store_true') | |
36 | |
37 ### process options | |
38 # motion pattern learning and assignment | |
39 parser.add_argument('--prototype-filename', dest = 'outputPrototypeDatabaseFilename', help = 'name of the Sqlite database file to save prototypes') | |
40 #parser.add_argument('-i', dest = 'inputPrototypeDatabaseFilename', help = 'name of the Sqlite database file for prototypes to start the algorithm with') | |
41 parser.add_argument('--max-nobjectfeatures', dest = 'maxNObjectFeatures', help = 'maximum number of features per object to load', type = int, default = 1) | |
42 parser.add_argument('--maxdist', dest = 'epsilon', help = 'distance for the similarity of trajectory points', type = float, required = True) | |
43 parser.add_argument('--metric', dest = 'metric', help = 'metric for the similarity of trajectory points', default = 'cityblock') # default is manhattan distance | |
44 parser.add_argument('-minsimil', dest = 'minSimilarity', help = 'minimum similarity to put a trajectory in a cluster', type = float, required = True) | |
45 parser.add_argument('-min-cluster-size', dest = 'minClusterSize', help = 'minimum cluster size', type = int, default = 0) | |
46 parser.add_argument('--learn', dest = 'learn', help = 'learn', action = 'store_true') | |
47 parser.add_argument('--optimize', dest = 'optimizeCentroid', help = 'recompute centroid at each assignment', action = 'store_true') | |
48 parser.add_argument('--random', dest = 'randomInitialization', help = 'random initialization of clustering algorithm', action = 'store_true') | |
49 #parser.add_argument('--similarities-filename', dest = 'similaritiesFilename', help = 'filename of the similarities') | |
50 parser.add_argument('--save-similarities', dest = 'saveSimilarities', help = 'save computed similarities (in addition to prototypes)', action = 'store_true') | |
51 parser.add_argument('--save-assignments', dest = 'saveAssignments', help = 'saves the assignments of the objects to the prototypes', action = 'store_true') | |
52 parser.add_argument('--assign', dest = 'assign', help = 'assigns the objects to the prototypes and saves the assignments', action = 'store_true') | |
53 | |
54 # safety analysis | |
55 parser.add_argument('--prediction-method', dest = 'predictionMethod', help = 'prediction method (constant velocity (cvd: vector computation (approximate); cve: equation solving; cv: discrete time (approximate)), normal adaptation, point set prediction)', choices = ['cvd', 'cve', 'cv', 'na', 'ps', 'mp']) | |
56 parser.add_argument('--pet', dest = 'computePET', help = 'computes PET', action = 'store_true') | |
57 # override other tracking config, erase sqlite? | |
58 | |
33 | 59 |
34 # analysis options | 60 # analysis options |
35 parser.add_argument('--output', dest = 'output', help = 'kind of output to produce (interval means)', choices = ['figure', 'interval', 'event']) | 61 parser.add_argument('--output', dest = 'output', help = 'kind of output to produce (interval means)', choices = ['figure', 'interval', 'event']) |
36 parser.add_argument('--min-user-duration', dest = 'minUserDuration', help = 'mininum duration we have to see the user to take into account in the analysis (s)', type = float, default = 0.1) | 62 parser.add_argument('--min-user-duration', dest = 'minUserDuration', help = 'mininum duration we have to see the user to take into account in the analysis (s)', type = float, default = 0.1) |
37 parser.add_argument('--interval-duration', dest = 'intervalDuration', help = 'length of time interval to aggregate data (min)', type = float, default = 15.) | 63 parser.add_argument('--interval-duration', dest = 'intervalDuration', help = 'length of time interval to aggregate data (min)', type = float, default = 15.) |
38 parser.add_argument('--aggregation', dest = 'aggMethod', help = 'aggregation method per user/event and per interval', choices = ['mean', 'median', 'centile'], nargs = '*', default = ['median']) | 64 parser.add_argument('--aggregation', dest = 'aggMethod', help = 'aggregation method per user/event and per interval', choices = ['mean', 'median', 'centile'], nargs = '*', default = ['median']) |
39 parser.add_argument('--aggregation-centile', dest = 'aggCentiles', help = 'centile(s) to compute from the observations', nargs = '*', type = int) | 65 parser.add_argument('--aggregation-centile', dest = 'aggCentiles', help = 'centile(s) to compute from the observations', nargs = '*', type = int) |
40 dpi = 150 | 66 dpi = 150 |
41 # unit of analysis: site or video sequence? | 67 # unit of analysis: site or video sequence? |
42 | 68 |
43 # safety analysis | |
44 parser.add_argument('--prediction-method', dest = 'predictionMethod', help = 'prediction method (constant velocity (cvd: vector computation (approximate); cve: equation solving; cv: discrete time (approximate)), normal adaptation, point set prediction)', choices = ['cvd', 'cve', 'cv', 'na', 'ps', 'mp']) | |
45 parser.add_argument('--pet', dest = 'computePET', help = 'computes PET', action = 'store_true') | |
46 # override other tracking config, erase sqlite? | |
47 | |
48 # need way of selecting sites as similar as possible to sql alchemy syntax | 69 # need way of selecting sites as similar as possible to sql alchemy syntax |
49 # override tracking.cfg from db | 70 # override tracking.cfg from db |
50 # manage cfg files, overwrite them (or a subset of parameters) | 71 # manage cfg files, overwrite them (or a subset of parameters) |
51 # delete sqlite files | 72 # delete sqlite files |
52 # info of metadata | 73 # info of metadata |
59 session = connectDatabase(args.metadataFilename) | 80 session = connectDatabase(args.metadataFilename) |
60 parentPath = Path(args.metadataFilename).parent # files are relative to metadata location | 81 parentPath = Path(args.metadataFilename).parent # files are relative to metadata location |
61 videoSequences = [] | 82 videoSequences = [] |
62 if args.videoIds is not None: | 83 if args.videoIds is not None: |
63 videoSequences = [session.query(VideoSequence).get(videoId) for videoId in args.videoIds] | 84 videoSequences = [session.query(VideoSequence).get(videoId) for videoId in args.videoIds] |
85 siteIds = set([vs.cameraView.siteIdx for vs in videoSequences]) | |
64 elif args.siteIds is not None: | 86 elif args.siteIds is not None: |
65 for siteId in args.siteIds: | 87 siteIds = set(args.siteIds) |
88 for siteId in siteIds: | |
66 for site in getSite(session, siteId): | 89 for site in getSite(session, siteId): |
67 for cv in site.cameraViews: | 90 for cv in site.cameraViews: |
68 videoSequences += cv.videoSequences | 91 videoSequences += cv.videoSequences |
69 else: | 92 else: |
70 print('No video/site to process') | 93 print('No video/site to process') |
119 print('SQLite already exists: {}'.format(parentPath/vs.getDatabaseFilename())) | 142 print('SQLite already exists: {}'.format(parentPath/vs.getDatabaseFilename())) |
120 pool.close() | 143 pool.close() |
121 pool.join() | 144 pool.join() |
122 | 145 |
123 elif args.process == 'prototype': # motion pattern learning | 146 elif args.process == 'prototype': # motion pattern learning |
124 pass | 147 # learn by site by default -> group videos by site (or by camera view? TODO add cameraviews) |
148 # by default, load all objects, learn and then assign | |
149 objects = {siteId: [] for siteId in siteIds} | |
150 for vs in videoSequences: | |
151 print('Loading '+vs.getDatabaseFilename()) | |
152 objects[vs.cameraView.siteIdx] += storage.loadTrajectoriesFromSqlite(str(parentPath/vs.getDatabaseFilename()), args.trajectoryType, args.nTrajectories, timeStep = args.positionSubsamplingRate) | |
153 | |
125 | 154 |
126 elif args.process == 'interaction': | 155 elif args.process == 'interaction': |
127 # safety analysis TODO make function in safety analysis script | 156 # safety analysis TODO make function in safety analysis script |
128 if args.predictionMethod == 'cvd': | 157 if args.predictionMethod == 'cvd': |
129 predictionParameters = prediction.CVDirectPredictionParameters() | 158 predictionParameters = prediction.CVDirectPredictionParameters() |
181 row += aggSpeeds.tolist() | 210 row += aggSpeeds.tolist() |
182 else: | 211 else: |
183 row.append(aggSpeeds) | 212 row.append(aggSpeeds) |
184 data.append(row) | 213 data.append(row) |
185 data = DataFrame(data, columns = headers) | 214 data = DataFrame(data, columns = headers) |
186 if args.siteIds is None: | |
187 siteIds = set([vs.cameraView.siteIdx for vs in videoSequences]) | |
188 else: | |
189 siteIds = set(args.siteIds) | |
190 if args.output == 'figure': | 215 if args.output == 'figure': |
191 for name in headers[4:]: | 216 for name in headers[4:]: |
192 plt.ioff() | 217 plt.ioff() |
193 plt.figure() | 218 plt.figure() |
194 plt.boxplot([data.loc[data['sites']==siteId, name] for siteId in siteIds], labels = [session.query(Site).get(siteId).name for siteId in siteIds]) | 219 plt.boxplot([data.loc[data['sites']==siteId, name] for siteId in siteIds], labels = [session.query(Site).get(siteId).name for siteId in siteIds]) |