changeset 1058:16575ca4537d

work in progress
author Nicolas Saunier <nicolas.saunier@polymtl.ca>
date Tue, 10 Jul 2018 17:16:38 -0400
parents a7ada64b8214
children a87b3072bd26
files scripts/process.py trafficintelligence/storage.py trafficintelligence/utils.py
diffstat 3 files changed, 28 insertions(+), 14 deletions(-) [+]
line wrap: on
line diff
--- a/scripts/process.py	Mon Jul 09 16:21:03 2018 -0400
+++ b/scripts/process.py	Tue Jul 10 17:16:38 2018 -0400
@@ -23,7 +23,7 @@
 parser.add_argument('--delete', dest = 'delete', help = 'data to delete', choices = ['feature', 'object', 'classification', 'interaction'])
 parser.add_argument('--process', dest = 'process', help = 'data to process', choices = ['feature', 'object', 'classification', 'prototype', 'interaction'])
 parser.add_argument('--display', dest = 'display', help = 'data to display (replay over video)', choices = ['feature', 'object', 'classification', 'interaction'])
-parser.add_argument('--analyze', dest = 'analyze', help = 'data to analyze (results)', choices = ['feature', 'object', 'classification', 'interaction'])
+parser.add_argument('--analyze', dest = 'analyze', help = 'data to analyze (results)', choices = ['feature', 'object', 'classification', 'interaction', 'event'])
 
 # common options
 parser.add_argument('--cfg', dest = 'configFilename', help = 'name of the configuration file')
@@ -61,8 +61,9 @@
 parser.add_argument('--output', dest = 'output', help = 'kind of output to produce (interval means)', choices = ['figure', 'interval', 'event'])
 parser.add_argument('--min-user-duration', dest = 'minUserDuration', help = 'mininum duration we have to see the user to take into account in the analysis (s)', type = float, default = 0.1)
 parser.add_argument('--interval-duration', dest = 'intervalDuration', help = 'length of time interval to aggregate data (min)', type = float, default = 15.)
-parser.add_argument('--aggregation', dest = 'aggMethod', help = 'aggregation method per user/interaction and per interval', choices = ['mean', 'median', 'centile'], nargs = '*', default = ['median'])
+parser.add_argument('--aggregation', dest = 'aggMethods', help = 'aggregation method per user/interaction and per interval', choices = ['mean', 'median', 'centile'], nargs = '*', default = ['median'])
 parser.add_argument('--aggregation-centile', dest = 'aggCentiles', help = 'centile(s) to compute from the observations', nargs = '*', type = int)
+parser.add_argument('--event-filename', dest = 'eventFilename', help = 'filename of the event data')
 dpi = 150
 # unit of analysis: site - camera-view
 
@@ -213,15 +214,8 @@
     # aggregation per site
     data = [] # list of observation per site-user with time
     headers = ['sites', 'date', 'time', 'user_type']
-    aggFunctions = {}
-    for method in args.aggMethod:
-        if method == 'centile':
-            aggFunctions[method] = utils.aggregationFunction(method, args.aggCentiles)
-            for c in args.aggCentiles:
-                headers.append('{}{}'.format(method,c))
-        else:
-            aggFunctions[method] = utils.aggregationFunction(method)
-            headers.append(method)
+    aggFunctions, tmpheaders = utils.aggregationMethods(arg.aggMethods, args.aggCentiles)
+    headers.extend(tmpheaders)
     for vs in videoSequences:
         d = vs.startTime.date()
         t1 = vs.startTime.time()
@@ -281,3 +275,11 @@
         plt.ylabel(events.Interaction.indicatorNames[i]+' ('+events.Interaction.indicatorUnits[i]+')')
         plt.savefig(events.Interaction.indicatorNames[i]+'.png', dpi=150)
         plt.close()
+
+if args.analyze == 'event': # aggregate event data by 15 min interval (arg.intervalDuration)
+    data = pd.read_csv(arg.eventFilename)
+    # create time for end of each 15 min, then group by, using the agg method for each data column
+    headers = ['sites', 'date', 'intervalend15']
+    # add n road users (by type?)
+    aggFunctions, tmpheaders = utils.aggregationMethods(arg.aggMethods, args.aggCentiles)
+    headers.extend(tmpheaders)
--- a/trafficintelligence/storage.py	Mon Jul 09 16:21:03 2018 -0400
+++ b/trafficintelligence/storage.py	Tue Jul 10 17:16:38 2018 -0400
@@ -1333,6 +1333,7 @@
         self.stdVehicleSpeed = config.getfloat(self.sectionHeader, 'std-veh-speed')
 
     def __init__(self, filename = None):
+        self.configFilename = filename
         if filename is not None and Path(filename).exists():
             self.loadConfigFile(filename)
         else:
@@ -1501,15 +1502,13 @@
         self.maxLcssDistance = config.getfloat(self.sectionHeader, 'max-lcss-distance')
         self.lcssMetric = config.get(self.sectionHeader, 'lcss-metric')
         self.minLcssSimilarity = config.getfloat(self.sectionHeader, 'min-lcss-similarity')
-        
-        self.configFilename = filename
 
     def __init__(self, filename = None):
+        self.configFilename = filename
         if filename is not None and Path(filename).exists():
             self.loadConfigFile(filename)
         else:
             print('Configuration filename {} could not be loaded.'.format(filename))
-            self.configFilename = filename
 
 def processVideoArguments(args):
     '''Loads information from configuration file
--- a/trafficintelligence/utils.py	Mon Jul 09 16:21:03 2018 -0400
+++ b/trafficintelligence/utils.py	Tue Jul 10 17:16:38 2018 -0400
@@ -591,6 +591,19 @@
         print('Unknown aggregation method: {}'.format(funcStr))
         return None
 
+def aggregationMethods(methods, centiles = None):
+    aggFunctions = {}
+    headers = []
+    for method in methods:
+        if method == 'centile':
+            aggFunctions[method] = aggregationFunction(method, centiles)
+            for c in centiles:
+                headers.append('{}{}'.format(method,c))
+        else:
+            aggFunctions[method] = utils.aggregationFunction(method)
+            headers.append(method)
+    return aggFunctions, headers
+    
 #########################
 # regression analysis using statsmodels (and pandas)
 #########################