changeset 1084:1a7e0b2c858b

remove debugging
author Nicolas Saunier <nicolas.saunier@polymtl.ca>
date Tue, 24 Jul 2018 01:24:42 -0400
parents 5b597b021aed
children 7853106677b7
files scripts/process.py
diffstat 1 files changed, 2 insertions(+), 2 deletions(-) [+]
line wrap: on
line diff
--- a/scripts/process.py	Mon Jul 23 20:17:27 2018 -0400
+++ b/scripts/process.py	Tue Jul 24 01:24:42 2018 -0400
@@ -336,7 +336,7 @@
         data.to_csv(args.eventFilename, index = False)
 
 if args.analyze == 'event-speed': # aggregate event data by 15 min interval (args.intervalDuration), count events with thresholds
-    data = pd.read_csv(args.eventFilename, parse_dates = [2], nrows = 10000)
+    data = pd.read_csv(args.eventFilename, parse_dates = [2])
     #data = pd.read_csv('./speeds.csv', converters = {'time': lambda s: datetime.datetime.strptime(s, "%H:%M:%S").time()}, nrows = 5000)
     # create time for end of each 15 min, then group by, using the agg method for each data column
     headers = ['site', 'date', 'intervalend15', 'duration', 'count']
@@ -373,7 +373,7 @@
     pd.DataFrame(outputData, columns = headers).to_csv(utils.removeExtension(args.eventFilename)+'-aggregated.csv', index = False)
 
 elif args.analyze == 'event-interaction': # aggregate event data by 15 min interval (args.intervalDuration), count events with thresholds
-    data = pd.read_csv(args.eventFilename, parse_dates = [2], nrows = 20000)
+    data = pd.read_csv(args.eventFilename, parse_dates = [2])
     headers = ['site', 'date', 'intervalend15', 'duration', 'count']
     aggFunctions, tmpheaders = utils.aggregationMethods(args.aggMethods, args.aggCentiles)
     dataColumns = list(data.columns[3:])