Mercurial Hosting > traffic-intelligence
changeset 1061:671426ce0f3e
minor
author | Nicolas Saunier <nicolas.saunier@polymtl.ca> |
---|---|
date | Wed, 11 Jul 2018 17:31:26 -0400 |
parents | c04550f957ab |
children | a2e20aba0740 |
files | scripts/compute-homography.py scripts/process.py |
diffstat | 2 files changed, 2 insertions(+), 2 deletions(-) [+] |
line wrap: on
line diff
--- a/scripts/compute-homography.py Wed Jul 11 15:29:44 2018 -0400 +++ b/scripts/compute-homography.py Wed Jul 11 17:31:26 2018 -0400 @@ -30,7 +30,7 @@ parser.add_argument('--display', dest = 'displayPoints', help = 'display original and projected points on both images', action = 'store_true') parser.add_argument('--intrinsic', dest = 'intrinsicCameraMatrixFilename', help = 'name of the intrinsic camera file') parser.add_argument('--distortion-coefficients', dest = 'distortionCoefficients', help = 'distortion coefficients', nargs = '*', type = float) -parser.add_argument('--undistorted-multiplication', dest = 'undistortedImageMultiplication', help = 'undistorted image multiplication', type = float) +parser.add_argument('--undistorted-multiplication', dest = 'undistortedImageMultiplication', help = 'undistorted image multiplication', type = float, default = 1.) parser.add_argument('--undistort', dest = 'undistort', help = 'undistort the video (because features have been extracted that way', action = 'store_true') parser.add_argument('--save', dest = 'saveImages', help = 'save the undistorted video frame (display option must be chosen)', action = 'store_true')
--- a/scripts/process.py Wed Jul 11 15:29:44 2018 -0400 +++ b/scripts/process.py Wed Jul 11 17:31:26 2018 -0400 @@ -280,7 +280,7 @@ data = pd.read_csv(args.eventFilename, parse_dates = [2]) #data = pd.read_csv('./speeds.csv', converters = {'time': lambda s: datetime.datetime.strptime(s, "%H:%M:%S").time()}, nrows = 5000) # create time for end of each 15 min, then group by, using the agg method for each data column - headers = ['sites', 'date', 'intervalend15', 'duration', 'count'] + headers = ['site', 'date', 'intervalend15', 'duration', 'count'] aggFunctions, tmpheaders = utils.aggregationMethods(args.aggMethods, args.aggCentiles) dataColumns = list(data.columns[4:]) for h in dataColumns: