changeset 1029:c6cf75a2ed08

reorganization of imports
author Nicolas Saunier <nicolas.saunier@polymtl.ca>
date Mon, 18 Jun 2018 22:50:59 -0400
parents cc5cb04b04b0
children aafbc0bab925
files trafficintelligence/cvutils.py trafficintelligence/indicators.py trafficintelligence/moving.py trafficintelligence/pavement.py trafficintelligence/prediction.py trafficintelligence/processing.py trafficintelligence/requirements.txt trafficintelligence/traffic_engineering.py trafficintelligence/utils.py
diffstat 9 files changed, 38 insertions(+), 65 deletions(-) [+]
line wrap: on
line diff
--- a/trafficintelligence/cvutils.py	Fri Jun 15 11:19:10 2018 -0400
+++ b/trafficintelligence/cvutils.py	Mon Jun 18 22:50:59 2018 -0400
@@ -1,7 +1,16 @@
 #! /usr/bin/env python
 '''Image/Video utilities'''
 
-from trafficintelligence import utils, moving
+from sys import stdout
+from os import listdir
+from subprocess import run
+from math import floor, log10, ceil
+from time import time
+
+from numpy import dot, array, append, float32, loadtxt, savetxt, append, zeros, ones, identity, abs as npabs, logical_and, unravel_index, sum as npsum, isnan, mgrid, median, floor as npfloor, ceil as npceil
+from numpy.linalg import inv
+from matplotlib.mlab import find
+from matplotlib.pyplot import imread, imsave, imshow, figure, subplot
 
 try:
     import cv2
@@ -16,15 +25,7 @@
     print('Scikit-image library could not be loaded (HoG-based classification methods will not be available)')
     skimageAvailable = False
     
-from sys import stdout
-from os import listdir
-from subprocess import run
-from math import floor, log10, ceil
-
-from numpy import dot, array, append, float32, loadtxt, savetxt, append, zeros, ones, identity, abs as npabs, logical_and, unravel_index, sum as npsum, isnan, mgrid, median, floor as npfloor, ceil as npceil
-from numpy.linalg import inv
-from matplotlib.mlab import find
-from matplotlib.pyplot import imread, imsave
+from trafficintelligence import utils, moving
 
 videoFilenameExtensions = ['mov', 'avi', 'mp4', 'MOV', 'AVI', 'MP4']
 trackerExe = 'feature-based-tracking'
@@ -103,10 +104,9 @@
 
     def cvImshow(windowName, img, rescale = 1.0):
         'Rescales the image (in particular if too large)'
-        from cv2 import resize
         if rescale != 1.:
             size = (int(round(img.shape[1]*rescale)), int(round(img.shape[0]*rescale)))
-            resizedImg = resize(img, size)
+            resizedImg = cv2.resize(img, size)
             cv2.imshow(windowName, resizedImg)
         else:
             cv2.imshow(windowName, img)
@@ -303,7 +303,6 @@
         if undistort:
             cmd += ['--undistort', 'true']
             if intrinsicCameraMatrix is not None: # we currently have to save a file
-                from time import time
                 intrinsicCameraFilename = '/tmp/intrinsic-{}.txt'.format(time())
                 savetxt(intrinsicCameraFilename, intrinsicCameraMatrix)
                 cmd += ['--intrinsic-camera-filename', intrinsicCameraFilename]
@@ -628,7 +627,6 @@
         inputImg = transform.resize(bwImg, rescaleSize)
         features = hog(inputImg, orientations, pixelsPerCell, cellsPerBlock, blockNorm, visualize, transformSqrt, True)
         if visualize:
-            from matplotlib.pyplot import imshow, figure, subplot
             hogViz = features[1]
             features = features[0]
             figure()
--- a/trafficintelligence/indicators.py	Fri Jun 15 11:19:10 2018 -0400
+++ b/trafficintelligence/indicators.py	Mon Jun 18 22:50:59 2018 -0400
@@ -1,13 +1,14 @@
 #! /usr/bin/env python
 '''Class for indicators, temporal indicators, and safety indicators'''
 
-from trafficintelligence import moving
-#import matplotlib.nxutils as nx
 from matplotlib.pyplot import plot, ylim
 from matplotlib.pylab import find
 from numpy import array, arange, mean, floor, mean
 from scipy import percentile
 
+from trafficintelligence import moving
+from trafficintelligence.utils import LCSS as utilsLCSS
+
 def multivariateName(indicatorNames):
     return '_'.join(indicatorNames)
 
@@ -122,8 +123,6 @@
             n += 1
     return n >= nDimensions*proportionMatching
 
-from utils import LCSS as utilsLCSS
-
 class LCSS(utilsLCSS):
     '''Adapted LCSS class for indicators, same pattern'''
     def __init__(self, similarityFunc, delta = float('inf'), minLength = 0, aligned = False, lengthFunc = min):
--- a/trafficintelligence/moving.py	Fri Jun 15 11:19:10 2018 -0400
+++ b/trafficintelligence/moving.py	Mon Jun 18 22:50:59 2018 -0400
@@ -1,16 +1,17 @@
 #! /usr/bin/env python
 '''Libraries for moving objects, trajectories...'''
 
-from trafficintelligence import utils, cvutils
-from trafficintelligence.base import VideoFilenameAddable
+import copy
+from math import sqrt, atan2, cos, sin
 
-from math import sqrt, atan2, cos, sin
 from numpy import median, mean, array, arange, zeros, ones, hypot, NaN, std, floor, float32, argwhere, minimum
 from matplotlib.pyplot import plot, text
 from scipy.stats import scoreatpercentile
 from scipy.spatial.distance import cdist
 from scipy.signal import savgol_filter
-import copy
+
+from trafficintelligence import utils, cvutils
+from trafficintelligence.base import VideoFilenameAddable
 
 try:
     from shapely.geometry import Polygon, Point as shapelyPoint
--- a/trafficintelligence/pavement.py	Fri Jun 15 11:19:10 2018 -0400
+++ b/trafficintelligence/pavement.py	Mon Jun 18 22:50:59 2018 -0400
@@ -1,10 +1,11 @@
 #! /usr/bin/env python
 '''Tools for processing and analyzing pavement marking data'''
 
-from trafficintelligence import utils
+import datetime
 
 import numpy as np
 
+from trafficintelligence import utils
 
 paintTypes = {0: "Non-existant",
               1: "Eau",
@@ -104,7 +105,6 @@
 def winterMaintenanceIndicators(data, startDate, endDate, circuitReference, snowThreshold):
     '''Computes several winter maintenance indicators
     data = entretien_hivernal = pylab.csv2rec('C:\\Users\Alexandre\Documents\Cours\Poly\Projet\mesures_entretien_hivernal\mesures_deneigement.txt', delimiter = ',')'''
-    import datetime
     somme_eau, somme_neige, somme_abrasif, somme_sel, somme_lc, somme_lrg, somme_lrd, compteur_premiere_neige, compteur_somme_abrasif = 0,0,0,0,0,0,0,0,0
 
     if circuitReference in data['ref_circuit']:
--- a/trafficintelligence/prediction.py	Fri Jun 15 11:19:10 2018 -0400
+++ b/trafficintelligence/prediction.py	Mon Jun 18 22:50:59 2018 -0400
@@ -1,14 +1,13 @@
 #! /usr/bin/env python
 '''Library for motion prediction methods'''
 
-import moving
-from utils import LCSS
-
 import math, random
 from copy import copy
+
 import numpy as np
-#from multiprocessing import Pool
 
+from trafficintelligence import moving
+from trafficintelligence.utils import LCSS
 
 class PredictedTrajectory(object):
     '''Class for predicted trajectories with lazy evaluation
--- a/trafficintelligence/processing.py	Fri Jun 15 11:19:10 2018 -0400
+++ b/trafficintelligence/processing.py	Mon Jun 18 22:50:59 2018 -0400
@@ -1,10 +1,9 @@
 #! /usr/bin/env python
 '''Algorithms to process trajectories and moving objects'''
 
-import moving
-
 import numpy as np
 
+from trafficintelligence import moving
 
 def extractSpeeds(objects, zone):
     speeds = {}
--- a/trafficintelligence/requirements.txt	Fri Jun 15 11:19:10 2018 -0400
+++ /dev/null	Thu Jan 01 00:00:00 1970 +0000
@@ -1,10 +0,0 @@
-matplotlib
-numpy
-
-The following libraries are optional. They are necessary for (sometimes very) specific classes/functions.
-
-Computer Vision (cvutils.py): opencv, scikit-image
-Statistics and machine learning (ml.py): scipy, scikit-learn
-Moving object geometry (currently commented) (moving.py) and plotting shapely polygons (utils.py): shapely
-Tabular data loading/processing (storage.py): pandas
-Optimization: munkres
--- a/trafficintelligence/traffic_engineering.py	Fri Jun 15 11:19:10 2018 -0400
+++ b/trafficintelligence/traffic_engineering.py	Mon Jun 18 22:50:59 2018 -0400
@@ -1,10 +1,9 @@
 #! /usr/bin/env python
 ''' Traffic Engineering Tools and Examples'''
 
-from trafficintelligence import prediction
-
 from math import ceil
 
+from trafficintelligence import prediction
 
 #########################
 # Simulation
--- a/trafficintelligence/utils.py	Fri Jun 15 11:19:10 2018 -0400
+++ b/trafficintelligence/utils.py	Mon Jun 18 22:50:59 2018 -0400
@@ -1,17 +1,21 @@
 #! /usr/bin/env python
-# -*- coding: utf-8 -*-
 ''' Generic utilities.'''
 
-import matplotlib.pyplot as plt
 from datetime import time, datetime
 from argparse import ArgumentTypeError
 from pathlib import Path
 from math import sqrt, ceil, floor
-from scipy.stats import rv_continuous, kruskal, shapiro, lognorm
+from copy import deepcopy, copy
+
+from scipy.stats import rv_continuous, kruskal, shapiro, lognorm, norm, t
 from scipy.spatial import distance
 from scipy.sparse import dok_matrix
-from numpy import zeros, array, exp, sum as npsum, int as npint, arange, cumsum, mean, median, percentile, isnan, ones, convolve,  dtype, isnan, NaN, ma, isinf, savez, load as npload, log
+from numpy import zeros, array, exp, sum as npsum, int as npint, arange, cumsum, mean, median, percentile, isnan, ones, convolve,  dtype, isnan, NaN, ma, isinf, savez, load as npload, log, polyfit
+from numpy.random import permutation as nppermutation
+from pandas import DataFrame, concat
+import matplotlib.pyplot as plt
 
+from trafficintelligence.storage import openCheck
 
 datetimeFormat = "%Y-%m-%d %H:%M:%S"
 
@@ -67,7 +71,6 @@
     return log(scale), shape
 
 def sampleSize(stdev, tolerance, percentConfidence, nRoundingDigits = None, printLatex = False):
-    from scipy.stats.distributions import norm
     if nRoundingDigits is None:
         k = round(norm.ppf(0.5+percentConfidence/200., 0, 1), 2) # 1.-(100-percentConfidence)/200.
     else:
@@ -84,7 +87,6 @@
     Use otherwise t.interval or norm.interval for the boundaries
     ex: norm.interval(0.95)
     t.interval(0.95, nSamples-1)'''
-    from scipy.stats.distributions import norm, t
     if trueStd:
         k = round(norm.ppf(0.5+percentConfidence/200., 0, 1), 2)
     else: # use Student
@@ -344,7 +346,6 @@
 def cat_mvgavg(cat_list, halfWidth):
     ''' Return a list of categories/values smoothed according to a window. 
         halfWidth is the search radius on either side'''
-    from copy import deepcopy
     smoothed = deepcopy(cat_list)
     for point in range(len(cat_list)):
         lower_bound_check = max(0,point-halfWidth)
@@ -366,8 +367,6 @@
 def linearRegression(x, y, deg = 1, plotData = False):
     '''returns the least square estimation of the linear regression of y = ax+b
     as well as the plot'''
-    from numpy.lib.polynomial import polyfit
-    from numpy.core.multiarray import arange
     coef = polyfit(x, y, deg)
     if plotData:
         def poly(x):
@@ -437,7 +436,6 @@
     independentVariableValues = sorted(tmp[independentVariable].unique().tolist())
     if len(independentVariableValues) >= 2:
         if saveLatex:
-            from storage import openCheck
             out = openCheck(filenamePrefix+'-{}-{}.tex'.format(dependentVariable, independentVariable), 'w')
         for x in independentVariableValues:
             print('Shapiro-Wilk normality test for {} when {}={}: {} obs'.format(dependentVariable,independentVariable, x, len(tmp.loc[tmp[independentVariable] == x, dependentVariable])))
@@ -476,8 +474,6 @@
     text is the template to display for the two types of printout (see default): 3 elements if no saving to latex file, 8 otherwise
 
     TODO: pass the dummies for nominal variables and remove if all dummies are correlated, or none is correlated with the dependentvariable'''    
-    from copy import copy
-    from pandas import DataFrame
     result = copy(independentVariables)
     table1 = ''
     table2 = {}
@@ -516,7 +512,6 @@
                 table2['Valeurs p'].append(p)
 
     if saveFiles:
-        from storage import openCheck
         out = openCheck(filenamePrefix+'-removed-variables.tex', 'w')
         out.write(latexHeader)
         out.write(table1)
@@ -598,7 +593,6 @@
 
 def generateExperiments(independentVariables):
     '''Generates all possible models for including or not each independent variable'''
-    from pandas import DataFrame
     experiments = {}
     nIndependentVariables = len(independentVariables)
     if nIndependentVariables != len(set(independentVariables)):
@@ -620,8 +614,6 @@
     '''Generates all possible model with the independentVariables
     and runs them, saving the results in experiments
     with multiprocess option'''
-    from pandas import concat
-    from multiprocessing import Pool
     experiments = generateExperiments(independentVariables)
     nModels = len(experiments)
     print("Running {} models with {} processes".format(nModels, nProcesses))
@@ -642,7 +634,6 @@
     The results are added to experiments if provided as argument
     Storing in experiment relies on the index being the number equal 
     to the binary code derived from the independent variables'''
-    from numpy.random import permutation as nppermutation
     if experiments is None:
         experiments = generateExperiments(independentVariables)
     nIndependentVariables = len(independentVariables)
@@ -712,7 +703,6 @@
             plt.ylabel(text['qqplot.ylabel'])
         plt.tight_layout()
         if filenamePrefix is not None:
-            from storage import openCheck
             out = openCheck(filenamePrefix+'-coefficients.html', 'w')
             out.write(results.summary().as_html())
             plt.savefig(filenamePrefix+'-model-results.'+figureFileType)
@@ -885,9 +875,8 @@
 
 def plotPolygon(poly, options = '', **kwargs):
     'Plots shapely polygon poly'
-    from matplotlib.pyplot import plot
     x,y = poly.exterior.xy
-    plot(x, y, options, **kwargs)
+    plt.plot(x, y, options, **kwargs)
 
 def stepPlot(X, firstX, lastX, initialCount = 0, increment = 1):
     '''for each value in X, increment by increment the initial count
@@ -927,7 +916,6 @@
     plt.rc('axes', prop_cycle=monochrome)
 
 def plotIndicatorMap(indicatorMap, squareSize, masked = True, defaultValue=-1):
-    from matplotlib.pyplot import pcolor
     coords = array(list(indicatorMap.keys()))
     minX = min(coords[:,0])
     minY = min(coords[:,1])
@@ -937,9 +925,9 @@
     for k,v in indicatorMap.items():
         C[k[1]-minY,k[0]-minX] = v
     if masked:
-        pcolor(X, Y, ma.masked_where(C==defaultValue,C))
+        plt.pcolor(X, Y, ma.masked_where(C==defaultValue,C))
     else:
-        pcolor(X, Y, C)
+        plt.pcolor(X, Y, C)
 
 #########################
 # Data download