changeset 1030:aafbc0bab925

moved method around to avoid cross-dependencies
author Nicolas Saunier <nicolas.saunier@polymtl.ca>
date Tue, 19 Jun 2018 10:04:52 -0400
parents c6cf75a2ed08
children 045cb04ad7b8
files trafficintelligence/moving.py trafficintelligence/poly-utils.py trafficintelligence/poly_utils.py trafficintelligence/storage.py trafficintelligence/tests/cvutils.txt trafficintelligence/tests/events.txt trafficintelligence/tests/indicators.txt trafficintelligence/tests/ml.txt trafficintelligence/tests/moving.txt trafficintelligence/tests/prediction.txt trafficintelligence/tests/storage.txt trafficintelligence/tests/utils.txt trafficintelligence/ubc_utils.py trafficintelligence/utils.py
diffstat 14 files changed, 216 insertions(+), 212 deletions(-) [+]
line wrap: on
line diff
--- a/trafficintelligence/moving.py	Mon Jun 18 22:50:59 2018 -0400
+++ b/trafficintelligence/moving.py	Tue Jun 19 10:04:52 2018 -0400
@@ -10,9 +10,6 @@
 from scipy.spatial.distance import cdist
 from scipy.signal import savgol_filter
 
-from trafficintelligence import utils, cvutils
-from trafficintelligence.base import VideoFilenameAddable
-
 try:
     from shapely.geometry import Polygon, Point as shapelyPoint
     from shapely.prepared import prep, PreparedGeometry
@@ -21,6 +18,9 @@
     print('Shapely library could not be loaded')
     shapelyAvailable = False
 
+from trafficintelligence import utils, cvutils
+from trafficintelligence.base import VideoFilenameAddable
+
 
 class Interval(object):
     '''Generic interval: a subset of real numbers (not iterable)'''
--- a/trafficintelligence/poly-utils.py	Mon Jun 18 22:50:59 2018 -0400
+++ /dev/null	Thu Jan 01 00:00:00 1970 +0000
@@ -1,125 +0,0 @@
-#! /usr/bin/env python
-'''Various utilities to load data saved by the POLY new output(s)'''
-
-from moving import  TimeInterval
-from indicators import SeverityIndicator
-
-import sys, utils
-import numpy as np
-
-
-def loadNewInteractions(videoFilename,interactionType,dirname, extension, indicatorsNames, roaduserNum1,roaduserNum2, selectedIndicators=[]):
-    '''Loads interactions from the POLY traffic event format'''
-    from events import Interaction 
-    filename= dirname + videoFilename + extension
-    #filename= dirname + interactionType+ '-' + videoFilename + extension # case of min distance todo: change the saving format to be matched with all outputs
-    file = utils.openCheck(filename)
-    if (not file):
-        return []
-    #interactions = []
-    interactionNum = 0
-    data= np.loadtxt(filename)
-    indicatorFrameNums= data[:,0]
-    inter = Interaction(interactionNum, TimeInterval(indicatorFrameNums[0],indicatorFrameNums[-1]), roaduserNum1, roaduserNum2) 
-    inter.addVideoFilename(videoFilename)
-    inter.addInteractionType(interactionType)
-    for key in indicatorsNames:
-        values= {}
-        for i,t in enumerate(indicatorFrameNums):
-            values[t] = data[i,key]
-        inter.addIndicator(SeverityIndicator(indicatorsNames[key], values))
-    if selectedIndicators !=[]:
-        values= {}
-        for i,t in enumerate(indicatorFrameNums):
-            values[t] = [data[i,index] for index in selectedIndicators]
-        inter.addIndicator(SeverityIndicator('selectedIndicators', values))    
-        
-    #interactions.append(inter)
-    file.close()
-    #return interactions
-    return inter
-
-# Plotting results
-
-frameRate = 15.
-
-# To run in directory that contains the directories that contain the results (Miss-xx and Incident-xx)
-#dirname = '/home/nicolas/Research/Data/kentucky-db/'
-
-interactingRoadUsers = {'Miss/0404052336': [(0,3)] # 0,2 and 1 vs 3
-                        #,
-                        #'Incident/0306022035': [(1,3)]
-                        #,
-                        #'Miss/0208030956': [(4,5),(5,7)]
-                        }
-
-
-def getIndicatorName(filename, withUnit = False):
-    if withUnit:
-        unit = ' (s)'
-    else:
-        unit = ''
-    if 'collision-point' in filename:
-        return 'TTC'+unit
-    elif 'crossing' in filename:
-        return 'pPET'+unit
-    elif 'probability' in filename:
-        return 'P(UEA)'
-
-def getMethodName(fileprefix):
-    if fileprefix == 'constant-velocity':
-        return 'Con. Vel.'
-    elif fileprefix == 'normal-adaptation':
-        return 'Norm. Ad.'
-    elif fileprefix == 'point-set':
-        return 'Pos. Set'
-    elif fileprefix == 'evasive-action':
-        return 'Ev. Act.'
-    elif fileprefix == 'point-set-evasive-action':
-        return 'Pos. Set'
-
-indicator2TimeIdx = {'TTC':2,'pPET':2, 'P(UEA)':3}
-
-def getDataAtInstant(data, i):
-    return data[data[:,2] == i]
-
-def getPointsAtInstant(data, i):
-    return getDataAtInstant(i)[3:5]
-
-def getIndicator(data, roadUserNumbers, indicatorName):
-    if data.ndim ==1:
-        data.shape = (1,data.shape[0])
-
-    # find the order for the roadUserNumbers
-    uniqueObj1 = np.unique(data[:,0])
-    uniqueObj2 = np.unique(data[:,1])
-    found = False
-    if roadUserNumbers[0] in uniqueObj1 and roadUserNumbers[1] in uniqueObj2:
-        objNum1 = roadUserNumbers[0]
-        objNum2 = roadUserNumbers[1]
-        found = True
-    if roadUserNumbers[1] in uniqueObj1 and roadUserNumbers[0] in uniqueObj2:
-        objNum1 = roadUserNumbers[1]
-        objNum2 = roadUserNumbers[0]
-        found = True
-
-    # get subset of data for road user numbers
-    if found:
-        roadUserData = data[np.logical_and(data[:,0] == objNum1, data[:,1] == objNum2),:]
-        if roadUserData.size > 0:
-            time = np.unique(roadUserData[:,indicator2TimeIdx[indicatorName]])
-            values = {}
-            if indicatorName == 'P(UEA)':
-                tmp = roadUserData[:,4]
-                for k,v in zip(time, tmp):
-                    values[k]=v
-                return SeverityIndicator(indicatorName, values, mostSevereIsMax = False, maxValue = 1.), roadUserData
-            else:
-                for i in range(time[0],time[-1]+1):
-                    try:
-                        tmp = getDataAtInstant(roadUserData, i)
-                        values[i] = np.sum(tmp[:,5]*tmp[:,6])/np.sum(tmp[:,5])/frameRate
-                    except IOError:
-                        values[i] = np.inf
-                return SeverityIndicator(indicatorName, values, mostSevereIsMax = False), roadUserData
-    return None, None
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/trafficintelligence/poly_utils.py	Tue Jun 19 10:04:52 2018 -0400
@@ -0,0 +1,125 @@
+#! /usr/bin/env python
+'''Various utilities to load data saved by the POLY new output(s)'''
+
+from moving import  TimeInterval
+from indicators import SeverityIndicator
+
+import sys, utils
+import numpy as np
+
+
+def loadNewInteractions(videoFilename,interactionType,dirname, extension, indicatorsNames, roaduserNum1,roaduserNum2, selectedIndicators=[]):
+    '''Loads interactions from the POLY traffic event format'''
+    from events import Interaction 
+    filename= dirname + videoFilename + extension
+    #filename= dirname + interactionType+ '-' + videoFilename + extension # case of min distance todo: change the saving format to be matched with all outputs
+    file = utils.openCheck(filename)
+    if (not file):
+        return []
+    #interactions = []
+    interactionNum = 0
+    data= np.loadtxt(filename)
+    indicatorFrameNums= data[:,0]
+    inter = Interaction(interactionNum, TimeInterval(indicatorFrameNums[0],indicatorFrameNums[-1]), roaduserNum1, roaduserNum2) 
+    inter.addVideoFilename(videoFilename)
+    inter.addInteractionType(interactionType)
+    for key in indicatorsNames:
+        values= {}
+        for i,t in enumerate(indicatorFrameNums):
+            values[t] = data[i,key]
+        inter.addIndicator(SeverityIndicator(indicatorsNames[key], values))
+    if selectedIndicators !=[]:
+        values= {}
+        for i,t in enumerate(indicatorFrameNums):
+            values[t] = [data[i,index] for index in selectedIndicators]
+        inter.addIndicator(SeverityIndicator('selectedIndicators', values))    
+        
+    #interactions.append(inter)
+    file.close()
+    #return interactions
+    return inter
+
+# Plotting results
+
+frameRate = 15.
+
+# To run in directory that contains the directories that contain the results (Miss-xx and Incident-xx)
+#dirname = '/home/nicolas/Research/Data/kentucky-db/'
+
+interactingRoadUsers = {'Miss/0404052336': [(0,3)] # 0,2 and 1 vs 3
+                        #,
+                        #'Incident/0306022035': [(1,3)]
+                        #,
+                        #'Miss/0208030956': [(4,5),(5,7)]
+                        }
+
+
+def getIndicatorName(filename, withUnit = False):
+    if withUnit:
+        unit = ' (s)'
+    else:
+        unit = ''
+    if 'collision-point' in filename:
+        return 'TTC'+unit
+    elif 'crossing' in filename:
+        return 'pPET'+unit
+    elif 'probability' in filename:
+        return 'P(UEA)'
+
+def getMethodName(fileprefix):
+    if fileprefix == 'constant-velocity':
+        return 'Con. Vel.'
+    elif fileprefix == 'normal-adaptation':
+        return 'Norm. Ad.'
+    elif fileprefix == 'point-set':
+        return 'Pos. Set'
+    elif fileprefix == 'evasive-action':
+        return 'Ev. Act.'
+    elif fileprefix == 'point-set-evasive-action':
+        return 'Pos. Set'
+
+indicator2TimeIdx = {'TTC':2,'pPET':2, 'P(UEA)':3}
+
+def getDataAtInstant(data, i):
+    return data[data[:,2] == i]
+
+def getPointsAtInstant(data, i):
+    return getDataAtInstant(i)[3:5]
+
+def getIndicator(data, roadUserNumbers, indicatorName):
+    if data.ndim ==1:
+        data.shape = (1,data.shape[0])
+
+    # find the order for the roadUserNumbers
+    uniqueObj1 = np.unique(data[:,0])
+    uniqueObj2 = np.unique(data[:,1])
+    found = False
+    if roadUserNumbers[0] in uniqueObj1 and roadUserNumbers[1] in uniqueObj2:
+        objNum1 = roadUserNumbers[0]
+        objNum2 = roadUserNumbers[1]
+        found = True
+    if roadUserNumbers[1] in uniqueObj1 and roadUserNumbers[0] in uniqueObj2:
+        objNum1 = roadUserNumbers[1]
+        objNum2 = roadUserNumbers[0]
+        found = True
+
+    # get subset of data for road user numbers
+    if found:
+        roadUserData = data[np.logical_and(data[:,0] == objNum1, data[:,1] == objNum2),:]
+        if roadUserData.size > 0:
+            time = np.unique(roadUserData[:,indicator2TimeIdx[indicatorName]])
+            values = {}
+            if indicatorName == 'P(UEA)':
+                tmp = roadUserData[:,4]
+                for k,v in zip(time, tmp):
+                    values[k]=v
+                return SeverityIndicator(indicatorName, values, mostSevereIsMax = False, maxValue = 1.), roadUserData
+            else:
+                for i in range(time[0],time[-1]+1):
+                    try:
+                        tmp = getDataAtInstant(roadUserData, i)
+                        values[i] = np.sum(tmp[:,5]*tmp[:,6])/np.sum(tmp[:,5])/frameRate
+                    except IOError:
+                        values[i] = np.inf
+                return SeverityIndicator(indicatorName, values, mostSevereIsMax = False), roadUserData
+    return None, None
--- a/trafficintelligence/storage.py	Mon Jun 18 22:50:59 2018 -0400
+++ b/trafficintelligence/storage.py	Tue Jun 19 10:04:52 2018 -0400
@@ -2,20 +2,17 @@
 # -*- coding: utf-8 -*-
 '''Various utilities to save and load data'''
 
-from trafficintelligence import utils, moving, events, indicators
-from trafficintelligence.base import VideoFilenameAddable
-
 from pathlib import Path
 import shutil
 from copy import copy
 import sqlite3, logging
+
 from numpy import log, min as npmin, max as npmax, round as npround, array, sum as npsum, loadtxt, floor as npfloor, ceil as npceil, linalg
 from pandas import read_csv, merge
 
+from trafficintelligence import utils, moving, events, indicators
+from trafficintelligence.base import VideoFilenameAddable
 
-commentChar = '#'
-
-delimiterChar = '%';
 
 ngsimUserTypes = {'twowheels':1,
                   'car':2,
@@ -881,48 +878,19 @@
 # txt files
 #########################
 
-def openCheck(filename, option = 'r', quitting = False):
-    '''Open file filename in read mode by default
-    and checks it is open'''
-    try:
-        return open(filename, option)
-    except IOError:
-        print('File {} could not be opened.'.format(filename))
-        if quitting:
-            from sys import exit
-            exit()
-        return None
-
-def readline(f, commentCharacters = commentChar):
-    '''Modified readline function to skip comments
-    Can take a list of characters or a string (in will work in both)'''
-    s = f.readline()
-    while (len(s) > 0) and s[0] in commentCharacters:
-        s = f.readline()
-    return s.strip()
-
-def getLines(f, delimiterChar = delimiterChar, commentCharacters = commentChar):
-    '''Gets a complete entry (all the lines) in between delimiterChar.'''
-    dataStrings = []
-    s = readline(f, commentCharacters)
-    while len(s) > 0 and s[0] != delimiterChar:
-        dataStrings += [s.strip()]
-        s = readline(f, commentCharacters)
-    return dataStrings
-
 def saveList(filename, l):
-    f = openCheck(filename, 'w')
+    f = utils.openCheck(filename, 'w')
     for x in l:
         f.write('{}\n'.format(x))
     f.close()
 
-def loadListStrings(filename, commentCharacters = commentChar):
-    f = openCheck(filename, 'r')
-    result = getLines(f, commentCharacters)
+def loadListStrings(filename, commentCharacters = utils.commentChar):
+    f = utils.openCheck(filename, 'r')
+    result = utils.getLines(f, commentCharacters)
     f.close()
     return result
 
-def getValuesFromINIFile(filename, option, delimiterChar = '=', commentCharacters = commentChar):
+def getValuesFromINIFile(filename, option, delimiterChar = '=', commentCharacters = utils.commentChar):
     values = []
     for l in loadListStrings(filename, commentCharacters):
         if l.startswith(option):
@@ -942,7 +910,7 @@
 def loadPemsTraffic(filename):
     '''Loads traffic data downloaded from the http://pems.dot.ca.gov clearinghouse 
     into pandas dataframe'''
-    f = openCheck(filename)
+    f = utils.openCheck(filename)
     l = f.readline().strip()
     items = l.split(',')
     headers = ['time', 'station', 'district', 'route', 'direction', 'lanetype', 'length', 'nsamples', 'pctobserved', 'flow', 'occupancy', 'speed', 'delay35', 'delay40', 'delay45', 'delay50', 'delay55', 'delay60']
@@ -960,7 +928,7 @@
     sqlite3 [file.sqlite] < import_fzp.sql'''
     sqlScriptFilename = "import_fzp.sql"
     # create sql file
-    out = openCheck(sqlScriptFilename, "w")
+    out = utils.openCheck(sqlScriptFilename, "w")
     out.write(".separator \";\"\n"+
               "CREATE TABLE IF NOT EXISTS curvilinear_positions (t REAL, trajectory_id INTEGER, link_id INTEGER, lane_id INTEGER, s_coordinate REAL, y_coordinate REAL, speed REAL, PRIMARY KEY (t, trajectory_id));\n"+
               ".import "+filename+" curvilinear_positions\n"+
@@ -968,7 +936,7 @@
     out.close()
     # system call
     from subprocess import run
-    out = openCheck("err.log", "w")
+    out = utils.openCheck("err.log", "w")
     run("sqlite3 "+utils.removeExtension(filename)+".sqlite < "+sqlScriptFilename, stderr = out)
     out.close()
     shutil.os.remove(sqlScriptFilename)
@@ -1027,8 +995,8 @@
                 return list(objects.values())
     else:
         if filename.endswith(".fzp"):
-            inputfile = openCheck(filename, quitting = True)
-            line = readline(inputfile, '*$')
+            inputfile = utils.openCheck(filename, quitting = True)
+            line = utils.readline(inputfile, '*$')
             while len(line) > 0:#for line in inputfile:
                 data = line.strip().split(';')
                 objNum = int(data[1])
@@ -1044,7 +1012,7 @@
                 if (warmUpLastInstant is None or instant >= warmUpLastInstant) and objNum in objects:
                     objects[objNum].timeInterval.last = instant
                     objects[objNum].curvilinearPositions.addPositionSYL(s, y, lane)
-                line = readline(inputfile, '*$')
+                line = utils.readline(inputfile, '*$')
         elif filename.endswith(".sqlite"):
             with sqlite3.connect(filename) as connection:
                 cursor = connection.cursor()
@@ -1156,7 +1124,7 @@
     and returns the list of Feature objects'''
     objects = []
 
-    inputfile = openCheck(filename, quitting = True)
+    inputfile = utils.openCheck(filename, quitting = True)
 
     def createObject(numbers):
         firstFrameNum = int(numbers[1])
@@ -1180,7 +1148,7 @@
         obj.size = [float(numbers[8]), float(numbers[9])] # 8 lengh, 9 width # TODO: temporary, should use a geometry object
         return obj
 
-    numbers = readline(inputfile).strip().split()
+    numbers = utils.readline(inputfile).strip().split()
     if (len(numbers) > 0):
         obj = createObject(numbers)
 
@@ -1218,9 +1186,9 @@
     '''Reads data from the trajectory data provided by NGSIM project
     and converts to our current format.'''
     if append:
-        out = openCheck(outputfile,'a')
+        out = utils.openCheck(outputfile,'a')
     else:
-        out = openCheck(outputfile,'w')
+        out = utils.openCheck(outputfile,'w')
     nObjectsPerType = [0,0,0]
 
     features = loadNgsimFile(inputfile, sequenceNum)
@@ -1237,8 +1205,8 @@
     (pinhole camera model, http://docs.opencv.org/2.4/modules/calib3d/doc/camera_calibration_and_3d_reconstruction.html)
     and returns a dictionary'''
     if tanalystFormat:
-        f = openCheck(filename, quitting = True)
-        content = getLines(f)
+        f = utils.openCheck(filename, quitting = True)
+        content = utils.getLines(f)
         cameraData = {}
         for l in content:
             tmp = l.split(':')
@@ -1261,7 +1229,7 @@
         f.write(s+'\n')
 
 def saveTrajectoriesToCsv(filename, objects):
-    f = openCheck(filename, 'w')
+    f = utils.openCheck(filename, 'w')
     for i,obj in enumerate(objects):
         savePositionsToCsv(f, obj)
     f.close()
@@ -1277,7 +1245,7 @@
         from configparser import ConfigParser
 
         config = ConfigParser()
-        config.read_file(addSectionHeader(openCheck(filename)))
+        config.read_file(addSectionHeader(utils.openCheck(filename)))
 
         parentPath = Path(filename).parent
         self.sectionHeader = config.sections()[0]
@@ -1346,7 +1314,7 @@
         from configparser import ConfigParser
 
         config = ConfigParser(strict=False)
-        config.read_file(addSectionHeader(openCheck(filename)))
+        config.read_file(addSectionHeader(utils.openCheck(filename)))
 
         parentPath = Path(filename).parent
         self.sectionHeader = config.sections()[0]
@@ -1462,7 +1430,7 @@
     def loadConfigFile(filename):
         from configparser import ConfigParser
         config = ConfigParser()
-        config.readfp(openCheck(filename))
+        config.readfp(utils.openCheck(filename))
         configDict = dict()
         for sectionName in config.sections():
             configDict[sectionName] = SceneParameters(config, sectionName) 
--- a/trafficintelligence/tests/cvutils.txt	Mon Jun 18 22:50:59 2018 -0400
+++ b/trafficintelligence/tests/cvutils.txt	Tue Jun 19 10:04:52 2018 -0400
@@ -1,5 +1,6 @@
->>> import cv2, cvutils
 >>> from numpy import array, round, ones, dot, linalg, absolute
+>>> import cv2
+>>> from trafficintelligence import cvutils
 >>> img = cv2.imread("../samples/val-dor-117-111.png")
 >>> width = img.shape[1]
 >>> height = img.shape[0]
--- a/trafficintelligence/tests/events.txt	Mon Jun 18 22:50:59 2018 -0400
+++ b/trafficintelligence/tests/events.txt	Tue Jun 19 10:04:52 2018 -0400
@@ -1,6 +1,6 @@
->>> from events import *
->>> from moving import MovingObject, TimeInterval, Point
->>> from prediction import ConstantPredictionParameters
+>>> from trafficintelligence.events import *
+>>> from trafficintelligence.moving import MovingObject, TimeInterval, Point
+>>> from trafficintelligence.prediction import ConstantPredictionParameters
 
 >>> objects = [MovingObject(num = i, timeInterval = TimeInterval(0,10)) for i in range(10)]
 >>> interactions = createInteractions(objects)
--- a/trafficintelligence/tests/indicators.txt	Mon Jun 18 22:50:59 2018 -0400
+++ b/trafficintelligence/tests/indicators.txt	Tue Jun 19 10:04:52 2018 -0400
@@ -1,5 +1,5 @@
->>> from indicators import *
->>> from moving import TimeInterval,Trajectory
+>>> from trafficintelligence.indicators import *
+>>> from trafficintelligence.moving import TimeInterval,Trajectory
 
 >>> indic1 = TemporalIndicator('bla', [0,3,-4], TimeInterval(4,6))
 >>> indic1.empty()
--- a/trafficintelligence/tests/ml.txt	Mon Jun 18 22:50:59 2018 -0400
+++ b/trafficintelligence/tests/ml.txt	Tue Jun 19 10:04:52 2018 -0400
@@ -1,6 +1,6 @@
 >>> from math import fabs
 >>> from numpy import ones
->>> from ml import prototypeCluster
+>>> from trafficintelligence.ml import prototypeCluster
 
 >>> nTrajectories = 7
 >>> similarityFunc = lambda x, y: 1.-fabs(x-y)/(nTrajectories-1)
--- a/trafficintelligence/tests/moving.txt	Mon Jun 18 22:50:59 2018 -0400
+++ b/trafficintelligence/tests/moving.txt	Tue Jun 19 10:04:52 2018 -0400
@@ -1,5 +1,5 @@
->>> from moving import *
->>> import storage
+>>> from trafficintelligence.moving import *
+>>> from trafficintelligence import storage
 >>> import numpy as np
 
 >>> Interval().empty()
--- a/trafficintelligence/tests/prediction.txt	Mon Jun 18 22:50:59 2018 -0400
+++ b/trafficintelligence/tests/prediction.txt	Tue Jun 19 10:04:52 2018 -0400
@@ -1,6 +1,6 @@
->>> from prediction import *
->>> import moving, storage, utils
->>> from numpy import absolute, array
+>>> from trafficintelligence.prediction import *
+>>> from trafficintelligence import moving, storage, utils
+>>> from numpy import absolute, array, max
 
 >>> et = PredictedTrajectoryConstant(moving.Point(0,0), moving.Point(1,0))
 >>> et.predictPosition(4) # doctest:+ELLIPSIS
@@ -21,7 +21,6 @@
 >>> steering = lambda: random.uniform(-0.1,0.1)
 >>> et = PredictedTrajectoryRandomControl(moving.Point(0,0),moving.Point(1,1), acceleration, steering, maxSpeed = 2)
 >>> p = et.predictPosition(500)
->>> from numpy import max
 >>> max(et.getPredictedSpeeds()) <= 2.
 True
 
--- a/trafficintelligence/tests/storage.txt	Mon Jun 18 22:50:59 2018 -0400
+++ b/trafficintelligence/tests/storage.txt	Tue Jun 19 10:04:52 2018 -0400
@@ -1,6 +1,7 @@
->>> from storage import *
 >>> from io import StringIO
->>> from moving import MovingObject, Point, TimeInterval, Trajectory, prepareSplines
+>>> from trafficintelligence.storage import *
+>>> from trafficintelligence.utils import openCheck, readline
+>>> from trafficintelligence.moving import MovingObject, Point, TimeInterval, Trajectory, prepareSplines
 
 >>> f = openCheck('non_existant_file.txt')
 File non_existant_file.txt could not be opened.
--- a/trafficintelligence/tests/utils.txt	Mon Jun 18 22:50:59 2018 -0400
+++ b/trafficintelligence/tests/utils.txt	Tue Jun 19 10:04:52 2018 -0400
@@ -1,5 +1,5 @@
->>> from utils import *
->>> from moving import Point
+>>> from trafficintelligence.utils import *
+>>> from trafficintelligence.moving import Point
 
 >>> upperCaseFirstLetter('mmmm... donuts')
 'Mmmm... Donuts'
--- a/trafficintelligence/ubc_utils.py	Mon Jun 18 22:50:59 2018 -0400
+++ b/trafficintelligence/ubc_utils.py	Tue Jun 19 10:04:52 2018 -0400
@@ -57,8 +57,8 @@
     by just copying the corresponding trajectory and velocity data
     from the inFilename, and saving the characteristics in objects (first line)
     into outFilename'''
-    infile = storage.openCheck(inFilename)
-    outfile = storage.openCheck(outFilename,'w')
+    infile = utils.openCheck(inFilename)
+    outfile = utils.openCheck(outFilename,'w')
 
     if (inFilename.find('features') >= 0) or (not infile) or (not outfile):
         return
@@ -86,8 +86,8 @@
 
 def modifyTrajectoryFile(modifyLines, filenameIn, filenameOut):
     '''Reads filenameIn, replaces the lines with the result of modifyLines and writes the result in filenameOut'''
-    fileIn = storage.openCheck(filenameIn, 'r', True)
-    fileOut = storage.openCheck(filenameOut, "w", True)
+    fileIn = utils.openCheck(filenameIn, 'r', True)
+    fileOut = utils.openCheck(filenameOut, "w", True)
 
     lines = storage.getLines(fileIn)
     trajNum = 0
@@ -106,8 +106,8 @@
 def copyTrajectoryFile(keepTrajectory, filenameIn, filenameOut):
     '''Reads filenameIn, keeps the trajectories for which the function keepTrajectory(trajNum, lines) is True
     and writes the result in filenameOut'''
-    fileIn = storage.openCheck(filenameIn, 'r', True)
-    fileOut = storage.openCheck(filenameOut, "w", True)
+    fileIn = utils.openCheck(filenameIn, 'r', True)
+    fileOut = utils.openCheck(filenameOut, "w", True)
 
     lines = storage.getLines(fileIn)
     trajNum = 0
@@ -125,7 +125,7 @@
 def loadTrajectories(filename, nObjects = -1):
     '''Loads trajectories'''
 
-    file = storage.openCheck(filename)
+    file = utils.openCheck(filename)
     if (not file):
         return []
 
@@ -177,7 +177,7 @@
     'Loads interactions from the old UBC traffic event format'
     from events import Interaction 
     from indicators import SeverityIndicator
-    file = storage.openCheck(filename)
+    file = utils.openCheck(filename)
     if (not file):
         return []
 
@@ -206,7 +206,7 @@
 def loadCollisionPoints(filename, nPoints = -1):
     '''Loads collision points and returns a dict
     with keys as a pair of the numbers of the two interacting objects'''
-    file = storage.openCheck(filename)
+    file = utils.openCheck(filename)
     if (not file):
         return []
 
--- a/trafficintelligence/utils.py	Mon Jun 18 22:50:59 2018 -0400
+++ b/trafficintelligence/utils.py	Tue Jun 19 10:04:52 2018 -0400
@@ -15,13 +15,48 @@
 from pandas import DataFrame, concat
 import matplotlib.pyplot as plt
 
-from trafficintelligence.storage import openCheck
-
 datetimeFormat = "%Y-%m-%d %H:%M:%S"
 
 sjcamDatetimeFormat = "%Y_%m%d_%H%M%S"#2017_0626_143720
 
 #########################
+# txt files
+#########################
+
+commentChar = '#'
+
+delimiterChar = '%';
+
+def openCheck(filename, option = 'r', quitting = False):
+    '''Open file filename in read mode by default
+    and checks it is open'''
+    try:
+        return open(filename, option)
+    except IOError:
+        print('File {} could not be opened.'.format(filename))
+        if quitting:
+            from sys import exit
+            exit()
+        return None
+
+def readline(f, commentCharacters = commentChar):
+    '''Modified readline function to skip comments
+    Can take a list of characters or a string (in will work in both)'''
+    s = f.readline()
+    while (len(s) > 0) and s[0] in commentCharacters:
+        s = f.readline()
+    return s.strip()
+
+def getLines(f, delimiterChar = delimiterChar, commentCharacters = commentChar):
+    '''Gets a complete entry (all the lines) in between delimiterChar.'''
+    dataStrings = []
+    s = readline(f, commentCharacters)
+    while len(s) > 0 and s[0] != delimiterChar:
+        dataStrings += [s.strip()]
+        s = readline(f, commentCharacters)
+    return dataStrings
+
+#########################
 # Strings
 #########################