changeset 1050:9d4a06f49cb8

work in progress
author Nicolas Saunier <nicolas.saunier@polymtl.ca>
date Fri, 06 Jul 2018 18:12:15 -0400
parents c9c03c97ed9f
children c3c3a90c08f8
files scripts/process.py trafficintelligence/metadata.py trafficintelligence/storage.py
diffstat 3 files changed, 191 insertions(+), 182 deletions(-) [+]
line wrap: on
line diff
--- a/scripts/process.py	Fri Jul 06 15:58:45 2018 -0400
+++ b/scripts/process.py	Fri Jul 06 18:12:15 2018 -0400
@@ -61,10 +61,10 @@
 parser.add_argument('--output', dest = 'output', help = 'kind of output to produce (interval means)', choices = ['figure', 'interval', 'event'])
 parser.add_argument('--min-user-duration', dest = 'minUserDuration', help = 'mininum duration we have to see the user to take into account in the analysis (s)', type = float, default = 0.1)
 parser.add_argument('--interval-duration', dest = 'intervalDuration', help = 'length of time interval to aggregate data (min)', type = float, default = 15.)
-parser.add_argument('--aggregation', dest = 'aggMethod', help = 'aggregation method per user/event and per interval', choices = ['mean', 'median', 'centile'], nargs = '*', default = ['median'])
+parser.add_argument('--aggregation', dest = 'aggMethod', help = 'aggregation method per user/interaction and per interval', choices = ['mean', 'median', 'centile'], nargs = '*', default = ['median'])
 parser.add_argument('--aggregation-centile', dest = 'aggCentiles', help = 'centile(s) to compute from the observations', nargs = '*', type = int)
 dpi = 150
-# unit of analysis: site or video sequence?
+# unit of analysis: site - camera-view
 
 # need way of selecting sites as similar as possible to sql alchemy syntax
 # override tracking.cfg from db
@@ -90,8 +90,7 @@
         tmpsites = getSite(session, siteId)
         sites.extend(tmpsites)
         for site in tmpsites:
-            for cv in site.cameraViews:
-                videoSequences.extend(cv.videoSequences)
+            videoSequences.extend(getSiteVideoSequences(site))
 else:
     print('No video/site to process')
 
@@ -147,7 +146,7 @@
         pool.join()
 
 elif args.process == 'prototype': # motion pattern learning
-    # learn by site by default -> group videos by site (or by camera view? TODO add cameraviews)
+    # learn by site by default -> group videos by camera view TODO
     # by default, load all objects, learn and then assign (BUT not save the assignments)
     for site in sites:
         print('Learning motion patterns for site {} ({})'.format(site.idx, site.name))
@@ -177,7 +176,6 @@
             outputPrototypeDatabaseFilename = args.databaseFilename
         else:
             outputPrototypeDatabaseFilename = args.outputPrototypeDatabaseFilename
-        # TODO maintain mapping from object prototype to db filename + compute nmatchings before
         clusterSizes = ml.computeClusterSizes(labels, prototypeIndices, -1)
         storage.savePrototypesToSqlite(str(parentPath/site.getPath()/outputPrototypeDatabaseFilename), [moving.Prototype(object2VideoSequences[trainingObjects[i]].getDatabaseFilename(False), trainingObjects[i].getNum(), prototypeType, clusterSizes[i]) for i in prototypeIndices])
 
@@ -232,7 +230,7 @@
         objects = storage.loadTrajectoriesFromSqlite(str(parentPath/vs.getDatabaseFilename()), 'object', args.nObjects)
         for o in objects:
             if o.length() > minUserDuration:
-                row = [vs.cameraView.siteIdx, d, utils.framesToTime(o.getFirstInstant(), vs.cameraView.cameraType.frameRate, t1), o.getUserType()]
+                row = [vs.cameraView.site.name, d, utils.framesToTime(o.getFirstInstant(), vs.cameraView.cameraType.frameRate, t1), o.getUserType()]
                 tmp = o.getSpeeds()
                 for method,func in aggFunctions.items():
                     aggSpeeds = vs.cameraView.cameraType.frameRate*3.6*func(tmp)
@@ -252,7 +250,8 @@
             plt.close()
     elif args.output == 'event':
         data.to_csv('speeds.csv', index = False)
-if args.analyze == 'interaction':
+
+if args.analyze == 'interaction': # redo as for object, export in dataframe all interaction data
     indicatorIds = [2,5,7,10]
     conversionFactors = {2: 1., 5: 30.*3.6, 7:1./30, 10:1./30}
     maxIndicatorValue = {2: float('inf'), 5: float('inf'), 7:10., 10:10.}
--- a/trafficintelligence/metadata.py	Fri Jul 06 15:58:45 2018 -0400
+++ b/trafficintelligence/metadata.py	Fri Jul 06 18:12:15 2018 -0400
@@ -366,6 +366,9 @@
     'Returns the site(s) matching the index'
     return session.query(CameraView).filter(CameraView.idx == int(viewId)).first()
 
+def getSiteVideoSequences(site):
+    return [vs for cv in site.cameraViews for vs in cv.videoSequences]
+
 def initializeSites(session, directoryName, nViewsPerSite = 1):
     '''Initializes default site objects and n camera views per site
     
--- a/trafficintelligence/storage.py	Fri Jul 06 15:58:45 2018 -0400
+++ b/trafficintelligence/storage.py	Fri Jul 06 18:12:15 2018 -0400
@@ -259,60 +259,61 @@
     The number loaded is either the first objectNumbers objects,
     or the indices in objectNumbers from the database'''
     objects = []
-    with sqlite3.connect(filename) as connection:
-        objects = loadTrajectoriesFromTable(connection, 'positions', trajectoryType, objectNumbers, timeStep)
-        objectVelocities = loadTrajectoriesFromTable(connection, 'velocities', trajectoryType, objectNumbers, timeStep)
+    if Path(filename).exists():
+        with sqlite3.connect(filename) as connection:
+            objects = loadTrajectoriesFromTable(connection, 'positions', trajectoryType, objectNumbers, timeStep)
+            objectVelocities = loadTrajectoriesFromTable(connection, 'velocities', trajectoryType, objectNumbers, timeStep)
 
-        if len(objectVelocities) > 0:
-            for o,v in zip(objects, objectVelocities):
-                if o.getNum() == v.getNum():
-                    o.velocities = v.positions
-                    o.velocities.duplicateLastPosition() # avoid having velocity shorter by one position than positions
-                else:
-                    print('Could not match positions {0} with velocities {1}'.format(o.getNum(), v.getNum()))
+            if len(objectVelocities) > 0:
+                for o,v in zip(objects, objectVelocities):
+                    if o.getNum() == v.getNum():
+                        o.velocities = v.positions
+                        o.velocities.duplicateLastPosition() # avoid having velocity shorter by one position than positions
+                    else:
+                        print('Could not match positions {0} with velocities {1}'.format(o.getNum(), v.getNum()))
 
-        if trajectoryType == 'object':
-            cursor = connection.cursor()
-            try:
-                # attribute feature numbers to objects
-                queryStatement = 'SELECT trajectory_id, object_id FROM objects_features'
-                if objectNumbers is not None:
-                    queryStatement += ' WHERE object_id '+getObjectCriteria(objectNumbers)
-                queryStatement += ' ORDER BY object_id' # order is important to group all features per object
-                logging.debug(queryStatement)
-                cursor.execute(queryStatement)
+            if trajectoryType == 'object':
+                cursor = connection.cursor()
+                try:
+                    # attribute feature numbers to objects
+                    queryStatement = 'SELECT trajectory_id, object_id FROM objects_features'
+                    if objectNumbers is not None:
+                        queryStatement += ' WHERE object_id '+getObjectCriteria(objectNumbers)
+                    queryStatement += ' ORDER BY object_id' # order is important to group all features per object
+                    logging.debug(queryStatement)
+                    cursor.execute(queryStatement)
 
-                featureNumbers = {}
-                for row in cursor:
-                    objId = row[1]
-                    if objId not in featureNumbers:
-                        featureNumbers[objId] = [row[0]]
-                    else:
-                        featureNumbers[objId].append(row[0])
+                    featureNumbers = {}
+                    for row in cursor:
+                        objId = row[1]
+                        if objId not in featureNumbers:
+                            featureNumbers[objId] = [row[0]]
+                        else:
+                            featureNumbers[objId].append(row[0])
 
-                for obj in objects:
-                    obj.featureNumbers = featureNumbers[obj.getNum()]
+                    for obj in objects:
+                        obj.featureNumbers = featureNumbers[obj.getNum()]
 
-                # load userType
-                attributes = loadObjectAttributesFromTable(cursor, objectNumbers, True)
-                for obj in objects:
-                    userType, nObjects = attributes[obj.getNum()]
-                    obj.setUserType(userType)
-                    obj.setNObjects(nObjects)
-
-                # add features
-                if withFeatures:
+                    # load userType
+                    attributes = loadObjectAttributesFromTable(cursor, objectNumbers, True)
                     for obj in objects:
-                        obj.features = loadTrajectoriesFromSqlite(filename, 'feature', obj.featureNumbers, timeStep = timeStep)
-                elif nLongestFeaturesPerObject is not None:
-                    for obj in objects:
-                        queryStatement = 'SELECT trajectory_id, max(frame_number)-min(frame_number) AS length FROM positions WHERE trajectory_id '+getObjectCriteria(obj.featureNumbers)+' GROUP BY trajectory_id ORDER BY length DESC'
-                        logging.debug(queryStatement)
-                        cursor.execute(queryStatement)
-                        obj.features = loadTrajectoriesFromSqlite(filename, 'feature', [row[0] for i,row in enumerate(cursor) if i<nLongestFeaturesPerObject], timeStep = timeStep)
+                        userType, nObjects = attributes[obj.getNum()]
+                        obj.setUserType(userType)
+                        obj.setNObjects(nObjects)
 
-            except sqlite3.OperationalError as error:
-                printDBError(error)
+                    # add features
+                    if withFeatures:
+                        for obj in objects:
+                            obj.features = loadTrajectoriesFromSqlite(filename, 'feature', obj.featureNumbers, timeStep = timeStep)
+                    elif nLongestFeaturesPerObject is not None:
+                        for obj in objects:
+                            queryStatement = 'SELECT trajectory_id, max(frame_number)-min(frame_number) AS length FROM positions WHERE trajectory_id '+getObjectCriteria(obj.featureNumbers)+' GROUP BY trajectory_id ORDER BY length DESC'
+                            logging.debug(queryStatement)
+                            cursor.execute(queryStatement)
+                            obj.features = loadTrajectoriesFromSqlite(filename, 'feature', [row[0] for i,row in enumerate(cursor) if i<nLongestFeaturesPerObject], timeStep = timeStep)
+
+                except sqlite3.OperationalError as error:
+                    printDBError(error)
     return objects
 
 def loadObjectFeatureFrameNumbers(filename, objectNumbers = None):
@@ -451,19 +452,20 @@
 
     Load descriptions?'''
     objects = []
-    with sqlite3.connect(filename) as connection:
-        if objectType == 'bb':
-            topCorners = loadTrajectoriesFromTable(connection, 'bounding_boxes', 'bbtop', objectNumbers, timeStep)
-            bottomCorners = loadTrajectoriesFromTable(connection, 'bounding_boxes', 'bbbottom', objectNumbers, timeStep)
-            userTypes = loadObjectAttributesFromTable(connection.cursor(), objectNumbers) # string format is same as object
+    if Path(filename).exists():
+        with sqlite3.connect(filename) as connection:
+            if objectType == 'bb':
+                topCorners = loadTrajectoriesFromTable(connection, 'bounding_boxes', 'bbtop', objectNumbers, timeStep)
+                bottomCorners = loadTrajectoriesFromTable(connection, 'bounding_boxes', 'bbbottom', objectNumbers, timeStep)
+                userTypes = loadObjectAttributesFromTable(connection.cursor(), objectNumbers) # string format is same as object
 
-            for t, b in zip(topCorners, bottomCorners):
-                num = t.getNum()
-                if t.getNum() == b.getNum():
-                    annotation = moving.BBMovingObject(num, t.getTimeInterval(), t, b, userTypes[num])
-                    objects.append(annotation)
-        else:
-            print ('Unknown type of bounding box {}'.format(objectType))
+                for t, b in zip(topCorners, bottomCorners):
+                    num = t.getNum()
+                    if t.getNum() == b.getNum():
+                        annotation = moving.BBMovingObject(num, t.getTimeInterval(), t, b, userTypes[num])
+                        objects.append(annotation)
+            else:
+                print ('Unknown type of bounding box {}'.format(objectType))
     return objects
 
 def saveInteraction(cursor, interaction):
@@ -509,29 +511,30 @@
     
     TODO choose the interactions to load'''
     interactions = []
-    with sqlite3.connect(filename) as connection:
-        cursor = connection.cursor()
-        try:
-            cursor.execute('SELECT INT.id, INT.object_id1, INT.object_id2, INT.first_frame_number, INT.last_frame_number, IND.indicator_type, IND.frame_number, IND.value from interactions INT, indicators IND WHERE INT.id = IND.interaction_id ORDER BY INT.id, IND.indicator_type, IND.frame_number')
-            interactionNum = -1
-            indicatorTypeNum = -1
-            tmpIndicators = {}
-            for row in cursor:
-                if row[0] != interactionNum:
-                    interactionNum = row[0]
-                    interactions.append(events.Interaction(interactionNum, moving.TimeInterval(row[3],row[4]), row[1], row[2]))
-                    interactions[-1].indicators = {}
-                if indicatorTypeNum != row[5] or row[0] != interactionNum:
-                    indicatorTypeNum = row[5]
-                    indicatorName = events.Interaction.indicatorNames[indicatorTypeNum]
-                    indicatorValues = {row[6]:row[7]}
-                    interactions[-1].indicators[indicatorName] = indicators.SeverityIndicator(indicatorName, indicatorValues, mostSevereIsMax = not indicatorName in events.Interaction.timeIndicators)
-                else:
-                    indicatorValues[row[6]] = row[7]
-                    interactions[-1].indicators[indicatorName].timeInterval.last = row[6]
-        except sqlite3.OperationalError as error:
-            printDBError(error)
-            return []
+    if Path(filename).exists():
+        with sqlite3.connect(filename) as connection:
+            cursor = connection.cursor()
+            try:
+                cursor.execute('SELECT INT.id, INT.object_id1, INT.object_id2, INT.first_frame_number, INT.last_frame_number, IND.indicator_type, IND.frame_number, IND.value from interactions INT, indicators IND WHERE INT.id = IND.interaction_id ORDER BY INT.id, IND.indicator_type, IND.frame_number')
+                interactionNum = -1
+                indicatorTypeNum = -1
+                tmpIndicators = {}
+                for row in cursor:
+                    if row[0] != interactionNum:
+                        interactionNum = row[0]
+                        interactions.append(events.Interaction(interactionNum, moving.TimeInterval(row[3],row[4]), row[1], row[2]))
+                        interactions[-1].indicators = {}
+                    if indicatorTypeNum != row[5] or row[0] != interactionNum:
+                        indicatorTypeNum = row[5]
+                        indicatorName = events.Interaction.indicatorNames[indicatorTypeNum]
+                        indicatorValues = {row[6]:row[7]}
+                        interactions[-1].indicators[indicatorName] = indicators.SeverityIndicator(indicatorName, indicatorValues, mostSevereIsMax = not indicatorName in events.Interaction.timeIndicators)
+                    else:
+                        indicatorValues[row[6]] = row[7]
+                        interactions[-1].indicators[indicatorName].timeInterval.last = row[6]
+            except sqlite3.OperationalError as error:
+                printDBError(error)
+                return []
     return interactions
 # load first and last object instants
 # CREATE TEMP TABLE IF NOT EXISTS object_instants AS SELECT OF.object_id, min(frame_number) as first_instant, max(frame_number) as last_instant from positions P, objects_features OF WHERE P.trajectory_id = OF.trajectory_id group by OF.object_id order by OF.object_id
@@ -554,18 +557,18 @@
 def loadBoundingBoxTableForDisplay(filename):
     '''Loads bounding boxes from bounding_boxes table for display over trajectories'''
     boundingBoxes = {} # list of bounding boxes for each instant
-    with sqlite3.connect(filename) as connection:
-        cursor = connection.cursor()
-        try:
-            cursor.execute('SELECT name FROM sqlite_master WHERE type=\'table\' AND name=\'bounding_boxes\'')
-            result = cursor.fetchall()
-            if len(result) > 0:
-                cursor.execute('SELECT * FROM bounding_boxes')
-                for row in cursor:
-                    boundingBoxes.setdefault(row[1], []).append([moving.Point(row[2], row[3]), moving.Point(row[4], row[5])])
-        except sqlite3.OperationalError as error:
-            printDBError(error)
-            return boundingBoxes
+    if Path(filename).exists():
+        with sqlite3.connect(filename) as connection:
+            cursor = connection.cursor()
+            try:
+                cursor.execute('SELECT name FROM sqlite_master WHERE type=\'table\' AND name=\'bounding_boxes\'')
+                result = cursor.fetchall()
+                if len(result) > 0:
+                    cursor.execute('SELECT * FROM bounding_boxes')
+                    for row in cursor:
+                        boundingBoxes.setdefault(row[1], []).append([moving.Point(row[2], row[3]), moving.Point(row[4], row[5])])
+            except sqlite3.OperationalError as error:
+                printDBError(error)
     return boundingBoxes
 
 #########################
@@ -624,49 +627,52 @@
         connection.commit()
 
 def loadPrototypeAssignmentsFromSqlite(filename, objectType):
-    with sqlite3.connect(filename) as connection:
-        cursor = connection.cursor()
-        try:
-            tableName, objectIdColumnName = prototypeAssignmentNames(objectType)
-            cursor.execute('SELECT * FROM '+tableName)
-            prototypeAssignments = {}
-            for row in cursor:
-                p = moving.Prototype(row[1], row[2], row[3])
-                if p in prototypeAssignments:
-                    prototypeAssignments[p].append(row[0])
-                else:
-                    prototypeAssignments[p] = [row[0]]
-            return prototypeAssignments
-        except sqlite3.OperationalError as error:
-            printDBError(error)   
-        
+    prototypeAssignments = {}
+    if Path(filename).exists():
+        with sqlite3.connect(filename) as connection:
+            cursor = connection.cursor()
+            try:
+                tableName, objectIdColumnName = prototypeAssignmentNames(objectType)
+                cursor.execute('SELECT * FROM '+tableName)
+                for row in cursor:
+                    p = moving.Prototype(row[1], row[2], row[3])
+                    if p in prototypeAssignments:
+                        prototypeAssignments[p].append(row[0])
+                    else:
+                        prototypeAssignments[p] = [row[0]]
+                return prototypeAssignments
+            except sqlite3.OperationalError as error:
+                printDBError(error)
+    return prototypeAssignments
+
 def loadPrototypesFromSqlite(filename, withTrajectories = True):
     'Loads prototype ids and matchings (if stored)'
     prototypes = []
-    with sqlite3.connect(filename) as connection:
-        cursor = connection.cursor()
-        objects = []
-        try:
-            cursor.execute('SELECT * FROM prototypes')
-            for row in cursor:
-                prototypes.append(moving.Prototype(row[0], row[1], row[2], row[3]))
-            if withTrajectories:
-                for p in prototypes:
-                    p.setMovingObject(loadTrajectoriesFromSqlite(p.getFilename(), p.getTrajectoryType(), [p.getNum()])[0])
-                # loadingInformation = {} # complicated slightly optimized
-                # for p in prototypes:
-                #     dbfn = p.getFilename()
-                #     trajType = p.getTrajectoryType()
-                #     if (dbfn, trajType) in loadingInformation:
-                #         loadingInformation[(dbfn, trajType)].append(p)
-                #     else:
-                #         loadingInformation[(dbfn, trajType)] = [p]
-                # for k, v in loadingInformation.iteritems():
-                #     objects += loadTrajectoriesFromSqlite(k[0], k[1], [p.getNum() for p in v])
-        except sqlite3.OperationalError as error:
-            printDBError(error)
-    if len(set([p.getTrajectoryType() for p in prototypes])) > 1:
-        print('Different types of prototypes in database ({}).'.format(set([p.getTrajectoryType() for p in prototypes])))
+    if Path(filename).exists():
+        with sqlite3.connect(filename) as connection:
+            cursor = connection.cursor()
+            objects = []
+            try:
+                cursor.execute('SELECT * FROM prototypes')
+                for row in cursor:
+                    prototypes.append(moving.Prototype(row[0], row[1], row[2], row[3]))
+                if withTrajectories:
+                    for p in prototypes:
+                        p.setMovingObject(loadTrajectoriesFromSqlite(p.getFilename(), p.getTrajectoryType(), [p.getNum()])[0])
+                    # loadingInformation = {} # complicated slightly optimized
+                    # for p in prototypes:
+                    #     dbfn = p.getFilename()
+                    #     trajType = p.getTrajectoryType()
+                    #     if (dbfn, trajType) in loadingInformation:
+                    #         loadingInformation[(dbfn, trajType)].append(p)
+                    #     else:
+                    #         loadingInformation[(dbfn, trajType)] = [p]
+                    # for k, v in loadingInformation.iteritems():
+                    #     objects += loadTrajectoriesFromSqlite(k[0], k[1], [p.getNum() for p in v])
+            except sqlite3.OperationalError as error:
+                printDBError(error)
+        if len(set([p.getTrajectoryType() for p in prototypes])) > 1:
+            print('Different types of prototypes in database ({}).'.format(set([p.getTrajectoryType() for p in prototypes])))
     return prototypes
 
 def savePOIsToSqlite(filename, gmm, gmmType, gmmId):
@@ -703,46 +709,47 @@
     from sklearn import mixture # todo if not avalaible, load data in duck-typed class with same fields
     from ast import literal_eval
     pois = []
-    with sqlite3.connect(filename) as connection:
-        cursor = connection.cursor()
-        try:
-            cursor.execute('SELECT * from gaussians2d')
-            gmmId = None
-            gmm = []
-            for row in cursor:
-                if gmmId is None or row[0] != gmmId:
-                    if len(gmm) > 0:
-                        tmp = mixture.GaussianMixture(len(gmm), covarianceType)
-                        tmp.means_ = array([gaussian['mean'] for gaussian in gmm])
-                        tmp.covariances_ = array([gaussian['covar'] for gaussian in gmm])
-                        tmp.weights_ = array([gaussian['weight'] for gaussian in gmm])
-                        tmp.gmmTypes = [gaussian['type'] for gaussian in gmm]
-                        tmp.precisions_cholesky_ = array([gaussian['precisions'] for gaussian in gmm])
-                        pois.append(tmp)
-                    gaussian = {'type': row[2],
-                                'mean': row[3:5],
-                                'covar': array(literal_eval(row[5])),
-                                'weight': row[7],
-                                'precisions': array(literal_eval(row[8]))}
-                    gmm = [gaussian]
-                    covarianceType = row[6]
-                    gmmId = row[0]
-                else:
-                    gmm.append({'type': row[2],
-                                'mean': row[3:5],
-                                'covar': array(literal_eval(row[5])),
-                                'weight': row[7],
-                                'precisions': array(literal_eval(row[8]))})
-            if len(gmm) > 0:
-                tmp = mixture.GaussianMixture(len(gmm), covarianceType)
-                tmp.means_ = array([gaussian['mean'] for gaussian in gmm])
-                tmp.covariances_ = array([gaussian['covar'] for gaussian in gmm])
-                tmp.weights_ = array([gaussian['weight'] for gaussian in gmm])
-                tmp.gmmTypes = [gaussian['type'] for gaussian in gmm]
-                tmp.precisions_cholesky_ = array([gaussian['precisions'] for gaussian in gmm])
-                pois.append(tmp)
-        except sqlite3.OperationalError as error:
-            printDBError(error)
+    if Path(filename).exists():
+        with sqlite3.connect(filename) as connection:
+            cursor = connection.cursor()
+            try:
+                cursor.execute('SELECT * from gaussians2d')
+                gmmId = None
+                gmm = []
+                for row in cursor:
+                    if gmmId is None or row[0] != gmmId:
+                        if len(gmm) > 0:
+                            tmp = mixture.GaussianMixture(len(gmm), covarianceType)
+                            tmp.means_ = array([gaussian['mean'] for gaussian in gmm])
+                            tmp.covariances_ = array([gaussian['covar'] for gaussian in gmm])
+                            tmp.weights_ = array([gaussian['weight'] for gaussian in gmm])
+                            tmp.gmmTypes = [gaussian['type'] for gaussian in gmm]
+                            tmp.precisions_cholesky_ = array([gaussian['precisions'] for gaussian in gmm])
+                            pois.append(tmp)
+                        gaussian = {'type': row[2],
+                                    'mean': row[3:5],
+                                    'covar': array(literal_eval(row[5])),
+                                    'weight': row[7],
+                                    'precisions': array(literal_eval(row[8]))}
+                        gmm = [gaussian]
+                        covarianceType = row[6]
+                        gmmId = row[0]
+                    else:
+                        gmm.append({'type': row[2],
+                                    'mean': row[3:5],
+                                    'covar': array(literal_eval(row[5])),
+                                    'weight': row[7],
+                                    'precisions': array(literal_eval(row[8]))})
+                if len(gmm) > 0:
+                    tmp = mixture.GaussianMixture(len(gmm), covarianceType)
+                    tmp.means_ = array([gaussian['mean'] for gaussian in gmm])
+                    tmp.covariances_ = array([gaussian['covar'] for gaussian in gmm])
+                    tmp.weights_ = array([gaussian['weight'] for gaussian in gmm])
+                    tmp.gmmTypes = [gaussian['type'] for gaussian in gmm]
+                    tmp.precisions_cholesky_ = array([gaussian['precisions'] for gaussian in gmm])
+                    pois.append(tmp)
+            except sqlite3.OperationalError as error:
+                printDBError(error)
     return pois
     
 #########################