changeset 917:89cc05867c4c

reorg and work in progress
author Nicolas Saunier <nicolas.saunier@polymtl.ca>
date Tue, 04 Jul 2017 18:00:01 -0400
parents 7345f0d51faa
children 3a06007a4bb7
files python/ml.py python/storage.py scripts/learn-motion-patterns.py
diffstat 3 files changed, 78 insertions(+), 73 deletions(-) [+]
line wrap: on
line diff
--- a/python/ml.py	Tue Jul 04 17:36:24 2017 -0400
+++ b/python/ml.py	Tue Jul 04 18:00:01 2017 -0400
@@ -266,7 +266,7 @@
             print('Mean overall similarity: {}'.format((similarities[cluster][:,cluster].sum()+n)/(n*(n-1))))
 
 # Gaussian Mixture Models
-def plotGMM(mean, covariance, num, fig, color, alpha = 0.3):
+def plotGMM(mean, covariance, gmmId, fig, color, alpha = 0.3):
     v, w = np.linalg.eigh(covariance)
     angle = 180*np.arctan2(w[0][1], w[0][0])/np.pi
     v *= 4
@@ -275,7 +275,7 @@
     ell.set_alpha(alpha)
     fig.axes[0].add_artist(ell)
     plt.plot([mean[0]], [mean[1]], 'x'+color)
-    plt.annotate(str(num), xy=(mean[0]+1, mean[1]+1))
+    plt.annotate(str(gmmId), xy=(mean[0]+1, mean[1]+1))
 
 def plotGMMClusters(model, labels = None, dataset = None, fig = None, colors = utils.colors, nUnitsPerPixel = 1., alpha = 0.3):
     '''plot the ellipse corresponding to the Gaussians
--- a/python/storage.py	Tue Jul 04 17:36:24 2017 -0400
+++ b/python/storage.py	Tue Jul 04 18:00:01 2017 -0400
@@ -40,6 +40,26 @@
     except sqlite3.OperationalError as error:
         printDBError(error)
 
+def deleteFromSqlite(filename, dataType):
+    'Deletes (drops) some tables in the filename depending on type of data'
+    if path.isfile(filename):
+        connection = sqlite3.connect(filename)
+        if dataType == 'object':
+            dropTables(connection, ['objects', 'objects_features'])
+        elif dataType == 'interaction':
+            dropTables(connection, ['interactions', 'indicators'])
+        elif dataType == 'bb':
+            dropTables(connection, ['bounding_boxes'])
+        elif dataType == 'pois':
+            dropTables(connection, ['gaussians2d', 'objects_pois'])
+        elif dataType == 'prototype':
+            dropTables(connection, ['prototypes'])
+        else:
+            print('Unknown data type {} to delete from database'.format(dataType))
+        connection.close()
+    else:
+        print('{} does not exist'.format(filename))
+
 def tableExists(filename, tableName):
     'indicates if the table exists in the database'
     try:
@@ -355,7 +375,6 @@
     if len(missingObjectNumbers) > 0:
         print('List of missing objects to attach corresponding curvilinear trajectories: {}'.format(missingObjectNumbers))
 
-
 def saveTrajectoriesToSqlite(outputFilename, objects, trajectoryType, withFeatures = False):
     '''Writes features, ie the trajectory positions (and velocities if exist)
     with their instants to a specified sqlite file
@@ -422,55 +441,6 @@
         printDBError(error)
     connection.close()
 
-def savePrototypesToSqlite(filename, prototypeIndices, trajectoryType, nMatchings = None, dbFilenames = None):
-    '''save the prototype indices
-    nMatchings, if not None, is a list of the number of matches
-    dbFilenames, if not None, is a list of the DB filenames'''
-    connection = sqlite3.connect(filename)
-    cursor = connection.cursor()
-    try:
-        cursor.execute('CREATE TABLE IF NOT EXISTS prototypes (id INTEGER, dbfilename VARCHAR, trajectory_type VARCHAR CHECK (trajectory_type IN (\"feature\", \"object\")), nMatchings INTEGER, PRIMARY KEY (id, dbfilename))')
-        for i, protoId in enumerate(prototypeIndices):
-            if nMatchings is not None:
-                n = nMatchings[i]
-            else:
-                n = 'NULL'
-            if dbFilenames is not None:
-                dbfn = dbFilenames[i]
-            else:
-                dbfn = filename
-            cursor.execute('INSERT INTO prototypes (id, dbfilename, trajectory_type, nMatchings) VALUES ({},\"{}\",\"{}\",{})'.format(protoId, dbfn, trajectoryType, n))
-        cursor.execute('SELECT * from sqlite_master WHERE type = \"table\" and name = \"{}\"'.format(tableNames[trajectoryType]))
-        if len(cursor.fetchall()) == 0:
-            pass # save prototype trajectory data
-    except sqlite3.OperationalError as error:
-        printDBError(error)
-    connection.commit()
-    connection.close()
-
-def loadPrototypesFromSqlite(filename):
-    'Loads prototype ids and matchings (if stored)'
-    connection = sqlite3.connect(filename)
-    cursor = connection.cursor()
-    prototypeIndices = []
-    dbFilenames = []
-    trajectoryTypes = []
-    nMatchings = []
-    try:
-        cursor.execute('SELECT * FROM prototypes')
-        for row in cursor:
-            prototypeIndices.append(row[0])
-            dbFilenames.append(row[1])
-            trajectoryTypes.append(row[2])
-            if row[3] is not None:
-                nMatchings.append(row[3])
-    except sqlite3.OperationalError as error:
-        printDBError(error)
-    connection.close()
-    if len(set(trajectoryTypes)) > 1:
-        print('Different types of prototypes in database ({}).'.format(set(trajectoryTypes)))
-    return prototypeIndices, dbFilenames, trajectoryTypes, nMatchings
-
 def loadBBMovingObjectsFromSqlite(filename, objectType = 'bb', objectNumbers = None, timeStep = None):
     '''Loads bounding box moving object from an SQLite
     (format of SQLite output by the ground truth annotation tool
@@ -496,26 +466,6 @@
     connection.close()
     return objects
 
-def deleteFromSqlite(filename, dataType):
-    'Deletes (drops) some tables in the filename depending on type of data'
-    if path.isfile(filename):
-        connection = sqlite3.connect(filename)
-        if dataType == 'object':
-            dropTables(connection, ['objects', 'objects_features'])
-        elif dataType == 'interaction':
-            dropTables(connection, ['interactions', 'indicators'])
-        elif dataType == 'bb':
-            dropTables(connection, ['bounding_boxes'])
-        elif dataType == 'pois':
-            dropTables(connection, ['gaussians2d', 'objects_pois'])
-        elif dataType == 'prototype':
-            dropTables(connection, ['prototypes'])
-        else:
-            print('Unknown data type {} to delete from database'.format(dataType))
-        connection.close()
-    else:
-        print('{} does not exist'.format(filename))
-
 def saveInteraction(cursor, interaction):
     roadUserNumbers = list(interaction.getRoadUserNumbers())
     cursor.execute('INSERT INTO interactions VALUES({}, {}, {}, {}, {})'.format(interaction.getNum(), roadUserNumbers[0], roadUserNumbers[1], interaction.getFirstInstant(), interaction.getLastInstant()))
@@ -624,9 +574,63 @@
     return boundingBoxes
 
 #########################
-# saving and loading for scene interpretation
+# saving and loading for scene interpretation: POIs and Prototypes
 #########################
 
+def savePrototypesToSqlite(filename, prototypeIndices, trajectoryType, objects = None, nMatchings = None, dbFilenames = None):
+    '''save the prototype indices
+    if objects is not None, the trajectories are also saved in prototype_positions and _velocities
+    (prototypeIndices have to be in objects)
+    nMatchings, if not None, is a list of the number of matches
+    dbFilenames, if not None, is a list of the DB filenames'''
+    connection = sqlite3.connect(filename)
+    cursor = connection.cursor()
+    try:
+        cursor.execute('CREATE TABLE IF NOT EXISTS prototypes (id INTEGER, dbfilename VARCHAR, trajectory_type VARCHAR CHECK (trajectory_type IN (\"feature\", \"object\")), nmatchings INTEGER, positions_id INTEGER, PRIMARY KEY (id, dbfilename))')
+        for i, protoId in enumerate(prototypeIndices):
+            if nMatchings is not None:
+                n = nMatchings[i]
+            else:
+                n = 'NULL'
+            if dbFilenames is not None:
+                dbfn = dbFilenames[i]
+            else:
+                dbfn = filename
+            cursor.execute('INSERT INTO prototypes (id, dbfilename, trajectory_type, nmatchings, positions_id) VALUES ({},\"{}\",\"{}\",{}, {})'.format(protoId, dbfn, trajectoryType, n, i))
+        #cursor.execute('SELECT * from sqlite_master WHERE type = \"table\" and name = \"{}\"'.format(tableNames[trajectoryType]))
+        if objects is not None:
+            pass 
+    except sqlite3.OperationalError as error:
+        printDBError(error)
+    connection.commit()
+    connection.close()
+
+def savePrototypeAssignments(filename, objects):
+    pass
+
+def loadPrototypesFromSqlite(filename):
+    'Loads prototype ids and matchings (if stored)'
+    connection = sqlite3.connect(filename)
+    cursor = connection.cursor()
+    prototypeIndices = []
+    dbFilenames = []
+    trajectoryTypes = []
+    nMatchings = []
+    try:
+        cursor.execute('SELECT * FROM prototypes')
+        for row in cursor:
+            prototypeIndices.append(row[0])
+            dbFilenames.append(row[1])
+            trajectoryTypes.append(row[2])
+            if row[3] is not None:
+                nMatchings.append(row[3])
+    except sqlite3.OperationalError as error:
+        printDBError(error)
+    connection.close()
+    if len(set(trajectoryTypes)) > 1:
+        print('Different types of prototypes in database ({}).'.format(set(trajectoryTypes)))
+    return prototypeIndices, dbFilenames, trajectoryTypes, nMatchings
+
 def savePOIs(filename, gmm, gmmType, gmmId):
     '''Saves a Gaussian mixture model (of class sklearn.mixture.GaussianMixture)
     gmmType is a type of GaussianMixture, learnt either from beginnings or ends of trajectories'''
--- a/scripts/learn-motion-patterns.py	Tue Jul 04 17:36:24 2017 -0400
+++ b/scripts/learn-motion-patterns.py	Tue Jul 04 18:00:01 2017 -0400
@@ -31,6 +31,7 @@
 # 2. load proto, load objects, update proto, save proto
 # 3. assign objects from one db to proto
 # 4. load objects from several files, save in another
+# 5. keep prototypes, with positions/velocities, in separate db (keep link to original data through filename, type and index)
 
 # TODO add possibility to cluter with velocities
 # TODO add possibility to start with saved prototypes so that one can incrementally learn from several databases