view scripts/process.py @ 984:a69695d14e59

work on script for large datasets
author Nicolas Saunier <nicolas.saunier@polymtl.ca>
date Tue, 06 Mar 2018 08:26:13 -0500
parents 7463c9bc846b
children 668a85c963c3
line wrap: on
line source

#! /usr/bin/env python

import sys, argparse
from pathlib2 import Path

import storage, events, prediction
from metadata import *

parser = argparse.ArgumentParser(description='This program manages the processing of several files based on a description of the sites and video data in an SQLite database following the metadata module.')
parser.add_argument('--db', dest = 'metadataFilename', help = 'name of the metadata file', required = True)
parser.add_argument('--videos', dest = 'videoIds', help = 'indices of the video sequences', nargs = '*', type = int)
parser.add_argument('--pet', dest = 'computePET', help = 'computes PET', action = 'store_true')

# need way of selecting sites as similar as possible to sql alchemy syntax
# override tracking.cfg from db
# manage cfg files, overwrite them (or a subset of parameters)
# delete sqlite files

parser.add_argument('--nthreads', dest = 'nProcesses', help = 'number of processes to run in parallel', type = int, default = 1)

args = parser.parse_args()
# files are relative to metadata location

session = createDatabase(args.metadataFilename)
parentDir = Path(args.metadataFilename).parent

# todo change prediction parameters
predictionParameters = prediction.CVExactPredictionParameters()

for videoId in args.videoIds:
    vs = session.query(VideoSequence).get(videoId)
    print(vs.getDatabaseFilename())
    objects = storage.loadTrajectoriesFromSqlite(str(parentDir/vs.getDatabaseFilename()), 'object')#, args.nObjects, withFeatures = (params.useFeaturesForPrediction or predictionMethod == 'ps' or predictionMethod == 'mp'))
    interactions = events.createInteractions(objects)
    #if args.nProcesses == 1:
    params = storage.ProcessParameters(str(parentDir/vs.cameraView.getTrackingConfigurationFilename()))
    #print(interactions, True, args.computePET, predictionParameters, params.collisionDistance, params.predictionTimeHorizon, params.crossingZones)
    processed = events.computeIndicators(interactions, True, args.computePET, predictionParameters, params.collisionDistance, params.predictionTimeHorizon, params.crossingZones, False, None)
    storage.saveIndicatorsToSqlite(str(parentDir/vs.getDatabaseFilename()), processed)
    
# else:
#     pool = Pool(processes = args.nProcesses)
#     nInteractionPerProcess = int(np.ceil(len(interactions)/float(args.nProcesses)))
#     jobs = [pool.apply_async(events.computeIndicators, args = (interactions[i*nInteractionPerProcess:(i+1)*nInteractionPerProcess], not args.noMotionPrediction, args.computePET, predictionParameters, params.collisionDistance, params.predictionTimeHorizon, params.crossingZones, False, None)) for i in range(args.nProcesses)]
#     processed = []
#     for job in jobs:
#         processed += job.get()
#     pool.close()