changeset 1234:dd969637381e

work on tracker interface
author Nicolas Saunier <nicolas.saunier@polymtl.ca>
date Thu, 14 Sep 2023 16:18:36 -0400
parents d5695e0b59d9
children 855abc69fa99
files scripts/dltrack.py
diffstat 1 files changed, 25 insertions(+), 10 deletions(-) [+]
line wrap: on
line diff
--- a/scripts/dltrack.py	Fri Sep 08 17:09:12 2023 -0400
+++ b/scripts/dltrack.py	Thu Sep 14 16:18:36 2023 -0400
@@ -8,11 +8,13 @@
 from trafficintelligence import cvutils, moving, storage, utils
 
 parser = argparse.ArgumentParser(description='The program tracks objects following the ultralytics yolo executable.')#, epilog = 'Either the configuration filename or the other parameters (at least video and database filenames) need to be provided.')
-parser.add_argument('-i', dest = 'videoFilename', help = 'name of the video file (overrides the configuration file)')
-# detect model
-# tracker model
+parser.add_argument('-i', dest = 'videoFilename', help = 'name of the video file', required = True)
+parser.add_argument('-d', dest = 'databaseFilename', help = 'name of the Sqlite database file', required = True)
+parser.add_argument('-m', dest = 'detectorFilename', help = 'name of the detection model file', required = True)
+parser.add_argument('-t', dest = 'trackerFilename', help = 'name of the tracker file', required = True)
 parser.add_argument('--display', dest = 'display', help = 'show the results (careful with long videos, risk of running out of memory)', action = 'store_true')
-#parser.add_argument('-f', dest = 'firstFrameNum', help = 'show the results (careful with long videos, risk of running out of memory)', action = 'store_true')
+parser.add_argument('-f', dest = 'firstFrameNum', help = 'number of first frame number to process', type = int, default = 0)
+parser.add_argument('-l', dest = 'lastFrameNum', help = 'number of last frame number to process', type = int, default = float('Inf'))
 args = parser.parse_args()
 
 # required functionality?
@@ -59,7 +61,7 @@
 # check if one can go to specific frame https://docs.ultralytics.com/modes/track/#persisting-tracks-loop
 
 # Load a model
-model = YOLO('/home/nicolas/Research/Data/classification-models/yolov8x.pt') # seg yolov8x-seg.pt
+model = YOLO('/home/nicolas/Research/Data/classification-models/yolov8x.pt', ) # seg yolov8x-seg.pt
 # seg could be used on cropped image... if can be loaded and kept in memory
 # model = YOLO('/home/nicolas/Research/Data/classification-models/yolo_nas_l.pt ') # AttributeError: 'YoloNAS_L' object has no attribute 'get'
 
@@ -68,13 +70,23 @@
 if args.display:
     windowName = 'frame'
     cv2.namedWindow(windowName, cv2.WINDOW_NORMAL)
-    
-results = model.track(source=args.videoFilename, tracker="/home/nicolas/Research/Data/classification-models/bytetrack.yaml", classes=list(moving.cocoTypeNames.keys()), stream=True)
+
+capture = cv2.VideoCapture(args.videoFilename)
+#results = model.track(source=args.videoFilename, tracker="/home/nicolas/Research/Data/classification-models/bytetrack.yaml", classes=list(moving.cocoTypeNames.keys()), stream=True)
 objects = []
 currentObjects = {}
 featureNum = 0
+
+frameNum = args.firstFrameNum
+capture.set(cv2.CAP_PROP_POS_FRAMES, frameNum)
+lastFrameNum = args.lastFrameNum
+
+success, frame = capture.read()
+results = model.track(frame, tracker="/home/nicolas/Research/Data/classification-models/bytetrack.yaml", classes=list(moving.cocoTypeNames.keys()), persist=True)
 # create object with user type and list of 3 features (bottom ones and middle) + projection
-for frameNum, result in enumerate(results):
+while capture.isOpened() and success and frameNum <= lastFrameNum:
+#for frameNum, result in enumerate(results):
+    result = results[0]
     print(frameNum, len(result.boxes))
     for box in result.boxes:
         #print(box.cls, box.id, box.xyxy)
@@ -102,8 +114,11 @@
         key = cv2.waitKey()
         if cvutils.quitKey(key):
             break
+    frameNum += 1
+    success, frame = capture.read()
+    results = model.track(frame, persist=True)
 
-# interpolate before saving
+# interpolate and generate velocity before saving
 for num, obj in currentObjects.items():
     obj.setUserType(utils.mostCommon(obj.userTypes))
     obj.features[0].timeInterval = copy(obj.getTimeInterval())
@@ -115,7 +130,7 @@
         obj.features[0].positions = moving.Trajectory.fromPointList(list(obj.features[0].tmpPositions.values()))
         obj.features[1].positions = moving.Trajectory.fromPointList(list(obj.features[1].tmpPositions.values()))
         
-storage.saveTrajectoriesToSqlite('test.sqlite', list(currentObjects.values()), 'object')
+storage.saveTrajectoriesToSqlite(args.databaseFilename, list(currentObjects.values()), 'object')
 
 # todo save bbox and mask to study localization / representation
 # apply quality checks deviation and acceleration bounds?