comparison trafficintelligence/storage.py @ 1212:af329f3330ba

work in progress on 3D safety analysis
author Nicolas Saunier <nicolas.saunier@polymtl.ca>
date Tue, 02 May 2023 17:11:24 -0400
parents 36f0d18e1fad
children 3f2214125164
comparison
equal deleted inserted replaced
1211:a095d4fbb2ea 1212:af329f3330ba
5 from pathlib import Path 5 from pathlib import Path
6 import shutil 6 import shutil
7 from copy import copy 7 from copy import copy
8 import sqlite3, logging 8 import sqlite3, logging
9 9
10 from numpy import log, min as npmin, max as npmax, round as npround, array, sum as npsum, loadtxt, floor as npfloor, ceil as npceil, linalg, int32, int64, reshape, dot, vstack, transpose, ones, zeros_like 10 from numpy import log, min as npmin, max as npmax, round as npround, array, sum as npsum, loadtxt, floor as npfloor, ceil as npceil, linalg, int32, int64, reshape, dot, vstack, transpose, ones, zeros_like, pi
11 from pandas import read_csv, merge 11 from pandas import read_csv, merge
12 12
13 from trafficintelligence import utils, moving, events, indicators, cvutils 13 from trafficintelligence import utils, moving, events, indicators, cvutils
14 from trafficintelligence.base import VideoFilenameAddable 14 from trafficintelligence.base import VideoFilenameAddable
15 15
1383 # check https://docs.opencv.org/3.4/d9/d0c/group__calib3d.html#ga1019495a2c8d1743ed5cc23fa0daff8c 1383 # check https://docs.opencv.org/3.4/d9/d0c/group__calib3d.html#ga1019495a2c8d1743ed5cc23fa0daff8c
1384 1384
1385 newObj = moving.MovingObject(num = objNum, timeInterval = moving.TimeInterval(int(tmp.frame.min()), int(tmp.frame.max())), positions = t, velocities = t.differentiate(True), userType = tmp.iloc[0].usertype, features = [moving.MovingObject(num = featureNum+i, timeInterval = moving.TimeInterval(int(tmp.frame.min()), int(tmp.frame.max())), positions = featureTrajectories[i], velocities = featureTrajectories[i].differentiate(True)) for i in range(4)]) 1385 newObj = moving.MovingObject(num = objNum, timeInterval = moving.TimeInterval(int(tmp.frame.min()), int(tmp.frame.max())), positions = t, velocities = t.differentiate(True), userType = tmp.iloc[0].usertype, features = [moving.MovingObject(num = featureNum+i, timeInterval = moving.TimeInterval(int(tmp.frame.min()), int(tmp.frame.max())), positions = featureTrajectories[i], velocities = featureTrajectories[i].differentiate(True)) for i in range(4)])
1386 objects.append(newObj) 1386 objects.append(newObj)
1387 featureNum += 4 1387 featureNum += 4
1388
1389 # ego vehicle
1390 t = moving.Trajectory()
1391 featureTrajectories = [moving.Trajectory() for i in range(4)]
1392 interval = moving.TimeInterval(int(data.frame.min()), int(data.frame.max()))
1393 R = roty(pi/2)
1394 for frame in interval:
1395 _, Tr_imu_to_world = oxts[frame]
1396
1397 l = 4.5 # m
1398 w = 1.8 # m
1399 h = 0.
1400 # 3d bounding box corners
1401 x_corners = [l / 2, l / 2, -l / 2, -l / 2, l / 2, l / 2, -l / 2, -l / 2]
1402 y_corners = [0, 0, 0, 0, -h, -h, -h, -h]
1403 z_corners = [w / 2, -w / 2, -w / 2, w / 2, w / 2, -w / 2, -w / 2, w / 2]
1404 # rotate and translate 3d bounding box
1405 corners3d = dot(R, vstack([x_corners, y_corners, z_corners]))
1406 corners3d[0, :] = corners3d[0, :]
1407 corners3d[1, :] = corners3d[1, :]
1408 corners3d[2, :] = corners3d[2, :]
1409 refCorners = transpose(dot(invR0, corners3d)) # 8x3 avoid double transpose np.transpose(pts_3d_rect))) in pts_3d_ref = self.project_rect_to_ref(pts_3d_rect)
1410 homRefCorners = cvutils.cartesian2Homogeneous(refCorners)
1411 veloCorners = dot(homRefCorners, transCam2Velo)
1412 homVeloCorners = cvutils.cartesian2Homogeneous(veloCorners)
1413 imuCorners = dot(kittiCalibration['Tr_velo_imu'], homVeloCorners.T).T # 8x3
1414
1415 homImuCorners = cvutils.cartesian2Homogeneous(imuCorners)
1416 worldCorners = dot(Tr_imu_to_world, homImuCorners.T).T # 8x3
1417
1418 # take first 4 lines of corners, x,y,_ # x0, y0, _ = boxes3d[0]
1419 xCoords = worldCorners[:4,0]
1420 yCoords = worldCorners[:4,1]
1421 t.addPositionXY(xCoords.mean(), yCoords.mean())
1422 for i in range(4):
1423 featureTrajectories[i].addPositionXY(xCoords[i], yCoords[i])
1424 # check https://docs.opencv.org/3.4/d9/d0c/group__calib3d.html#ga1019495a2c8d1743ed5cc23fa0daff8c
1425
1426 newObj = moving.MovingObject(num = objNum+1, timeInterval = interval, positions = t, velocities = t.differentiate(True), userType = 'Car', features = [moving.MovingObject(num = featureNum+i, timeInterval = interval, positions = featureTrajectories[i], velocities = featureTrajectories[i].differentiate(True)) for i in range(4)])
1427 objects.append(newObj)
1428
1388 return objects 1429 return objects
1389 1430
1390 def convertNgsimFile(inputfile, outputfile, append = False, nObjects = -1, sequenceNum = 0): 1431 def convertNgsimFile(inputfile, outputfile, append = False, nObjects = -1, sequenceNum = 0):
1391 '''Reads data from the trajectory data provided by NGSIM project 1432 '''Reads data from the trajectory data provided by NGSIM project
1392 and converts to our current format.''' 1433 and converts to our current format.'''