Source code for utility_functions

import math
from std_msgs.msg import Header
from geometry_msgs.msg import Quaternion, Vector3
from feb_msgs.msg import ConesCartesian
import numpy as np

[docs]def compute_timediff(slam, header: Header) -> float: """ Function that takes in message header and computes difference in time from last state msg Input: Header (std_msg/Header) - uint32 seq - time stamp - string frame_id Output: timediff: float """ newtime = header.stamp.sec + 1e-9 * header.stamp.nanosec timediff = newtime - slam.statetimestamp slam.statetimestamp = newtime return timediff
[docs]def quat_to_euler(quat): """ Function that takes in quaternion and converts to Eulerian angles Input: Quat (Quaternion) - float x - float y - float z - float w Output: roll, pitch, yaw """ x = quat.x y = quat.y z = quat.z w = quat.w roll = math.atan2(2.0 * (w * x + y * z), 1.0 - 2.0 * (x * x + y * x)) pitch = math.asin(2.0 * (w * y - z * x)) yaw = math.atan2(2.0 * (w * z + x * y), 1.0 - 2.0 * (y * y + z * z)) return yaw
[docs]def cartesian_to_polar(car_state, cone): p_x = cone[0] - car_state[0] p_y = cone[1] - car_state[1] r = math.sqrt(p_x**2 + p_y**2) angle = math.atan2(p_y, p_x) return r, angle
[docs]def compareAngle(self, a, b, threshold): # a<b mn = min(b-a, 2*np.pi - b + a) # (ex. in degrees): a = 15 and b = 330 are 45 degrees apart (not 315) return mn < threshold
[docs]def compute_delta_velocity(self, acc: Vector3, dt: float, imu_direction: str) -> float: """ Function that takes in linear acceleration and dt and outputs velocity (linear) Input: - linear_acceleration: from imu message - dt: calculated at each time step Output: - linear_velocity #NOTE: we assume linear acceleration is in the car's frame. This means x is in the longitudinal direction (positive = forwards) y is in the lateral direction (positive = to the right) depending on how IMU is providing this data, change accordingly """ # longitudinal_acc = np.linalg.norm([acc.x, acc.y]) if imu_direction == 'x': longitudinal_acc = acc.x if imu_direction == 'y': longitudinal_acc = acc.y if imu_direction == 'z': longitudinal_acc = acc.z # lateral_acc = acc.y # May be needed in the future if # straightline model is not accurate enough linear_velocity = longitudinal_acc * dt return linear_velocity
[docs]def rotate_and_translate_cones(msg: ConesCartesian, camera): pass
[docs]def adjust_cone_poses_to_previous_pose(previous_states, cones, timestamp): """ Args: previous_states: nx3 array of [timestamp, x, y] cones: nx3 array of [x y color] - the cones message we got timestamp: float of time the CAMERA recieved the cones at Returns: cones: nx3 array of [x y color] of adjusted cones idx_to_remove: latest index to remove -- old poses """ if previous_states is None: return print('here6') prev_states_times = previous_states[:, 0] print("TIMESTAMP: ", timestamp) print("PREV STATE TIMES: ", prev_states_times) idx = np.searchsorted(prev_states_times, timestamp, side='right') # random one in a million edge cases that probably wont happen if idx == 0 or idx - 10 >= len(previous_states): print("INDEX: ", idx) print('here9') return # This looks gross, but it just averages the 2 closest poses to the timestamp # returns [avg_x, avg_y] closest_state = np.average(previous_states[idx-1:idx+1, 1:3], axis=0) # Difference between current state, and the state when we saw the cones delta_pose = previous_states[-1, 1:3] - closest_state return [np.hstack([cones[:, 0:2] - delta_pose, cones[:, 2:3]]), idx - 10]
[docs]def ccw(A,B,C): return (C[1]-A[1]) * (B[0]-A[0]) > (B[1]-A[1]) * (C[0]-A[0])
[docs]def segments_intersect(A,B,C,D): return ccw(A,C,D) != ccw(B,C,D) and ccw(A,B,C) != ccw(A,B,D)