Skip to content

Commit

Permalink
Merge pull request #30 from dora-rs/dora-daemon
Browse files Browse the repository at this point in the history
Update dora-drives to new dora-daemon
  • Loading branch information
haixuanTao authored Mar 27, 2023
2 parents b3f79d5 + 042c5c8 commit 6ecc5e6
Show file tree
Hide file tree
Showing 37 changed files with 795 additions and 117 deletions.
4 changes: 2 additions & 2 deletions Dockerfile
Original file line number Diff line number Diff line change
Expand Up @@ -117,8 +117,8 @@ RUN sudo chown -R dora:dora .
# RUN conda activate dora3.7 && python3 -c "from strong_sort import StrongSORT; import torch; StrongSORT('osnet_x0_25_msmt17.pt', torch.device('cuda'), False)"
# RUN conda activate dora3.7 && python3 -c "import yolov7_tt100k"

RUN sudo wget https://github.com/dora-rs/dora/releases/download/v0.1.3/dora-v0.1.3-x86_64-Linux.zip && sudo unzip dora-v0.1.3-x86_64-Linux.zip -d /bin && sudo mv /bin/iceoryx/iox-roudi /bin
RUN conda activate dora3.7 && python3 -m pip install dora-rs patchelf --upgrade
RUN sudo wget https://github.com/dora-rs/dora/releases/download/v0.2.0/dora-v0.2.0-x86_64-Linux.zip && sudo unzip dora-v0.2.0-x86_64-Linux.zip -d /bin
RUN conda activate dora3.7 && python3 -m pip install dora-rs==0.2.0 patchelf --upgrade

WORKDIR /home/dora/workspace/dora-drives
COPY . .
Expand Down
2 changes: 1 addition & 1 deletion carla/_hd_map.py
Original file line number Diff line number Diff line change
Expand Up @@ -29,7 +29,7 @@ def __init__(self, simulator_map, _log_file=None):
self._map = simulator_map
# Setup global planner.
self._grp = GlobalRoutePlanner(
self._map, 0.1
self._map, 1.0
) # Distance between waypoints

# self._grp.setup()
Expand Down
10 changes: 9 additions & 1 deletion carla/carla_control_op.py
Original file line number Diff line number Diff line change
Expand Up @@ -12,7 +12,6 @@
client.set_timeout(30.0)



def radians_to_steer(rad: float, steer_gain: float):
"""Converts radians to steer input.
Expand All @@ -35,6 +34,15 @@ class Operator:
def __init__(self):
self.vehicle_id = None

def on_event(
self,
dora_event: dict,
send_output: Callable[[str, bytes], None],
) -> DoraStatus:
if dora_event["type"] == "INPUT":
return self.on_input(dora_event, send_output)
return DoraStatus.CONTINUE

def on_input(
self,
dora_input: dict,
Expand Down
17 changes: 13 additions & 4 deletions carla/carla_gps_op.py
Original file line number Diff line number Diff line change
Expand Up @@ -10,7 +10,6 @@
from carla import Map

# Planning general
TARGET_SPEED = 7.0
NUM_WAYPOINTS_AHEAD = 120
GOAL_LOCATION = [234, 59, 39]
OBJECTIVE_MIN_DISTANCE = 0
Expand All @@ -37,6 +36,15 @@ def __init__(self):
self.completed_waypoints = 0
self.waypoints_array = np.array([])

def on_event(
self,
dora_event: dict,
send_output: Callable[[str, bytes], None],
) -> DoraStatus:
if dora_event["type"] == "INPUT":
return self.on_input(dora_event, send_output)
return DoraStatus.CONTINUE

def on_input(
self,
dora_input: dict,
Expand Down Expand Up @@ -99,7 +107,7 @@ def on_input(
index : index + NUM_WAYPOINTS_AHEAD
]

if len(self.waypoints) < NUM_WAYPOINTS_AHEAD / 2:
if len(self.waypoints) == 0:

[x, y, z, rx, ry, rz, rw] = self.position
[pitch, roll, yaw] = R.from_quat([rx, ry, rz, rw]).as_euler(
Expand All @@ -121,8 +129,9 @@ def on_input(
diff_angle = np.arctan2(
np.sin(angle - yaw), np.cos(angle - yaw)
)
if np.abs(diff_angle) > np.pi * 2 / 3:
print("Error in computation of waypoints")
if np.abs(diff_angle) > np.pi / 2:
print("Error in computation of waypoints.")
print("The next target waypoint requires to make a 180 degrees turn.")
print(f"target waypoint: {waypoints[0]}")
print(f"position: {[x, y, z]}")
print(f"goal location: {self._goal_location}")
Expand Down
309 changes: 309 additions & 0 deletions carla/human_agent.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,309 @@
#!/usr/bin/env python

# This work is licensed under the terms of the MIT license.
# For a copy, see <https://opensource.org/licenses/MIT>.

"""
This module provides a human agent to control the ego vehicle via keyboard
"""

import numpy as np
import json

try:
import pygame
from pygame.locals import K_DOWN
from pygame.locals import K_LEFT
from pygame.locals import K_RIGHT
from pygame.locals import K_SPACE
from pygame.locals import K_UP
from pygame.locals import K_a
from pygame.locals import K_d
from pygame.locals import K_s
from pygame.locals import K_w
from pygame.locals import K_q
except ImportError:
raise RuntimeError('cannot import pygame, make sure pygame package is installed')

import carla

from autoagents.autonomous_agent import AutonomousAgent


def get_entry_point():
return 'HumanAgent'

class HumanInterface(object):

"""
Class to control a vehicle manually for debugging purposes
"""

def __init__(self, width, height, side_scale, left_mirror=False, right_mirror=False):
self._width = width
self._height = height
self._scale = side_scale
self._surface = None

self._left_mirror = left_mirror
self._right_mirror = right_mirror

pygame.init()
pygame.font.init()
self._clock = pygame.time.Clock()
self._display = pygame.display.set_mode((self._width, self._height), pygame.HWSURFACE | pygame.DOUBLEBUF)
pygame.display.set_caption("Human Agent")

def run_interface(self, input_data):
"""
Run the GUI
"""

# Process sensor data
image_center = input_data['camera.center'][1][:, :, -2::-1]
self._surface = pygame.surfarray.make_surface(image_center.swapaxes(0, 1))

# Add the left mirror
if self._left_mirror:
image_left = input_data['Left'][1][:, :, -2::-1]
left_surface = pygame.surfarray.make_surface(image_left.swapaxes(0, 1))
self._surface.blit(left_surface, (0, (1 - self._scale) * self._height))

# Add the right mirror
if self._right_mirror:
image_right = input_data['Right'][1][:, :, -2::-1]
right_surface = pygame.surfarray.make_surface(image_right.swapaxes(0, 1))
self._surface.blit(right_surface, ((1 - self._scale) * self._width, (1 - self._scale) * self._height))

# Display image
if self._surface is not None:
self._display.blit(self._surface, (0, 0))
pygame.display.flip()

def set_black_screen(self):
"""Set the surface to black"""
black_array = np.zeros([self._width, self._height])
self._surface = pygame.surfarray.make_surface(black_array)
if self._surface is not None:
self._display.blit(self._surface, (0, 0))
pygame.display.flip()

def _quit(self):
pygame.quit()


class HumanAgent(AutonomousAgent):

"""
Human agent to control the ego vehicle via keyboard
"""

current_control = None
agent_engaged = False

def setup(self, destination, path_to_conf_file):
"""
Setup the agent parameters
"""

self.agent_engaged = False
self.camera_width = 1280
self.camera_height = 720
self._side_scale = 0.3
self._left_mirror = False
self._right_mirror = False

self._hic = HumanInterface(
self.camera_width,
self.camera_height,
self._side_scale,
self._left_mirror,
self._right_mirror
)
self._controller = KeyboardControl(path_to_conf_file)
self._prev_timestamp = 0

def sensors(self):
"""
Define the sensor suite required by the agent
:return: a list containing the required sensors in the following format:
[
{'type': 'sensor.camera.rgb', 'x': 0.7, 'y': -0.4, 'z': 1.60, 'roll': 0.0, 'pitch': 0.0, 'yaw': 0.0,
'width': 300, 'height': 200, 'fov': 100, 'id': 'Left'},
{'type': 'sensor.camera.rgb', 'x': 0.7, 'y': 0.4, 'z': 1.60, 'roll': 0.0, 'pitch': 0.0, 'yaw': 0.0,
'width': 300, 'height': 200, 'fov': 100, 'id': 'Right'},
{'type': 'sensor.lidar.ray_cast', 'x': 0.7, 'y': 0.0, 'z': 1.60, 'yaw': 0.0, 'pitch': 0.0, 'roll': 0.0,
'id': 'LIDAR'}
]
"""

sensors = [
{'type': 'sensor.camera.rgb', 'x': 0.7, 'y': 0.0, 'z': 1.60, 'roll': 0.0, 'pitch': 0.0, 'yaw': 0.0,
'width': self.camera_width, 'height': self.camera_height, 'fov': 100, 'id': 'camera.center'},
]

if self._left_mirror:
sensors.append(
{'type': 'sensor.camera.rgb', 'x': 0.7, 'y': -1.0, 'z': 1, 'roll': 0.0, 'pitch': 0.0, 'yaw': 210.0,
'width': self.camera_width * self._side_scale, 'height': self.camera_height * self._side_scale,
'fov': 100, 'id': 'Left'})

if self._right_mirror:
sensors.append(
{'type': 'sensor.camera.rgb', 'x': 0.7, 'y': 1.0, 'z': 1, 'roll': 0.0, 'pitch': 0.0, 'yaw': 150.0,
'width': self.camera_width * self._side_scale, 'height': self.camera_height * self._side_scale,
'fov': 100, 'id': 'Right'})

return sensors

def run_step(self, input_data, timestamp):
"""
Execute one step of navigation.
"""
self.agent_engaged = True
self._hic.run_interface(input_data)

control = self._controller.parse_events(timestamp - self._prev_timestamp)
self._prev_timestamp = timestamp

return control

def destroy(self):
"""
Cleanup
"""
self._hic.set_black_screen()
self._hic._quit = True


class KeyboardControl(object):

"""
Keyboard control for the human agent
"""

def __init__(self, path_to_conf_file):
"""
Init
"""
self._control = carla.VehicleControl()
self._steer_cache = 0.0
self._clock = pygame.time.Clock()

# Get the mode
if path_to_conf_file:

with (open(path_to_conf_file, "r")) as f:
lines = f.read().split("\n")
self._mode = lines[0].split(" ")[1]
self._endpoint = lines[1].split(" ")[1]

# Get the needed vars
if self._mode == "log":
self._log_data = {'records': []}

elif self._mode == "playback":
self._index = 0
self._control_list = []

with open(self._endpoint) as fd:
try:
self._records = json.load(fd)
self._json_to_control()
except json.JSONDecodeError:
pass
else:
self._mode = "normal"
self._endpoint = None

def _json_to_control(self):

# transform strs into VehicleControl commands
for entry in self._records['records']:
control = carla.VehicleControl(throttle=entry['control']['throttle'],
steer=entry['control']['steer'],
brake=entry['control']['brake'],
hand_brake=entry['control']['hand_brake'],
reverse=entry['control']['reverse'],
manual_gear_shift=entry['control']['manual_gear_shift'],
gear=entry['control']['gear'])
self._control_list.append(control)

def parse_events(self, timestamp):
"""
Parse the keyboard events and set the vehicle controls accordingly
"""
# Move the vehicle
if self._mode == "playback":
self._parse_json_control()
else:
self._parse_vehicle_keys(pygame.key.get_pressed(), timestamp*1000)

# Record the control
if self._mode == "log":
self._record_control()

return self._control

def _parse_vehicle_keys(self, keys, milliseconds):
"""
Calculate new vehicle controls based on input keys
"""

for event in pygame.event.get():
if event.type == pygame.QUIT:
return
elif event.type == pygame.KEYUP:
if event.key == K_q:
self._control.gear = 1 if self._control.reverse else -1
self._control.reverse = self._control.gear < 0

if keys[K_UP] or keys[K_w]:
self._control.throttle = 0.8
else:
self._control.throttle = 0.0

steer_increment = 3e-4 * milliseconds
if keys[K_LEFT] or keys[K_a]:
self._steer_cache -= steer_increment
elif keys[K_RIGHT] or keys[K_d]:
self._steer_cache += steer_increment
else:
self._steer_cache = 0.0

self._control.steer = round(self._steer_cache, 1)
self._control.brake = 1.0 if keys[K_DOWN] or keys[K_s] else 0.0
self._control.hand_brake = keys[K_SPACE]

def _parse_json_control(self):

if self._index < len(self._control_list):
self._control = self._control_list[self._index]
self._index += 1
else:
print("JSON file has no more entries")

def _record_control(self):
new_record = {
'control': {
'throttle': self._control.throttle,
'steer': self._control.steer,
'brake': self._control.brake,
'hand_brake': self._control.hand_brake,
'reverse': self._control.reverse,
'manual_gear_shift': self._control.manual_gear_shift,
'gear': self._control.gear
}
}

self._log_data['records'].append(new_record)

def __del__(self):
# Get ready to log user commands
if self._mode == "log" and self._log_data:
with open(self._endpoint, 'w') as fd:
json.dump(self._log_data, fd, indent=4, sort_keys=True)
Loading

0 comments on commit 6ecc5e6

Please sign in to comment.