Skip to content

Commit

Permalink
major python update
Browse files Browse the repository at this point in the history
  • Loading branch information
admercs committed Feb 15, 2024
1 parent eb72d19 commit 36bb70f
Show file tree
Hide file tree
Showing 190 changed files with 5,786 additions and 5,081 deletions.
98 changes: 49 additions & 49 deletions python/PythonClient.pyproj
Original file line number Diff line number Diff line change
Expand Up @@ -25,90 +25,90 @@
<EnableUnmanagedDebugging>false</EnableUnmanagedDebugging>
</PropertyGroup>
<ItemGroup>
<Compile Include="autonomysim\client.py" />
<Compile Include="autonomysim\types.py">
<Compile Include="src\autonomysim\client.py" />
<Compile Include="src\autonomysim\types.py">
<SubType>Code</SubType>
</Compile>
<Compile Include="autonomysim\utils.py">
<Compile Include="src\autonomysim\utils.py">
<SubType>Code</SubType>
</Compile>
<Compile Include="autonomysim\__init__.py">
<Compile Include="src\autonomysim\__init__.py">
<SubType>Code</SubType>
</Compile>
<Compile Include="car\multi_agent_car.py">
<Compile Include="src\car\multi_agent_car.py">
<SubType>Code</SubType>
</Compile>
<Compile Include="car\setup_path.py">
<Compile Include="src\car\setup_path.py">
<SubType>Code</SubType>
</Compile>
<Compile Include="computer_vision\cv_capture.py">
<Compile Include="src\computer_vision\cv_capture.py">
<SubType>Code</SubType>
</Compile>
<Compile Include="computer_vision\cv_navigate.py">
<Compile Include="src\computer_vision\cv_navigate.py">
<SubType>Code</SubType>
</Compile>
<Compile Include="computer_vision\getpos.py" />
<Compile Include="computer_vision\ground_truth.py">
<Compile Include="src\computer_vision\getpos.py" />
<Compile Include="src\computer_vision\ground_truth.py">
<SubType>Code</SubType>
</Compile>
<Compile Include="computer_vision\setup_path.py" />
<Compile Include="multirotor\drone_lidar.py" />
<Compile Include="multirotor\gimbal.py" />
<Compile Include="multirotor\land.py" />
<Compile Include="car\pause_continue_car.py">
<Compile Include="src\computer_vision\setup_path.py" />
<Compile Include="src\multirotor\drone_lidar.py" />
<Compile Include="src\multirotor\gimbal.py" />
<Compile Include="src\multirotor\land.py" />
<Compile Include="src\car\pause_continue_car.py">
<SubType>Code</SubType>
</Compile>
<Compile Include="multirotor\clock_speed.py">
<Compile Include="src\multirotor\clock_speed.py">
<SubType>Code</SubType>
</Compile>
<Compile Include="multirotor\manual_mode_demo.py">
<Compile Include="src\multirotor\manual_mode_demo.py">
<SubType>Code</SubType>
</Compile>
<Compile Include="computer_vision\objects.py">
<Compile Include="src\computer_vision\objects.py">
<SubType>Code</SubType>
</Compile>
<Compile Include="multirotor\multi_agent_drone.py">
<Compile Include="src\multirotor\multi_agent_drone.py">
<SubType>Code</SubType>
</Compile>
<Compile Include="multirotor\navigate.py">
<Compile Include="src\multirotor\navigate.py">
<SubType>Code</SubType>
</Compile>
<Compile Include="autonomysim\pfm.py" />
<Compile Include="car\car_collision.py" />
<Compile Include="car\car_monitor.py" />
<Compile Include="car\car_stress_test.py" />
<Compile Include="car\drive_straight.py" />
<Compile Include="car\hello_car.py" />
<Compile Include="car\reset_test_car.py" />
<Compile Include="multirotor\opencv_show.py" />
<Compile Include="computer_vision\cv_mode.py" />
<Compile Include="multirotor\point_cloud.py" />
<Compile Include="computer_vision\segmentation.py" />
<Compile Include="multirotor\box.py" />
<Compile Include="multirotor\disarm.py" />
<Compile Include="multirotor\drone_stress_test.py" />
<Compile Include="multirotor\hello_drone.py" />
<Compile Include="multirotor\orbit.py" />
<Compile Include="multirotor\path.py" />
<Compile Include="multirotor\pause_continue_drone.py" />
<Compile Include="multirotor\reset_test_drone.py" />
<Compile Include="multirotor\setup_path.py" />
<Compile Include="multirotor\survey.py" />
<Compile Include="multirotor\takeoff.py" />
<Compile Include="ros\car_image_raw.py" />
<Compile Include="src\autonomysim\pfm.py" />
<Compile Include="src\car\car_collision.py" />
<Compile Include="src\car\car_monitor.py" />
<Compile Include="src\car\car_stress_test.py" />
<Compile Include="src\car\drive_straight.py" />
<Compile Include="src\car\hello_car.py" />
<Compile Include="src\car\reset_test_car.py" />
<Compile Include="src\multirotor\opencv_show.py" />
<Compile Include="src\computer_vision\cv_mode.py" />
<Compile Include="src\multirotor\point_cloud.py" />
<Compile Include="src\computer_vision\segmentation.py" />
<Compile Include="src\multirotor\box.py" />
<Compile Include="src\multirotor\disarm.py" />
<Compile Include="src\multirotor\drone_stress_test.py" />
<Compile Include="src\multirotor\hello_drone.py" />
<Compile Include="src\multirotor\orbit.py" />
<Compile Include="src\multirotor\path.py" />
<Compile Include="src\multirotor\pause_continue_drone.py" />
<Compile Include="src\multirotor\reset_test_drone.py" />
<Compile Include="src\multirotor\setup_path.py" />
<Compile Include="src\multirotor\survey.py" />
<Compile Include="src\multirotor\takeoff.py" />
<!-- <Compile Include="ros\car_image_raw.py" />
<Compile Include="ros\car_pose.py" />
<Compile Include="ros\drone_image_raw.py" />
<Compile Include="ros\setup_path.py" />
<Compile Include="ros\setup_path.py" /> -->
<Compile Include="setup.py">
<SubType>Code</SubType>
</Compile>
</ItemGroup>
<ItemGroup>
<Folder Include="autonomysim\" />
<Folder Include="car\" />
<Folder Include="computer_vision\" />
<Folder Include="multirotor\" />
<Folder Include="ros\" />
<Folder Include="src\autonomysim\" />
<Folder Include="src\car\" />
<Folder Include="src\computer_vision\" />
<Folder Include="src\multirotor\" />
<!-- <Folder Include="src\ros\" /> -->
</ItemGroup>
<ItemGroup>
<Content Include="LICENSE" />
Expand Down
9 changes: 6 additions & 3 deletions python/autonomysim/__init__.py
Original file line number Diff line number Diff line change
@@ -1,5 +1,8 @@
from .client import *
from .utils import *
from .types import *
# from .client import *
# from .types import *

__version__ = "1.0.0"

description = """
AutonomySim: the simulation engine for autonomous systems
"""
3 changes: 3 additions & 0 deletions python/autonomysim/ai/__init__.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,3 @@
description = """
Artificial Intelligence
"""
Original file line number Diff line number Diff line change
Expand Up @@ -23,26 +23,29 @@ Imitation learning includes the usage of labeled data as input to a training alg

This diagram is represented by these files:

**cook_data.py**
**processors.py**
This file is responsible for preparing .h5 dataset files for the training procedure.
The code rely on having two adjacent folders:
'raw_data' - contains folders of recorded data by AutonomySim's recording method.
'cooked_data' - empty folder to store the .h5 files.

The flag "COOK_ALL_DATA" gives the option to choose all subfolders, or exclude some of them.

**train_model.py**
**trainers.py**
This file is responsible to train a model using the .h5 dataset files.
The code rely on having two adjacent folders:
'cooked_data' - contains the .h5 dataset files.
'models' - empty folder to store the generated models.

The file will preprocess the data, add augmentations and create a neural network model that predicts the next steering angle.

**drive_model.py**
**agents.py**
This file connects to the simulation in order to upload a trained model and drive using it.
By using the predicted steering value, the code calculates related control parameters and maintain driving with steady velocities.

**generators.py**
...

## Training Tips

We recommend on using augmentation and recording techniques.
Expand Down
59 changes: 59 additions & 0 deletions python/autonomysim/ai/imitation/__init__.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,59 @@
description = """
Imitation Learning
"""

description_long = """
# Imitation Learning
This section is about training a model to steer our Formula car using imitation learning.
The code in this section is based on the [Autonomous Driving Cookbook](https://github.com/nervosys/AutonomousDrivingCookbook/tree/master/AutonomySimE2EDeepLearning) from AutonomySim and it's highly recommended to read the tutorial first.
## Prerequisites
* Operating system: Windows 10
* GPU: Nvidia GTX 1080 or higher (recommended)
* Software: Unreal Engine 4.24 and Visual Studio 2019 (see [upgrade instructions](../../docs/unreal_upgrade.md))
* Development: CUDA 9.0 and python 3.5.
* Python libraries: Keras 2.1.2, TensorFlow 1.6.0.
* Note: Newer versions of keras or tensorflow are recommended but can cause syntax errors.
## What's inside
![imitation learning](https://github.com/nervosys/AutonomySim/wiki/images/technion/imitation_learning_example.gif)
*Driving in simulation using trained imitation learning model, based on recorded data*
Imitation learning includes the usage of labeled data as input to a training algorithm with the purpose of having the algorithm imitate the actions of people who recorded the data.
![diagram](https://github.com/nervosys/AutonomySim/wiki/images/technion/imitation_diagram.PNG)
This diagram is represented by these files:
**cook_data.py**
This file is responsible for preparing .h5 dataset files for the training procedure.
The code rely on having two adjacent folders:
'raw_data' - contains folders of recorded data by AutonomySim's recording method.
'cooked_data' - empty folder to store the .h5 files.
The flag "COOK_ALL_DATA" gives the option to choose all subfolders, or exclude some of them.
**train_model.py**
This file is responsible to train a model using the .h5 dataset files.
The code rely on having two adjacent folders:
'cooked_data' - contains the .h5 dataset files.
'models' - empty folder to store the generated models.
The file will preprocess the data, add augmentations and create a neural network model that predicts the next steering angle.
**drive_model.py**
This file connects to the simulation in order to upload a trained model and drive using it.
By using the predicted steering value, the code calculates related control parameters and maintain driving with steady velocities.
## Training Tips
We recommend on using augmentation and recording techniques.
We give here an example for two methods:
- [CycleLight](https://github.com/nervosys/AutonomySim/wiki/graphic_features) - Animation of a day light cycle in a changeable, potentially very short period of time.
- Shifted images - Altering the camera’s position to the right or the left of the car, so that it can record images in extreme conditions. To simulate driving back to the center from those extreme situations, post-process the recorded angle of the steering accordingly (manually).
"""
98 changes: 98 additions & 0 deletions python/autonomysim/ai/imitation/agents.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,98 @@
import os

os.environ["TF_CPP_MIN_LOG_LEVEL"] = "2"

import time
import numpy as np
from keras.models import load_model

from autonomysim.clients import CarClient, CarControls
from autonomysim.types import ImageRequest, ImageType


# Trained model path
MODEL_PATH = "./models/example_model.h5"


class CarAgent:
model = None

def __init__(self) -> None:
pass

def get_image(self, client):
"""
Get image from AutonomySim client
"""

image_response = client.simGetImages(
[ImageRequest("0", ImageType.Scene, False, False)]
)[0]
image1d = np.fromstring(image_response.image_data_uint8, dtype=np.uint8)
image_rgb = image1d.reshape(image_response.height, image_response.width, 3)
return image_rgb[78:144, 27:227, 0:2].astype(float)

def load(self, model_path=MODEL_PATH):
"""
Load the model.
"""
self.model = load_model(model_path)

def run(self):
"""
Run the model.
"""

# Connect to autonomysim
client = CarClient()
client.confirmConnection()
client.enableApiControl(True)
car_controls = CarControls()

# Start driving
car_controls.steering = 0
car_controls.throttle = 0
car_controls.brake = 0
client.setCarControls(car_controls)

# Initialize image buffer
image_buf = np.zeros((1, 66, 200, 3))

while True:
# Update throttle value according to steering angle
if abs(car_controls.steering) <= 1.0:
car_controls.throttle = 0.8 - (0.4 * abs(car_controls.steering))
else:
car_controls.throttle = 0.4

image_buf[0] = self.get_image(client)
image_buf[0] /= 255 # normalization

start_time = time.time()

# Prediction
model_output = self.model.predict([image_buf])

end_time = time.time()
received_output = model_output[0][0]

# Rescale prediction to [-1,1] and factor by 0.82 for drive smoothness
car_controls.steering = round(
(0.82 * (float((model_output[0][0] * 2.0) - 1))), 2
)

# Print progress
print(
"Sending steering = {0}, throttle = {1}, prediction time = {2}".format(
received_output, car_controls.throttle, str(end_time - start_time)
)
)

# Update next car state
client.setCarControls(car_controls)

# Wait a bit between iterations
time.sleep(0.05)

client.enableApiControl(False)
return None
Loading

0 comments on commit 36bb70f

Please sign in to comment.