Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Re-use script from osi repo #58

Closed
wants to merge 15 commits into from
Closed
2 changes: 1 addition & 1 deletion open-simulation-interface
Submodule open-simulation-interface updated 64 files
+24 −0 .github/.pyspelling.yml
+390 −0 .github/spelling_custom_words_en_US.txt
+1 −1 .github/workflows/antora-generator.yml
+124 −36 .github/workflows/protobuf.yml
+17 −9 .github/workflows/release.yml
+2 −2 .gitignore
+3 −1 CMakeLists.txt
+1 −0 MANIFEST.in
+1 −1 VERSION
+4 −2 doc/architecture/architecture_overview.adoc
+0 −9 doc/architecture/feature_data.adoc
+0 −22 doc/architecture/formatting_scripts.adoc
+1 −0 doc/architecture/reference_points_coordinate_systems.adoc
+4 −0 doc/architecture/sensor_data.adoc
+6 −5 doc/architecture/trace_file_formats.adoc
+ doc/images/OSI_Planned_Route.png
+ doc/images/OSI_Route_Segment.png
+ doc/images/osi-streaming-principle.png
+0 −2 doc/open-simulation-interface_user_guide.adoc
+0 −244 format/OSITrace.py
+0 −63 format/osi2read.py
+0 −75 format/txt2osi.py
+66 −0 osi3trace/osi2read.py
+168 −0 osi3trace/osi_trace.py
+157 −8 osi_common.proto
+6 −2 osi_detectedobject.proto
+2 −2 osi_environment.proto
+34 −12 osi_featuredata.proto
+60 −2 osi_groundtruth.proto
+8 −3 osi_hostvehicledata.proto
+6 −5 osi_lane.proto
+5 −1 osi_logicaldetectiondata.proto
+140 −6 osi_logicallane.proto
+8 −0 osi_motionrequest.proto
+70 −15 osi_object.proto
+7 −6 osi_referenceline.proto
+5 −1 osi_roadmarking.proto
+138 −0 osi_route.proto
+50 −0 osi_sensordata.proto
+16 −2 osi_sensorview.proto
+9 −1 osi_sensorviewconfiguration.proto
+9 −1 osi_streamingupdate.proto
+26 −14 osi_trafficcommand.proto
+12 −0 osi_trafficcommandupdate.proto
+2 −1 osi_trafficlight.proto
+12 −11 osi_trafficsign.proto
+9 −1 osi_trafficupdate.proto
+35 −0 pyproject.toml
+2 −0 requirements_tests.txt
+2 −1 rules.yml
+111 −78 setup.py
+107 −44 tests/test_comment_type.py
+26 −9 tests/test_doxygen_output.py
+18 −5 tests/test_invalid_comment.py
+76 −27 tests/test_invalid_enum.py
+87 −47 tests/test_invalid_html.py
+168 −59 tests/test_invalid_message.py
+8 −3 tests/test_invalid_punctuation.py
+7 −2 tests/test_invalid_tabs.py
+8 −3 tests/test_newline.py
+23 −4 tests/test_non_ascii.py
+6 −5 tests/test_osi_trace.py
+77 −22 tests/test_rules.py
+39 −10 tests/test_units.py
123 changes: 64 additions & 59 deletions osivalidator/osi_general_validator.py
Original file line number Diff line number Diff line change
Expand Up @@ -5,7 +5,8 @@
import argparse
from multiprocessing import Pool, Manager
from tqdm import tqdm
import os, sys
import os
import sys

sys.path.append(os.path.join(os.path.dirname(__file__), "."))

Expand All @@ -14,13 +15,24 @@
import osi_rules
import osi_validator_logger
import osi_rules_checker
import osi_trace
import linked_proto_field
from format.OSITrace import OSITrace
except Exception as e:
print(
"Make sure you have installed the requirements with 'pip install -r requirements.txt'!"
"Make sure you have installed the requirements with 'python3 -m pip install -r requirements.txt'!"
)
print(e)

# Global variables
manager_ = Manager()
logs_ = manager_.list()
timestamp_analyzed_ = manager_.list()
logger_ = osi_validator_logger.OSIValidatorLogger()
validation_rules_ = osi_rules.OSIRules()
id_to_ts_ = {}
bar_suffix_ = "%(index)d/%(max)d [%(elapsed_td)s]"
message_cache_ = {}


def check_positive_int(value):
ivalue = int(value)
Expand Down Expand Up @@ -108,7 +120,8 @@ def command_line_arguments():
parser.add_argument(
"--buffer",
"-bu",
help="Set the buffer size to retrieve OSI messages from trace file. Set it to 0 if you do not want to use buffering at all.",
help="Set the buffer size to retrieve OSI messages from trace file."
"Set it to 0 if you do not want to use buffering at all.",
default=1000000,
type=check_positive_int,
required=False,
Expand All @@ -117,16 +130,6 @@ def command_line_arguments():
return parser.parse_args()


MANAGER = Manager()
LOGS = MANAGER.list()
TIMESTAMP_ANALYZED = MANAGER.list()
LOGGER = osi_validator_logger.OSIValidatorLogger()
VALIDATION_RULES = osi_rules.OSIRules()
ID_TO_TS = {}
BAR_SUFFIX = "%(index)d/%(max)d [%(elapsed_td)s]"
MESSAGE_CACHE = {}


def main():
"""Main method"""

Expand All @@ -139,43 +142,43 @@ def main():
if not os.path.exists(directory):
os.makedirs(directory)

LOGGER.init(args.debug, args.verbose, directory)
logger_.init(args.debug, args.verbose, directory)

# Read data
print("Reading data ...")
DATA = osi_trace.OSITrace(buffer_size=args.buffer)
DATA.from_file(path=args.data, type_name=args.type, max_index=args.timesteps)
trace_data = OSITrace(buffer_size=args.buffer)
trace_data.from_file(path=args.data, type_name=args.type, max_index=args.timesteps)

if DATA.timestep_count < args.timesteps:
if trace_data.timestep_count < args.timesteps:
args.timesteps = -1

# Collect Validation Rules
print("Collect validation rules ...")
VALIDATION_RULES.from_yaml_directory(args.rules)
validation_rules_.from_yaml_directory(args.rules)

# Pass all timesteps or the number specified
if args.timesteps != -1:
max_timestep = args.timesteps
LOGGER.info(None, f"Pass the {max_timestep} first timesteps")
logger_.info(None, f"Pass the {max_timestep} first timesteps")
else:
LOGGER.info(None, "Pass all timesteps")
max_timestep = DATA.timestep_count
logger_.info(None, "Pass all timesteps")
max_timestep = trace_data.timestep_count

# Dividing in several blast to not overload the memory
max_timestep_blast = 0

while max_timestep_blast < max_timestep:
# Clear log queue
LOGS = MANAGER.list()
logs_ = manager_.list()

# Increment the max-timestep to analyze
max_timestep_blast += args.blast
first_of_blast = max_timestep_blast - args.blast
last_of_blast = min(max_timestep_blast, max_timestep)

# Cache messages
DATA.cache_messages_in_index_range(first_of_blast, last_of_blast)
MESSAGE_CACHE.update(DATA.message_cache)
trace_data.cache_messages_in_index_range(first_of_blast, last_of_blast)
message_cache_.update(trace_data.message_cache)

if args.parallel:
# Launch parallel computation
Expand All @@ -202,9 +205,9 @@ def main():
except Exception as e:
print(str(e))

MESSAGE_CACHE.clear()
message_cache_.clear()

DATA.trace_file.close()
trace_data.trace_file.close()
display_results()


Expand All @@ -217,85 +220,87 @@ def close_pool(pool):

def process_timestep(timestep, data_type):
"""Process one timestep"""
message = MESSAGE_CACHE[timestep]
rule_checker = osi_rules_checker.OSIRulesChecker(LOGGER)
message = linked_proto_field.LinkedProtoField(
message_cache_[timestep], name=data_type
)
rule_checker = osi_rules_checker.OSIRulesChecker(logger_)
timestamp = rule_checker.set_timestamp(message.value.timestamp, timestep)
ID_TO_TS[timestep] = timestamp
id_to_ts_[timestep] = timestamp

LOGGER.log_messages[timestep] = []
LOGGER.debug_messages[timestep] = []
LOGGER.info(None, f"Analyze message of timestamp {timestamp}", False)
logger_.log_messages[timestep] = []
logger_.debug_messages[timestep] = []
logger_.info(None, f"Analyze message of timestamp {timestamp}", False)

with MANAGER.Lock():
if timestamp in TIMESTAMP_ANALYZED:
LOGGER.error(timestep, f"Timestamp already exists")
TIMESTAMP_ANALYZED.append(timestamp)
with manager_.Lock():
if timestamp in timestamp_analyzed_:
logger_.error(timestep, f"Timestamp already exists")
timestamp_analyzed_.append(timestamp)

# Check common rules
getattr(rule_checker, "is_valid")(
message, VALIDATION_RULES.get_rules().get_type(data_type)
message, validation_rules_.get_rules().get_type(data_type)
)

LOGS.extend(LOGGER.log_messages[timestep])
logs_.extend(logger_.log_messages[timestep])


def get_message_count(data, data_type="SensorView", from_message=0, to_message=None):
# Wrapper function for external use in combination with process_timestep
timesteps = None
time_steps = None

if from_message != 0:
print("Currently only validation from the first frame (0) is supported!")

if to_message is not None:
timesteps = int(to_message)
time_steps = int(to_message)

# Read data
print("Reading data ...")
DATA = osi_trace.OSITrace(buffer_size=1000000)
DATA.from_file(path=data, type_name=data_type, max_index=timesteps)
trace_data = OSITrace(buffer_size=1000000)
trace_data.from_file(path=data, type_name=data_type, max_index=time_steps)

if DATA.timestep_count < timesteps:
timesteps = -1
if trace_data.timestep_count < time_steps:
time_steps = -1

# Collect Validation Rules
print("Collect validation rules ...")
try:
VALIDATION_RULES.from_yaml_directory("osi-validation/rules/")
validation_rules_.from_yaml_directory("osi-validation/rules/")
except Exception as e:
print("Error collecting validation rules:", e)

# Pass all timesteps or the number specified
if timesteps != -1:
max_timestep = timesteps
LOGGER.info(None, f"Pass the {max_timestep} first timesteps")
# Pass all time_steps or the number specified
if time_steps != -1:
max_timestep = time_steps
logger_.info(None, f"Pass the {max_timestep} first time_steps")
else:
LOGGER.info(None, "Pass all timesteps")
max_timestep = DATA.timestep_count
logger_.info(None, "Pass all time_steps")
max_timestep = trace_data.timestep_count

# Dividing in several blast to not overload the memory
max_timestep_blast = 0

while max_timestep_blast < max_timestep:
# Clear log queue
LOGS[:] = []
logs_[:] = []

# Increment the max-timestep to analyze
max_timestep_blast += 500
first_of_blast = max_timestep_blast - 500
last_of_blast = min(max_timestep_blast, max_timestep)

# Cache messages
DATA.cache_messages_in_index_range(first_of_blast, last_of_blast)
MESSAGE_CACHE.update(DATA.message_cache)
trace_data.cache_messages_in_index_range(first_of_blast, last_of_blast)
message_cache_.update(trace_data.message_cache)

DATA.trace_file.close()
trace_data.trace_file.close()

return len(MESSAGE_CACHE)
return len(message_cache_)


# Synthetize Logs
# Synthesize Logs
def display_results():
return LOGGER.synthetize_results(LOGS)
return logger_.synthetize_results(logs_)


if __name__ == "__main__":
Expand Down
Loading
Loading