From 02ebd75ce2bdb7652e9ed62b906f0909bf419b4e Mon Sep 17 00:00:00 2001 From: Mihaela Radian Date: Wed, 12 Nov 2025 16:33:37 +0200 Subject: [PATCH 01/24] MX-17308 Added generic classes --- .flake8 | 2 + .pre-commit-config.yaml | 20 +++ README.md | 16 +++ multiversx_cross_shard_analysis/__init__.py | 0 .../gather_data.py | 0 .../header_checker.py | 18 +++ multiversx_logs_parser_tools/__init__.py | 0 .../aho-corasik-checker | 106 ++++++++++++++++ .../archive_handler.py | 74 +++++++++++ multiversx_logs_parser_tools/entry_parser.py | 81 ++++++++++++ multiversx_logs_parser_tools/helpers.py | 20 +++ .../node_logs_checker.py | 117 ++++++++++++++++++ pyrightconfig.json | 16 +++ requirements-dev.txt | 4 + requirements.txt | 0 15 files changed, 474 insertions(+) create mode 100644 .flake8 create mode 100644 .pre-commit-config.yaml create mode 100644 multiversx_cross_shard_analysis/__init__.py create mode 100644 multiversx_cross_shard_analysis/gather_data.py create mode 100644 multiversx_cross_shard_analysis/header_checker.py create mode 100644 multiversx_logs_parser_tools/__init__.py create mode 100644 multiversx_logs_parser_tools/aho-corasik-checker create mode 100644 multiversx_logs_parser_tools/archive_handler.py create mode 100644 multiversx_logs_parser_tools/entry_parser.py create mode 100644 multiversx_logs_parser_tools/helpers.py create mode 100644 multiversx_logs_parser_tools/node_logs_checker.py create mode 100644 pyrightconfig.json create mode 100644 requirements-dev.txt create mode 100644 requirements.txt diff --git a/.flake8 b/.flake8 new file mode 100644 index 0000000..a4959c5 --- /dev/null +++ b/.flake8 @@ -0,0 +1,2 @@ +[flake8] +ignore = E501, E722 diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml new file mode 100644 index 0000000..8343e61 --- /dev/null +++ b/.pre-commit-config.yaml @@ -0,0 +1,20 @@ +repos: + - repo: https://github.com/pre-commit/pre-commit-hooks + rev: v4.4.0 + hooks: + - id: trailing-whitespace + - id: end-of-file-fixer + - id: check-yaml + + - repo: https://github.com/PyCQA/flake8 + rev: 7.0.0 + hooks: + - id: flake8 + args: + - "--config=.flake8" + + - repo: https://github.com/pycqa/isort + rev: 5.13.2 + hooks: + - id: isort + \ No newline at end of file diff --git a/README.md b/README.md index 62ac2cf..009fae6 100644 --- a/README.md +++ b/README.md @@ -1,2 +1,18 @@ # mx-chain-logs-parser Logs parsing utilities. + +Tool for parsing logs. + +INSTALL + +Create a virtual environment and install the dependencies: + +python3 -m venv ./venv +source ./venv/bin/activate +pip install -r ./requirements.txt --upgrade +export PYTHONPATH=. + + +INSTALL DEVELOPMENT DEPENDENCIES + +pip install -r ./requirements-dev.txt --upgrade diff --git a/multiversx_cross_shard_analysis/__init__.py b/multiversx_cross_shard_analysis/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/multiversx_cross_shard_analysis/gather_data.py b/multiversx_cross_shard_analysis/gather_data.py new file mode 100644 index 0000000..e69de29 diff --git a/multiversx_cross_shard_analysis/header_checker.py b/multiversx_cross_shard_analysis/header_checker.py new file mode 100644 index 0000000..941bb11 --- /dev/null +++ b/multiversx_cross_shard_analysis/header_checker.py @@ -0,0 +1,18 @@ +from typing import Any +from multiversx_logs_parser_tools.node_logs_checker import NodeLogsChecker + + +class HeaderChecker(NodeLogsChecker): + def __init__(self, node_name: str, run_name: str, source: Any): + super().__init__(node_name, run_name) + self.source = source + + def process_logs(self): + # Implement log processing logic specific to header checking + pass + + @classmethod + def from_source(cls, args: dict[str, Any], source: Any) -> 'HeaderChecker': + node_name = args.get('node_name', 'unknown-node') + run_name = args.get('run_name', 'unknown-run') + return cls(node_name, run_name, source) diff --git a/multiversx_logs_parser_tools/__init__.py b/multiversx_logs_parser_tools/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/multiversx_logs_parser_tools/aho-corasik-checker b/multiversx_logs_parser_tools/aho-corasik-checker new file mode 100644 index 0000000..93c56ff --- /dev/null +++ b/multiversx_logs_parser_tools/aho-corasik-checker @@ -0,0 +1,106 @@ +import ahocorasick +from ahocorasick import Automaton +from logs_alerts_definition import AlertCategories, Alerts +from entry_parser import EntryParser +from alerts_counter import AlertCounter +from config import SEARCH_FOR_DEBUG_LEVEL_UNKNOWN_ERRORS + + +class AhoCorasickChecker: + def __init__(self): + # initialize the counter + self.counter: AlertCounter = AlertCounter() + + # Create the automaton & add patterns + self.automaton: Automaton = ahocorasick.Automaton() + for pattern in Alerts.get_patterns(): + self.automaton.add_word(pattern.alert_key_word, Alerts.get_patterns().index(pattern)) + self.known_patterns_length = len(Alerts.get_patterns()) + + # Add ERROR and WARN to automaton + self.automaton.add_word('ERROR', self.known_patterns_length + 2) + self.automaton.add_word('WARN', self.known_patterns_length + 1) + if SEARCH_FOR_DEBUG_LEVEL_UNKNOWN_ERRORS: + self.automaton.add_word('err = ', self.known_patterns_length + 3) + self.automaton.add_word('error = ', self.known_patterns_length + 4) + + self.automaton.make_automaton() + + def is_unknown_issue(self, pattern_idx: int) -> bool: + return pattern_idx > self.known_patterns_length + + def initialize_counter(self): + self.counter.initialize_counter() + + # Updates the counter and also updates the identification with shard and version related data + def check(self, file: str, identification: dict[str, str]): + branches = set() + line = None + for line in file: + if 'starting node' in line and "version = " in line: + # in case name of branch like 'rc/supernova' replace with 'supernova-rc' for valid file name + version_match = line.split("version = ")[1].split("/go")[0].replace('/', '-') + if version_match: + branches.add(version_match) + + matches: ahocorasick = list(self.automaton.iter(line)) + for end_index, pattern_idx in matches: + + if self.is_unknown_issue(pattern_idx): + if len(matches) > 1: + continue + # UNKNOWN ISSUES + alert_type, issue = EntryParser('').parse_log_entry(line) + alert_category = AlertCategories.UNKNOWN_ERRORS.value if alert_type == 'ERROR' else AlertCategories.UNKNOWN_WARNINGS.value + message = issue.get('message', 'Error message not found') + logger_name: str = issue.get('logger_name', '') + + message = f'[{logger_name}] {message}' + self.counter.update_counter(alert_category.category_title, message) + self.counter.update_existing_statistics(alert_category.category_title) + + else: + # KNOWN ISSUES + alert = Alerts.get_pattern_by_index(pattern_idx) + alert_category = alert.alert_category + + if Alerts.should_include_parameters_info(alert): + # INCLUDE INFO FROM LOG ENTRY PARAMETERS + alert_type, issue = EntryParser('').parse_log_entry(line) + message: str = issue.get('message', 'Error message not found') + parameters: str = issue.get('parameters', '') + + if alert == Alerts.JAILED_VALIDATOR.value: + message += ' ' + parameters + elif alert == Alerts.JAILED_NUMBER.value: + no_of_jailed_validators = parameters.rsplit(' = ', 1)[1] + if int(no_of_jailed_validators) > 0: + message += ' ' + parameters + else: + break + elif alert_category == AlertCategories.PANICS.value: + if not parameters: + message = line.strip() + else: + message += ' ' + parameters.split(' = ')[-1].strip() + elif alert_category == AlertCategories.NEGATIVE_REWARDS.value: + if not parameters: + message = line.strip() + else: + message += ' ' + parameters + self.counter.update_counter(alert_category.category_title, message) + else: + self.counter.update_existing_counter(alert.alert_category.category_title, alert.alert_key_word) + + self.counter.update_existing_statistics(alert_category.category_title) + break + + # update identification (passed from the node_checker) + if not identification['initial_branch']: + identification['initial_branch'] = list(branches)[0] if len(branches) > 0 else 'Unknown initial branch' + if branches: + identification['final_branch'] = list(branches)[-1] + if line: + _, issue = EntryParser('').parse_log_entry(line) + identification['shard'] = issue.get('shard', '') + return diff --git a/multiversx_logs_parser_tools/archive_handler.py b/multiversx_logs_parser_tools/archive_handler.py new file mode 100644 index 0000000..582f23e --- /dev/null +++ b/multiversx_logs_parser_tools/archive_handler.py @@ -0,0 +1,74 @@ + +from pathlib import Path +from node_logs_checker import NodeLogsChecker +import argparse +from datetime import datetime, timedelta +import re +import zipfile +from aho_corasick_checker import AhoCorasickChecker +from helpers import validate_file_path +from master.master_report import Report + + +class ArchiveHandler: + def __init__(self, checker: NodeLogsChecker, logs_path: str): + self.logs_path = logs_path + self.ahochorasick_checker = AhoCorasickChecker() + zip_name_pattern = r'.*/(.*?).zip' + match = re.match(zip_name_pattern, self.logs_path) + self.run_name = match.group(1) if match else 'unknown-zip-name' + + def handle_logs(self): + """Loop through nodes in the zip file and process logs for each node.""" + # Open the zip file and process tar.gz files inside it that each correspond to a node + + with zipfile.ZipFile(self.logs_path, 'r') as zip_file: + # List all files inside the zip + file_list = zip_file.namelist() + + for file_name in file_list: + if file_name.endswith(".tar.gz"): + node_name = file_name.replace(".tar.gz", "").rsplit("--", 1)[1] + print(f"Processing node {node_name}") + + # Open the tar.gz file as bytes + with zip_file.open(file_name) as tar_file_io: + args = { + 'node_name': node_name, + 'run_name': self.run_name, + } + node_logs_checker = NodeLogsChecker(**args) + node_logs_checker.handle_node_from_archive(tar_file_io) + node_logs_checker.post_process_node_logs() + + +if __name__ == "__main__": + time_started = datetime.now() + parser = argparse.ArgumentParser( + description=''' + Runs node log checks. Example script: + + python ansible/templates/logs-checker/archive_handler.py --path=logsPath/logs_archive.zip + ''', + epilog='\n', + formatter_class=argparse.RawTextHelpFormatter + ) + + parser.add_argument( + '--path', + required=True, + type=validate_file_path, + help='Path to the run zip file.' + ) + + args = parser.parse_args() + + handler = ArchiveHandler(args.path) + handler.handle_logs() + print(f'Archive checked succesfully: {timedelta(seconds=(datetime.now() - time_started).total_seconds())}s') + + report = Report(handler.run_name) + report.gather_data() + report_file = Path(f'./{report.get_name_of_the_report()}.txt') + report.execute_report(report_file) + print(f"\nReport generated: {report_file}") diff --git a/multiversx_logs_parser_tools/entry_parser.py b/multiversx_logs_parser_tools/entry_parser.py new file mode 100644 index 0000000..0e91280 --- /dev/null +++ b/multiversx_logs_parser_tools/entry_parser.py @@ -0,0 +1,81 @@ +import json +import re + +log_entry_pattern = re.compile( + r'^(?PWARN|ERROR|DEBUG|TRACE|INFO)\s*\[' # Log level + r'(?P[^\]]+)\]\s*' # Timestamp + r'\[(?P[^\]]+)\]\s*' # Logger name + r'\[(?P[^\]]*)\]\s*' # context inside brackets + r'(?P.*)$', # The rest of the log message + re.MULTILINE +) + +context_pattern = re.compile( + r'(?P\S+)/(?P\d+)/(?P\d+)(?:/\((?P[^\)]+)\))?/?') + +separator = ' ' + + +class EntryParser: + ''' + Parses entries with the format: + log_level [2025-04-29 07:46:37.102] [logger] [shard/epoch/round/(subround)] entry_content + + The context [shard/epoch/round/(subround)] can be either fully formed like in '0/4/805/(END_ROUND)' or 'metachain/13/2648/(START_ROUND)' + or partially formed, like in '/0/0/', 'metachain/2/400/' + + The content of the entry is separated using the predefined separator. If the separator is not present, and a distinction cannot be made + between the message and parameters, it returns the entire entry content as message + + ''' + + def __init__(self, node_name: str): + self.alerts = [] + + def parse_context(self, context: str): + # Parse shard, epoch, round, subround from context + context_match = context_pattern.match(context) + if context_match: + subround = context_match.group('subround') + return {'shard': context_match.group('shard').strip(), 'epoch': context_match.group('epoch').strip(), + 'round': context_match.group('round').strip(), 'subround': subround.strip() if subround else ''} + else: + return {'shard': '', 'epoch': 0, 'round': 0, 'subround': ''} + + def parse_message(self, message: str): + if separator in message: + # if the separator is present, split the content between message and parameters using the separator + message, parameters = message.split(separator, 1) + return message.strip(), parameters.strip() + + elif ' = ' in message: + # if no separator, but the content includes '=', assume first parameter is the word before the '=' and split before that word + message_parts = message.split(' = ', 1) + message, first_parameter_label = message_parts[0].rsplit(' ', 1) + return message.strip(), first_parameter_label.strip() + ' = ' + message_parts[1].strip() + + else: + # no parameters in the entry or cannot determine if there are parameters present + return message.strip(), '' + + def parse_log_entry(self, log_content: str) -> dict[str, str]: + match = log_entry_pattern.search(log_content) + if match: + data = match.groupdict() + context = self.parse_context(data.pop('context')) + data.update(context) + + message, parameters = self.parse_message(match['message']) + data['message'] = message + data['parameters'] = parameters + + return data + + +if __name__ == "__main__": + content = 'DEBUG[2025-11-11 17:09:06.028] [..nsus/spos/bls/v1] [metachain/0/3/(BLOCK)] Proposed header received v1 header = {"accumulatedFees": "0","accumulatedFeesInEpoch":"0","chainID":"6c6f63616c2d746573746e6574","devFeesInEpoch":"0","developerFees":"0","epoch":0,"epochStart":{"economics":{"nodePrice":null,"prevEpochStartHash":"","prevEpochStartRound":0,"rewardsForProtocolSustainability":null,"rewardsPerBlock":null,"totalNewlyMinted":null,"totalSupply":null,"totalToDistribute":null},"lastFinalizedHeaders":[]},"leaderSignature":"","miniBlockHeaders":[],"nonce":3,"peerInfo":[],"prevHash":"bbc1249e07d98aabfdb3e735e35142800df013694780497df76778a27db62033","prevRandSeed":"8b6a73f9f4d34f9355cd4399f8c6f14e1296184ea32636ebd58709d62bd35bbe6b992dd3104fa0a64e5fcff9398e0502","pubKeysBitmap":"","randSeed":"41e9c758555f4a33f5de954d6e7cc2d252cacf3ddadfd3fbac1d9ddca2382681e56ab30c65029b29212eb69b13a47906","receiptsHash":"0e5751c026e543b2e8ab2eb06099daa1d1e5df47778f7787faab45cdf12fe3a8","reserved":"","rootHash":"e4a0f900f1ea487a61d3832776d05871e81f7975670a69b1febbf212f4cea5cc","round":3,"shardInfo":[{"accumulatedFees":"0","developerFees":"0","epoch":0,"headerHash":"8da01a5fbda1484d915740b824ebb1de11722ae501e3c8a4e21d9ce96d1a4c1d","lastIncludedMetaNonce":0,"nonce":1,"numPendingMiniBlocks":0,"prevHash":"00fd532ea2e896b86cd70e189c5e716fcfaaac8b7a060e75421d369c19db78e3","prevRandSeed":"f501347f089b236bdd605babb0e27af0b7df76e45628d35072bc87eec4178c0c","pubKeysBitmap":"07","round":1,"shardID":0,"shardMiniBlockHeaders":[],"signature":"","txCount":0},{"accumulatedFees":"0","developerFees":"0","epoch":0,"headerHash":"3897723b58aa6e1949a415a54598e5655363b4a4c967827ef045326f1ca9e216","lastIncludedMetaNonce":0,"nonce":1,"numPendingMiniBlocks":0,"prevHash":"5a62b8bd0aa019e8967dbdc446ab610b98bf59525cc0672e77ea8e74d3e6a3b3","prevRandSeed":"e626f319e5c70a6c3b4e96258a635b8f7e1efb224c285c830cbb4e473b187c1a","pubKeysBitmap":"07","round":1,"shardID":1,"shardMiniBlockHeaders":[],"signature":"","txCount":0}],"signature":"","softwareVersion":"64656661756c74","timeStamp":1762873746,"txCount":0,"validatorStatsRootHash":"d3f82f56f69f4c26a913a8a5721dfdd85cdd70ad4efdfb464c4f1f6ddd4f8dea"} ' + result = EntryParser('').parse_log_entry(content) + parameter = result.pop('parameters').split(' = ', 1)[1] + header = json.loads(parameter) + print(json.dumps(result, indent=4)) + print(json.dumps(header, indent=4)) diff --git a/multiversx_logs_parser_tools/helpers.py b/multiversx_logs_parser_tools/helpers.py new file mode 100644 index 0000000..51cec21 --- /dev/null +++ b/multiversx_logs_parser_tools/helpers.py @@ -0,0 +1,20 @@ +import argparse +import os +from typing import Any + + +def extend_dict(dict_to_extend: dict[str, Any], key: str): + if key not in dict_to_extend.keys(): + dict_to_extend[key] = {} + + +def validate_file_path(path: str): + if not os.path.isfile(path): + raise argparse.ArgumentTypeError(f"File '{path}' does not exist.") + return path + + +def validate_folder_path(path: str): + if not os.path.isdir(path): + raise argparse.ArgumentTypeError(f"Folder '{path}' does not exist.") + return path diff --git a/multiversx_logs_parser_tools/node_logs_checker.py b/multiversx_logs_parser_tools/node_logs_checker.py new file mode 100644 index 0000000..28852a0 --- /dev/null +++ b/multiversx_logs_parser_tools/node_logs_checker.py @@ -0,0 +1,117 @@ + + +from helpers import validate_folder_path + +from typing import IO, Any +import tarfile +from pathlib import Path +import os +import json +import argparse +from abc import ABC, abstractmethod + + +"""Abstract Base Class for Node Logs Checker.""" + + +class NodeLogsChecker(ABC): + def __init__(self, args: dict[str, Any]): + self.report_name = '' + self.node_name = args.get('node_name', 'unknown-node') + self.run_name = args.get('run_name', 'unknown-run') + + self.initialize_checker(args) + + """ Parses a .log file for the given node. """ + @abstractmethod + def process_log_file(self, log_lines: list[str]): + pass + + """ Post-process the node logs after all log files have been parsed. """ + @abstractmethod + def post_process_node_logs(self): + pass + + @abstractmethod + def initialize_checker(self, args: dict[str, Any]): + pass + + @abstractmethod + def create_json_for_node(self) -> dict[str, Any]: + pass + + def handle_node_from_archive(self, tar_gz_contents: IO[bytes]): + with tarfile.open(fileobj=tar_gz_contents, mode='r:gz') as logs_archive: + # sort logs in alphabetic/chronological order + sorted_members = sorted( + logs_archive.getmembers(), + key=lambda member: member.name + ) + + # process all log files for the node + for member in sorted_members: + if member.name.startswith('logs/logs/') and member.name.endswith('.log'): + raw_data = logs_archive.extractfile(member) + if not raw_data: + continue + + with raw_data as f: + # Decode and pass an iterable (a list of lines) + log_lines = (line.decode("utf-8") for line in f) # Generator expression + self.process_log_file(log_lines) + + def handle_node_from_folder(self, node_logs_path: str): + files = sorted(Path(node_logs_path).glob('*.log')) + for file in files: + with open(file, 'r') as f: + log_lines = f.readlines() + self.process_log_file(log_lines) + + def write_node_json(self, path=''): + if not path: + node_reports_path = './Reports/Nodes' + output_file = Path(f'{node_reports_path}/{self.run_name}/{self.node_name}_report.json') + directory = os.path.dirname(output_file) + Path(directory).mkdir(parents=True, exist_ok=True) + else: + output_file = Path(path + f'/{self.node_name}_report.json') + with open(output_file, "w") as json_file: + json.dump(self.create_json_for_node(), json_file, indent=4) + + +if __name__ == "__main__": + # Should be run either from node logs folder or with [path] parameter + + parser = argparse.ArgumentParser( + description=''' + Runs logs check for the selected node. Example script: + + python ansible/templates/logs-checker/node_logs_checker.py --path=logsPath/node_logs_folder/logs + ''', + epilog='!!! Location should be in the node\'s LOGS folder !!!\n', + formatter_class=argparse.RawTextHelpFormatter + ) + parser.add_argument( + '--path', + required=False, + type=validate_folder_path, + help='Path to the logs folder.' + ) + args = parser.parse_args() + + if args.path: + current_folder = args.path + else: + current_folder = os.getcwd() + + try: + node_name = current_folder.rsplit('--', 1)[1].replace('/logs', '') + except IndexError: + node_name = '' + + if ('validator' in node_name or 'observer' in node_name) and current_folder.endswith('/logs'): + selected_folder = current_folder + node_logs_checker: NodeLogsChecker = NodeLogsChecker.from_node_logs(node_name, 'test_run', selected_folder) + node_logs_checker.write_node_report_json(selected_folder) + else: + print('Invalid folder') diff --git a/pyrightconfig.json b/pyrightconfig.json new file mode 100644 index 0000000..8b3ea3e --- /dev/null +++ b/pyrightconfig.json @@ -0,0 +1,16 @@ +{ + "include": [], + "exclude": [ + "**/__pycache__", + ], + "ignore": [], + "defineConstant": { + "DEBUG": true + }, + "venvPath": ".", + "venv": "venv", + "stubPath": "", + "reportMissingImports": true, + "reportMissingTypeStubs": false, + "reportUnknownParameterType": true +} diff --git a/requirements-dev.txt b/requirements-dev.txt new file mode 100644 index 0000000..85d0621 --- /dev/null +++ b/requirements-dev.txt @@ -0,0 +1,4 @@ +autopep8 +flake8 +pytest +pyright \ No newline at end of file diff --git a/requirements.txt b/requirements.txt new file mode 100644 index 0000000..e69de29 From 227349740ee213a02829d7d19323f94095a18f2d Mon Sep 17 00:00:00 2001 From: Mihaela Radian Date: Fri, 14 Nov 2025 14:07:24 +0200 Subject: [PATCH 02/24] MX-17308 Added specific classes for header analysis --- .gitignore | 3 + README.md | 4 + .../gather_data.py | 20 ++++ .../header_analysis_checker.py | 35 ++++++ .../header_analysis_parser.py | 47 ++++++++ .../header_checker.py | 18 --- .../header_data.py | 21 ++++ .../aho-corasik-checker | 106 ------------------ .../aho_corasik_parser.py | 48 ++++++++ .../archive_handler.py | 71 +++++------- multiversx_logs_parser_tools/entry_parser.py | 4 +- .../node_logs_checker.py | 86 +++++--------- 12 files changed, 243 insertions(+), 220 deletions(-) create mode 100644 multiversx_cross_shard_analysis/header_analysis_checker.py create mode 100644 multiversx_cross_shard_analysis/header_analysis_parser.py delete mode 100644 multiversx_cross_shard_analysis/header_checker.py create mode 100644 multiversx_cross_shard_analysis/header_data.py delete mode 100644 multiversx_logs_parser_tools/aho-corasik-checker create mode 100644 multiversx_logs_parser_tools/aho_corasik_parser.py diff --git a/.gitignore b/.gitignore index 0a19790..85b25c8 100644 --- a/.gitignore +++ b/.gitignore @@ -6,6 +6,9 @@ __pycache__/ # C extensions *.so +# Json files +*.json + # Distribution / packaging .Python build/ diff --git a/README.md b/README.md index 009fae6..4962ce3 100644 --- a/README.md +++ b/README.md @@ -16,3 +16,7 @@ export PYTHONPATH=. INSTALL DEVELOPMENT DEPENDENCIES pip install -r ./requirements-dev.txt --upgrade + +EXAMPLE USAGE + +python -m multiversx_cross_shard_analysis.gather_data --path /home/mihaela/Downloads/cross-shard-execution-anal-9afe696daf.zip \ No newline at end of file diff --git a/multiversx_cross_shard_analysis/gather_data.py b/multiversx_cross_shard_analysis/gather_data.py index e69de29..a9a6800 100644 --- a/multiversx_cross_shard_analysis/gather_data.py +++ b/multiversx_cross_shard_analysis/gather_data.py @@ -0,0 +1,20 @@ +from datetime import datetime, timedelta +from multiversx_logs_parser_tools.archive_handler import ArchiveHandler + +from .header_analysis_checker import HeaderAnalysisChecker + +from .header_analysis_parser import HeaderAnalysisParser + + +def gather_data(): + time_started = datetime.now() + print('Starting cross-shard analysis...') + args = ArchiveHandler.get_path() + header_checker = HeaderAnalysisChecker(HeaderAnalysisParser, args) + handler = ArchiveHandler(header_checker, args.path) + handler.handle_logs() + print(f'Archive checked successfully: {timedelta(seconds=(datetime.now() - time_started).total_seconds())}s') + + +if __name__ == "__main__": + gather_data() diff --git a/multiversx_cross_shard_analysis/header_analysis_checker.py b/multiversx_cross_shard_analysis/header_analysis_checker.py new file mode 100644 index 0000000..afb68ea --- /dev/null +++ b/multiversx_cross_shard_analysis/header_analysis_checker.py @@ -0,0 +1,35 @@ +from argparse import Namespace +from typing import Any +from multiversx_logs_parser_tools.node_logs_checker import NodeLogsChecker + +from .header_analysis_parser import HeaderAnalysisParser + + +class HeaderAnalysisChecker(NodeLogsChecker): + def __init__(self, parser_cls: type[HeaderAnalysisParser], args: Namespace): + self.node_parsed_headers = {} + super().__init__(parser_cls, args) + + def initialize_checker(self, args): + self.node_parsed_headers = {} + return super().initialize_checker(args) + + def process_parsed_result(self): + self.parsed = self.parser.parsed_headers.header_dictionary.copy() + self.parser.initialize_checker() + + def post_process_node_logs(self): + # Implement post-processing logic here + self.write_node_json() + pass + + def create_json_for_node(self) -> dict[str, Any]: + return { + "node_name": self.node_name, + "run_name": self.run_name, + "header_analysis": self.parsed + } + + def reset_node(self, args: Namespace): + super().reset_node(args) + self.parsed = {'proposed_header': [], 'commited_header': []} diff --git a/multiversx_cross_shard_analysis/header_analysis_parser.py b/multiversx_cross_shard_analysis/header_analysis_parser.py new file mode 100644 index 0000000..dee7f41 --- /dev/null +++ b/multiversx_cross_shard_analysis/header_analysis_parser.py @@ -0,0 +1,47 @@ +import json +from re import Pattern +from typing import Any +from multiversx_logs_parser_tools.aho_corasik_parser import AhoCorasickParser + +from .header_data import HeaderData + + +class HeaderAnalysisParser(AhoCorasickParser): + def __init__(self): + print("Before setting:", hasattr(self, "parsed_headers")) + self.parsed_headers = HeaderData() + print("After setting:", hasattr(self, "parsed_headers")) + super().__init__() + print("After parent init:", hasattr(self, "parsed_headers")) + + def get_patterns(self) -> list[tuple[Pattern[str], int]]: + patterns = [] + patterns.append(('Proposed header received', 0)) + patterns.append(('Proposed header sent', 1)) + patterns.append(('Proposed header committed', 2)) + return patterns + + def initialize_checker(self) -> None: + # Initialize any required state or variables for the checker + self.parsed_headers.reset() + + def process_match(self, line: str, end_index: int, pattern_idx: int, args: dict[str, str]) -> dict[str, Any]: + parsed = super().process_match(line, end_index, pattern_idx, args) + # Additional processing specific to header checking can be added here + if pattern_idx < 3 and 'parameters' in parsed: + parameter = parsed.pop('parameters').split(' = ', 1)[1] + header = json.loads(parameter) + if pattern_idx < 2: + self.parsed_headers.add_proposed_header(header) + elif pattern_idx == 2: + self.parsed_headers.add_commited_header(header) + + return {} + + def process_parsed_entry(self, parsed_entry: dict[str, Any], args: dict[str, str]) -> None: + # Process the parsed log entry specific to header checking + pass + + def should_parse_line(self, pattern: Pattern[str]) -> bool: + # Determine if the line should be parsed based on the pattern + return True diff --git a/multiversx_cross_shard_analysis/header_checker.py b/multiversx_cross_shard_analysis/header_checker.py deleted file mode 100644 index 941bb11..0000000 --- a/multiversx_cross_shard_analysis/header_checker.py +++ /dev/null @@ -1,18 +0,0 @@ -from typing import Any -from multiversx_logs_parser_tools.node_logs_checker import NodeLogsChecker - - -class HeaderChecker(NodeLogsChecker): - def __init__(self, node_name: str, run_name: str, source: Any): - super().__init__(node_name, run_name) - self.source = source - - def process_logs(self): - # Implement log processing logic specific to header checking - pass - - @classmethod - def from_source(cls, args: dict[str, Any], source: Any) -> 'HeaderChecker': - node_name = args.get('node_name', 'unknown-node') - run_name = args.get('run_name', 'unknown-run') - return cls(node_name, run_name, source) diff --git a/multiversx_cross_shard_analysis/header_data.py b/multiversx_cross_shard_analysis/header_data.py new file mode 100644 index 0000000..97f7aa6 --- /dev/null +++ b/multiversx_cross_shard_analysis/header_data.py @@ -0,0 +1,21 @@ +from typing import Any + + +class HeaderData: + def __init__(self): + self.header_dictionary = { + 'proposed_headers': [], + 'commited_headers': [] + } + + def reset(self): + self.header_dictionary = { + 'proposed_headers': [], + 'commited_headers': [] + } + + def add_proposed_header(self, header: dict[str, Any]): + self.header_dictionary['proposed_headers'].append(header) + + def add_commited_header(self, header: dict[str, Any]): + self.header_dictionary['commited_headers'].append(header) diff --git a/multiversx_logs_parser_tools/aho-corasik-checker b/multiversx_logs_parser_tools/aho-corasik-checker deleted file mode 100644 index 93c56ff..0000000 --- a/multiversx_logs_parser_tools/aho-corasik-checker +++ /dev/null @@ -1,106 +0,0 @@ -import ahocorasick -from ahocorasick import Automaton -from logs_alerts_definition import AlertCategories, Alerts -from entry_parser import EntryParser -from alerts_counter import AlertCounter -from config import SEARCH_FOR_DEBUG_LEVEL_UNKNOWN_ERRORS - - -class AhoCorasickChecker: - def __init__(self): - # initialize the counter - self.counter: AlertCounter = AlertCounter() - - # Create the automaton & add patterns - self.automaton: Automaton = ahocorasick.Automaton() - for pattern in Alerts.get_patterns(): - self.automaton.add_word(pattern.alert_key_word, Alerts.get_patterns().index(pattern)) - self.known_patterns_length = len(Alerts.get_patterns()) - - # Add ERROR and WARN to automaton - self.automaton.add_word('ERROR', self.known_patterns_length + 2) - self.automaton.add_word('WARN', self.known_patterns_length + 1) - if SEARCH_FOR_DEBUG_LEVEL_UNKNOWN_ERRORS: - self.automaton.add_word('err = ', self.known_patterns_length + 3) - self.automaton.add_word('error = ', self.known_patterns_length + 4) - - self.automaton.make_automaton() - - def is_unknown_issue(self, pattern_idx: int) -> bool: - return pattern_idx > self.known_patterns_length - - def initialize_counter(self): - self.counter.initialize_counter() - - # Updates the counter and also updates the identification with shard and version related data - def check(self, file: str, identification: dict[str, str]): - branches = set() - line = None - for line in file: - if 'starting node' in line and "version = " in line: - # in case name of branch like 'rc/supernova' replace with 'supernova-rc' for valid file name - version_match = line.split("version = ")[1].split("/go")[0].replace('/', '-') - if version_match: - branches.add(version_match) - - matches: ahocorasick = list(self.automaton.iter(line)) - for end_index, pattern_idx in matches: - - if self.is_unknown_issue(pattern_idx): - if len(matches) > 1: - continue - # UNKNOWN ISSUES - alert_type, issue = EntryParser('').parse_log_entry(line) - alert_category = AlertCategories.UNKNOWN_ERRORS.value if alert_type == 'ERROR' else AlertCategories.UNKNOWN_WARNINGS.value - message = issue.get('message', 'Error message not found') - logger_name: str = issue.get('logger_name', '') - - message = f'[{logger_name}] {message}' - self.counter.update_counter(alert_category.category_title, message) - self.counter.update_existing_statistics(alert_category.category_title) - - else: - # KNOWN ISSUES - alert = Alerts.get_pattern_by_index(pattern_idx) - alert_category = alert.alert_category - - if Alerts.should_include_parameters_info(alert): - # INCLUDE INFO FROM LOG ENTRY PARAMETERS - alert_type, issue = EntryParser('').parse_log_entry(line) - message: str = issue.get('message', 'Error message not found') - parameters: str = issue.get('parameters', '') - - if alert == Alerts.JAILED_VALIDATOR.value: - message += ' ' + parameters - elif alert == Alerts.JAILED_NUMBER.value: - no_of_jailed_validators = parameters.rsplit(' = ', 1)[1] - if int(no_of_jailed_validators) > 0: - message += ' ' + parameters - else: - break - elif alert_category == AlertCategories.PANICS.value: - if not parameters: - message = line.strip() - else: - message += ' ' + parameters.split(' = ')[-1].strip() - elif alert_category == AlertCategories.NEGATIVE_REWARDS.value: - if not parameters: - message = line.strip() - else: - message += ' ' + parameters - self.counter.update_counter(alert_category.category_title, message) - else: - self.counter.update_existing_counter(alert.alert_category.category_title, alert.alert_key_word) - - self.counter.update_existing_statistics(alert_category.category_title) - break - - # update identification (passed from the node_checker) - if not identification['initial_branch']: - identification['initial_branch'] = list(branches)[0] if len(branches) > 0 else 'Unknown initial branch' - if branches: - identification['final_branch'] = list(branches)[-1] - if line: - _, issue = EntryParser('').parse_log_entry(line) - identification['shard'] = issue.get('shard', '') - return diff --git a/multiversx_logs_parser_tools/aho_corasik_parser.py b/multiversx_logs_parser_tools/aho_corasik_parser.py new file mode 100644 index 0000000..0c2c5c3 --- /dev/null +++ b/multiversx_logs_parser_tools/aho_corasik_parser.py @@ -0,0 +1,48 @@ +from abc import ABC, abstractmethod +from re import Pattern +from typing import Any +import ahocorasick +from ahocorasick import Automaton +from .entry_parser import EntryParser + + +class AhoCorasickParser(ABC): + def __init__(self): + self.initialize_checker() + self.entry_parser = EntryParser(node_name='') + # Create the automaton & add patterns + self.automaton: Automaton = ahocorasick.Automaton() + for pattern, index in self.get_patterns(): + self.automaton.add_word(pattern, index) + self.automaton.make_automaton() + + @abstractmethod + def get_patterns(self) -> list[tuple[Pattern[str], int]]: + return [] + + @abstractmethod + def initialize_checker(self) -> None: + pass + + @abstractmethod + def process_match(self, line: str, end_index: int, pattern_idx: int, args: dict[str, str]) -> dict[str, Any]: + result = {} + if self.should_parse_line(self.get_patterns()[pattern_idx][0]): + result = self.entry_parser.parse_log_entry(line) + return result + + @abstractmethod + def process_parsed_entry(self, parsed_entry: dict[str, Any], args: dict[str, str]) -> None: + pass + + @abstractmethod + def should_parse_line(self, pattern: Pattern[str]) -> bool: + pass + + def parse(self, file: list[str], args: dict[str, str]): + for line in file: + matches: list[tuple[int, int]] = list(self.automaton.iter(line)) + for end_index, pattern_idx in matches: + result = self.process_match(line, end_index, pattern_idx, args) + if result: + self.process_parsed_entry(result, args) diff --git a/multiversx_logs_parser_tools/archive_handler.py b/multiversx_logs_parser_tools/archive_handler.py index 582f23e..e06e573 100644 --- a/multiversx_logs_parser_tools/archive_handler.py +++ b/multiversx_logs_parser_tools/archive_handler.py @@ -1,22 +1,24 @@ -from pathlib import Path -from node_logs_checker import NodeLogsChecker +from typing import TypeVar + +from .aho_corasik_parser import AhoCorasickParser +from .node_logs_checker import NodeLogsChecker import argparse -from datetime import datetime, timedelta import re import zipfile -from aho_corasick_checker import AhoCorasickChecker -from helpers import validate_file_path -from master.master_report import Report + +from .helpers import validate_file_path + +P = TypeVar("P", bound=AhoCorasickParser) class ArchiveHandler: - def __init__(self, checker: NodeLogsChecker, logs_path: str): + def __init__(self, checker: NodeLogsChecker[P], logs_path: str): self.logs_path = logs_path - self.ahochorasick_checker = AhoCorasickChecker() zip_name_pattern = r'.*/(.*?).zip' match = re.match(zip_name_pattern, self.logs_path) self.run_name = match.group(1) if match else 'unknown-zip-name' + self.checker = checker def handle_logs(self): """Loop through nodes in the zip file and process logs for each node.""" @@ -33,42 +35,31 @@ def handle_logs(self): # Open the tar.gz file as bytes with zip_file.open(file_name) as tar_file_io: - args = { - 'node_name': node_name, - 'run_name': self.run_name, - } - node_logs_checker = NodeLogsChecker(**args) - node_logs_checker.handle_node_from_archive(tar_file_io) - node_logs_checker.post_process_node_logs() + args = argparse.Namespace( + node_name=node_name, + run_name=self.run_name, + ) + self.checker.reset_node(args) + self.checker.handle_node_from_archive(tar_file_io) + self.checker.post_process_node_logs() - -if __name__ == "__main__": - time_started = datetime.now() - parser = argparse.ArgumentParser( - description=''' + @staticmethod + def get_path() -> argparse.Namespace: + parser = argparse.ArgumentParser( + description=''' Runs node log checks. Example script: python ansible/templates/logs-checker/archive_handler.py --path=logsPath/logs_archive.zip ''', - epilog='\n', - formatter_class=argparse.RawTextHelpFormatter - ) - - parser.add_argument( - '--path', - required=True, - type=validate_file_path, - help='Path to the run zip file.' - ) - - args = parser.parse_args() + epilog='\n', + formatter_class=argparse.RawTextHelpFormatter + ) - handler = ArchiveHandler(args.path) - handler.handle_logs() - print(f'Archive checked succesfully: {timedelta(seconds=(datetime.now() - time_started).total_seconds())}s') + parser.add_argument( + '--path', + required=True, + type=validate_file_path, + help='Path to the run zip file.' + ) - report = Report(handler.run_name) - report.gather_data() - report_file = Path(f'./{report.get_name_of_the_report()}.txt') - report.execute_report(report_file) - print(f"\nReport generated: {report_file}") + return parser.parse_args() diff --git a/multiversx_logs_parser_tools/entry_parser.py b/multiversx_logs_parser_tools/entry_parser.py index 0e91280..1863395 100644 --- a/multiversx_logs_parser_tools/entry_parser.py +++ b/multiversx_logs_parser_tools/entry_parser.py @@ -1,5 +1,6 @@ import json import re +from typing import Any log_entry_pattern = re.compile( r'^(?PWARN|ERROR|DEBUG|TRACE|INFO)\s*\[' # Log level @@ -32,7 +33,7 @@ class EntryParser: def __init__(self, node_name: str): self.alerts = [] - def parse_context(self, context: str): + def parse_context(self, context: str) -> dict[str, Any]: # Parse shard, epoch, round, subround from context context_match = context_pattern.match(context) if context_match: @@ -59,6 +60,7 @@ def parse_message(self, message: str): return message.strip(), '' def parse_log_entry(self, log_content: str) -> dict[str, str]: + data = {} match = log_entry_pattern.search(log_content) if match: data = match.groupdict() diff --git a/multiversx_logs_parser_tools/node_logs_checker.py b/multiversx_logs_parser_tools/node_logs_checker.py index 28852a0..8861598 100644 --- a/multiversx_logs_parser_tools/node_logs_checker.py +++ b/multiversx_logs_parser_tools/node_logs_checker.py @@ -1,44 +1,50 @@ +try: + # When this module is imported as part of a package + from .aho_corasik_parser import AhoCorasickParser +except Exception: + # Fallback when running the script directly (not as a package) + from aho_corasik_parser import AhoCorasickParser -from helpers import validate_folder_path -from typing import IO, Any +from typing import IO, Any, Generic, Type, TypeVar import tarfile from pathlib import Path import os import json import argparse -from abc import ABC, abstractmethod """Abstract Base Class for Node Logs Checker.""" -class NodeLogsChecker(ABC): - def __init__(self, args: dict[str, Any]): - self.report_name = '' - self.node_name = args.get('node_name', 'unknown-node') - self.run_name = args.get('run_name', 'unknown-run') +P = TypeVar("P", bound=AhoCorasickParser) + +class NodeLogsChecker(Generic[P]): + def __init__(self, parser_cls: Type[P], args: argparse.Namespace): + self.parser: P = parser_cls() self.initialize_checker(args) """ Parses a .log file for the given node. """ - @abstractmethod - def process_log_file(self, log_lines: list[str]): + + def process_parsed_result(self): pass """ Post-process the node logs after all log files have been parsed. """ - @abstractmethod + def post_process_node_logs(self): pass - @abstractmethod - def initialize_checker(self, args: dict[str, Any]): + def initialize_checker(self, args: argparse.Namespace): pass - @abstractmethod def create_json_for_node(self) -> dict[str, Any]: - pass + return {} + + def reset_node(self, args: argparse.Namespace): + self.node_name = args.node_name if args.node_name else 'unknown-node' + self.run_name = args.run_name if args.run_name else 'unknown-run' def handle_node_from_archive(self, tar_gz_contents: IO[bytes]): with tarfile.open(fileobj=tar_gz_contents, mode='r:gz') as logs_archive: @@ -51,21 +57,24 @@ def handle_node_from_archive(self, tar_gz_contents: IO[bytes]): # process all log files for the node for member in sorted_members: if member.name.startswith('logs/logs/') and member.name.endswith('.log'): + print(" Processing log file:", member.name) raw_data = logs_archive.extractfile(member) if not raw_data: continue with raw_data as f: # Decode and pass an iterable (a list of lines) - log_lines = (line.decode("utf-8") for line in f) # Generator expression - self.process_log_file(log_lines) + log_lines = [line.decode("utf-8") for line in f] + self.parser.parse(log_lines, {}) + self.process_parsed_result() def handle_node_from_folder(self, node_logs_path: str): files = sorted(Path(node_logs_path).glob('*.log')) for file in files: with open(file, 'r') as f: log_lines = f.readlines() - self.process_log_file(log_lines) + self.parser.parse(log_lines, {}) + self.process_parsed_result() def write_node_json(self, path=''): if not path: @@ -78,40 +87,7 @@ def write_node_json(self, path=''): with open(output_file, "w") as json_file: json.dump(self.create_json_for_node(), json_file, indent=4) - -if __name__ == "__main__": - # Should be run either from node logs folder or with [path] parameter - - parser = argparse.ArgumentParser( - description=''' - Runs logs check for the selected node. Example script: - - python ansible/templates/logs-checker/node_logs_checker.py --path=logsPath/node_logs_folder/logs - ''', - epilog='!!! Location should be in the node\'s LOGS folder !!!\n', - formatter_class=argparse.RawTextHelpFormatter - ) - parser.add_argument( - '--path', - required=False, - type=validate_folder_path, - help='Path to the logs folder.' - ) - args = parser.parse_args() - - if args.path: - current_folder = args.path - else: - current_folder = os.getcwd() - - try: - node_name = current_folder.rsplit('--', 1)[1].replace('/logs', '') - except IndexError: - node_name = '' - - if ('validator' in node_name or 'observer' in node_name) and current_folder.endswith('/logs'): - selected_folder = current_folder - node_logs_checker: NodeLogsChecker = NodeLogsChecker.from_node_logs(node_name, 'test_run', selected_folder) - node_logs_checker.write_node_report_json(selected_folder) - else: - print('Invalid folder') + @classmethod + def from_args(cls: Type['NodeLogsChecker[P]'], parser_cls: Type[P], args: argparse.Namespace) -> 'NodeLogsChecker[P]': + instance = cls(parser_cls, args) + return instance From 7bfa53112c3b4af33dd2ef9d4d372b7472950fda Mon Sep 17 00:00:00 2001 From: Mihaela Radian Date: Sun, 16 Nov 2025 00:46:25 +0200 Subject: [PATCH 03/24] MX-17306 Added shards structure --- .pre-commit-config.yaml | 1 - README.md | 2 +- .../gather_data.py | 9 +-- .../header_analysis_archive_handler.py | 41 +++++++++++++ .../header_analysis_checker.py | 8 ++- .../header_analysis_parser.py | 6 +- .../header_data.py | 21 ------- .../header_structures.py | 57 +++++++++++++++++++ .../aho_corasik_parser.py | 2 + .../archive_handler.py | 17 ++++-- multiversx_logs_parser_tools/entry_parser.py | 2 +- .../node_logs_checker.py | 14 ++--- requirements-dev.txt | 2 +- requirements.txt | 1 + 14 files changed, 135 insertions(+), 48 deletions(-) create mode 100644 multiversx_cross_shard_analysis/header_analysis_archive_handler.py delete mode 100644 multiversx_cross_shard_analysis/header_data.py create mode 100644 multiversx_cross_shard_analysis/header_structures.py diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 8343e61..1d029bf 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -17,4 +17,3 @@ repos: rev: 5.13.2 hooks: - id: isort - \ No newline at end of file diff --git a/README.md b/README.md index 4962ce3..7bdd592 100644 --- a/README.md +++ b/README.md @@ -19,4 +19,4 @@ pip install -r ./requirements-dev.txt --upgrade EXAMPLE USAGE -python -m multiversx_cross_shard_analysis.gather_data --path /home/mihaela/Downloads/cross-shard-execution-anal-9afe696daf.zip \ No newline at end of file +python -m multiversx_cross_shard_analysis.gather_data --path /home/mihaela/Downloads/cross-shard-execution-anal-9afe696daf.zip diff --git a/multiversx_cross_shard_analysis/gather_data.py b/multiversx_cross_shard_analysis/gather_data.py index a9a6800..c15679e 100644 --- a/multiversx_cross_shard_analysis/gather_data.py +++ b/multiversx_cross_shard_analysis/gather_data.py @@ -1,17 +1,18 @@ from datetime import datetime, timedelta -from multiversx_logs_parser_tools.archive_handler import ArchiveHandler -from .header_analysis_checker import HeaderAnalysisChecker +from .header_analysis_archive_handler import HeaderAnalysisArchiveHandler + +from .header_analysis_checker import HeaderAnalysisChecker from .header_analysis_parser import HeaderAnalysisParser def gather_data(): time_started = datetime.now() print('Starting cross-shard analysis...') - args = ArchiveHandler.get_path() + args = HeaderAnalysisArchiveHandler.get_path() header_checker = HeaderAnalysisChecker(HeaderAnalysisParser, args) - handler = ArchiveHandler(header_checker, args.path) + handler = HeaderAnalysisArchiveHandler(header_checker, args.path) handler.handle_logs() print(f'Archive checked successfully: {timedelta(seconds=(datetime.now() - time_started).total_seconds())}s') diff --git a/multiversx_cross_shard_analysis/header_analysis_archive_handler.py b/multiversx_cross_shard_analysis/header_analysis_archive_handler.py new file mode 100644 index 0000000..496d276 --- /dev/null +++ b/multiversx_cross_shard_analysis/header_analysis_archive_handler.py @@ -0,0 +1,41 @@ +import json +from pathlib import Path +from multiversx_logs_parser_tools.archive_handler import ArchiveHandler + +from .header_structures import HeaderData, ShardData + + +from .header_analysis_checker import HeaderAnalysisChecker + + +class HeaderAnalysisArchiveHandler(ArchiveHandler): + def __init__(self, checker: HeaderAnalysisChecker, logs_path: str): + self.checker = checker + self.shard_data = ShardData() + self.parsed_miniblocks = {} + super().__init__(checker, logs_path) + + def process_node_data(self): + """Process the parsed data for a single node.""" + node_data = HeaderData() + node_data.header_dictionary = self.checker.parsed + self.shard_data.add_node(node_data) + + def process_run_data(self): + """Process the parsed data for the entire run.""" + self.write_run_json() + + def write_run_json(self, path=''): + for shard_id, header_data in self.shard_data.parsed_headers.items(): + run_data = { + "run_name": self.run_name, + "shard_id": shard_id, + "shards": header_data.header_dictionary + } + shard_reports_path = f'./Reports/{self.run_name}/Shards' + output_file = Path(f'{shard_reports_path}/{shard_id}_report.json') + directory = output_file.parent + directory.mkdir(parents=True, exist_ok=True) + with open(output_file, 'w') as f: + json.dump(run_data, f, indent=4) + print(f"Shard data for shard {shard_id} written to {output_file}") diff --git a/multiversx_cross_shard_analysis/header_analysis_checker.py b/multiversx_cross_shard_analysis/header_analysis_checker.py index afb68ea..cf910d1 100644 --- a/multiversx_cross_shard_analysis/header_analysis_checker.py +++ b/multiversx_cross_shard_analysis/header_analysis_checker.py @@ -1,17 +1,19 @@ from argparse import Namespace from typing import Any + from multiversx_logs_parser_tools.node_logs_checker import NodeLogsChecker +from .header_structures import HeaderData + from .header_analysis_parser import HeaderAnalysisParser class HeaderAnalysisChecker(NodeLogsChecker): def __init__(self, parser_cls: type[HeaderAnalysisParser], args: Namespace): - self.node_parsed_headers = {} super().__init__(parser_cls, args) def initialize_checker(self, args): - self.node_parsed_headers = {} + self.parsed = HeaderData().header_dictionary return super().initialize_checker(args) def process_parsed_result(self): @@ -32,4 +34,4 @@ def create_json_for_node(self) -> dict[str, Any]: def reset_node(self, args: Namespace): super().reset_node(args) - self.parsed = {'proposed_header': [], 'commited_header': []} + self.parsed = HeaderData().header_dictionary.copy() diff --git a/multiversx_cross_shard_analysis/header_analysis_parser.py b/multiversx_cross_shard_analysis/header_analysis_parser.py index dee7f41..dba0e2c 100644 --- a/multiversx_cross_shard_analysis/header_analysis_parser.py +++ b/multiversx_cross_shard_analysis/header_analysis_parser.py @@ -1,18 +1,16 @@ import json from re import Pattern from typing import Any + from multiversx_logs_parser_tools.aho_corasik_parser import AhoCorasickParser -from .header_data import HeaderData +from .header_structures import HeaderData class HeaderAnalysisParser(AhoCorasickParser): def __init__(self): - print("Before setting:", hasattr(self, "parsed_headers")) self.parsed_headers = HeaderData() - print("After setting:", hasattr(self, "parsed_headers")) super().__init__() - print("After parent init:", hasattr(self, "parsed_headers")) def get_patterns(self) -> list[tuple[Pattern[str], int]]: patterns = [] diff --git a/multiversx_cross_shard_analysis/header_data.py b/multiversx_cross_shard_analysis/header_data.py deleted file mode 100644 index 97f7aa6..0000000 --- a/multiversx_cross_shard_analysis/header_data.py +++ /dev/null @@ -1,21 +0,0 @@ -from typing import Any - - -class HeaderData: - def __init__(self): - self.header_dictionary = { - 'proposed_headers': [], - 'commited_headers': [] - } - - def reset(self): - self.header_dictionary = { - 'proposed_headers': [], - 'commited_headers': [] - } - - def add_proposed_header(self, header: dict[str, Any]): - self.header_dictionary['proposed_headers'].append(header) - - def add_commited_header(self, header: dict[str, Any]): - self.header_dictionary['commited_headers'].append(header) diff --git a/multiversx_cross_shard_analysis/header_structures.py b/multiversx_cross_shard_analysis/header_structures.py new file mode 100644 index 0000000..083ff54 --- /dev/null +++ b/multiversx_cross_shard_analysis/header_structures.py @@ -0,0 +1,57 @@ +from typing import Any + + +def get_value(variable_name: str, header: dict[str, Any]) -> str: + return header['header'][variable_name] if 'header' in header else header[variable_name] + + +def get_shard_id(header: dict[str, Any]) -> int: + return header['header']['shardID'] if 'header' in header else header.get('shardID', 4294967295) + + +class HeaderData: + def __init__(self): + self.header_dictionary = { + 'proposed_headers': [], + 'commited_headers': [] + } + self.seen_headers: dict[str, set[str]] = {'proposed_headers': set(), + 'commited_headers': set()} + + def reset(self): + self.header_dictionary = { + 'proposed_headers': [], + 'commited_headers': [] + } + self.seen_headers: dict[str, set[str]] = {'proposed_headers': set(), + 'commited_headers': set()} + + def add_proposed_header(self, header: dict[str, Any]): + nonce = get_value('nonce', header) + if nonce in self.seen_headers['proposed_headers']: + return + self.header_dictionary['proposed_headers'].append(header) + self.seen_headers['proposed_headers'].add(nonce) + + def add_commited_header(self, header: dict[str, Any]): + nonce = get_value('nonce', header) + if nonce in self.seen_headers['commited_headers']: + return + self.header_dictionary['commited_headers'].append(header) + self.seen_headers['commited_headers'].add(nonce) + + +class ShardData: + def __init__(self): + self.parsed_headers = {0: HeaderData(), 1: HeaderData(), 2: HeaderData(), 4294967295: HeaderData()} + + def add_node(self, node_data: HeaderData): + for header_status in node_data.header_dictionary.keys(): + for header in node_data.header_dictionary[header_status]: + shard_id = get_shard_id(header) + if header_status == 'commited_headers': + self.parsed_headers[shard_id].add_commited_header(header) + elif header_status == 'proposed_headers': + self.parsed_headers[shard_id].add_proposed_header(header) + else: + print(f"Warning: Unknown header status {header_status} in header: round = {get_value('round', header)}, nonce = {get_value('nonce', header)}") diff --git a/multiversx_logs_parser_tools/aho_corasik_parser.py b/multiversx_logs_parser_tools/aho_corasik_parser.py index 0c2c5c3..e571e74 100644 --- a/multiversx_logs_parser_tools/aho_corasik_parser.py +++ b/multiversx_logs_parser_tools/aho_corasik_parser.py @@ -1,8 +1,10 @@ from abc import ABC, abstractmethod from re import Pattern from typing import Any + import ahocorasick from ahocorasick import Automaton + from .entry_parser import EntryParser diff --git a/multiversx_logs_parser_tools/archive_handler.py b/multiversx_logs_parser_tools/archive_handler.py index e06e573..c0d5db4 100644 --- a/multiversx_logs_parser_tools/archive_handler.py +++ b/multiversx_logs_parser_tools/archive_handler.py @@ -1,13 +1,12 @@ -from typing import TypeVar - -from .aho_corasik_parser import AhoCorasickParser -from .node_logs_checker import NodeLogsChecker import argparse import re import zipfile +from typing import TypeVar +from .aho_corasik_parser import AhoCorasickParser from .helpers import validate_file_path +from .node_logs_checker import NodeLogsChecker P = TypeVar("P", bound=AhoCorasickParser) @@ -42,6 +41,16 @@ def handle_logs(self): self.checker.reset_node(args) self.checker.handle_node_from_archive(tar_file_io) self.checker.post_process_node_logs() + self.process_node_data() + self.process_run_data() + + def process_node_data(self): + """Process the parsed data for a single node.""" + pass + + def process_run_data(self): + """Process the parsed data for the entire run.""" + pass @staticmethod def get_path() -> argparse.Namespace: diff --git a/multiversx_logs_parser_tools/entry_parser.py b/multiversx_logs_parser_tools/entry_parser.py index 1863395..efeceb6 100644 --- a/multiversx_logs_parser_tools/entry_parser.py +++ b/multiversx_logs_parser_tools/entry_parser.py @@ -25,7 +25,7 @@ class EntryParser: The context [shard/epoch/round/(subround)] can be either fully formed like in '0/4/805/(END_ROUND)' or 'metachain/13/2648/(START_ROUND)' or partially formed, like in '/0/0/', 'metachain/2/400/' - The content of the entry is separated using the predefined separator. If the separator is not present, and a distinction cannot be made + The content of the entry is separated using the predefined separator. If the separator is not present, and a distinction cannot be made between the message and parameters, it returns the entire entry content as message ''' diff --git a/multiversx_logs_parser_tools/node_logs_checker.py b/multiversx_logs_parser_tools/node_logs_checker.py index 8861598..e8696ab 100644 --- a/multiversx_logs_parser_tools/node_logs_checker.py +++ b/multiversx_logs_parser_tools/node_logs_checker.py @@ -6,14 +6,12 @@ # Fallback when running the script directly (not as a package) from aho_corasik_parser import AhoCorasickParser - -from typing import IO, Any, Generic, Type, TypeVar +import argparse +import json +import os import tarfile from pathlib import Path -import os -import json -import argparse - +from typing import IO, Any, Generic, Type, TypeVar """Abstract Base Class for Node Logs Checker.""" @@ -78,8 +76,8 @@ def handle_node_from_folder(self, node_logs_path: str): def write_node_json(self, path=''): if not path: - node_reports_path = './Reports/Nodes' - output_file = Path(f'{node_reports_path}/{self.run_name}/{self.node_name}_report.json') + node_reports_path = f'./Reports/{self.run_name}/Nodes' + output_file = Path(f'{node_reports_path}/{self.node_name}_report.json') directory = os.path.dirname(output_file) Path(directory).mkdir(parents=True, exist_ok=True) else: diff --git a/requirements-dev.txt b/requirements-dev.txt index 85d0621..ba07244 100644 --- a/requirements-dev.txt +++ b/requirements-dev.txt @@ -1,4 +1,4 @@ autopep8 flake8 pytest -pyright \ No newline at end of file +pyright diff --git a/requirements.txt b/requirements.txt index e69de29..c6609dc 100644 --- a/requirements.txt +++ b/requirements.txt @@ -0,0 +1 @@ +pyahocorasick==2.2.0 From dfa2c06de5cc7c2da8f3d7452323872a0a84e3e0 Mon Sep 17 00:00:00 2001 From: Mihaela Radian Date: Mon, 17 Nov 2025 19:05:53 +0200 Subject: [PATCH 04/24] MX-17306 added miniblock collection --- .vscode/ltex.dictionary.en-US.txt | 2 + README.md | 36 ++++++++-- .../header_analysis_archive_handler.py | 12 +++- .../header_structures.py | 70 +++++++++++++++++-- 4 files changed, 107 insertions(+), 13 deletions(-) create mode 100644 .vscode/ltex.dictionary.en-US.txt diff --git a/.vscode/ltex.dictionary.en-US.txt b/.vscode/ltex.dictionary.en-US.txt new file mode 100644 index 0000000..2f7628b --- /dev/null +++ b/.vscode/ltex.dictionary.en-US.txt @@ -0,0 +1,2 @@ +AhoCorasikParser +NodeLogsChecker diff --git a/README.md b/README.md index 7bdd592..3bfd58e 100644 --- a/README.md +++ b/README.md @@ -1,22 +1,46 @@ # mx-chain-logs-parser -Logs parsing utilities. +Logs parsing utilities and applications -Tool for parsing logs. +## LOGS PARSER TOOLS: +The tool provides general abstract classes that can be useful for parsing logs. +In order to create an application that uses off-line parsing of logs files, these classes must be inherited and methods should be implemented for that particular case. -INSTALL +### ARCHIVE HANDLER +- General application processing class, that loops through the nodes in the downloaded logs archive and calls its NodeLogsChecker instance for each one of them +- run level methods should be implemented in inheriting classes + +### NODE LOGS CHECKER +- Node level processing, that loops through individual log files for a node and calls its instance of the AhoCorasikParser to search for entries with pre-defined key phrases +- node level methods should be implemented in inheriting classes + +### AHO-CORASIK PARSER +- Log level processing implementing the aho-corasik algorithm that searches for a list of given keywords simultaneously. It uses an EntryParser to extract information from the entries of interest + +### ENTRY PARSER +- Entry level processing, divides the log entry into its basic components: log level, context, message, parameters +- can be extended with re recognition to handle specific cases + + +## CROSS SHARD ANALYSIS TOOL +Tool that validates that cross shard miniblocks are executed (and proposed) in strict order, without gaps or duplications. + +INSTALL Create a virtual environment and install the dependencies: +``` python3 -m venv ./venv source ./venv/bin/activate pip install -r ./requirements.txt --upgrade export PYTHONPATH=. - +``` INSTALL DEVELOPMENT DEPENDENCIES - +``` pip install -r ./requirements-dev.txt --upgrade +``` EXAMPLE USAGE - +``` python -m multiversx_cross_shard_analysis.gather_data --path /home/mihaela/Downloads/cross-shard-execution-anal-9afe696daf.zip +``` \ No newline at end of file diff --git a/multiversx_cross_shard_analysis/header_analysis_archive_handler.py b/multiversx_cross_shard_analysis/header_analysis_archive_handler.py index 496d276..08c007d 100644 --- a/multiversx_cross_shard_analysis/header_analysis_archive_handler.py +++ b/multiversx_cross_shard_analysis/header_analysis_archive_handler.py @@ -12,7 +12,7 @@ class HeaderAnalysisArchiveHandler(ArchiveHandler): def __init__(self, checker: HeaderAnalysisChecker, logs_path: str): self.checker = checker self.shard_data = ShardData() - self.parsed_miniblocks = {} + super().__init__(checker, logs_path) def process_node_data(self): @@ -39,3 +39,13 @@ def write_run_json(self, path=''): with open(output_file, 'w') as f: json.dump(run_data, f, indent=4) print(f"Shard data for shard {shard_id} written to {output_file}") + miniblocks_reports_path = f'./Reports/{self.run_name}/Miniblocks' + output_file = Path(f'{miniblocks_reports_path}/miniblocks_report.json') + directory = output_file.parent + directory.mkdir(parents=True, exist_ok=True) + with open(output_file, 'w') as f: + json.dump({ + "run_name": self.run_name, + "miniblocks": self.shard_data.miniblocks + }, f, indent=4) + print(f"Miniblock data written to {output_file}") diff --git a/multiversx_cross_shard_analysis/header_structures.py b/multiversx_cross_shard_analysis/header_structures.py index 083ff54..be835d6 100644 --- a/multiversx_cross_shard_analysis/header_structures.py +++ b/multiversx_cross_shard_analysis/header_structures.py @@ -1,3 +1,4 @@ +from enum import Enum from typing import Any @@ -26,32 +27,89 @@ def reset(self): self.seen_headers: dict[str, set[str]] = {'proposed_headers': set(), 'commited_headers': set()} - def add_proposed_header(self, header: dict[str, Any]): + def add_proposed_header(self, header: dict[str, Any]) -> bool: nonce = get_value('nonce', header) if nonce in self.seen_headers['proposed_headers']: - return + return False self.header_dictionary['proposed_headers'].append(header) self.seen_headers['proposed_headers'].add(nonce) + return True - def add_commited_header(self, header: dict[str, Any]): + def add_commited_header(self, header: dict[str, Any]) -> bool: nonce = get_value('nonce', header) if nonce in self.seen_headers['commited_headers']: - return + return False self.header_dictionary['commited_headers'].append(header) self.seen_headers['commited_headers'].add(nonce) + return True + + +MiniBlockTypes = Enum("MiniBlockType", ['MiniBlockHeaders', 'ShardInfo', 'ExecutionResults']) class ShardData: def __init__(self): self.parsed_headers = {0: HeaderData(), 1: HeaderData(), 2: HeaderData(), 4294967295: HeaderData()} + self.miniblocks = {} + self.seen_miniblock_hashes = set() def add_node(self, node_data: HeaderData): for header_status in node_data.header_dictionary.keys(): for header in node_data.header_dictionary[header_status]: shard_id = get_shard_id(header) + added = False if header_status == 'commited_headers': - self.parsed_headers[shard_id].add_commited_header(header) + added = self.parsed_headers[shard_id].add_commited_header(header) elif header_status == 'proposed_headers': - self.parsed_headers[shard_id].add_proposed_header(header) + added = self.parsed_headers[shard_id].add_proposed_header(header) else: print(f"Warning: Unknown header status {header_status} in header: round = {get_value('round', header)}, nonce = {get_value('nonce', header)}") + if added: + self.add_miniblocks(header, header_status) + + def add_miniblocks(self, header: dict[str, Any], status: str): + header_struct = Header(header) + for mb_type, miniblocks in header_struct.miniblocks.items(): + for mb in miniblocks: + mb_hash = mb.get('hash') + if mb_hash not in self.seen_miniblock_hashes: + self.seen_miniblock_hashes.add(mb_hash) + self.miniblocks[mb_hash] = mb.copy() + self.miniblocks[mb_hash]['mentioned'] = [] + mention_type = 'notarized' if mb_type == MiniBlockTypes.ShardInfo else status + self.miniblocks[mb_hash]['mentioned'].append((mention_type, header_struct.metadata)) + + +class Header: + def __init__(self, header: dict[str, Any]): + self.metadata: dict[str, Any] = self.get_header_metadata(header) + self.miniblocks: dict[str, list[dict[str, Any]]] = self.get_miniblocks(header) + + def get_header_metadata(self, header: dict[str, Any]) -> dict[str, Any]: + if Header.isHeaderV2(header): + header = header['header'] + return { + "nonce": header.get('nonce', 0), + "round": header.get('round', 0), + "epoch": header.get('epoch', 0), + "shard_id": header.get('shardID', 4294967295), + } + + def get_miniblocks(self, header: dict[str, Any]) -> dict[str, list[dict[str, Any]]]: + miniblocks = {} + if Header.isHeaderV2(header): + header = header['header'] + miniblocks[MiniBlockTypes.MiniBlockHeaders] = header.get('miniBlockHeaders', []) + if Header.isMetaHeader(header): + miniblocks[MiniBlockTypes.ShardInfo] = [] + for shard_header in header['shardInfo']: + miniblocks[MiniBlockTypes.ShardInfo].extend(shard_header.get('shardMiniBlockHeaders', [])) + return miniblocks + + @staticmethod + def isHeaderV2(header: dict[str, Any]) -> bool: + return 'header' in header + + @staticmethod + def isMetaHeader(header: dict[str, Any]) -> bool: + return 'shardInfo' in header From a8f2d8bd86f28c70aaada0a79cf24bf90bbb71d8 Mon Sep 17 00:00:00 2001 From: Mihaela Radian Date: Fri, 21 Nov 2025 18:11:49 +0200 Subject: [PATCH 05/24] MX-17306 Added reports, refactoring --- multiversx_cross_shard_analysis/constants.py | 40 ++ .../header_structures.py | 43 +- .../miniblock_data.py | 369 ++++++++++++++++++ .../miniblocks_detailed_report.py | 44 +++ multiversx_cross_shard_analysis/report.py | 318 +++++++++++++++ 5 files changed, 793 insertions(+), 21 deletions(-) create mode 100644 multiversx_cross_shard_analysis/constants.py create mode 100644 multiversx_cross_shard_analysis/miniblock_data.py create mode 100644 multiversx_cross_shard_analysis/miniblocks_detailed_report.py create mode 100644 multiversx_cross_shard_analysis/report.py diff --git a/multiversx_cross_shard_analysis/constants.py b/multiversx_cross_shard_analysis/constants.py new file mode 100644 index 0000000..9441714 --- /dev/null +++ b/multiversx_cross_shard_analysis/constants.py @@ -0,0 +1,40 @@ +from enum import Enum + + +origin_shard = "origin_shard" +dest_shard = "dest_shard" +meta = "meta" +proposed = "proposed" +committed = "committed" + +MiniBlockTypes = Enum("MiniBlockType", [ + 'MiniBlockHeaders', + 'ShardInfo', + 'ExecutionResults' +]) + +MentionType = Enum("MentionType", [ + # miniblock is mentioned in origin shard header + "origin_shard_proposed", + "origin_shard_committed", + + # notarization of shard miniblock when meta includes the shard header + "meta_origin_shard_proposed", + "meta_origin_shard_committed", + + # miniblock is mentioned in destination shard header + "dest_shard_proposed", + "dest_shard_committed", + + # notarization of shard miniblock when meta includes the shard header + "meta_dest_shard_proposed", + "meta_dest_shard_committed", + + # miniblock is mentioned in an execution result, either on origin or destination shard + "exec_proposed", + "exec_committed", + + # notarization of execution results when meta includes the header containing the execution result + "meta_exec_proposed", + "meta_exec_committed", +]) diff --git a/multiversx_cross_shard_analysis/header_structures.py b/multiversx_cross_shard_analysis/header_structures.py index be835d6..3ea9dc3 100644 --- a/multiversx_cross_shard_analysis/header_structures.py +++ b/multiversx_cross_shard_analysis/header_structures.py @@ -1,6 +1,7 @@ -from enum import Enum from typing import Any +from .constants import dest_shard, origin_shard, meta + def get_value(variable_name: str, header: dict[str, Any]) -> str: return header['header'][variable_name] if 'header' in header else header[variable_name] @@ -44,9 +45,6 @@ def add_commited_header(self, header: dict[str, Any]) -> bool: return True -MiniBlockTypes = Enum("MiniBlockType", ['MiniBlockHeaders', 'ShardInfo', 'ExecutionResults']) - - class ShardData: def __init__(self): self.parsed_headers = {0: HeaderData(), 1: HeaderData(), 2: HeaderData(), 4294967295: HeaderData()} @@ -68,22 +66,20 @@ def add_node(self, node_data: HeaderData): self.add_miniblocks(header, header_status) def add_miniblocks(self, header: dict[str, Any], status: str): - header_struct = Header(header) - for mb_type, miniblocks in header_struct.miniblocks.items(): - for mb in miniblocks: - mb_hash = mb.get('hash') - if mb_hash not in self.seen_miniblock_hashes: - self.seen_miniblock_hashes.add(mb_hash) - self.miniblocks[mb_hash] = mb.copy() - self.miniblocks[mb_hash]['mentioned'] = [] - mention_type = 'notarized' if mb_type == MiniBlockTypes.ShardInfo else status - self.miniblocks[mb_hash]['mentioned'].append((mention_type, header_struct.metadata)) + header_struct = Header(header, status) + for mention_type, mb in header_struct.miniblocks: + mb_hash = mb.get('hash') + if mb_hash not in self.seen_miniblock_hashes: + self.seen_miniblock_hashes.add(mb_hash) + self.miniblocks[mb_hash] = mb.copy() + self.miniblocks[mb_hash]['mentioned'] = [] + self.miniblocks[mb_hash]['mentioned'].append((mention_type, header_struct.metadata)) class Header: - def __init__(self, header: dict[str, Any]): + def __init__(self, header: dict[str, Any], status: str): self.metadata: dict[str, Any] = self.get_header_metadata(header) - self.miniblocks: dict[str, list[dict[str, Any]]] = self.get_miniblocks(header) + self.miniblocks: list[tuple[str, dict[str, Any]]] = self.get_miniblocks(header, status) def get_header_metadata(self, header: dict[str, Any]) -> dict[str, Any]: if Header.isHeaderV2(header): @@ -95,15 +91,20 @@ def get_header_metadata(self, header: dict[str, Any]) -> dict[str, Any]: "shard_id": header.get('shardID', 4294967295), } - def get_miniblocks(self, header: dict[str, Any]) -> dict[str, list[dict[str, Any]]]: - miniblocks = {} + def get_miniblocks(self, header: dict[str, Any], status: str) -> list[tuple[str, dict[str, Any]]]: + metadata = self.metadata + miniblocks = [] if Header.isHeaderV2(header): header = header['header'] - miniblocks[MiniBlockTypes.MiniBlockHeaders] = header.get('miniBlockHeaders', []) + for miniblock in header.get('miniBlockHeaders', []): + miniblock_mention = f'{origin_shard if metadata['shard_id'] == miniblock['senderShardID'] else dest_shard}_{status}' + miniblocks.append((miniblock_mention, miniblock)) if Header.isMetaHeader(header): - miniblocks[MiniBlockTypes.ShardInfo] = [] for shard_header in header['shardInfo']: - miniblocks[MiniBlockTypes.ShardInfo].extend(shard_header.get('shardMiniBlockHeaders', [])) + shard_metadata = self.get_header_metadata(shard_header) + for miniblock in shard_header.get('shardMiniBlockHeaders', []): + miniblock_mention = f'{meta}_{origin_shard if shard_metadata['shard_id'] == miniblock['senderShardID'] else dest_shard}_{status}' + miniblocks.append((miniblock_mention, miniblock)) return miniblocks @staticmethod diff --git a/multiversx_cross_shard_analysis/miniblock_data.py b/multiversx_cross_shard_analysis/miniblock_data.py new file mode 100644 index 0000000..9fff351 --- /dev/null +++ b/multiversx_cross_shard_analysis/miniblock_data.py @@ -0,0 +1,369 @@ +import json +from typing import Any + + +class MiniblockData: + + def __init__(self, miniblocks: list[tuple[str, dict[str, Any]]]): + self.miniblocks = miniblocks + + def get_data_for_detailed_report(self) -> dict[str, Any]: + report = {} + + for mb_hash, mb_info in self.miniblocks: + if mb_info['senderShardID'] == mb_info['receiverShardID']: + continue # Skip same-shard miniblocks + origin_epoch = None + report_data = { + 'lanes': { + 'origin': [], + 'dest': [], + 'meta': [] + } + } + for mention_type, header in mb_info.get('mentioned', []): + lane = 'meta' if 'meta' in mention_type else ('origin' if 'origin' in mention_type else 'dest') + report_data['lanes'][lane].append((mention_type, header.get('round'))) + if lane == 'origin': + origin_epoch = header.get('epoch') + if not origin_epoch: + print(f"Warning: origin_epoch not found for miniblock {mb_hash}") + continue + if origin_epoch not in report: + report[origin_epoch] = {} + report[origin_epoch][mb_hash] = report_data + return report + + def get_data_for_detailed_report1(self) -> dict[str, Any]: + report = {} + + for mb_hash, mb_info in self.miniblocks: + if mb_info['senderShardID'] == mb_info['receiverShardID']: + continue # Skip same-shard miniblocks + origin_epoch = None + start_round = None + end_round = None + report_data = { + 'start_round': start_round, + 'end_round': end_round, + 'lanes': { + 'origin': [], + 'dest': [], + 'meta': [] + } + } + for mention_type, header in mb_info.get('mentioned', []): + lane = 'meta' if 'meta' in mention_type else ('origin' if 'origin' in mention_type else 'dest') + report_data['lanes'][lane].append((mention_type, header.get('round'))) + if report_data['start_round'] is None or header.get('round') < report_data['start_round']: + report_data['start_round'] = header.get('round') + if report_data['end_round'] is None or header.get('round') > report_data['end_round']: + report_data['end_round'] = header.get('round') + if lane == 'origin': + origin_epoch = header.get('epoch') + if not origin_epoch: + print(f"Warning: origin_epoch not found for miniblock {mb_hash}") + continue + if origin_epoch not in report: + report[origin_epoch] = {} + + report[origin_epoch][mb_hash] = report_data + return report + + +if __name__ == "__main__": + example = {"run_name": "cross-shard-execution-anal-9afe696daf", "miniblocks": {"01f7f0f3503c62b69ff0fe22a57b97eaa2f164ac9e4a0bb852bc66e3f5c12369": {"hash": "01f7f0f3503c62b69ff0fe22a57b97eaa2f164ac9e4a0bb852bc66e3f5c12369", "receiverShardID": 0, "reserved": "", "senderShardID": 1, "txCount": 1, "type": 0, "mentioned": [["dest_shard_proposed_headers", {"nonce": 207, "round": 207, "epoch": 1, "shard_id": 0}], ["dest_shard_commited_headers", {"nonce": 207, "round": 207, "epoch": 1, "shard_id": 0}], ["origin_shard_proposed_headers", {"nonce": 205, "round": 205, "epoch": 1, "shard_id": 1}], ["origin_shard_commited_headers", {"nonce": 205, "round": 205, "epoch": 1, "shard_id": 1}], ["meta_origin_shard_proposed_headers", {"nonce": 206, "round": 206, "epoch": 1, "shard_id": 4294967295}], ["meta_dest_shard_proposed_headers", {"nonce": 208, "round": 208, "epoch": 1, "shard_id": 4294967295}], ["meta_origin_shard_commited_headers", {"nonce": 206, "round": 206, "epoch": 1, "shard_id": 4294967295}], ["meta_dest_shard_commited_headers", {"nonce": 208, "round": 208, "epoch": 1, "shard_id": 4294967295}]]}, "3fce18121e1ce8e57a3d776c91468649699e8238fad7898f46b8ac88b8d2c1cb": {"hash": "3fce18121e1ce8e57a3d776c91468649699e8238fad7898f46b8ac88b8d2c1cb", "receiverShardID": 0, "reserved": "200b", "senderShardID": 0, "txCount": 12, "type": 0, "mentioned": [["origin_shard_proposed_headers", {"nonce": 210, "round": 210, "epoch": 1, "shard_id": 0}], ["origin_shard_commited_headers", {"nonce": 210, "round": 210, "epoch": 1, "shard_id": 0}], ["meta_origin_shard_proposed_headers", {"nonce": 211, "round": 211, "epoch": 1, "shard_id": 4294967295}], ["meta_origin_shard_commited_headers", {"nonce": 211, "round": 211, "epoch": 1, "shard_id": 4294967295}]]}, "cb2bbdf01dd1a44c813ceecb58ae7404ad9016073fc707dda1be52f7dc5735fc": {"hash": "cb2bbdf01dd1a44c813ceecb58ae7404ad9016073fc707dda1be52f7dc5735fc", "receiverShardID": 1, "reserved": "201c", "senderShardID": 0, "txCount": 29, "type": 0, "mentioned": [["origin_shard_proposed_headers", {"nonce": 210, "round": 210, "epoch": 1, "shard_id": 0}], ["origin_shard_commited_headers", {"nonce": 210, "round": 210, "epoch": 1, "shard_id": 0}], ["dest_shard_proposed_headers", {"nonce": 212, "round": 212, "epoch": 1, "shard_id": 1}], ["dest_shard_commited_headers", {"nonce": 212, "round": 212, "epoch": 1, "shard_id": 1}], ["meta_origin_shard_proposed_headers", {"nonce": 211, "round": 211, "epoch": 1, "shard_id": 4294967295}], ["meta_dest_shard_proposed_headers", {"nonce": 213, "round": 213, "epoch": 1, "shard_id": 4294967295}], ["meta_origin_shard_commited_headers", {"nonce": 211, "round": 211, "epoch": 1, "shard_id": 4294967295}], ["meta_dest_shard_commited_headers", {"nonce": 213, "round": 213, "epoch": 1, "shard_id": 4294967295}]]}, "01e7fc8132cd61f6aede3231bc9c9dc36dbe7d3ffcaca59fce7b46cca0e5884e": {"hash": "01e7fc8132cd61f6aede3231bc9c9dc36dbe7d3ffcaca59fce7b46cca0e5884e", "receiverShardID": 2, "reserved": "2008", "senderShardID": 0, "txCount": 9, "type": 0, "mentioned": [["origin_shard_proposed_headers", {"nonce": 210, "round": 210, "epoch": 1, "shard_id": 0}], ["origin_shard_commited_headers", {"nonce": 210, "round": 210, "epoch": 1, "shard_id": 0}], ["meta_origin_shard_proposed_headers", {"nonce": 211, "round": 211, "epoch": 1, "shard_id": 4294967295}], ["meta_dest_shard_proposed_headers", {"nonce": 213, "round": 213, "epoch": 1, "shard_id": 4294967295}], ["meta_origin_shard_commited_headers", {"nonce": 211, "round": 211, "epoch": 1, "shard_id": 4294967295}], ["meta_dest_shard_commited_headers", {"nonce": 213, "round": 213, "epoch": 1, "shard_id": 4294967295}], ["dest_shard_proposed_headers", {"nonce": 211, "round": 212, "epoch": 1, "shard_id": 2}], ["dest_shard_commited_headers", {"nonce": 211, "round": 212, "epoch": 1, "shard_id": 2}]]}, "f966a0d0370de9a7b0675ec153c03f61031e02864fe781ab49cca0f60439e590": {"hash": "f966a0d0370de9a7b0675ec153c03f61031e02864fe781ab49cca0f60439e590", "receiverShardID": 0, "reserved": "2012", "senderShardID": 2, "txCount": 19, "type": 0, "mentioned": [["dest_shard_proposed_headers", {"nonce": 212, "round": 212, "epoch": 1, "shard_id": 0}], ["dest_shard_commited_headers", {"nonce": 212, "round": 212, "epoch": 1, "shard_id": 0}], ["meta_origin_shard_proposed_headers", {"nonce": 211, "round": 211, "epoch": 1, "shard_id": 4294967295}], ["meta_dest_shard_proposed_headers", {"nonce": 213, "round": 213, "epoch": 1, "shard_id": 4294967295}], ["meta_origin_shard_commited_headers", {"nonce": 211, "round": 211, "epoch": 1, "shard_id": 4294967295}], ["meta_dest_shard_commited_headers", {"nonce": 213, "round": 213, "epoch": 1, "shard_id": 4294967295}], ["origin_shard_proposed_headers", {"nonce": 209, "round": 210, "epoch": 1, "shard_id": 2}], ["origin_shard_commited_headers", {"nonce": 209, "round": 210, "epoch": 1, "shard_id": 2}]]}, "cdac3609485d6813f79c717e48e5d6cf94b3ca6e25dd372a21f3511f0eb1e536": {"hash": "cdac3609485d6813f79c717e48e5d6cf94b3ca6e25dd372a21f3511f0eb1e536", "receiverShardID": 0, "reserved": "200e", "senderShardID": 1, "txCount": 15, "type": 0, "mentioned": [["dest_shard_proposed_headers", {"nonce": 212, "round": 212, "epoch": 1, "shard_id": 0}], ["dest_shard_commited_headers", {"nonce": 212, "round": 212, "epoch": 1, "shard_id": 0}], ["origin_shard_proposed_headers", {"nonce": 210, "round": 210, "epoch": 1, "shard_id": 1}], ["origin_shard_commited_headers", {"nonce": 210, "round": 210, "epoch": 1, "shard_id": 1}], ["meta_origin_shard_proposed_headers", {"nonce": 211, "round": 211, "epoch": 1, "shard_id": 4294967295}], ["meta_dest_shard_proposed_headers", {"nonce": 213, "round": 213, "epoch": 1, "shard_id": 4294967295}], ["meta_origin_shard_commited_headers", {"nonce": 211, "round": 211, "epoch": 1, "shard_id": 4294967295}], ["meta_dest_shard_commited_headers", {"nonce": 213, "round": 213, "epoch": 1, "shard_id": 4294967295}]]}}} + mb_data = MiniblockData(list(example['miniblocks'].items())) + print(json.dumps(mb_data.get_data_for_detailed_report(), indent=4)) + +''' +Class to hold data related to a miniblock and its appearances in headers. + +For headers V1, miniblocks are directly in the header under "miniBlockHeaders". +For headers V2, miniblocks are under "header" -> "miniBlockHeaders". +For metaheaders V1, miniblocks are under "miniBlockHeaders" and also in "shardInfo"->"shardMiniBlockHeaders". +''' + +''' +header: miniblocks +Example: +HV1 + { + "accumulatedFees": "0", + "blockBodyType": 0, + "chainID": "31", + "developerFees": "0", + "epoch": 0, + "epochStartMetaHash": "", + "leaderSignature": "", + "metaBlockHashes": [ + "353b97d74521f37d6776c9b8070f928af210d9cca2f22531f635d2ed207d0a44" + ], + "miniBlockHeaders": [], + "nonce": 6, + "peerChanges": [], + "prevHash": "5fef2a316b7046470de021f37a5443854699950138ded925cc0d10a1c4cdc383", + "prevRandSeed": "0e4d1c5112aef96495f851ea8a285fab7aa7a28723d9930f8f280f1e158de7b64f95f301160794da4f551cd789176714", + "pubKeysBitmap": "", + "randSeed": "22400b5e348375592be8537cc797da3f80fc5a385feb18922c060c7b9b7e53698bc3ffa7da4e28bee2a0fd2ddb5ea419", + "receiptsHash": "0e5751c026e543b2e8ab2eb06099daa1d1e5df47778f7787faab45cdf12fe3a8", + "reserved": "", + "rootHash": "34f5b60441c630fbd7327835070a64429eb40627b0b638c062d15aa1de2f7208", + "round": 6, + "shardID": 1, + "signature": "", + "softwareVersion": "64656661756c74", + "timeStamp": 1762937897, + "txCount": 0 + }, + + +HV2 + { + "header": { + "accumulatedFees": "0", + "blockBodyType": 0, + "chainID": "31", + "developerFees": "0", + "epoch": 2, + "epochStartMetaHash": "e43e0e37766f6e59bb5bc586b244427e0385ea720292a0c17b0272952b4afbf5", + "leaderSignature": "05bbf615de4e0d1271df5fd4eb6a057a6d89fd945499b24a50dbc080d4b87dd8c5823105fceacbdf214653621bc3f10f", + "metaBlockHashes": [ + "e43e0e37766f6e59bb5bc586b244427e0385ea720292a0c17b0272952b4afbf5" + ], + "miniBlockHeaders": [ + { + "hash": "d3ba36a1f12970615fbea92a8a3b1639fef9676dc2951a7150c73516cbde2301", + "receiverShardID": 1, + "reserved": "200a", + "senderShardID": 4294967295, + "txCount": 11, + "type": 255 + }, + { + "hash": "697a913df8d23454c56755c9f60dca2008d182ef685c124f0ade6332ae291647", + "receiverShardID": 4294967280, + "reserved": "200e", + "senderShardID": 4294967295, + "txCount": 15, + "type": 60 + }, + { + "hash": "9948e5f806de024ccc253429850f3e6b4203d2509c7d653f8dd00d89e8d32ae5", + "receiverShardID": 4294967280, + "reserved": "200e", + "senderShardID": 4294967295, + "txCount": 15, + "type": 60 + }, + { + "hash": "722b967aac3a34f4097e536176975f0f7fb06e70f45e35a67a09a09011f699be", + "receiverShardID": 4294967280, + "reserved": "200e", + "senderShardID": 4294967295, + "txCount": 15, + "type": 60 + }, + { + "hash": "7ab74c81351bf5d5876421da2fda21257c4924240be05fae66523dcc7cf165ed", + "receiverShardID": 4294967280, + "reserved": "200e", + "senderShardID": 4294967295, + "txCount": 15, + "type": 60 + } + ], + "nonce": 403, + "peerChanges": [], + "prevHash": "5e2640a23517bb8fbba1ea9428c80bb86af907d3dca5f931a8c414ecab15816c", + "prevRandSeed": "41ec3c4ce905421a9646354435b0c6259d7f6268f784133dc0e3f5033580e5492071b90f60c84f25c396004f98c0f00a", + "pubKeysBitmap": "", + "randSeed": "2f26601869d5130613e685b0b9fd829db86310e30e446acfa50e0010865a1e5f82d1e7456930d10942e8e8f2e587b486", + "receiptsHash": "0e5751c026e543b2e8ab2eb06099daa1d1e5df47778f7787faab45cdf12fe3a8", + "reserved": "", + "rootHash": "0a4c40ca1a8488ca7d3832fb73c315a3a86facafca48978f2d28b2c0da7dcf41", + "round": 403, + "shardID": 1, + "signature": "", + "softwareVersion": "32", + "timeStamp": 1762940279000, + "txCount": 71 + }, + "scheduledAccumulatedFees": "0", + "scheduledDeveloperFees": "0", + "scheduledGasPenalized": 0, + "scheduledGasProvided": 0, + "scheduledGasRefunded": 0, + "scheduledRootHash": "40ff71800f799bd91ad57e00b8fd232a12ab559360b93f16bfbd23463dd25721" + }, + +metaheader: miniBlockHeaders, shardinfo/shardMiniBlockHeaders +Example: + { + "accumulatedFees": "5000000000000000", + "accumulatedFeesInEpoch": "660292165000000000", + "chainID": "31", + "devFeesInEpoch": "6694699500000000", + "developerFees": "1500000000000000", + "epoch": 1, + "epochStart": { + "economics": { + "nodePrice": null, + "prevEpochStartHash": "", + "prevEpochStartRound": 0, + "rewardsForProtocolSustainability": null, + "rewardsPerBlock": null, + "totalNewlyMinted": null, + "totalSupply": null, + "totalToDistribute": null + }, + "lastFinalizedHeaders": [] + }, + "leaderSignature": "a5c38e4db58f7f6598948cf1050fc96f73ed4952db9d11f32a97ee9cfb9f5df84a46733e3b9bcc33b32f83ed9d9ec40a", + "miniBlockHeaders": [ + { + "hash": "f5323d263ac564829e92c457d4393a03d788ed81d9023b247a025d41a82f137b", + "receiverShardID": 4294967295, + "reserved": "", + "senderShardID": 0, + "txCount": 1, + "type": 0 + }, + { + "hash": "2a669d6ba2b61d3915b3b75d779443093dbe408a947445fb7d4ce9c0f8c3dbba", + "receiverShardID": 0, + "reserved": "2002", + "senderShardID": 4294967295, + "txCount": 3, + "type": 90 + }, + { + "hash": "83b76de2135a864cebf7845567ebdabbf31c3c4005340221c5feecb65867a88a", + "receiverShardID": 1, + "reserved": "2001", + "senderShardID": 4294967295, + "txCount": 2, + "type": 90 + }, + { + "hash": "88b7fd9f2c9d3df8bb88ec7d3dd3e3fd13d0dd3ca1e9e9e0d5547bcdb9a3f1b0", + "receiverShardID": 2, + "reserved": "2001", + "senderShardID": 4294967295, + "txCount": 2, + "type": 90 + } + ], + "nonce": 283, + "peerInfo": [], + "prevHash": "b82b1e3ea44357d35d28c76de0ef5d66674c0a602edd8b8c73b45cf61087d0e1", + "prevRandSeed": "19e8fbcf33054f8a5c0982a1d1bcdbe8e698318eaf8b6061316072819463b2b3ab1c22500cb4f61d9f37585d9b994d84", + "pubKeysBitmap": "", + "randSeed": "9d99b6595f2ff33bca4ca5c5cd0331b9b36053f7857bc52806806440feed7a0eae9f44c4691a16e753d05536da369b8f", + "receiptsHash": "0e5751c026e543b2e8ab2eb06099daa1d1e5df47778f7787faab45cdf12fe3a8", + "reserved": "", + "rootHash": "315acd42e00cca35998436b7a060cb196674cf6e058b74a3637026e101519835", + "round": 283, + "shardInfo": [ + { + "accumulatedFees": "22715500000000000", + "developerFees": "0", + "epoch": 1, + "headerHash": "3b41fc97dd499d9cb34bfeb5b12898b5f456886d06a543c72b2f0f758a13ccc1", + "lastIncludedMetaNonce": 280, + "nonce": 282, + "numPendingMiniBlocks": 1, + "prevHash": "38c906f96e2d56474407b08fd5be7b46ba51466537799b1b623437c65b325c66", + "prevRandSeed": "cb2ba79e42b33bd609694de59b2b8b24baad3d7f2b9bd2044563f15808c610fac34ac7580d5067bc177c45d01c625113", + "pubKeysBitmap": "", + "round": 282, + "shardID": 0, + "shardMiniBlockHeaders": [ + { + "hash": "6c5248018490f44bc5e9961e095130c8aef96bc427f20b6031d51a80fa818ca8", + "receiverShardID": 0, + "reserved": "", + "senderShardID": 0, + "txCount": 92, + "type": 0 + }, + { + "hash": "4d1ceacabdcea75c9cde8a1f00058507760d03998c1a98bd7809be2a5f61973e", + "receiverShardID": 1, + "reserved": "", + "senderShardID": 0, + "txCount": 299, + "type": 0 + }, + { + "hash": "f5323d263ac564829e92c457d4393a03d788ed81d9023b247a025d41a82f137b", + "receiverShardID": 4294967295, + "reserved": "", + "senderShardID": 0, + "txCount": 1, + "type": 0 + } + ], + "signature": "", + "txCount": 392 + }, + { + "accumulatedFees": "4150000000000000", + "developerFees": "0", + "epoch": 1, + "headerHash": "ba444fcc2da250641426831623e93fe2107e2c3f415bd245386d68286c78bb79", + "lastIncludedMetaNonce": 280, + "nonce": 282, + "numPendingMiniBlocks": 2, + "prevHash": "2a308ec0bd722d1197abd2824daa804833e4a78f50d01b43333b998d36657af7", + "prevRandSeed": "44d8d8a69e52494bcaf57ba8c52f07843ce47dac05498b6badee4457d8cf8c3846fb068c9edcc074df0d6b2f2ff5e391", + "pubKeysBitmap": "", + "round": 282, + "shardID": 1, + "shardMiniBlockHeaders": [ + { + "hash": "a6b7bc2cecaddf1743109a5f045592c9fefb0954cfa4eb1cc1c44bfe81524645", + "receiverShardID": 1, + "reserved": "", + "senderShardID": 1, + "txCount": 83, + "type": 0 + } + ], + "signature": "", + "txCount": 83 + }, + { + "accumulatedFees": "4150000000000000", + "developerFees": "0", + "epoch": 1, + "headerHash": "1b135e7cc2876ddbf7ee1679ab86c63f4c4416fd76e1ffb96a161f072f012550", + "lastIncludedMetaNonce": 280, + "nonce": 281, + "numPendingMiniBlocks": 2, + "prevHash": "a9da1c4b5e2e65f3c4a7aedbb230ee1f9afe0ae9432965b8971e6e26c5546509", + "prevRandSeed": "bb30cd93bac3a177f65beafbab717113be974e3d31ba1851911059e7a61643843d4b1b0308659f6fde0657cc90d7ca07", + "pubKeysBitmap": "", + "round": 282, + "shardID": 2, + "shardMiniBlockHeaders": [ + { + "hash": "878916cd4e6aff248c15a0fcdd8bb19025a1db1cbabf6ad762426672cff0cd0a", + "receiverShardID": 2, + "reserved": "", + "senderShardID": 2, + "txCount": 83, + "type": 0 + } + ], + "signature": "", + "txCount": 83 + } + ], + "signature": "", + "softwareVersion": "32", + "timeStamp": 1762939559, + "txCount": 566, + "validatorStatsRootHash": "2bdc8762983b907c262896a4387b432746129bce1a357646a21e67c977a28eb5" + }, + ''' diff --git a/multiversx_cross_shard_analysis/miniblocks_detailed_report.py b/multiversx_cross_shard_analysis/miniblocks_detailed_report.py new file mode 100644 index 0000000..9b6f8dd --- /dev/null +++ b/multiversx_cross_shard_analysis/miniblocks_detailed_report.py @@ -0,0 +1,44 @@ +from reportlab.lib.pagesizes import A4 +from reportlab.platypus import SimpleDocTemplate, Table, TableStyle +from reportlab.lib import colors + + +def build_miniblock_table_pdf(rows, out_path): + doc = SimpleDocTemplate(out_path, pagesize=A4) + + header = [ + "miniblock hash", + "sh0 proposed", "sh0 committed", "sh0 notarize prop", "sh0 notarize comm", + "sh1 proposed", "sh1 committed", "sh1 notarize prop", "sh1 notarize comm", + "sh2 proposed", "sh2 committed", "sh2 notarize prop", "sh2 notarize comm", + ] + + data = [header] + rows + + table = Table(data, repeatRows=1) + + table.setStyle(TableStyle([ + ("BACKGROUND", (0, 0), (-1, 0), colors.lightgrey), + ("TEXTCOLOR", (0, 0), (-1, 0), colors.black), + ("ALIGN", (0, 0), (-1, -1), "CENTER"), + ("FONTNAME", (0, 0), (-1, 0), "Helvetica-Bold"), + ("GRID", (0, 0), (-1, -1), 0.3, colors.black), + ("FONTSIZE", (0, 0), (-1, -1), 7), + ])) + + doc.build([table]) + + +# example usage: +# each row must be a list of exactly 13 values, same order as the header +rows = [ + [ + "abc123", + 101, 102, 103, 104, + 201, 202, 203, 204, + 301, 302, 303, 304, + ] +] + +build_miniblock_table_pdf(rows, "miniblocks.pdf") +# mb_data = MiniblockData(list(data['miniblocks'].items())).get_data_for_detailed_report1() diff --git a/multiversx_cross_shard_analysis/report.py b/multiversx_cross_shard_analysis/report.py new file mode 100644 index 0000000..a0ba555 --- /dev/null +++ b/multiversx_cross_shard_analysis/report.py @@ -0,0 +1,318 @@ +from collections import defaultdict +from typing import Any +import matplotlib.colors as mcolors +import matplotlib.pyplot as plt +import json +from reportlab.lib import colors +from reportlab.lib.pagesizes import letter +from reportlab.pdfgen import canvas + +from multiversx_cross_shard_analysis.miniblock_data import MiniblockData + +data = { + "01f7f0f3503c62b69ff0fe22a57b97eaa2f164ac9e4a0bb852bc66e3f5c12369": { + "lanes": { + "origin": [ + [ + "origin_shard_proposed_headers", + 205 + ], + [ + "origin_shard_commited_headers", + 205 + ] + ], + "dest": [ + [ + "dest_shard_proposed_headers", + 207 + ], + [ + "dest_shard_commited_headers", + 207 + ] + ], + "meta": [ + [ + "meta_origin_shard_proposed_headers", + 206 + ], + [ + "meta_dest_shard_proposed_headers", + 208 + ], + [ + "meta_origin_shard_commited_headers", + 206 + ], + [ + "meta_dest_shard_commited_headers", + 208 + ] + ] + }, + "origin_epoch": 1 + }, + "3fce18121e1ce8e57a3d776c91468649699e8238fad7898f46b8ac88b8d2c1cb": { + "lanes": { + "origin": [ + [ + "origin_shard_proposed_headers", + 210 + ], + [ + "origin_shard_commited_headers", + 210 + ] + ], + "dest": [], + "meta": [ + [ + "meta_origin_shard_proposed_headers", + 211 + ], + [ + "meta_origin_shard_commited_headers", + 211 + ] + ] + }, + "origin_epoch": 1 + }, + "cb2bbdf01dd1a44c813ceecb58ae7404ad9016073fc707dda1be52f7dc5735fc": { + "lanes": { + "origin": [ + [ + "origin_shard_proposed_headers", + 210 + ], + [ + "origin_shard_commited_headers", + 210 + ] + ], + "dest": [ + [ + "dest_shard_proposed_headers", + 212 + ], + [ + "dest_shard_commited_headers", + 212 + ] + ], + "meta": [ + [ + "meta_origin_shard_proposed_headers", + 211 + ], + [ + "meta_dest_shard_proposed_headers", + 213 + ], + [ + "meta_origin_shard_commited_headers", + 211 + ], + [ + "meta_dest_shard_commited_headers", + 213 + ] + ] + }, + "origin_epoch": 1 + }, + "01e7fc8132cd61f6aede3231bc9c9dc36dbe7d3ffcaca59fce7b46cca0e5884e": { + "lanes": { + "origin": [ + [ + "origin_shard_proposed_headers", + 210 + ], + [ + "origin_shard_commited_headers", + 210 + ] + ], + "dest": [ + [ + "dest_shard_proposed_headers", + 212 + ], + [ + "dest_shard_commited_headers", + 212 + ] + ], + "meta": [ + [ + "meta_origin_shard_proposed_headers", + 211 + ], + [ + "meta_dest_shard_proposed_headers", + 213 + ], + [ + "meta_origin_shard_commited_headers", + 211 + ], + [ + "meta_dest_shard_commited_headers", + 213 + ] + ] + }, + "origin_epoch": 1 + }, + "f966a0d0370de9a7b0675ec153c03f61031e02864fe781ab49cca0f60439e590": { + "lanes": { + "origin": [ + [ + "origin_shard_proposed_headers", + 210 + ], + [ + "origin_shard_commited_headers", + 210 + ] + ], + "dest": [ + [ + "dest_shard_proposed_headers", + 212 + ], + [ + "dest_shard_commited_headers", + 212 + ] + ], + "meta": [ + [ + "meta_origin_shard_proposed_headers", + 211 + ], + [ + "meta_dest_shard_proposed_headers", + 213 + ], + [ + "meta_origin_shard_commited_headers", + 211 + ], + [ + "meta_dest_shard_commited_headers", + 213 + ] + ] + }, + "origin_epoch": 1 + }, + "cdac3609485d6813f79c717e48e5d6cf94b3ca6e25dd372a21f3511f0eb1e536": { + "lanes": { + "origin": [ + [ + "origin_shard_proposed_headers", + 210 + ], + [ + "origin_shard_commited_headers", + 210 + ] + ], + "dest": [ + [ + "dest_shard_proposed_headers", + 212 + ], + [ + "dest_shard_commited_headers", + 212 + ] + ], + "meta": [ + [ + "meta_origin_shard_proposed_headers", + 211 + ], + [ + "meta_dest_shard_proposed_headers", + 213 + ], + [ + "meta_origin_shard_commited_headers", + 211 + ], + [ + "meta_dest_shard_commited_headers", + 213 + ] + ] + }, + "origin_epoch": 1 + } +} + + +def draw_timeline(data: dict[str, Any]): + lane_colors = { + "origin": "#4CAF50", # green + "dest": "#FFEB3B", # yellow + "meta": "#2196F3" # blue + } + + shade_factor = {"proposed": 1.0, "commited": 0.6} + + plt.figure(figsize=(12, 6)) + + for i, (mb_hash, mb_data) in enumerate(data.items()): + # track number of events per round to offset them + round_counts = defaultdict(int) + for lane_type, events in mb_data["lanes"].items(): + for name, round_num in events: + shade = shade_factor["proposed"] if "proposed" in name else shade_factor["commited"] + rgb = mcolors.to_rgb(lane_colors[lane_type]) + color = tuple([c * shade for c in rgb]) + + # stack multiple events in the same round + offset = 0.15 * round_counts[round_num] + plt.scatter(round_num, -i + offset, color=color, s=200, marker="s") + round_counts[round_num] += 1 + + plt.yticks([-i for i in range(len(data))], [h[:8] + '…' for h in data.keys()]) + plt.xlabel("Round") + plt.title("Miniblock Timelines") + plt.gca().invert_yaxis() + plt.tight_layout() + plt.savefig("miniblocks.pdf") + plt.show() + + +example = {"run_name": "cross-shard-execution-anal-9afe696daf", "miniblocks": {"01f7f0f3503c62b69ff0fe22a57b97eaa2f164ac9e4a0bb852bc66e3f5c12369": {"hash": "01f7f0f3503c62b69ff0fe22a57b97eaa2f164ac9e4a0bb852bc66e3f5c12369", "receiverShardID": 0, "reserved": "", "senderShardID": 1, "txCount": 1, "type": 0, "mentioned": [["dest_shard_proposed_headers", {"nonce": 207, "round": 207, "epoch": 1, "shard_id": 0}], ["dest_shard_commited_headers", {"nonce": 207, "round": 207, "epoch": 1, "shard_id": 0}], ["origin_shard_proposed_headers", {"nonce": 205, "round": 205, "epoch": 1, "shard_id": 1}], ["origin_shard_commited_headers", {"nonce": 205, "round": 205, "epoch": 1, "shard_id": 1}], ["meta_origin_shard_proposed_headers", {"nonce": 206, "round": 206, "epoch": 1, "shard_id": 4294967295}], ["meta_dest_shard_proposed_headers", {"nonce": 208, "round": 208, "epoch": 1, "shard_id": 4294967295}], ["meta_origin_shard_commited_headers", {"nonce": 206, "round": 206, "epoch": 1, "shard_id": 4294967295}], ["meta_dest_shard_commited_headers", {"nonce": 208, "round": 208, "epoch": 1, "shard_id": 4294967295}]]}, "3fce18121e1ce8e57a3d776c91468649699e8238fad7898f46b8ac88b8d2c1cb": {"hash": "3fce18121e1ce8e57a3d776c91468649699e8238fad7898f46b8ac88b8d2c1cb", "receiverShardID": 0, "reserved": "200b", "senderShardID": 0, "txCount": 12, "type": 0, "mentioned": [["origin_shard_proposed_headers", {"nonce": 210, "round": 210, "epoch": 1, "shard_id": 0}], ["origin_shard_commited_headers", {"nonce": 210, "round": 210, "epoch": 1, "shard_id": 0}], ["meta_origin_shard_proposed_headers", {"nonce": 211, "round": 211, "epoch": 1, "shard_id": 4294967295}], ["meta_origin_shard_commited_headers", {"nonce": 211, "round": 211, "epoch": 1, "shard_id": 4294967295}]]}, "cb2bbdf01dd1a44c813ceecb58ae7404ad9016073fc707dda1be52f7dc5735fc": {"hash": "cb2bbdf01dd1a44c813ceecb58ae7404ad9016073fc707dda1be52f7dc5735fc", "receiverShardID": 1, "reserved": "201c", "senderShardID": 0, "txCount": 29, "type": 0, "mentioned": [["origin_shard_proposed_headers", {"nonce": 210, "round": 210, "epoch": 1, "shard_id": 0}], ["origin_shard_commited_headers", {"nonce": 210, "round": 210, "epoch": 1, "shard_id": 0}], ["dest_shard_proposed_headers", {"nonce": 212, "round": 212, "epoch": 1, "shard_id": 1}], ["dest_shard_commited_headers", {"nonce": 212, "round": 212, "epoch": 1, "shard_id": 1}], ["meta_origin_shard_proposed_headers", {"nonce": 211, "round": 211, "epoch": 1, "shard_id": 4294967295}], ["meta_dest_shard_proposed_headers", {"nonce": 213, "round": 213, "epoch": 1, "shard_id": 4294967295}], ["meta_origin_shard_commited_headers", {"nonce": 211, "round": 211, "epoch": 1, "shard_id": 4294967295}], ["meta_dest_shard_commited_headers", {"nonce": 213, "round": 213, "epoch": 1, "shard_id": 4294967295}]]}, "01e7fc8132cd61f6aede3231bc9c9dc36dbe7d3ffcaca59fce7b46cca0e5884e": {"hash": "01e7fc8132cd61f6aede3231bc9c9dc36dbe7d3ffcaca59fce7b46cca0e5884e", "receiverShardID": 2, "reserved": "2008", "senderShardID": 0, "txCount": 9, "type": 0, "mentioned": [["origin_shard_proposed_headers", {"nonce": 210, "round": 210, "epoch": 1, "shard_id": 0}], ["origin_shard_commited_headers", {"nonce": 210, "round": 210, "epoch": 1, "shard_id": 0}], ["meta_origin_shard_proposed_headers", {"nonce": 211, "round": 211, "epoch": 1, "shard_id": 4294967295}], ["meta_dest_shard_proposed_headers", {"nonce": 213, "round": 213, "epoch": 1, "shard_id": 4294967295}], ["meta_origin_shard_commited_headers", {"nonce": 211, "round": 211, "epoch": 1, "shard_id": 4294967295}], ["meta_dest_shard_commited_headers", {"nonce": 213, "round": 213, "epoch": 1, "shard_id": 4294967295}], ["dest_shard_proposed_headers", {"nonce": 211, "round": 212, "epoch": 1, "shard_id": 2}], ["dest_shard_commited_headers", {"nonce": 211, "round": 212, "epoch": 1, "shard_id": 2}]]}, "f966a0d0370de9a7b0675ec153c03f61031e02864fe781ab49cca0f60439e590": {"hash": "f966a0d0370de9a7b0675ec153c03f61031e02864fe781ab49cca0f60439e590", "receiverShardID": 0, "reserved": "2012", "senderShardID": 2, "txCount": 19, "type": 0, "mentioned": [["dest_shard_proposed_headers", {"nonce": 212, "round": 212, "epoch": 1, "shard_id": 0}], ["dest_shard_commited_headers", {"nonce": 212, "round": 212, "epoch": 1, "shard_id": 0}], ["meta_origin_shard_proposed_headers", {"nonce": 211, "round": 211, "epoch": 1, "shard_id": 4294967295}], ["meta_dest_shard_proposed_headers", {"nonce": 213, "round": 213, "epoch": 1, "shard_id": 4294967295}], ["meta_origin_shard_commited_headers", {"nonce": 211, "round": 211, "epoch": 1, "shard_id": 4294967295}], ["meta_dest_shard_commited_headers", {"nonce": 213, "round": 213, "epoch": 1, "shard_id": 4294967295}], ["origin_shard_proposed_headers", {"nonce": 209, "round": 210, "epoch": 1, "shard_id": 2}], ["origin_shard_commited_headers", {"nonce": 209, "round": 210, "epoch": 1, "shard_id": 2}]]}, "cdac3609485d6813f79c717e48e5d6cf94b3ca6e25dd372a21f3511f0eb1e536": {"hash": "cdac3609485d6813f79c717e48e5d6cf94b3ca6e25dd372a21f3511f0eb1e536", "receiverShardID": 0, "reserved": "200e", "senderShardID": 1, "txCount": 15, "type": 0, "mentioned": [["dest_shard_proposed_headers", {"nonce": 212, "round": 212, "epoch": 1, "shard_id": 0}], ["dest_shard_commited_headers", {"nonce": 212, "round": 212, "epoch": 1, "shard_id": 0}], ["origin_shard_proposed_headers", {"nonce": 210, "round": 210, "epoch": 1, "shard_id": 1}], ["origin_shard_commited_headers", {"nonce": 210, "round": 210, "epoch": 1, "shard_id": 1}], ["meta_origin_shard_proposed_headers", {"nonce": 211, "round": 211, "epoch": 1, "shard_id": 4294967295}], ["meta_dest_shard_proposed_headers", {"nonce": 213, "round": 213, "epoch": 1, "shard_id": 4294967295}], ["meta_origin_shard_commited_headers", {"nonce": 211, "round": 211, "epoch": 1, "shard_id": 4294967295}], ["meta_dest_shard_commited_headers", {"nonce": 213, "round": 213, "epoch": 1, "shard_id": 4294967295}]]}}} +# usage: +# export_timeline_pdf(data, "miniblock_lifecycle.pdf") + + +def chunks(lst, size): + for i in range(0, len(lst), size): + yield lst[i:i + size] + + +if __name__ == "__main__": + with open('./Reports/cross-shard-execution-anal-9afe696daf_old/Miniblocks/miniblocks_report.json', 'r') as f: + data = json.load(f) + + mb_data = MiniblockData(list(data['miniblocks'].items())).get_data_for_detailed_report1() + + LIMIT = 30 # how many miniblocks per draw + + for epoch in sorted(mb_data.keys()): + print(f"Epoch: {epoch} - miniblocks: {len(mb_data[epoch])}") + + sorted_mb = sorted( + mb_data[epoch].items(), + key=lambda x: x[1]['start_round'] + ) + + for batch in chunks(sorted_mb, LIMIT): + draw_timeline(data=dict(batch)) From 7074943efaa76697412c2b42ffd0d1ade53e17e9 Mon Sep 17 00:00:00 2001 From: Mihaela Radian Date: Fri, 21 Nov 2025 22:26:17 +0200 Subject: [PATCH 06/24] MX-17306 Added fields decoded from reserved, test --- .gitignore | 3 + multiversx_cross_shard_analysis/constants.py | 22 ++++++ .../decode_reserved.py | 64 +++++++++++++++ .../header_structures.py | 7 +- .../test_decode_reserved.py | 77 +++++++++++++++++++ 5 files changed, 172 insertions(+), 1 deletion(-) create mode 100644 multiversx_cross_shard_analysis/decode_reserved.py create mode 100644 multiversx_cross_shard_analysis/test_decode_reserved.py diff --git a/.gitignore b/.gitignore index 85b25c8..0a579d0 100644 --- a/.gitignore +++ b/.gitignore @@ -9,6 +9,9 @@ __pycache__/ # Json files *.json +# Pdf files +*.pdf + # Distribution / packaging .Python build/ diff --git a/multiversx_cross_shard_analysis/constants.py b/multiversx_cross_shard_analysis/constants.py index 9441714..160ff73 100644 --- a/multiversx_cross_shard_analysis/constants.py +++ b/multiversx_cross_shard_analysis/constants.py @@ -38,3 +38,25 @@ "meta_exec_proposed", "meta_exec_committed", ]) + +# Mappings from field number to field name for MiniBlockHeaderReserved +FIELD_NAME_MAPPING = { + 1: "ExecutionType", + 2: "State", + 3: "IndexOfFirstTxProcessed", + 4: "IndexOfLastTxProcessed", +} + +# Mappings for enum values from block.proto +PROCESSING_TYPE_MAPPING = { + 0: "Normal", + 1: "Scheduled", + 2: "Processed", +} + +# Mappings for miniblock state enum values from block.proto +MINIBLOCK_STATE_MAPPING = { + 0: "Final", + 1: "Proposed", + 2: "PartialExecuted", +} diff --git a/multiversx_cross_shard_analysis/decode_reserved.py b/multiversx_cross_shard_analysis/decode_reserved.py new file mode 100644 index 0000000..78c9cbe --- /dev/null +++ b/multiversx_cross_shard_analysis/decode_reserved.py @@ -0,0 +1,64 @@ + +from typing import Any + +from multiversx_cross_shard_analysis.constants import FIELD_NAME_MAPPING, MINIBLOCK_STATE_MAPPING, PROCESSING_TYPE_MAPPING + + +def get_default_decoded_data(tx_count: int) -> dict[str, Any]: + """ + Returns a dictionary with the default values for the MiniBlockHeaderReserved struct. + """ + return { + "ExecutionType": "Normal", + "State": "Final", + "IndexOfFirstTxProcessed": 0, + "IndexOfLastTxProcessed": tx_count - 1 if tx_count > 0 else 0, + } + + +def decode_reserved_field(hex_string: str, tx_count: int) -> dict[str, Any]: + """ + Decodes the reserved field from a hex string into a human-readable dictionary, + including default values for missing fields. + """ + decoded_data = get_default_decoded_data(tx_count) + + if not hex_string: + return {} + + byte_data = bytes.fromhex(hex_string) + i = 0 + while i < len(byte_data): + field_and_type = byte_data[i] + field_number = field_and_type >> 3 + wire_type = field_and_type & 0x07 + i += 1 + + if wire_type == 0: # Varint + value = 0 + shift = 0 + while True: + if i >= len(byte_data): + decoded_data["error"] = "Incomplete varint data" + return decoded_data + byte = byte_data[i] + value |= (byte & 0x7F) << shift + i += 1 + if not (byte & 0x80): + break + shift += 7 + + field_name = FIELD_NAME_MAPPING.get(field_number, f"UnknownField_{field_number}") + + if field_name == "ExecutionType": + decoded_data[field_name] = PROCESSING_TYPE_MAPPING.get(value, f"UnknownProcessingType_{value}") + elif field_name == "State": + decoded_data[field_name] = MINIBLOCK_STATE_MAPPING.get(value, f"UnknownState_{value}") + else: + decoded_data[field_name] = value + + else: + decoded_data["error"] = f"Unsupported wire type: {wire_type}" + break + + return decoded_data diff --git a/multiversx_cross_shard_analysis/header_structures.py b/multiversx_cross_shard_analysis/header_structures.py index 3ea9dc3..8466bb7 100644 --- a/multiversx_cross_shard_analysis/header_structures.py +++ b/multiversx_cross_shard_analysis/header_structures.py @@ -1,5 +1,7 @@ from typing import Any +from multiversx_cross_shard_analysis.test_decode_reserved import decode_reserved_field + from .constants import dest_shard, origin_shard, meta @@ -67,13 +69,16 @@ def add_node(self, node_data: HeaderData): def add_miniblocks(self, header: dict[str, Any], status: str): header_struct = Header(header, status) + for mention_type, mb in header_struct.miniblocks: mb_hash = mb.get('hash') if mb_hash not in self.seen_miniblock_hashes: self.seen_miniblock_hashes.add(mb_hash) self.miniblocks[mb_hash] = mb.copy() self.miniblocks[mb_hash]['mentioned'] = [] - self.miniblocks[mb_hash]['mentioned'].append((mention_type, header_struct.metadata)) + metadata = header_struct.metadata.copy() + metadata["reserved"] = decode_reserved_field(mb.get("reserved", ""), mb.get("txCount", 0)) + self.miniblocks[mb_hash]['mentioned'].append((mention_type, metadata)) class Header: diff --git a/multiversx_cross_shard_analysis/test_decode_reserved.py b/multiversx_cross_shard_analysis/test_decode_reserved.py new file mode 100644 index 0000000..6cd6a17 --- /dev/null +++ b/multiversx_cross_shard_analysis/test_decode_reserved.py @@ -0,0 +1,77 @@ +from multiversx_cross_shard_analysis.decode_reserved import decode_reserved_field + + +mentioned_headers = { + "origin_shard_proposed_headers": "20ec12", + "origin_shard_commited_headers": "20ec12", + "dest_shard_proposed_headers_1": "1002208112", + "dest_shard_proposed_headers_2": "18821220ec12", + "dest_shard_commited_headers_1": "1002208112", + "dest_shard_commited_headers_2": "18821220ec12", + "meta_origin_shard_proposed_headers": "08011002208112", + "meta_dest_shard_proposed_headers": "08011002180a208112", + "meta_dest_shard_commited_headers": "", +} + +expected = { + "origin_shard_proposed_headers": { + "ExecutionType": "Normal", + "State": "Final", + "IndexOfFirstTxProcessed": 0, + "IndexOfLastTxProcessed": 2412 + }, + "origin_shard_commited_headers": { + "ExecutionType": "Normal", + "State": "Final", + "IndexOfFirstTxProcessed": 0, + "IndexOfLastTxProcessed": 2412 + }, + "dest_shard_proposed_headers_1": { + "ExecutionType": "Normal", + "State": "PartialExecuted", + "IndexOfFirstTxProcessed": 0, + "IndexOfLastTxProcessed": 2305 + }, + "dest_shard_proposed_headers_2": { + "ExecutionType": "Normal", + "State": "Final", + "IndexOfFirstTxProcessed": 2306, + "IndexOfLastTxProcessed": 2412 + }, + "dest_shard_commited_headers_1": { + "ExecutionType": "Normal", + "State": "PartialExecuted", + "IndexOfFirstTxProcessed": 0, + "IndexOfLastTxProcessed": 2305 + }, + "dest_shard_commited_headers_2": { + "ExecutionType": "Normal", + "State": "Final", + "IndexOfFirstTxProcessed": 2306, + "IndexOfLastTxProcessed": 2412 + }, + "meta_origin_shard_proposed_headers": { + "ExecutionType": "Scheduled", + "State": "PartialExecuted", + "IndexOfFirstTxProcessed": 0, + "IndexOfLastTxProcessed": 2305 + }, + "meta_dest_shard_proposed_headers": { + "ExecutionType": "Scheduled", + "State": "PartialExecuted", + "IndexOfFirstTxProcessed": 10, + "IndexOfLastTxProcessed": 2305 + }, + "meta_dest_shard_commited_headers": { + "ExecutionType": "Normal", + "State": "Final", + "IndexOfFirstTxProcessed": 0, + "IndexOfLastTxProcessed": 2412 + } +} + + +class TestMiniBlockHeader: + def test_get_processing_type1(self): + for name, hex_str in mentioned_headers.items(): + assert decode_reserved_field(hex_str, 2413) == expected[name], f"Decoding failed for {name}" From a0fbb30155e8bab3c1c59feac5836c310a171041 Mon Sep 17 00:00:00 2001 From: Mihaela Radian Date: Wed, 26 Nov 2025 07:59:57 +0200 Subject: [PATCH 07/24] MX-17306 Added round report --- multiversx_cross_shard_analysis/constants.py | 13 +- .../miniblock_data.py | 36 ++++ .../miniblocks_round_report.py | 193 ++++++++++++++++++ 3 files changed, 241 insertions(+), 1 deletion(-) create mode 100644 multiversx_cross_shard_analysis/miniblocks_round_report.py diff --git a/multiversx_cross_shard_analysis/constants.py b/multiversx_cross_shard_analysis/constants.py index 160ff73..ebed914 100644 --- a/multiversx_cross_shard_analysis/constants.py +++ b/multiversx_cross_shard_analysis/constants.py @@ -1,5 +1,5 @@ from enum import Enum - +from reportlab.lib import colors origin_shard = "origin_shard" dest_shard = "dest_shard" @@ -60,3 +60,14 @@ 1: "Proposed", 2: "PartialExecuted", } + +COLORS_MAPPING = { + "origin_proposed": colors.lightyellow, + "origin_partial_executed": colors.orange, + "origin_final": colors.yellow, + "dest_proposed": colors.mistyrose, + "dest_partial_executed": colors.palevioletred, + "dest_final": colors.pink, + "meta_origin_committed": colors.lightgreen, + "meta_dest_committed": colors.lightblue, +} diff --git a/multiversx_cross_shard_analysis/miniblock_data.py b/multiversx_cross_shard_analysis/miniblock_data.py index 9fff351..ecbd177 100644 --- a/multiversx_cross_shard_analysis/miniblock_data.py +++ b/multiversx_cross_shard_analysis/miniblock_data.py @@ -1,12 +1,48 @@ import json from typing import Any +from multiversx_cross_shard_analysis.constants import COLORS_MAPPING + class MiniblockData: def __init__(self, miniblocks: list[tuple[str, dict[str, Any]]]): self.miniblocks = miniblocks + def get_data_for_round_report(self) -> dict[str, Any]: + report = {} + for mb_hash, mb_info in self.miniblocks: + for mention_type, header in mb_info.get('mentioned', []): + if "proposed" in mention_type: + continue + + epoch = header.get('epoch') + if epoch not in report: + report[epoch] = {} + round_number = header.get('round') + if round_number not in report[epoch]: + report[epoch][round_number] = {} + shard = header.get('shard_id') + if shard not in report[epoch][round_number]: + report[epoch][round_number][shard] = [] + + if header.get('reserved') == {}: + if "meta" in mention_type: + reserved = COLORS_MAPPING["meta_origin_committed"] if mention_type.startswith('meta_origin') else COLORS_MAPPING["meta_dest_committed"] + else: + reserved = COLORS_MAPPING["origin_final"] if mention_type.startswith('origin') else COLORS_MAPPING["dest_final"] + else: + # execution_type = header.get('reserved', {}).get('ExecutionType', '') + state = header.get('reserved', {}).get('State', '') + if state == 'Proposed': + reserved = COLORS_MAPPING["origin_proposed"] if mention_type.startswith('origin') else COLORS_MAPPING["dest_proposed"] + elif state == 'PartialExecuted': + reserved = COLORS_MAPPING["origin_partial_executed"] if mention_type.startswith('origin') else COLORS_MAPPING["dest_partial_executed"] + else: + reserved = COLORS_MAPPING["origin_final"] if mention_type.startswith('origin') else COLORS_MAPPING["dest_final"] + report[epoch][round_number][shard].append((mb_hash, reserved)) + return report + def get_data_for_detailed_report(self) -> dict[str, Any]: report = {} diff --git a/multiversx_cross_shard_analysis/miniblocks_round_report.py b/multiversx_cross_shard_analysis/miniblocks_round_report.py new file mode 100644 index 0000000..62415f5 --- /dev/null +++ b/multiversx_cross_shard_analysis/miniblocks_round_report.py @@ -0,0 +1,193 @@ +import json +from typing import Any + +from reportlab.platypus import ( + SimpleDocTemplate, Paragraph, Spacer, + Flowable +) +from reportlab.lib.styles import getSampleStyleSheet +from reportlab.lib.pagesizes import A4 +from reportlab.graphics.shapes import Drawing, Rect, String +from reportlab.lib import colors +from reportlab.platypus import Table, TableStyle + +from multiversx_cross_shard_analysis.miniblock_data import MiniblockData +from multiversx_cross_shard_analysis.constants import COLORS_MAPPING + +# ---------------------------------------- +# legend +# ---------------------------------------- + + +def legend_box(color: colors.Color) -> Drawing: + d = Drawing(8, 8) + d.add(Rect(0, 0, 8, 8, fillColor=color, strokeColor=colors.black)) # type: ignore + return d + + +def build_legend(): + # turn dict into list of (label, color) + items = list(COLORS_MAPPING.items()) + + # 3 columns grid + cols = 3 + rows = [] + row = [] + + for i, (label, color) in enumerate(items): + row.append([legend_box(color), label]) + if len(row) == cols: + rows.append(row) + row = [] + + # leftover + if row: + rows.append(row) + + # flatten structure for Table + flat_rows = [] + for r in rows: + flat = [] + for (box, label) in r: + flat.append(box) + flat.append(label) + flat_rows.append(flat) + + # widths: box col, text col, box col, text col, etc. + col_widths = [] + for _ in range(cols): + col_widths.extend([10, 140]) + + tbl = Table(flat_rows, colWidths=col_widths) + tbl.setStyle(TableStyle([ + ("VALIGN", (0, 0), (-1, -1), "MIDDLE"), + ("LEFTPADDING", (0, 0), (-1, -1), 1), + ("RIGHTPADDING", (0, 0), (-1, -1), 2), + ("TOPPADDING", (0, 0), (-1, -1), 1), + ("BOTTOMPADDING", (0, 0), (-1, -1), 1), + ])) + + return tbl + + +# ---------------------------------------- +# miniblock graphics (stack of boxes) +# ---------------------------------------- + + +def miniblock_box(text: str, stage_color: colors.Color) -> Drawing: + d = Drawing(120, 18) + d.add(Rect(0, 0, 120, 18, fillColor=stage_color, strokeColor=colors.black)) # type: ignore + d.add(String(3, 5, text, fontSize=6, fillColor=colors.black)) + return d + + +def stacked_miniblocks(miniblocks: list[tuple[str, colors.Color]], shard: int | None = None) -> Drawing: + height = 20 * (len(miniblocks) + 1 if shard is not None else len(miniblocks)) + d = Drawing(120, height) + y = height - 20 + + # header rectangle (same size, no fill) + if shard is not None: + d.add(Rect(0, y, 120, 18, fillColor=None, strokeColor=colors.black)) # type: ignore + d.add(String(3, y + 5, f"Shard {shard}", fontSize=6, fontName="Helvetica-Bold")) + y -= 20 + + # miniblocks + for (h, color) in miniblocks: + d.add(Rect(0, y, 120, 18, fillColor=color, strokeColor=colors.black)) # type: ignore + d.add(String(3, y + 5, h[:36] + "...", fontSize=6)) + y -= 20 + + return d + +# ---------------------------------------- +# horizontal layout helper +# ---------------------------------------- + + +class HFlowable(Flowable): + def __init__(self, flowables: list[Flowable], space=6): + super().__init__() + self.flowables = flowables + self.space = space + + def wrap(self, aW: float, aH: float) -> tuple[float, float]: + w, h = 0, 0 + for fl in self.flowables: + fw, fh = fl.wrap(aW, aH) + w += fw + self.space + h = max(h, fh) + self.width, self.height = w, h + return w, h + + def draw(self): + x = 0 + for fl in self.flowables: + fl.wrapOn(self.canv, 0, 0) + fl.drawOn(self.canv, x, 0) + x += fl.width + self.space + + +# ---------------------------------------- +# build report for one epoch +# ---------------------------------------- + +def build_report(epoch: int, rounds_data: dict[int, Any], shards: list[int]): + + doc = SimpleDocTemplate( + f"miniblock_report_epoch_{epoch}.pdf", + pagesize=A4, + leftMargin=20, rightMargin=20, topMargin=20, bottomMargin=20 + ) + + story = [] + styles = getSampleStyleSheet() + + # title + story.append(Paragraph(f"Miniblock Report — Epoch {epoch}", styles["Title"])) + story.append(Spacer(1, 8)) + story.append(build_legend()) + story.append(Spacer(1, 12)) + + for rnd, shard_map in rounds_data.items(): + # round header + story.append(Paragraph(f"Round {rnd}", styles["Heading3"])) + story.append(Spacer(1, 6)) + + # for each row: we need max miniblocks across shards + max_rows = max(len(shard_map.get(s, [])) for s in shards) + + for i in range(max_rows): + row_flowables = [] + + for shard in shards: + mbs = shard_map.get(shard, []) + if i < len(mbs): + row_flowables.append(stacked_miniblocks([mbs[i]], shard if i == 0 else None)) + else: + # empty placeholder to keep columns aligned + row_flowables.append(Spacer(120, 20)) + + # add horizontal row + story.append(HFlowable(row_flowables, space=12)) + + story.append(Spacer(1, 20)) + + doc.build(story) + + +# ---------------------------------------- +# main +# ---------------------------------------- + +if __name__ == "__main__": + with open('./Reports/cross-shard-execution-anal-9afe696daf/Miniblocks/miniblocks_report.json', 'r') as f: + data = json.load(f) + + mb_data = MiniblockData(list(data['miniblocks'].items())).get_data_for_round_report() + + for epoch in sorted(mb_data.keys()): + print(f"Epoch: {epoch}") + report_dict = mb_data[epoch] + build_report(int(epoch), report_dict, shards=[0, 1, 2, 4294967295]) From 246d9562b21f03630f65b34d08212ebf7322da10 Mon Sep 17 00:00:00 2001 From: Mihaela Radian Date: Fri, 28 Nov 2025 05:48:43 +0200 Subject: [PATCH 08/24] MX-17306 Added timeline report --- multiversx_cross_shard_analysis/constants.py | 11 + .../miniblock_data.py | 83 ++++++ .../miniblocks_round_report.py | 2 +- .../miniblocks_timeline_report.py | 251 ++++++++++++++++++ 4 files changed, 346 insertions(+), 1 deletion(-) create mode 100644 multiversx_cross_shard_analysis/miniblocks_timeline_report.py diff --git a/multiversx_cross_shard_analysis/constants.py b/multiversx_cross_shard_analysis/constants.py index ebed914..c75b338 100644 --- a/multiversx_cross_shard_analysis/constants.py +++ b/multiversx_cross_shard_analysis/constants.py @@ -61,6 +61,17 @@ 2: "PartialExecuted", } +# type names +TYPE_NAMES = { + 0: "TxBlock", + 30: "StateBlock", + 60: "PeerBlock", + 90: "SmartContractResultBlock", + 120: "InvalidBlock", + 150: "ReceiptBlock", + 255: "RewardsBlock", +} + COLORS_MAPPING = { "origin_proposed": colors.lightyellow, "origin_partial_executed": colors.orange, diff --git a/multiversx_cross_shard_analysis/miniblock_data.py b/multiversx_cross_shard_analysis/miniblock_data.py index ecbd177..932a95e 100644 --- a/multiversx_cross_shard_analysis/miniblock_data.py +++ b/multiversx_cross_shard_analysis/miniblock_data.py @@ -3,6 +3,8 @@ from multiversx_cross_shard_analysis.constants import COLORS_MAPPING +from multiversx_cross_shard_analysis.decode_reserved import get_default_decoded_data + class MiniblockData: @@ -43,6 +45,87 @@ def get_data_for_round_report(self) -> dict[str, Any]: report[epoch][round_number][shard].append((mb_hash, reserved)) return report + def get_data_for_detail_report(self) -> dict[str, list[dict[str, Any]]]: + ''' + { + "hash": "b87f711aadc8f928ff6b2a1baf0ef4381f36dd0af9d100c07e9b7b6ca7233648", + "receiverShardID": 0, + "senderShardID": 1, + "txCount": 50, + "type": 0, + "first_seen_round": 295, + "last_seen_round": 298, + "mentioned": { + 298: [ + ("dest_proposed", "txs 1–25 / 50", COLORS_MAPPING["dest_partial_executed"]), + ("dest_proposed", "txs 26–50 / 50", COLORS_MAPPING["dest_final"]), + ], + 295: [ + ("origin_proposed", "txs 1–50 / 50", COLORS_MAPPING["origin_final"]), + ], + 296: [ + ("meta_origin_proposed", "txs 1–50 / 50", COLORS_MAPPING["meta_origin_committed"]), + ], + }, + }, + ''' + report = {} + for mb_hash, mb_info in self.miniblocks: + if mb_info['senderShardID'] == mb_info['receiverShardID']: + continue # Skip same-shard miniblocks + origin_epoch = None + + mb_data = { + "hash": mb_hash, + "first_seen_round": None, + "last_seen_round": None, + "receiverShardID": mb_info['receiverShardID'], + "senderShardID": mb_info['senderShardID'], + "txCount": mb_info['txCount'], + "type": mb_info['type'], + "mentioned": {}, + } + for mention_type, header in mb_info.get('mentioned', []): + epoch = header.get('epoch') + if epoch is not None and (origin_epoch is None or epoch < origin_epoch): + origin_epoch = epoch + round_number = header.get('round') + if mb_data['first_seen_round'] is None or round_number < mb_data['first_seen_round']: + mb_data['first_seen_round'] = round_number + if mb_data['last_seen_round'] is None or round_number > mb_data['last_seen_round']: + mb_data['last_seen_round'] = round_number + if round_number not in mb_data['mentioned']: + mb_data['mentioned'][round_number] = [] + + reserved = header.get('reserved', {}) + if reserved == {}: + reserved = get_default_decoded_data(tx_count=mb_info['txCount']) + if "meta" in mention_type: + color = COLORS_MAPPING["meta_origin_committed"] if mention_type.startswith('meta_origin') else COLORS_MAPPING["meta_dest_committed"] + else: + color = COLORS_MAPPING["origin_final"] if mention_type.startswith('origin') else COLORS_MAPPING["dest_final"] + else: + # execution_type = header.get('reserved', {}).get('ExecutionType', '') + state = header.get('reserved', {}).get('State', '') + if state == 'Proposed': + color = COLORS_MAPPING["origin_proposed"] if mention_type.startswith('origin') else COLORS_MAPPING["dest_proposed"] + elif state == 'PartialExecuted': + color = COLORS_MAPPING["origin_partial_executed"] if mention_type.startswith('origin') else COLORS_MAPPING["dest_partial_executed"] + else: + color = COLORS_MAPPING["origin_final"] if mention_type.startswith('origin') else COLORS_MAPPING["dest_final"] + mb_data['mentioned'][round_number].append((mention_type, f"txs {reserved['IndexOfFirstTxProcessed']}–{reserved['IndexOfLastTxProcessed']} / {mb_info['txCount']}", color)) + + if not origin_epoch: + print(f"Warning: origin_epoch not found for miniblock {mb_hash}") + continue + if origin_epoch not in report: + report[origin_epoch] = [] + report[origin_epoch].append(mb_data) + + for epoch, mb_list in report.items(): + mb_list.sort(key=lambda x: x['first_seen_round']) + return report + def get_data_for_detailed_report(self) -> dict[str, Any]: report = {} diff --git a/multiversx_cross_shard_analysis/miniblocks_round_report.py b/multiversx_cross_shard_analysis/miniblocks_round_report.py index 62415f5..6851a36 100644 --- a/multiversx_cross_shard_analysis/miniblocks_round_report.py +++ b/multiversx_cross_shard_analysis/miniblocks_round_report.py @@ -145,7 +145,7 @@ def build_report(epoch: int, rounds_data: dict[int, Any], shards: list[int]): styles = getSampleStyleSheet() # title - story.append(Paragraph(f"Miniblock Report — Epoch {epoch}", styles["Title"])) + story.append(Paragraph(f"Miniblock Shards Report — Epoch {epoch}", styles["Title"])) story.append(Spacer(1, 8)) story.append(build_legend()) story.append(Spacer(1, 12)) diff --git a/multiversx_cross_shard_analysis/miniblocks_timeline_report.py b/multiversx_cross_shard_analysis/miniblocks_timeline_report.py new file mode 100644 index 0000000..a683e13 --- /dev/null +++ b/multiversx_cross_shard_analysis/miniblocks_timeline_report.py @@ -0,0 +1,251 @@ +""" +miniblock_timeline_report.py + +Produces miniblock timeline report PDF: +- multiple miniblocks per page +- each miniblock: subtitle + full hash + meta info +- timeline table: columns = rounds (including gaps), each column contains stacked colored rectangles for mentions +- colors: use mention['color'] if present, otherwise derived from mention type + reserved +""" + +import json +from typing import Any +from reportlab.lib.pagesizes import A4 +from reportlab.lib import colors +from reportlab.platypus import ( + SimpleDocTemplate, + Paragraph, + Spacer, + Table, + TableStyle, + PageBreak, +) +from reportlab.lib.styles import getSampleStyleSheet +from reportlab.graphics.shapes import Drawing, Rect, String +from reportlab.platypus.flowables import Flowable + + +from multiversx_cross_shard_analysis.constants import COLORS_MAPPING, TYPE_NAMES + +from multiversx_cross_shard_analysis.miniblock_data import MiniblockData + +# ----------------------------- +# CONFIG +# ----------------------------- + +PAGE_WIDTH, PAGE_HEIGHT = A4 +LEFT_MARGIN = RIGHT_MARGIN = 20 +TOP_MARGIN = BOTTOM_MARGIN = 20 + +MINIBLOCKS_PER_PAGE = 6 + +ROUND_HEADER_FONT = 7 +RECT_LABEL_FONT = 8 +RECT_INFO_FONT = 8 + +# rectangle drawing dimensions +RECT_H = 20 +RECT_PADDING_X = 4 + +# ----------------------------- +# small flowable for a left-aligned rectangle inside a table cell +# ----------------------------- + + +class RectCell(Flowable): + '''not used currently, included for further development''' + + def __init__(self, label: str, info: str, color: colors.Color, width: float, height: float = 14, padding: float = 2): + super().__init__() + self.label = label + self.info = info + self.color = color + self.width = width + self.height = height + self.padding = padding + + def wrap(self, aW: float, aH: float): + # force rect to match column width, never bigger + return self.width, self.height * 2 + + def draw(self): + c = self.canv + + c.setFillColor(self.color) + c.rect(0, 0, self.width, self.height * 2, fill=1, stroke=0) + + c.setFillColor(colors.black) + c.setFont("Helvetica", 7) + + # label line + c.drawString(self.padding, self.height + 1, self.label) + # info line + c.drawString(self.padding, 2, self.info) + + +# ----------------------------- +# build stacked drawing for one round +# ----------------------------- + + +def build_stack_for_round(items: list[tuple[str, str, colors.Color]], col_width: float) -> Drawing: + """ + items: list of (label, info, color) + """ + + rows = max(1, len(items)) + total_h = rows * RECT_H + d = Drawing(col_width, total_h) + y = total_h - RECT_H + + for label, info, col in items: + rect_w = max(2, col_width - RECT_PADDING_X * 2) - 4 + if 'proposed' in label: + # dashed border for proposed + d.add(Rect(0, y + 2, rect_w, RECT_H - 4, fillColor=col, strokeColor=colors.black, strokeWidth=1, strokeDashArray=[3, 2])) # type: ignore + else: + # solid border for committed + d.add(Rect(0, y + 2, rect_w, RECT_H - 4, fillColor=col, strokeColor=colors.black)) # type: ignore + + # text: two rows inside rectangle + text_x = RECT_PADDING_X + 3 + base_y = y + 4 + + d.add(String(text_x, base_y + 8, label, fontSize=RECT_LABEL_FONT)) + d.add(String(text_x, base_y, info, fontSize=RECT_INFO_FONT)) + + y -= RECT_H + + # empty case + if len(items) == 0: + rect_w = max(2, col_width - RECT_PADDING_X * 2) - 4 + d.add(Rect(0, total_h / 2 - 6, rect_w, 12, fillColor=colors.whitesmoke, strokeColor=colors.grey)) # type: ignore + d.add(String(RECT_PADDING_X + 2, total_h / 2 - 2, "no action", fontSize=RECT_LABEL_FONT)) + + return d + + +# ----------------------------- +# miniblock section +# ----------------------------- +def build_miniblock_section(miniblock: dict[str, Any], page_usable_width: float) -> list[Flowable]: + flow = [] + styles = getSampleStyleSheet() + + h = miniblock.get("hash", "") + sender = miniblock.get("senderShardID", "?") + receiver = miniblock.get("receiverShardID", "?") + txc = miniblock.get("txCount", "?") + typ = TYPE_NAMES.get(miniblock.get("type", -1), str(miniblock.get("type", "?"))) + + flow.append(Paragraph(f"Miniblock {h}", styles["Heading3"])) + flow.append(Paragraph(f"- from shard {sender} -> shard {receiver}
- tx_count: {txc}, type: {typ}", styles["BodyText"])) + flow.append(Spacer(1, 4)) + + mentioned = miniblock.get("mentioned", {}) + if not mentioned: + flow.append(Paragraph("No mentions found.", styles["BodyText"])) + flow.append(Spacer(1, 6)) + return flow + + first_r = miniblock.get("first_seen_round", 0) + last_r = miniblock.get("last_seen_round", 0) + rounds = list(range(first_r, last_r + 1)) + + num_cols = max(1, len(rounds)) + col_width = page_usable_width / num_cols + + header = [ + Paragraph(f"round {r}", styles["BodyText"]) + for r in rounds + ] + + cells = [] + for r in rounds: + items = mentioned.get(r, []) + drawing = build_stack_for_round(items, col_width) + cells.append(drawing) + + tbl = Table( + [header, cells], + colWidths=[col_width] * num_cols, + hAlign="LEFT", + ) + + tbl.setStyle( + TableStyle( + [ + ("GRID", (0, 0), (-1, -1), 0.25, colors.grey), + ("BACKGROUND", (0, 0), (-1, 0), colors.whitesmoke), + ("ALIGN", (0, 0), (-1, 0), "CENTER"), + ("FONTSIZE", (0, 0), (-1, 0), ROUND_HEADER_FONT), + ("VALIGN", (0, 1), (-1, -1), "TOP"), + ] + ) + ) + + flow.append(tbl) + flow.append(Spacer(1, 8)) + return flow + + +# ----------------------------- +# PDF builder +# ----------------------------- +def build_pdf_from_miniblocks(epoch: int, miniblocks: list[dict[str, Any]], outname="miniblock_timeline_report.pdf"): + doc = SimpleDocTemplate( + outname, + pagesize=A4, + leftMargin=LEFT_MARGIN, + rightMargin=RIGHT_MARGIN, + topMargin=TOP_MARGIN, + bottomMargin=BOTTOM_MARGIN, + ) + + usable_width = PAGE_WIDTH - LEFT_MARGIN - RIGHT_MARGIN + MAX_PAGE_HEIGHT = PAGE_HEIGHT - TOP_MARGIN - BOTTOM_MARGIN + TITLE_HEIGHT = 75 + MINIBLOCK_WITH_2_ROWS = 135 + EXTRA_LINE_HEIGHT = 18 + + story = [] + current_height = 0 + first_page = True + styles = getSampleStyleSheet() + story.append(Paragraph(f"Miniblock Detail Report — Epoch {epoch}", styles["Title"])) + story.append(Spacer(1, 8)) + for i, mb in enumerate(miniblocks, 1): + num_rects = max(len(v) for v in mb.get("mentioned", {}).values()) + EXTRA_LINES = max(0, num_rects - 2) + + miniblock_height = MINIBLOCK_WITH_2_ROWS + EXTRA_LINES * EXTRA_LINE_HEIGHT + + # if first page, reserve title height + effective_page_height = MAX_PAGE_HEIGHT - (TITLE_HEIGHT if first_page else 0) + + if current_height + miniblock_height > effective_page_height: + story.append(PageBreak()) + current_height = 0 + first_page = False + + story.extend(build_miniblock_section(mb, usable_width)) + current_height += miniblock_height + + doc.build(story) + + +# ----------------------------- +# Example using new tuple form: (label, info, color) +# ----------------------------- +if __name__ == "__main__": + # run PDF build + + with open('./Reports/cross-shard-execution-anal-9afe696daf/Miniblocks/miniblocks_report.json', 'r') as f: + data = json.load(f) + + mb_data = MiniblockData(list(data['miniblocks'].items())).get_data_for_detail_report() + + for epoch in sorted(mb_data.keys()): + print(f"Epoch: {epoch}") + report_list = mb_data[epoch] + build_pdf_from_miniblocks(int(epoch), report_list, outname=f"miniblock_timeline_report_epoch_{epoch}.pdf") From 0c64a32cc9d26492729a8a8484bc0c66c266bfc2 Mon Sep 17 00:00:00 2001 From: Mihaela Radian Date: Thu, 4 Dec 2025 20:20:52 +0200 Subject: [PATCH 09/24] MX-17306 Added headers report --- multiversx_cross_shard_analysis/constants.py | 38 +- .../header_structures.py | 157 +++++- .../headers_timeline_report.py | 275 +++++++++++ .../miniblock_data.py | 445 ++---------------- .../miniblocks_detailed_report.py | 44 -- .../miniblocks_round_report.py | 2 +- .../miniblocks_timeline_report.py | 7 +- 7 files changed, 487 insertions(+), 481 deletions(-) create mode 100644 multiversx_cross_shard_analysis/headers_timeline_report.py delete mode 100644 multiversx_cross_shard_analysis/miniblocks_detailed_report.py diff --git a/multiversx_cross_shard_analysis/constants.py b/multiversx_cross_shard_analysis/constants.py index c75b338..b6a8e94 100644 --- a/multiversx_cross_shard_analysis/constants.py +++ b/multiversx_cross_shard_analysis/constants.py @@ -31,14 +31,19 @@ "meta_dest_shard_committed", # miniblock is mentioned in an execution result, either on origin or destination shard - "exec_proposed", - "exec_committed", + "origin_exec_proposed", + "origin_exec_committed", + + # miniblock is mentioned in an execution result, either on origin or destination shard + "dest_exec_proposed", + "dest_exec_committed", # notarization of execution results when meta includes the header containing the execution result "meta_exec_proposed", "meta_exec_committed", ]) + # Mappings from field number to field name for MiniBlockHeaderReserved FIELD_NAME_MAPPING = { 1: "ExecutionType", @@ -66,19 +71,30 @@ 0: "TxBlock", 30: "StateBlock", 60: "PeerBlock", - 90: "SmartContractResultBlock", + 90: "SCResultBlock", 120: "InvalidBlock", 150: "ReceiptBlock", 255: "RewardsBlock", } +Colors = Enum("Colors", [ + "origin_proposed", + "origin_partial_executed", + "origin_final", + "dest_proposed", + "dest_partial_executed", + "dest_final", + "meta_origin_committed", + "meta_dest_committed", +]) + COLORS_MAPPING = { - "origin_proposed": colors.lightyellow, - "origin_partial_executed": colors.orange, - "origin_final": colors.yellow, - "dest_proposed": colors.mistyrose, - "dest_partial_executed": colors.palevioletred, - "dest_final": colors.pink, - "meta_origin_committed": colors.lightgreen, - "meta_dest_committed": colors.lightblue, + Colors.origin_proposed: colors.lightyellow, + Colors.origin_partial_executed: colors.orange, + Colors.origin_final: colors.yellow, + Colors.dest_proposed: colors.mistyrose, + Colors.dest_partial_executed: colors.palevioletred, + Colors.dest_final: colors.pink, + Colors.meta_origin_committed: colors.lightgreen, + Colors.meta_dest_committed: colors.lightblue, } diff --git a/multiversx_cross_shard_analysis/header_structures.py b/multiversx_cross_shard_analysis/header_structures.py index 8466bb7..66b928b 100644 --- a/multiversx_cross_shard_analysis/header_structures.py +++ b/multiversx_cross_shard_analysis/header_structures.py @@ -1,11 +1,14 @@ + from typing import Any from multiversx_cross_shard_analysis.test_decode_reserved import decode_reserved_field -from .constants import dest_shard, origin_shard, meta +from multiversx_cross_shard_analysis.miniblock_data import MiniblockData + +from .constants import COLORS_MAPPING, TYPE_NAMES, dest_shard, origin_shard, meta -def get_value(variable_name: str, header: dict[str, Any]) -> str: +def get_value(variable_name: str, header: dict[str, Any]) -> Any: return header['header'][variable_name] if 'header' in header else header[variable_name] @@ -80,6 +83,148 @@ def add_miniblocks(self, header: dict[str, Any], status: str): metadata["reserved"] = decode_reserved_field(mb.get("reserved", ""), mb.get("txCount", 0)) self.miniblocks[mb_hash]['mentioned'].append((mention_type, metadata)) + def get_data_for_header_vertical_report(self) -> dict[str, dict[int, Any]]: + miniblocks = MiniblockData(self.miniblocks) + report: dict[str, dict[int, Any]] = {} + last_epoch = None + + for shard_id, header_data in self.parsed_headers.items(): + header_group_count = 0 + header_count = 0 + header_group_name = "" + last_epoch = None + + for header in sorted(header_data.header_dictionary['commited_headers'], + key=lambda x: get_value('nonce', x)): + + epoch = get_value('epoch', header) + + # reset counters when epoch changes + if epoch != last_epoch: + header_group_count = 0 + header_count = 0 + header_group_name = "" + last_epoch = epoch + + # ensure epoch entry exists and contains all shards as keys + if epoch not in report: + report[epoch] = {sid: {} for sid in self.parsed_headers.keys()} + + if get_value('miniBlockHeaders', header) == []: + continue + + nonce = get_value('nonce', header) + round_num = get_value('round', header) + print(f"Processing header: epoch={epoch}, shard={shard_id}, nonce={nonce}, round={round_num}") + + # build result for this header (only cross-shard miniblocks) + result: dict[int, list] = {} + for miniblock in [mb for mb in get_value('miniBlockHeaders', header) if mb.get('senderShardID') == shard_id and mb.get('receiverShardID') != mb.get('senderShardID')]: + print(f" Processing miniblock: hash={miniblock.get('hash')}, senderShardID={miniblock.get('senderShardID')}, receiverShardID={miniblock.get('receiverShardID')}") + mb_hash = miniblock.get('hash') + for mention_type, metadata in self.miniblocks[mb_hash]['mentioned']: + # skip proposed mentions + if 'proposed' in mention_type: + continue + + rn = metadata['round'] + color = miniblocks.get_color_for_state(mention_type, miniblock['txCount'], metadata) + shard_name = f'Shard {metadata["shard_id"]}' if metadata["shard_id"] != 4294967295 else "MetaShard" + # append tuple (label, info, color) + result.setdefault(rn, []).append((shard_name, mb_hash[:15] + '...', COLORS_MAPPING[color])) + + # if result empty -> we don't include this nonce at all, don't count it + if not result: + continue + + # --- Add this nonce to the report and handle grouping --- + # if group start (every 5 actual added nonces) + group_size = 5 + if header_count % group_size == 0: + header_group_count += 1 + header_group_name = f"Nonces {nonce}" + # initialize structure for this group + report[epoch][shard_id][header_group_count] = { + 'group_name': header_group_name, + 'rounds': (round_num, round_num), + 'nonces': {} + } + print(f"Creating new header group: {header_group_name}") + else: + # extend existing group's name + header_group_name += f" - {nonce}" + report[epoch][shard_id][header_group_count]['group_name'] = header_group_name + + # store the nonce's data + report[epoch][shard_id][header_group_count]['nonces'][nonce] = result + + # update group's rounds min/max based on result keys + min_r, max_r = report[epoch][shard_id][header_group_count]['rounds'] + actual_min = min(result.keys()) + actual_max = max(result.keys()) + report[epoch][shard_id][header_group_count]['rounds'] = (min(min_r, actual_min), max(max_r, actual_max)) + + # increment header_count because we added this nonce + header_count += 1 + + return report + + def get_data_for_header_horizontal_report(self) -> dict[str, dict[int, Any]]: + miniblocks = MiniblockData(self.miniblocks) + report: dict[str, dict[int, Any]] = {} + + for shard_id, header_data in self.parsed_headers.items(): + + for header in sorted(header_data.header_dictionary['commited_headers'], + key=lambda x: get_value('nonce', x)): + + epoch = get_value('epoch', header) + + # ensure epoch entry exists and contains all shards as keys + if epoch not in report: + report[epoch] = {sid: {} for sid in self.parsed_headers.keys()} + + if get_value('miniBlockHeaders', header) == []: + continue + + nonce = get_value('nonce', header) + round_num = get_value('round', header) + print(f"Processing header: epoch={epoch}, shard={shard_id}, nonce={nonce}, round={round_num}") + + # build result for this header (only cross-shard miniblocks) + result: dict[int, list] = {} + for miniblock in [mb for mb in get_value('miniBlockHeaders', header) if mb.get('senderShardID') == shard_id and mb.get('receiverShardID') != mb.get('senderShardID')]: + print(f" Processing miniblock: hash={miniblock.get('hash')}, senderShardID={miniblock.get('senderShardID')}, receiverShardID={miniblock.get('receiverShardID')}") + mb_hash = miniblock.get('hash') + for mention_type, metadata in self.miniblocks[mb_hash]['mentioned']: + # skip proposed mentions + if 'proposed' in mention_type: + continue + + rn = metadata['round'] + color = miniblocks.get_color_for_state(mention_type, miniblock['txCount'], metadata) + label = f'Shard {metadata["shard_id"]}' if metadata["shard_id"] != 4294967295 else "MetaShard" + if miniblock['type'] != 0: + label += f' ({TYPE_NAMES[miniblock["type"]]})' + # append tuple (label, info, color) + result.setdefault(rn, []).append((label, mb_hash[:15] + '...', COLORS_MAPPING[color])) + + # if result empty -> we don't include this nonce at all, don't count it + if not result: + continue + + # sort rounds in result + result = dict(sorted(result.items())) + + # previous nonce round check - skip if miniblock from a previous nonce + if list(result.keys())[0] < round_num: + continue + + # store the nonce's data + report[epoch][shard_id][nonce] = result + + return report + class Header: def __init__(self, header: dict[str, Any], status: str): @@ -119,3 +264,11 @@ def isHeaderV2(header: dict[str, Any]) -> bool: @staticmethod def isMetaHeader(header: dict[str, Any]) -> bool: return 'shardInfo' in header + + @staticmethod + def isHeaderV3(header: dict[str, Any]) -> bool: + return 'executionResults' in header and 'shardInfoProposal' not in header + + @staticmethod + def isMetaHeaderV3(header: dict[str, Any]) -> bool: + return 'shardInfoProposal' in header diff --git a/multiversx_cross_shard_analysis/headers_timeline_report.py b/multiversx_cross_shard_analysis/headers_timeline_report.py new file mode 100644 index 0000000..4ba9210 --- /dev/null +++ b/multiversx_cross_shard_analysis/headers_timeline_report.py @@ -0,0 +1,275 @@ +import json +from typing import Any +from reportlab.lib.pagesizes import A4 +from reportlab.lib import colors +from reportlab.platypus import ( + SimpleDocTemplate, + Paragraph, + Spacer, + Table, + TableStyle, + PageBreak, + Flowable +) +from reportlab.lib.styles import getSampleStyleSheet +from reportlab.graphics.shapes import Drawing, Rect, String + +from multiversx_cross_shard_analysis.constants import COLORS_MAPPING, Colors + +from multiversx_cross_shard_analysis.header_structures import HeaderData, ShardData + +# ----------------------------- +# CONFIG (mirrors miniblock report) +# ----------------------------- + +PAGE_WIDTH, PAGE_HEIGHT = A4 +LEFT_MARGIN = RIGHT_MARGIN = 20 +TOP_MARGIN = BOTTOM_MARGIN = 20 + +RECT_H = 20 +RECT_PADDING_X = 4 +ROUND_HEADER_FONT = 7 +RECT_LABEL_FONT = 8 +RECT_INFO_FONT = 8 + +SECTION_BASE_HEIGHT = 110 # same idea as miniblock +EXTRA_LINE_HEIGHT = 18 # additional rows per stack +TITLE_HEIGHT = 60 + + +# ----------------------------- +# build stacked rectangles (same as miniblock version) +# ----------------------------- + +def build_stack_for_round(items: list[tuple[str, str, colors.Color]], col_width: float) -> Drawing: + rows = max(1, len(items)) + total_h = rows * RECT_H + d = Drawing(col_width, total_h) + + y = total_h - RECT_H + for label, info, col in items: + rect_w = max(2, col_width - RECT_PADDING_X * 2) - 4 + + d.add(Rect(0, y + 2, rect_w, RECT_H - 4, fillColor=col, strokeColor=colors.black)) # type: ignore + + text_x = RECT_PADDING_X + 3 + base_y = y + 4 + + d.add(String(text_x, base_y + 8, label, fontSize=RECT_LABEL_FONT)) + d.add(String(text_x, base_y, info, fontSize=RECT_INFO_FONT)) + + y -= RECT_H + + if len(items) == 0: + rect_w = max(2, col_width - RECT_PADDING_X * 2) - 4 + mid = total_h / 2 + d.add(Rect(0, mid - 6, rect_w, 12, fillColor=colors.whitesmoke, strokeColor=colors.grey)) # type: ignore + d.add(String(RECT_PADDING_X + 2, mid - 2, "no data", fontSize=RECT_LABEL_FONT)) + + return d + +# ----------------------------- +# check for round gaps +# ----------------------------- + + +def has_round_gap(rounds: list[int]) -> bool: + if len(rounds) < 2: + return False + rounds_sorted = sorted(rounds) + for a, b in zip(rounds_sorted, rounds_sorted[1:]): + if b != a + 1: + return True + return False + + +# ----------------------------- +# build section for one nonce +# ----------------------------- + +def build_nonce_section(shard_id: int, nonce: int, rounds: list[int], data: dict[int, list[Any]], + usable_width: float, highlight: bool = False) -> list[Flowable]: + + flow = [] + styles = getSampleStyleSheet() + + flow.append(Paragraph(f"Shard {shard_id} — Nonce {nonce}", styles["Heading3"])) + flow.append(Spacer(1, 4)) + + num_cols = len(rounds) + col_width = usable_width / max(1, num_cols) + + header = [Paragraph(f"{r}", styles["BodyText"]) for r in rounds] + + cells = [] + for r in rounds: + items = data.get(r, []) + drawing = build_stack_for_round(items, col_width) + cells.append(drawing) + + tbl = Table( + [header, cells], + colWidths=[col_width] * num_cols, + hAlign="LEFT", + ) + + tbl_style = [ + ("GRID", (0, 0), (-1, -1), 0.25, colors.grey), + ("BACKGROUND", (0, 0), (-1, 0), colors.whitesmoke), + ("ALIGN", (0, 0), (-1, 0), "CENTER"), + ("VALIGN", (0, 1), (-1, -1), "TOP"), + ("FONTSIZE", (0, 0), (-1, 0), ROUND_HEADER_FONT), + ] + + # add red border if highlighted + if highlight: + tbl_style.append(("BOX", (0, 0), (-1, -1), 2, colors.red)) + + tbl.setStyle(TableStyle(tbl_style)) + + flow.append(tbl) + flow.append(Spacer(1, 8)) + return flow + + +# ----------------------------- +# PDF builder +# ----------------------------- + +def build_nonce_timeline_pdf(shards_data: dict[int, dict[int, dict[int, list[Any]]]], + outname="nonce_timeline.pdf"): + doc = SimpleDocTemplate( + outname, + pagesize=A4, + leftMargin=LEFT_MARGIN, + rightMargin=RIGHT_MARGIN, + topMargin=TOP_MARGIN, + bottomMargin=BOTTOM_MARGIN, + ) + + usable_width = PAGE_WIDTH - LEFT_MARGIN - RIGHT_MARGIN + MAX_H = PAGE_HEIGHT - TOP_MARGIN - BOTTOM_MARGIN + + styles = getSampleStyleSheet() + story = [] + story.append(Paragraph("Nonce Timeline Report", styles["Title"])) + story.append(Spacer(1, 10)) + + current_h = 0 + first_page = True + + for shard_id, shard_dict in shards_data.items(): + for nonce, rdata in sorted(shard_dict.items()): + # height estimate based on max stack height + max_stack = max((len(v) for v in rdata.values()), default=1) + h_needed = SECTION_BASE_HEIGHT + max(0, max_stack - 2) * EXTRA_LINE_HEIGHT + + effective_page_height = MAX_H - (TITLE_HEIGHT if first_page else 0) + + if current_h + h_needed > effective_page_height: + story.append(PageBreak()) + current_h = 0 + first_page = False + + round_list = list(rdata.keys()) + gap = has_round_gap(round_list) + story.extend(build_nonce_section(shard_id, nonce, round_list, rdata, usable_width, gap)) + current_h += h_needed + + doc.build(story) + + +# ----------------------------- Example input data ------------------------------ +input_data = { + 0: { + 1: { + 100: [('origin_final', 'Shard 0', COLORS_MAPPING[Colors.origin_final])], + 101: [('origin_notarized', 'Shard 0', COLORS_MAPPING[Colors.meta_origin_committed])], + 102: [('dest_proposed', 'Shard 1', COLORS_MAPPING[Colors.dest_proposed]), ('dest_final', 'Shard 2', COLORS_MAPPING[Colors.dest_final])], + 103: [('dest_partial', 'Shard 1', COLORS_MAPPING[Colors.dest_partial_executed]), ('dest_notarized', 'Shard 2', COLORS_MAPPING[Colors.meta_dest_committed])], + 104: [('dest_final', 'Shard 1', COLORS_MAPPING[Colors.dest_final])], + 105: [('dest_notarized', 'Shard 1', COLORS_MAPPING[Colors.meta_dest_committed])], + }, + 2: { + 101: [('origin_proposed', 'Shard 0', COLORS_MAPPING[Colors.origin_proposed])], + 103: [('origin_final', 'Shard 0', COLORS_MAPPING[Colors.origin_final])], + 104: [('dest_final', 'Shard 2', COLORS_MAPPING[Colors.dest_final]), ('origin_notarized', 'Shard 0', COLORS_MAPPING[Colors.meta_origin_committed])], + 105: [('dest_notarized', 'Shard 2', COLORS_MAPPING[Colors.meta_dest_committed])], + } + }, + 1: { + 1: { + 101: [('N1', 'S1', COLORS_MAPPING[Colors.origin_final])], + 102: [('N1', 'S1', COLORS_MAPPING[Colors.meta_origin_committed])], + 103: [('N1', 'S0', COLORS_MAPPING[Colors.dest_proposed]), ('N1', 'S2', COLORS_MAPPING[Colors.dest_final])], + 104: [('N1', 'S0', COLORS_MAPPING[Colors.dest_partial_executed]), ('N1', 'S2', COLORS_MAPPING[Colors.meta_dest_committed])], + 105: [('N1', 'S0', COLORS_MAPPING[Colors.dest_final])], + 106: [('N1', 'S0', COLORS_MAPPING[Colors.meta_dest_committed])], + }, + 2: { + 102: [('N2', 'S1', COLORS_MAPPING[Colors.origin_partial_executed])], + 104: [('N2', 'S1', COLORS_MAPPING[Colors.origin_final])], + 105: [('N2', 'S2', COLORS_MAPPING[Colors.dest_final]), ('N2', 'S1', COLORS_MAPPING[Colors.meta_origin_committed])], + 106: [('N2', 'S2', COLORS_MAPPING[Colors.meta_dest_committed])], + }, + }, + 2: { + 1: { + 100: [('N1', 'S2', COLORS_MAPPING[Colors.origin_final])], + 101: [('N1', 'S2', COLORS_MAPPING[Colors.meta_origin_committed])], + 102: [('N1', 'S0', COLORS_MAPPING[Colors.dest_final]), ('N1', 'S1', COLORS_MAPPING[Colors.dest_final])], + 103: [('N1', 'S0', COLORS_MAPPING[Colors.meta_dest_committed]), ('N1', 'S1', COLORS_MAPPING[Colors.meta_dest_committed])], + }, + 2: { + 101: [('N2', 'S2', COLORS_MAPPING[Colors.origin_final])], + 102: [('N2', 'S2', COLORS_MAPPING[Colors.meta_origin_committed])], + 103: [('N2', 'S0', COLORS_MAPPING[Colors.dest_final])], + 104: [('N2', 'S0', COLORS_MAPPING[Colors.meta_dest_committed]), ('N2', 'S1', COLORS_MAPPING[Colors.dest_final])], + 105: [('N2', 'S1', COLORS_MAPPING[Colors.meta_dest_committed])], + }, + 3: { + 103: [('N3', 'S2', COLORS_MAPPING[Colors.origin_final])], + 104: [('N3', 'S2', COLORS_MAPPING[Colors.meta_origin_committed])], + 105: [('N3', 'S1', COLORS_MAPPING[Colors.dest_final])], + 106: [('N3', 'S1', COLORS_MAPPING[Colors.dest_final])], + 107: [('N3', 'S1', COLORS_MAPPING[Colors.meta_dest_committed])], + }, + }, + 4294967295: { + 1: { + 100: [('N1', 'M', COLORS_MAPPING[Colors.origin_final])], + 103: [('N1', 'M', COLORS_MAPPING[Colors.meta_origin_committed])], + 104: [('N1', 'S0', COLORS_MAPPING[Colors.dest_final]), ('N1', 'S1', COLORS_MAPPING[Colors.dest_final])], + 105: [('N1', 'S0', COLORS_MAPPING[Colors.meta_dest_committed]), ('N1', 'S1', COLORS_MAPPING[Colors.meta_dest_committed]), ('N1', 'S2', COLORS_MAPPING[Colors.dest_final])], + 106: [('N1', 'S2', COLORS_MAPPING[Colors.meta_dest_committed])], + } + } + +} + +# build_nonce_timeline_pdf(input_data, outname="nonce_timeline_report.pdf") +# print("Nonce timeline report generated: nonce_timeline_report.pdf") + +if __name__ == "__main__": + # run PDF build + + headers = ShardData() + for shard in [0, 1, 2, 4294967295]: + with open(f'./Reports/cross-shard-execution-anal-9afe696daf/Shards/{shard}_report.json', 'r') as f: + data = json.load(f) + + headers.parsed_headers[shard] = HeaderData() + headers.parsed_headers[shard].header_dictionary = data['shards'] + + with open('./Reports/cross-shard-execution-anal-9afe696daf/Miniblocks/miniblocks_report.json', 'r') as f: + data = json.load(f) + headers.miniblocks = data['miniblocks'] + + input_data = headers.get_data_for_header_horizontal_report() + + for epoch in sorted(input_data.keys()): + print(f"Epoch: {epoch}") + report_list = input_data[epoch] + + build_nonce_timeline_pdf(report_list, outname=f"nonce_timeline_report_{epoch}.pdf") + print(f"Nonce timeline report generated: nonce_timeline_report_{epoch}.pdf") diff --git a/multiversx_cross_shard_analysis/miniblock_data.py b/multiversx_cross_shard_analysis/miniblock_data.py index 932a95e..d902c7e 100644 --- a/multiversx_cross_shard_analysis/miniblock_data.py +++ b/multiversx_cross_shard_analysis/miniblock_data.py @@ -1,19 +1,37 @@ -import json from typing import Any -from multiversx_cross_shard_analysis.constants import COLORS_MAPPING +from multiversx_cross_shard_analysis.constants import COLORS_MAPPING, Colors from multiversx_cross_shard_analysis.decode_reserved import get_default_decoded_data class MiniblockData: - def __init__(self, miniblocks: list[tuple[str, dict[str, Any]]]): + def __init__(self, miniblocks: dict[str, dict[str, Any]]): self.miniblocks = miniblocks + def get_color_for_state(self, mention_type: str, tx_count: int, header: dict[str, Any]) -> Colors: + reserved = header.get('reserved', {}) + if reserved == {}: + reserved = get_default_decoded_data(tx_count=tx_count) + if "meta" in mention_type: + color = Colors.meta_origin_committed if mention_type.startswith('meta_origin') else Colors.meta_dest_committed + else: + color = Colors.origin_final if mention_type.startswith('origin') else Colors.dest_final + else: + # execution_type = header.get('reserved', {}).get('ExecutionType', '') + state = header.get('reserved', {}).get('State', '') + if state == 'Proposed': + color = Colors.origin_proposed if mention_type.startswith('origin') else Colors.dest_proposed + elif state == 'PartialExecuted': + color = Colors.origin_partial_executed if mention_type.startswith('origin') else Colors.dest_partial_executed + else: + color = Colors.origin_final if mention_type.startswith('origin') else Colors.dest_final + return color + def get_data_for_round_report(self) -> dict[str, Any]: report = {} - for mb_hash, mb_info in self.miniblocks: + for mb_hash, mb_info in self.miniblocks.items(): for mention_type, header in mb_info.get('mentioned', []): if "proposed" in mention_type: continue @@ -28,49 +46,13 @@ def get_data_for_round_report(self) -> dict[str, Any]: if shard not in report[epoch][round_number]: report[epoch][round_number][shard] = [] - if header.get('reserved') == {}: - if "meta" in mention_type: - reserved = COLORS_MAPPING["meta_origin_committed"] if mention_type.startswith('meta_origin') else COLORS_MAPPING["meta_dest_committed"] - else: - reserved = COLORS_MAPPING["origin_final"] if mention_type.startswith('origin') else COLORS_MAPPING["dest_final"] - else: - # execution_type = header.get('reserved', {}).get('ExecutionType', '') - state = header.get('reserved', {}).get('State', '') - if state == 'Proposed': - reserved = COLORS_MAPPING["origin_proposed"] if mention_type.startswith('origin') else COLORS_MAPPING["dest_proposed"] - elif state == 'PartialExecuted': - reserved = COLORS_MAPPING["origin_partial_executed"] if mention_type.startswith('origin') else COLORS_MAPPING["dest_partial_executed"] - else: - reserved = COLORS_MAPPING["origin_final"] if mention_type.startswith('origin') else COLORS_MAPPING["dest_final"] - report[epoch][round_number][shard].append((mb_hash, reserved)) + color = COLORS_MAPPING[self.get_color_for_state(mention_type, mb_info['txCount'], header)] + report[epoch][round_number][shard].append((mb_hash, color)) return report def get_data_for_detail_report(self) -> dict[str, list[dict[str, Any]]]: - ''' - { - "hash": "b87f711aadc8f928ff6b2a1baf0ef4381f36dd0af9d100c07e9b7b6ca7233648", - "receiverShardID": 0, - "senderShardID": 1, - "txCount": 50, - "type": 0, - "first_seen_round": 295, - "last_seen_round": 298, - "mentioned": { - 298: [ - ("dest_proposed", "txs 1–25 / 50", COLORS_MAPPING["dest_partial_executed"]), - ("dest_proposed", "txs 26–50 / 50", COLORS_MAPPING["dest_final"]), - ], - 295: [ - ("origin_proposed", "txs 1–50 / 50", COLORS_MAPPING["origin_final"]), - ], - 296: [ - ("meta_origin_proposed", "txs 1–50 / 50", COLORS_MAPPING["meta_origin_committed"]), - ], - }, - }, - ''' report = {} - for mb_hash, mb_info in self.miniblocks: + for mb_hash, mb_info in self.miniblocks.items(): if mb_info['senderShardID'] == mb_info['receiverShardID']: continue # Skip same-shard miniblocks origin_epoch = None @@ -97,22 +79,10 @@ def get_data_for_detail_report(self) -> dict[str, list[dict[str, Any]]]: if round_number not in mb_data['mentioned']: mb_data['mentioned'][round_number] = [] - reserved = header.get('reserved', {}) + color = COLORS_MAPPING[self.get_color_for_state(mention_type, mb_info['txCount'], header)] + reserved = header.get('reserved') if reserved == {}: reserved = get_default_decoded_data(tx_count=mb_info['txCount']) - if "meta" in mention_type: - color = COLORS_MAPPING["meta_origin_committed"] if mention_type.startswith('meta_origin') else COLORS_MAPPING["meta_dest_committed"] - else: - color = COLORS_MAPPING["origin_final"] if mention_type.startswith('origin') else COLORS_MAPPING["dest_final"] - else: - # execution_type = header.get('reserved', {}).get('ExecutionType', '') - state = header.get('reserved', {}).get('State', '') - if state == 'Proposed': - color = COLORS_MAPPING["origin_proposed"] if mention_type.startswith('origin') else COLORS_MAPPING["dest_proposed"] - elif state == 'PartialExecuted': - color = COLORS_MAPPING["origin_partial_executed"] if mention_type.startswith('origin') else COLORS_MAPPING["dest_partial_executed"] - else: - color = COLORS_MAPPING["origin_final"] if mention_type.startswith('origin') else COLORS_MAPPING["dest_final"] mb_data['mentioned'][round_number].append((mention_type, f"txs {reserved['IndexOfFirstTxProcessed']}–{reserved['IndexOfLastTxProcessed']} / {mb_info['txCount']}", color)) if not origin_epoch: @@ -125,364 +95,3 @@ def get_data_for_detail_report(self) -> dict[str, list[dict[str, Any]]]: for epoch, mb_list in report.items(): mb_list.sort(key=lambda x: x['first_seen_round']) return report - - def get_data_for_detailed_report(self) -> dict[str, Any]: - report = {} - - for mb_hash, mb_info in self.miniblocks: - if mb_info['senderShardID'] == mb_info['receiverShardID']: - continue # Skip same-shard miniblocks - origin_epoch = None - report_data = { - 'lanes': { - 'origin': [], - 'dest': [], - 'meta': [] - } - } - for mention_type, header in mb_info.get('mentioned', []): - lane = 'meta' if 'meta' in mention_type else ('origin' if 'origin' in mention_type else 'dest') - report_data['lanes'][lane].append((mention_type, header.get('round'))) - if lane == 'origin': - origin_epoch = header.get('epoch') - if not origin_epoch: - print(f"Warning: origin_epoch not found for miniblock {mb_hash}") - continue - if origin_epoch not in report: - report[origin_epoch] = {} - report[origin_epoch][mb_hash] = report_data - return report - - def get_data_for_detailed_report1(self) -> dict[str, Any]: - report = {} - - for mb_hash, mb_info in self.miniblocks: - if mb_info['senderShardID'] == mb_info['receiverShardID']: - continue # Skip same-shard miniblocks - origin_epoch = None - start_round = None - end_round = None - report_data = { - 'start_round': start_round, - 'end_round': end_round, - 'lanes': { - 'origin': [], - 'dest': [], - 'meta': [] - } - } - for mention_type, header in mb_info.get('mentioned', []): - lane = 'meta' if 'meta' in mention_type else ('origin' if 'origin' in mention_type else 'dest') - report_data['lanes'][lane].append((mention_type, header.get('round'))) - if report_data['start_round'] is None or header.get('round') < report_data['start_round']: - report_data['start_round'] = header.get('round') - if report_data['end_round'] is None or header.get('round') > report_data['end_round']: - report_data['end_round'] = header.get('round') - if lane == 'origin': - origin_epoch = header.get('epoch') - if not origin_epoch: - print(f"Warning: origin_epoch not found for miniblock {mb_hash}") - continue - if origin_epoch not in report: - report[origin_epoch] = {} - - report[origin_epoch][mb_hash] = report_data - return report - - -if __name__ == "__main__": - example = {"run_name": "cross-shard-execution-anal-9afe696daf", "miniblocks": {"01f7f0f3503c62b69ff0fe22a57b97eaa2f164ac9e4a0bb852bc66e3f5c12369": {"hash": "01f7f0f3503c62b69ff0fe22a57b97eaa2f164ac9e4a0bb852bc66e3f5c12369", "receiverShardID": 0, "reserved": "", "senderShardID": 1, "txCount": 1, "type": 0, "mentioned": [["dest_shard_proposed_headers", {"nonce": 207, "round": 207, "epoch": 1, "shard_id": 0}], ["dest_shard_commited_headers", {"nonce": 207, "round": 207, "epoch": 1, "shard_id": 0}], ["origin_shard_proposed_headers", {"nonce": 205, "round": 205, "epoch": 1, "shard_id": 1}], ["origin_shard_commited_headers", {"nonce": 205, "round": 205, "epoch": 1, "shard_id": 1}], ["meta_origin_shard_proposed_headers", {"nonce": 206, "round": 206, "epoch": 1, "shard_id": 4294967295}], ["meta_dest_shard_proposed_headers", {"nonce": 208, "round": 208, "epoch": 1, "shard_id": 4294967295}], ["meta_origin_shard_commited_headers", {"nonce": 206, "round": 206, "epoch": 1, "shard_id": 4294967295}], ["meta_dest_shard_commited_headers", {"nonce": 208, "round": 208, "epoch": 1, "shard_id": 4294967295}]]}, "3fce18121e1ce8e57a3d776c91468649699e8238fad7898f46b8ac88b8d2c1cb": {"hash": "3fce18121e1ce8e57a3d776c91468649699e8238fad7898f46b8ac88b8d2c1cb", "receiverShardID": 0, "reserved": "200b", "senderShardID": 0, "txCount": 12, "type": 0, "mentioned": [["origin_shard_proposed_headers", {"nonce": 210, "round": 210, "epoch": 1, "shard_id": 0}], ["origin_shard_commited_headers", {"nonce": 210, "round": 210, "epoch": 1, "shard_id": 0}], ["meta_origin_shard_proposed_headers", {"nonce": 211, "round": 211, "epoch": 1, "shard_id": 4294967295}], ["meta_origin_shard_commited_headers", {"nonce": 211, "round": 211, "epoch": 1, "shard_id": 4294967295}]]}, "cb2bbdf01dd1a44c813ceecb58ae7404ad9016073fc707dda1be52f7dc5735fc": {"hash": "cb2bbdf01dd1a44c813ceecb58ae7404ad9016073fc707dda1be52f7dc5735fc", "receiverShardID": 1, "reserved": "201c", "senderShardID": 0, "txCount": 29, "type": 0, "mentioned": [["origin_shard_proposed_headers", {"nonce": 210, "round": 210, "epoch": 1, "shard_id": 0}], ["origin_shard_commited_headers", {"nonce": 210, "round": 210, "epoch": 1, "shard_id": 0}], ["dest_shard_proposed_headers", {"nonce": 212, "round": 212, "epoch": 1, "shard_id": 1}], ["dest_shard_commited_headers", {"nonce": 212, "round": 212, "epoch": 1, "shard_id": 1}], ["meta_origin_shard_proposed_headers", {"nonce": 211, "round": 211, "epoch": 1, "shard_id": 4294967295}], ["meta_dest_shard_proposed_headers", {"nonce": 213, "round": 213, "epoch": 1, "shard_id": 4294967295}], ["meta_origin_shard_commited_headers", {"nonce": 211, "round": 211, "epoch": 1, "shard_id": 4294967295}], ["meta_dest_shard_commited_headers", {"nonce": 213, "round": 213, "epoch": 1, "shard_id": 4294967295}]]}, "01e7fc8132cd61f6aede3231bc9c9dc36dbe7d3ffcaca59fce7b46cca0e5884e": {"hash": "01e7fc8132cd61f6aede3231bc9c9dc36dbe7d3ffcaca59fce7b46cca0e5884e", "receiverShardID": 2, "reserved": "2008", "senderShardID": 0, "txCount": 9, "type": 0, "mentioned": [["origin_shard_proposed_headers", {"nonce": 210, "round": 210, "epoch": 1, "shard_id": 0}], ["origin_shard_commited_headers", {"nonce": 210, "round": 210, "epoch": 1, "shard_id": 0}], ["meta_origin_shard_proposed_headers", {"nonce": 211, "round": 211, "epoch": 1, "shard_id": 4294967295}], ["meta_dest_shard_proposed_headers", {"nonce": 213, "round": 213, "epoch": 1, "shard_id": 4294967295}], ["meta_origin_shard_commited_headers", {"nonce": 211, "round": 211, "epoch": 1, "shard_id": 4294967295}], ["meta_dest_shard_commited_headers", {"nonce": 213, "round": 213, "epoch": 1, "shard_id": 4294967295}], ["dest_shard_proposed_headers", {"nonce": 211, "round": 212, "epoch": 1, "shard_id": 2}], ["dest_shard_commited_headers", {"nonce": 211, "round": 212, "epoch": 1, "shard_id": 2}]]}, "f966a0d0370de9a7b0675ec153c03f61031e02864fe781ab49cca0f60439e590": {"hash": "f966a0d0370de9a7b0675ec153c03f61031e02864fe781ab49cca0f60439e590", "receiverShardID": 0, "reserved": "2012", "senderShardID": 2, "txCount": 19, "type": 0, "mentioned": [["dest_shard_proposed_headers", {"nonce": 212, "round": 212, "epoch": 1, "shard_id": 0}], ["dest_shard_commited_headers", {"nonce": 212, "round": 212, "epoch": 1, "shard_id": 0}], ["meta_origin_shard_proposed_headers", {"nonce": 211, "round": 211, "epoch": 1, "shard_id": 4294967295}], ["meta_dest_shard_proposed_headers", {"nonce": 213, "round": 213, "epoch": 1, "shard_id": 4294967295}], ["meta_origin_shard_commited_headers", {"nonce": 211, "round": 211, "epoch": 1, "shard_id": 4294967295}], ["meta_dest_shard_commited_headers", {"nonce": 213, "round": 213, "epoch": 1, "shard_id": 4294967295}], ["origin_shard_proposed_headers", {"nonce": 209, "round": 210, "epoch": 1, "shard_id": 2}], ["origin_shard_commited_headers", {"nonce": 209, "round": 210, "epoch": 1, "shard_id": 2}]]}, "cdac3609485d6813f79c717e48e5d6cf94b3ca6e25dd372a21f3511f0eb1e536": {"hash": "cdac3609485d6813f79c717e48e5d6cf94b3ca6e25dd372a21f3511f0eb1e536", "receiverShardID": 0, "reserved": "200e", "senderShardID": 1, "txCount": 15, "type": 0, "mentioned": [["dest_shard_proposed_headers", {"nonce": 212, "round": 212, "epoch": 1, "shard_id": 0}], ["dest_shard_commited_headers", {"nonce": 212, "round": 212, "epoch": 1, "shard_id": 0}], ["origin_shard_proposed_headers", {"nonce": 210, "round": 210, "epoch": 1, "shard_id": 1}], ["origin_shard_commited_headers", {"nonce": 210, "round": 210, "epoch": 1, "shard_id": 1}], ["meta_origin_shard_proposed_headers", {"nonce": 211, "round": 211, "epoch": 1, "shard_id": 4294967295}], ["meta_dest_shard_proposed_headers", {"nonce": 213, "round": 213, "epoch": 1, "shard_id": 4294967295}], ["meta_origin_shard_commited_headers", {"nonce": 211, "round": 211, "epoch": 1, "shard_id": 4294967295}], ["meta_dest_shard_commited_headers", {"nonce": 213, "round": 213, "epoch": 1, "shard_id": 4294967295}]]}}} - mb_data = MiniblockData(list(example['miniblocks'].items())) - print(json.dumps(mb_data.get_data_for_detailed_report(), indent=4)) - -''' -Class to hold data related to a miniblock and its appearances in headers. - -For headers V1, miniblocks are directly in the header under "miniBlockHeaders". -For headers V2, miniblocks are under "header" -> "miniBlockHeaders". -For metaheaders V1, miniblocks are under "miniBlockHeaders" and also in "shardInfo"->"shardMiniBlockHeaders". -''' - -''' -header: miniblocks -Example: -HV1 - { - "accumulatedFees": "0", - "blockBodyType": 0, - "chainID": "31", - "developerFees": "0", - "epoch": 0, - "epochStartMetaHash": "", - "leaderSignature": "", - "metaBlockHashes": [ - "353b97d74521f37d6776c9b8070f928af210d9cca2f22531f635d2ed207d0a44" - ], - "miniBlockHeaders": [], - "nonce": 6, - "peerChanges": [], - "prevHash": "5fef2a316b7046470de021f37a5443854699950138ded925cc0d10a1c4cdc383", - "prevRandSeed": "0e4d1c5112aef96495f851ea8a285fab7aa7a28723d9930f8f280f1e158de7b64f95f301160794da4f551cd789176714", - "pubKeysBitmap": "", - "randSeed": "22400b5e348375592be8537cc797da3f80fc5a385feb18922c060c7b9b7e53698bc3ffa7da4e28bee2a0fd2ddb5ea419", - "receiptsHash": "0e5751c026e543b2e8ab2eb06099daa1d1e5df47778f7787faab45cdf12fe3a8", - "reserved": "", - "rootHash": "34f5b60441c630fbd7327835070a64429eb40627b0b638c062d15aa1de2f7208", - "round": 6, - "shardID": 1, - "signature": "", - "softwareVersion": "64656661756c74", - "timeStamp": 1762937897, - "txCount": 0 - }, - - -HV2 - { - "header": { - "accumulatedFees": "0", - "blockBodyType": 0, - "chainID": "31", - "developerFees": "0", - "epoch": 2, - "epochStartMetaHash": "e43e0e37766f6e59bb5bc586b244427e0385ea720292a0c17b0272952b4afbf5", - "leaderSignature": "05bbf615de4e0d1271df5fd4eb6a057a6d89fd945499b24a50dbc080d4b87dd8c5823105fceacbdf214653621bc3f10f", - "metaBlockHashes": [ - "e43e0e37766f6e59bb5bc586b244427e0385ea720292a0c17b0272952b4afbf5" - ], - "miniBlockHeaders": [ - { - "hash": "d3ba36a1f12970615fbea92a8a3b1639fef9676dc2951a7150c73516cbde2301", - "receiverShardID": 1, - "reserved": "200a", - "senderShardID": 4294967295, - "txCount": 11, - "type": 255 - }, - { - "hash": "697a913df8d23454c56755c9f60dca2008d182ef685c124f0ade6332ae291647", - "receiverShardID": 4294967280, - "reserved": "200e", - "senderShardID": 4294967295, - "txCount": 15, - "type": 60 - }, - { - "hash": "9948e5f806de024ccc253429850f3e6b4203d2509c7d653f8dd00d89e8d32ae5", - "receiverShardID": 4294967280, - "reserved": "200e", - "senderShardID": 4294967295, - "txCount": 15, - "type": 60 - }, - { - "hash": "722b967aac3a34f4097e536176975f0f7fb06e70f45e35a67a09a09011f699be", - "receiverShardID": 4294967280, - "reserved": "200e", - "senderShardID": 4294967295, - "txCount": 15, - "type": 60 - }, - { - "hash": "7ab74c81351bf5d5876421da2fda21257c4924240be05fae66523dcc7cf165ed", - "receiverShardID": 4294967280, - "reserved": "200e", - "senderShardID": 4294967295, - "txCount": 15, - "type": 60 - } - ], - "nonce": 403, - "peerChanges": [], - "prevHash": "5e2640a23517bb8fbba1ea9428c80bb86af907d3dca5f931a8c414ecab15816c", - "prevRandSeed": "41ec3c4ce905421a9646354435b0c6259d7f6268f784133dc0e3f5033580e5492071b90f60c84f25c396004f98c0f00a", - "pubKeysBitmap": "", - "randSeed": "2f26601869d5130613e685b0b9fd829db86310e30e446acfa50e0010865a1e5f82d1e7456930d10942e8e8f2e587b486", - "receiptsHash": "0e5751c026e543b2e8ab2eb06099daa1d1e5df47778f7787faab45cdf12fe3a8", - "reserved": "", - "rootHash": "0a4c40ca1a8488ca7d3832fb73c315a3a86facafca48978f2d28b2c0da7dcf41", - "round": 403, - "shardID": 1, - "signature": "", - "softwareVersion": "32", - "timeStamp": 1762940279000, - "txCount": 71 - }, - "scheduledAccumulatedFees": "0", - "scheduledDeveloperFees": "0", - "scheduledGasPenalized": 0, - "scheduledGasProvided": 0, - "scheduledGasRefunded": 0, - "scheduledRootHash": "40ff71800f799bd91ad57e00b8fd232a12ab559360b93f16bfbd23463dd25721" - }, - -metaheader: miniBlockHeaders, shardinfo/shardMiniBlockHeaders -Example: - { - "accumulatedFees": "5000000000000000", - "accumulatedFeesInEpoch": "660292165000000000", - "chainID": "31", - "devFeesInEpoch": "6694699500000000", - "developerFees": "1500000000000000", - "epoch": 1, - "epochStart": { - "economics": { - "nodePrice": null, - "prevEpochStartHash": "", - "prevEpochStartRound": 0, - "rewardsForProtocolSustainability": null, - "rewardsPerBlock": null, - "totalNewlyMinted": null, - "totalSupply": null, - "totalToDistribute": null - }, - "lastFinalizedHeaders": [] - }, - "leaderSignature": "a5c38e4db58f7f6598948cf1050fc96f73ed4952db9d11f32a97ee9cfb9f5df84a46733e3b9bcc33b32f83ed9d9ec40a", - "miniBlockHeaders": [ - { - "hash": "f5323d263ac564829e92c457d4393a03d788ed81d9023b247a025d41a82f137b", - "receiverShardID": 4294967295, - "reserved": "", - "senderShardID": 0, - "txCount": 1, - "type": 0 - }, - { - "hash": "2a669d6ba2b61d3915b3b75d779443093dbe408a947445fb7d4ce9c0f8c3dbba", - "receiverShardID": 0, - "reserved": "2002", - "senderShardID": 4294967295, - "txCount": 3, - "type": 90 - }, - { - "hash": "83b76de2135a864cebf7845567ebdabbf31c3c4005340221c5feecb65867a88a", - "receiverShardID": 1, - "reserved": "2001", - "senderShardID": 4294967295, - "txCount": 2, - "type": 90 - }, - { - "hash": "88b7fd9f2c9d3df8bb88ec7d3dd3e3fd13d0dd3ca1e9e9e0d5547bcdb9a3f1b0", - "receiverShardID": 2, - "reserved": "2001", - "senderShardID": 4294967295, - "txCount": 2, - "type": 90 - } - ], - "nonce": 283, - "peerInfo": [], - "prevHash": "b82b1e3ea44357d35d28c76de0ef5d66674c0a602edd8b8c73b45cf61087d0e1", - "prevRandSeed": "19e8fbcf33054f8a5c0982a1d1bcdbe8e698318eaf8b6061316072819463b2b3ab1c22500cb4f61d9f37585d9b994d84", - "pubKeysBitmap": "", - "randSeed": "9d99b6595f2ff33bca4ca5c5cd0331b9b36053f7857bc52806806440feed7a0eae9f44c4691a16e753d05536da369b8f", - "receiptsHash": "0e5751c026e543b2e8ab2eb06099daa1d1e5df47778f7787faab45cdf12fe3a8", - "reserved": "", - "rootHash": "315acd42e00cca35998436b7a060cb196674cf6e058b74a3637026e101519835", - "round": 283, - "shardInfo": [ - { - "accumulatedFees": "22715500000000000", - "developerFees": "0", - "epoch": 1, - "headerHash": "3b41fc97dd499d9cb34bfeb5b12898b5f456886d06a543c72b2f0f758a13ccc1", - "lastIncludedMetaNonce": 280, - "nonce": 282, - "numPendingMiniBlocks": 1, - "prevHash": "38c906f96e2d56474407b08fd5be7b46ba51466537799b1b623437c65b325c66", - "prevRandSeed": "cb2ba79e42b33bd609694de59b2b8b24baad3d7f2b9bd2044563f15808c610fac34ac7580d5067bc177c45d01c625113", - "pubKeysBitmap": "", - "round": 282, - "shardID": 0, - "shardMiniBlockHeaders": [ - { - "hash": "6c5248018490f44bc5e9961e095130c8aef96bc427f20b6031d51a80fa818ca8", - "receiverShardID": 0, - "reserved": "", - "senderShardID": 0, - "txCount": 92, - "type": 0 - }, - { - "hash": "4d1ceacabdcea75c9cde8a1f00058507760d03998c1a98bd7809be2a5f61973e", - "receiverShardID": 1, - "reserved": "", - "senderShardID": 0, - "txCount": 299, - "type": 0 - }, - { - "hash": "f5323d263ac564829e92c457d4393a03d788ed81d9023b247a025d41a82f137b", - "receiverShardID": 4294967295, - "reserved": "", - "senderShardID": 0, - "txCount": 1, - "type": 0 - } - ], - "signature": "", - "txCount": 392 - }, - { - "accumulatedFees": "4150000000000000", - "developerFees": "0", - "epoch": 1, - "headerHash": "ba444fcc2da250641426831623e93fe2107e2c3f415bd245386d68286c78bb79", - "lastIncludedMetaNonce": 280, - "nonce": 282, - "numPendingMiniBlocks": 2, - "prevHash": "2a308ec0bd722d1197abd2824daa804833e4a78f50d01b43333b998d36657af7", - "prevRandSeed": "44d8d8a69e52494bcaf57ba8c52f07843ce47dac05498b6badee4457d8cf8c3846fb068c9edcc074df0d6b2f2ff5e391", - "pubKeysBitmap": "", - "round": 282, - "shardID": 1, - "shardMiniBlockHeaders": [ - { - "hash": "a6b7bc2cecaddf1743109a5f045592c9fefb0954cfa4eb1cc1c44bfe81524645", - "receiverShardID": 1, - "reserved": "", - "senderShardID": 1, - "txCount": 83, - "type": 0 - } - ], - "signature": "", - "txCount": 83 - }, - { - "accumulatedFees": "4150000000000000", - "developerFees": "0", - "epoch": 1, - "headerHash": "1b135e7cc2876ddbf7ee1679ab86c63f4c4416fd76e1ffb96a161f072f012550", - "lastIncludedMetaNonce": 280, - "nonce": 281, - "numPendingMiniBlocks": 2, - "prevHash": "a9da1c4b5e2e65f3c4a7aedbb230ee1f9afe0ae9432965b8971e6e26c5546509", - "prevRandSeed": "bb30cd93bac3a177f65beafbab717113be974e3d31ba1851911059e7a61643843d4b1b0308659f6fde0657cc90d7ca07", - "pubKeysBitmap": "", - "round": 282, - "shardID": 2, - "shardMiniBlockHeaders": [ - { - "hash": "878916cd4e6aff248c15a0fcdd8bb19025a1db1cbabf6ad762426672cff0cd0a", - "receiverShardID": 2, - "reserved": "", - "senderShardID": 2, - "txCount": 83, - "type": 0 - } - ], - "signature": "", - "txCount": 83 - } - ], - "signature": "", - "softwareVersion": "32", - "timeStamp": 1762939559, - "txCount": 566, - "validatorStatsRootHash": "2bdc8762983b907c262896a4387b432746129bce1a357646a21e67c977a28eb5" - }, - ''' diff --git a/multiversx_cross_shard_analysis/miniblocks_detailed_report.py b/multiversx_cross_shard_analysis/miniblocks_detailed_report.py deleted file mode 100644 index 9b6f8dd..0000000 --- a/multiversx_cross_shard_analysis/miniblocks_detailed_report.py +++ /dev/null @@ -1,44 +0,0 @@ -from reportlab.lib.pagesizes import A4 -from reportlab.platypus import SimpleDocTemplate, Table, TableStyle -from reportlab.lib import colors - - -def build_miniblock_table_pdf(rows, out_path): - doc = SimpleDocTemplate(out_path, pagesize=A4) - - header = [ - "miniblock hash", - "sh0 proposed", "sh0 committed", "sh0 notarize prop", "sh0 notarize comm", - "sh1 proposed", "sh1 committed", "sh1 notarize prop", "sh1 notarize comm", - "sh2 proposed", "sh2 committed", "sh2 notarize prop", "sh2 notarize comm", - ] - - data = [header] + rows - - table = Table(data, repeatRows=1) - - table.setStyle(TableStyle([ - ("BACKGROUND", (0, 0), (-1, 0), colors.lightgrey), - ("TEXTCOLOR", (0, 0), (-1, 0), colors.black), - ("ALIGN", (0, 0), (-1, -1), "CENTER"), - ("FONTNAME", (0, 0), (-1, 0), "Helvetica-Bold"), - ("GRID", (0, 0), (-1, -1), 0.3, colors.black), - ("FONTSIZE", (0, 0), (-1, -1), 7), - ])) - - doc.build([table]) - - -# example usage: -# each row must be a list of exactly 13 values, same order as the header -rows = [ - [ - "abc123", - 101, 102, 103, 104, - 201, 202, 203, 204, - 301, 302, 303, 304, - ] -] - -build_miniblock_table_pdf(rows, "miniblocks.pdf") -# mb_data = MiniblockData(list(data['miniblocks'].items())).get_data_for_detailed_report1() diff --git a/multiversx_cross_shard_analysis/miniblocks_round_report.py b/multiversx_cross_shard_analysis/miniblocks_round_report.py index 6851a36..1cc7e41 100644 --- a/multiversx_cross_shard_analysis/miniblocks_round_report.py +++ b/multiversx_cross_shard_analysis/miniblocks_round_report.py @@ -185,7 +185,7 @@ def build_report(epoch: int, rounds_data: dict[int, Any], shards: list[int]): with open('./Reports/cross-shard-execution-anal-9afe696daf/Miniblocks/miniblocks_report.json', 'r') as f: data = json.load(f) - mb_data = MiniblockData(list(data['miniblocks'].items())).get_data_for_round_report() + mb_data = MiniblockData(data['miniblocks']).get_data_for_round_report() for epoch in sorted(mb_data.keys()): print(f"Epoch: {epoch}") diff --git a/multiversx_cross_shard_analysis/miniblocks_timeline_report.py b/multiversx_cross_shard_analysis/miniblocks_timeline_report.py index a683e13..664b0b2 100644 --- a/multiversx_cross_shard_analysis/miniblocks_timeline_report.py +++ b/multiversx_cross_shard_analysis/miniblocks_timeline_report.py @@ -25,7 +25,7 @@ from reportlab.platypus.flowables import Flowable -from multiversx_cross_shard_analysis.constants import COLORS_MAPPING, TYPE_NAMES +from multiversx_cross_shard_analysis.constants import TYPE_NAMES from multiversx_cross_shard_analysis.miniblock_data import MiniblockData @@ -234,16 +234,13 @@ def build_pdf_from_miniblocks(epoch: int, miniblocks: list[dict[str, Any]], outn doc.build(story) -# ----------------------------- -# Example using new tuple form: (label, info, color) -# ----------------------------- if __name__ == "__main__": # run PDF build with open('./Reports/cross-shard-execution-anal-9afe696daf/Miniblocks/miniblocks_report.json', 'r') as f: data = json.load(f) - mb_data = MiniblockData(list(data['miniblocks'].items())).get_data_for_detail_report() + mb_data = MiniblockData(data['miniblocks']).get_data_for_detail_report() for epoch in sorted(mb_data.keys()): print(f"Epoch: {epoch}") From a1984ec8c9ea35d4945c2e6a5e7458a35f28f2b1 Mon Sep 17 00:00:00 2001 From: Mihaela Radian Date: Thu, 4 Dec 2025 20:35:17 +0200 Subject: [PATCH 10/24] MX-17306 Cleanup --- multiversx_cross_shard_analysis/report.py | 318 ---------------------- 1 file changed, 318 deletions(-) delete mode 100644 multiversx_cross_shard_analysis/report.py diff --git a/multiversx_cross_shard_analysis/report.py b/multiversx_cross_shard_analysis/report.py deleted file mode 100644 index a0ba555..0000000 --- a/multiversx_cross_shard_analysis/report.py +++ /dev/null @@ -1,318 +0,0 @@ -from collections import defaultdict -from typing import Any -import matplotlib.colors as mcolors -import matplotlib.pyplot as plt -import json -from reportlab.lib import colors -from reportlab.lib.pagesizes import letter -from reportlab.pdfgen import canvas - -from multiversx_cross_shard_analysis.miniblock_data import MiniblockData - -data = { - "01f7f0f3503c62b69ff0fe22a57b97eaa2f164ac9e4a0bb852bc66e3f5c12369": { - "lanes": { - "origin": [ - [ - "origin_shard_proposed_headers", - 205 - ], - [ - "origin_shard_commited_headers", - 205 - ] - ], - "dest": [ - [ - "dest_shard_proposed_headers", - 207 - ], - [ - "dest_shard_commited_headers", - 207 - ] - ], - "meta": [ - [ - "meta_origin_shard_proposed_headers", - 206 - ], - [ - "meta_dest_shard_proposed_headers", - 208 - ], - [ - "meta_origin_shard_commited_headers", - 206 - ], - [ - "meta_dest_shard_commited_headers", - 208 - ] - ] - }, - "origin_epoch": 1 - }, - "3fce18121e1ce8e57a3d776c91468649699e8238fad7898f46b8ac88b8d2c1cb": { - "lanes": { - "origin": [ - [ - "origin_shard_proposed_headers", - 210 - ], - [ - "origin_shard_commited_headers", - 210 - ] - ], - "dest": [], - "meta": [ - [ - "meta_origin_shard_proposed_headers", - 211 - ], - [ - "meta_origin_shard_commited_headers", - 211 - ] - ] - }, - "origin_epoch": 1 - }, - "cb2bbdf01dd1a44c813ceecb58ae7404ad9016073fc707dda1be52f7dc5735fc": { - "lanes": { - "origin": [ - [ - "origin_shard_proposed_headers", - 210 - ], - [ - "origin_shard_commited_headers", - 210 - ] - ], - "dest": [ - [ - "dest_shard_proposed_headers", - 212 - ], - [ - "dest_shard_commited_headers", - 212 - ] - ], - "meta": [ - [ - "meta_origin_shard_proposed_headers", - 211 - ], - [ - "meta_dest_shard_proposed_headers", - 213 - ], - [ - "meta_origin_shard_commited_headers", - 211 - ], - [ - "meta_dest_shard_commited_headers", - 213 - ] - ] - }, - "origin_epoch": 1 - }, - "01e7fc8132cd61f6aede3231bc9c9dc36dbe7d3ffcaca59fce7b46cca0e5884e": { - "lanes": { - "origin": [ - [ - "origin_shard_proposed_headers", - 210 - ], - [ - "origin_shard_commited_headers", - 210 - ] - ], - "dest": [ - [ - "dest_shard_proposed_headers", - 212 - ], - [ - "dest_shard_commited_headers", - 212 - ] - ], - "meta": [ - [ - "meta_origin_shard_proposed_headers", - 211 - ], - [ - "meta_dest_shard_proposed_headers", - 213 - ], - [ - "meta_origin_shard_commited_headers", - 211 - ], - [ - "meta_dest_shard_commited_headers", - 213 - ] - ] - }, - "origin_epoch": 1 - }, - "f966a0d0370de9a7b0675ec153c03f61031e02864fe781ab49cca0f60439e590": { - "lanes": { - "origin": [ - [ - "origin_shard_proposed_headers", - 210 - ], - [ - "origin_shard_commited_headers", - 210 - ] - ], - "dest": [ - [ - "dest_shard_proposed_headers", - 212 - ], - [ - "dest_shard_commited_headers", - 212 - ] - ], - "meta": [ - [ - "meta_origin_shard_proposed_headers", - 211 - ], - [ - "meta_dest_shard_proposed_headers", - 213 - ], - [ - "meta_origin_shard_commited_headers", - 211 - ], - [ - "meta_dest_shard_commited_headers", - 213 - ] - ] - }, - "origin_epoch": 1 - }, - "cdac3609485d6813f79c717e48e5d6cf94b3ca6e25dd372a21f3511f0eb1e536": { - "lanes": { - "origin": [ - [ - "origin_shard_proposed_headers", - 210 - ], - [ - "origin_shard_commited_headers", - 210 - ] - ], - "dest": [ - [ - "dest_shard_proposed_headers", - 212 - ], - [ - "dest_shard_commited_headers", - 212 - ] - ], - "meta": [ - [ - "meta_origin_shard_proposed_headers", - 211 - ], - [ - "meta_dest_shard_proposed_headers", - 213 - ], - [ - "meta_origin_shard_commited_headers", - 211 - ], - [ - "meta_dest_shard_commited_headers", - 213 - ] - ] - }, - "origin_epoch": 1 - } -} - - -def draw_timeline(data: dict[str, Any]): - lane_colors = { - "origin": "#4CAF50", # green - "dest": "#FFEB3B", # yellow - "meta": "#2196F3" # blue - } - - shade_factor = {"proposed": 1.0, "commited": 0.6} - - plt.figure(figsize=(12, 6)) - - for i, (mb_hash, mb_data) in enumerate(data.items()): - # track number of events per round to offset them - round_counts = defaultdict(int) - for lane_type, events in mb_data["lanes"].items(): - for name, round_num in events: - shade = shade_factor["proposed"] if "proposed" in name else shade_factor["commited"] - rgb = mcolors.to_rgb(lane_colors[lane_type]) - color = tuple([c * shade for c in rgb]) - - # stack multiple events in the same round - offset = 0.15 * round_counts[round_num] - plt.scatter(round_num, -i + offset, color=color, s=200, marker="s") - round_counts[round_num] += 1 - - plt.yticks([-i for i in range(len(data))], [h[:8] + '…' for h in data.keys()]) - plt.xlabel("Round") - plt.title("Miniblock Timelines") - plt.gca().invert_yaxis() - plt.tight_layout() - plt.savefig("miniblocks.pdf") - plt.show() - - -example = {"run_name": "cross-shard-execution-anal-9afe696daf", "miniblocks": {"01f7f0f3503c62b69ff0fe22a57b97eaa2f164ac9e4a0bb852bc66e3f5c12369": {"hash": "01f7f0f3503c62b69ff0fe22a57b97eaa2f164ac9e4a0bb852bc66e3f5c12369", "receiverShardID": 0, "reserved": "", "senderShardID": 1, "txCount": 1, "type": 0, "mentioned": [["dest_shard_proposed_headers", {"nonce": 207, "round": 207, "epoch": 1, "shard_id": 0}], ["dest_shard_commited_headers", {"nonce": 207, "round": 207, "epoch": 1, "shard_id": 0}], ["origin_shard_proposed_headers", {"nonce": 205, "round": 205, "epoch": 1, "shard_id": 1}], ["origin_shard_commited_headers", {"nonce": 205, "round": 205, "epoch": 1, "shard_id": 1}], ["meta_origin_shard_proposed_headers", {"nonce": 206, "round": 206, "epoch": 1, "shard_id": 4294967295}], ["meta_dest_shard_proposed_headers", {"nonce": 208, "round": 208, "epoch": 1, "shard_id": 4294967295}], ["meta_origin_shard_commited_headers", {"nonce": 206, "round": 206, "epoch": 1, "shard_id": 4294967295}], ["meta_dest_shard_commited_headers", {"nonce": 208, "round": 208, "epoch": 1, "shard_id": 4294967295}]]}, "3fce18121e1ce8e57a3d776c91468649699e8238fad7898f46b8ac88b8d2c1cb": {"hash": "3fce18121e1ce8e57a3d776c91468649699e8238fad7898f46b8ac88b8d2c1cb", "receiverShardID": 0, "reserved": "200b", "senderShardID": 0, "txCount": 12, "type": 0, "mentioned": [["origin_shard_proposed_headers", {"nonce": 210, "round": 210, "epoch": 1, "shard_id": 0}], ["origin_shard_commited_headers", {"nonce": 210, "round": 210, "epoch": 1, "shard_id": 0}], ["meta_origin_shard_proposed_headers", {"nonce": 211, "round": 211, "epoch": 1, "shard_id": 4294967295}], ["meta_origin_shard_commited_headers", {"nonce": 211, "round": 211, "epoch": 1, "shard_id": 4294967295}]]}, "cb2bbdf01dd1a44c813ceecb58ae7404ad9016073fc707dda1be52f7dc5735fc": {"hash": "cb2bbdf01dd1a44c813ceecb58ae7404ad9016073fc707dda1be52f7dc5735fc", "receiverShardID": 1, "reserved": "201c", "senderShardID": 0, "txCount": 29, "type": 0, "mentioned": [["origin_shard_proposed_headers", {"nonce": 210, "round": 210, "epoch": 1, "shard_id": 0}], ["origin_shard_commited_headers", {"nonce": 210, "round": 210, "epoch": 1, "shard_id": 0}], ["dest_shard_proposed_headers", {"nonce": 212, "round": 212, "epoch": 1, "shard_id": 1}], ["dest_shard_commited_headers", {"nonce": 212, "round": 212, "epoch": 1, "shard_id": 1}], ["meta_origin_shard_proposed_headers", {"nonce": 211, "round": 211, "epoch": 1, "shard_id": 4294967295}], ["meta_dest_shard_proposed_headers", {"nonce": 213, "round": 213, "epoch": 1, "shard_id": 4294967295}], ["meta_origin_shard_commited_headers", {"nonce": 211, "round": 211, "epoch": 1, "shard_id": 4294967295}], ["meta_dest_shard_commited_headers", {"nonce": 213, "round": 213, "epoch": 1, "shard_id": 4294967295}]]}, "01e7fc8132cd61f6aede3231bc9c9dc36dbe7d3ffcaca59fce7b46cca0e5884e": {"hash": "01e7fc8132cd61f6aede3231bc9c9dc36dbe7d3ffcaca59fce7b46cca0e5884e", "receiverShardID": 2, "reserved": "2008", "senderShardID": 0, "txCount": 9, "type": 0, "mentioned": [["origin_shard_proposed_headers", {"nonce": 210, "round": 210, "epoch": 1, "shard_id": 0}], ["origin_shard_commited_headers", {"nonce": 210, "round": 210, "epoch": 1, "shard_id": 0}], ["meta_origin_shard_proposed_headers", {"nonce": 211, "round": 211, "epoch": 1, "shard_id": 4294967295}], ["meta_dest_shard_proposed_headers", {"nonce": 213, "round": 213, "epoch": 1, "shard_id": 4294967295}], ["meta_origin_shard_commited_headers", {"nonce": 211, "round": 211, "epoch": 1, "shard_id": 4294967295}], ["meta_dest_shard_commited_headers", {"nonce": 213, "round": 213, "epoch": 1, "shard_id": 4294967295}], ["dest_shard_proposed_headers", {"nonce": 211, "round": 212, "epoch": 1, "shard_id": 2}], ["dest_shard_commited_headers", {"nonce": 211, "round": 212, "epoch": 1, "shard_id": 2}]]}, "f966a0d0370de9a7b0675ec153c03f61031e02864fe781ab49cca0f60439e590": {"hash": "f966a0d0370de9a7b0675ec153c03f61031e02864fe781ab49cca0f60439e590", "receiverShardID": 0, "reserved": "2012", "senderShardID": 2, "txCount": 19, "type": 0, "mentioned": [["dest_shard_proposed_headers", {"nonce": 212, "round": 212, "epoch": 1, "shard_id": 0}], ["dest_shard_commited_headers", {"nonce": 212, "round": 212, "epoch": 1, "shard_id": 0}], ["meta_origin_shard_proposed_headers", {"nonce": 211, "round": 211, "epoch": 1, "shard_id": 4294967295}], ["meta_dest_shard_proposed_headers", {"nonce": 213, "round": 213, "epoch": 1, "shard_id": 4294967295}], ["meta_origin_shard_commited_headers", {"nonce": 211, "round": 211, "epoch": 1, "shard_id": 4294967295}], ["meta_dest_shard_commited_headers", {"nonce": 213, "round": 213, "epoch": 1, "shard_id": 4294967295}], ["origin_shard_proposed_headers", {"nonce": 209, "round": 210, "epoch": 1, "shard_id": 2}], ["origin_shard_commited_headers", {"nonce": 209, "round": 210, "epoch": 1, "shard_id": 2}]]}, "cdac3609485d6813f79c717e48e5d6cf94b3ca6e25dd372a21f3511f0eb1e536": {"hash": "cdac3609485d6813f79c717e48e5d6cf94b3ca6e25dd372a21f3511f0eb1e536", "receiverShardID": 0, "reserved": "200e", "senderShardID": 1, "txCount": 15, "type": 0, "mentioned": [["dest_shard_proposed_headers", {"nonce": 212, "round": 212, "epoch": 1, "shard_id": 0}], ["dest_shard_commited_headers", {"nonce": 212, "round": 212, "epoch": 1, "shard_id": 0}], ["origin_shard_proposed_headers", {"nonce": 210, "round": 210, "epoch": 1, "shard_id": 1}], ["origin_shard_commited_headers", {"nonce": 210, "round": 210, "epoch": 1, "shard_id": 1}], ["meta_origin_shard_proposed_headers", {"nonce": 211, "round": 211, "epoch": 1, "shard_id": 4294967295}], ["meta_dest_shard_proposed_headers", {"nonce": 213, "round": 213, "epoch": 1, "shard_id": 4294967295}], ["meta_origin_shard_commited_headers", {"nonce": 211, "round": 211, "epoch": 1, "shard_id": 4294967295}], ["meta_dest_shard_commited_headers", {"nonce": 213, "round": 213, "epoch": 1, "shard_id": 4294967295}]]}}} -# usage: -# export_timeline_pdf(data, "miniblock_lifecycle.pdf") - - -def chunks(lst, size): - for i in range(0, len(lst), size): - yield lst[i:i + size] - - -if __name__ == "__main__": - with open('./Reports/cross-shard-execution-anal-9afe696daf_old/Miniblocks/miniblocks_report.json', 'r') as f: - data = json.load(f) - - mb_data = MiniblockData(list(data['miniblocks'].items())).get_data_for_detailed_report1() - - LIMIT = 30 # how many miniblocks per draw - - for epoch in sorted(mb_data.keys()): - print(f"Epoch: {epoch} - miniblocks: {len(mb_data[epoch])}") - - sorted_mb = sorted( - mb_data[epoch].items(), - key=lambda x: x[1]['start_round'] - ) - - for batch in chunks(sorted_mb, LIMIT): - draw_timeline(data=dict(batch)) From 76e8b766e47348d96b02117f03aac150cab71c86 Mon Sep 17 00:00:00 2001 From: Mihaela Radian Date: Thu, 4 Dec 2025 20:39:00 +0200 Subject: [PATCH 11/24] MX-17306 Cleanup --- README.md | 6 +++--- multiversx_cross_shard_analysis/constants.py | 1 + .../decode_reserved.py | 4 +++- .../gather_data.py | 2 -- .../header_analysis_archive_handler.py | 5 ++--- .../header_analysis_checker.py | 3 +-- .../header_structures.py | 7 ++++--- .../headers_timeline_report.py | 20 +++++++------------ .../miniblock_data.py | 4 ++-- .../miniblocks_round_report.py | 13 +++++------- .../miniblocks_timeline_report.py | 17 +++++----------- .../test_decode_reserved.py | 4 ++-- 12 files changed, 35 insertions(+), 51 deletions(-) diff --git a/README.md b/README.md index 3bfd58e..a19c46c 100644 --- a/README.md +++ b/README.md @@ -2,8 +2,8 @@ Logs parsing utilities and applications ## LOGS PARSER TOOLS: -The tool provides general abstract classes that can be useful for parsing logs. -In order to create an application that uses off-line parsing of logs files, these classes must be inherited and methods should be implemented for that particular case. +The tool provides general abstract classes that can be useful for parsing logs. +In order to create an application that uses off-line parsing of logs files, these classes must be inherited and methods should be implemented for that particular case. ### ARCHIVE HANDLER - General application processing class, that loops through the nodes in the downloaded logs archive and calls its NodeLogsChecker instance for each one of them @@ -43,4 +43,4 @@ pip install -r ./requirements-dev.txt --upgrade EXAMPLE USAGE ``` python -m multiversx_cross_shard_analysis.gather_data --path /home/mihaela/Downloads/cross-shard-execution-anal-9afe696daf.zip -``` \ No newline at end of file +``` diff --git a/multiversx_cross_shard_analysis/constants.py b/multiversx_cross_shard_analysis/constants.py index b6a8e94..54d65a1 100644 --- a/multiversx_cross_shard_analysis/constants.py +++ b/multiversx_cross_shard_analysis/constants.py @@ -1,4 +1,5 @@ from enum import Enum + from reportlab.lib import colors origin_shard = "origin_shard" diff --git a/multiversx_cross_shard_analysis/decode_reserved.py b/multiversx_cross_shard_analysis/decode_reserved.py index 78c9cbe..21908d3 100644 --- a/multiversx_cross_shard_analysis/decode_reserved.py +++ b/multiversx_cross_shard_analysis/decode_reserved.py @@ -1,7 +1,9 @@ from typing import Any -from multiversx_cross_shard_analysis.constants import FIELD_NAME_MAPPING, MINIBLOCK_STATE_MAPPING, PROCESSING_TYPE_MAPPING +from multiversx_cross_shard_analysis.constants import (FIELD_NAME_MAPPING, + MINIBLOCK_STATE_MAPPING, + PROCESSING_TYPE_MAPPING) def get_default_decoded_data(tx_count: int) -> dict[str, Any]: diff --git a/multiversx_cross_shard_analysis/gather_data.py b/multiversx_cross_shard_analysis/gather_data.py index c15679e..a369ab7 100644 --- a/multiversx_cross_shard_analysis/gather_data.py +++ b/multiversx_cross_shard_analysis/gather_data.py @@ -1,8 +1,6 @@ from datetime import datetime, timedelta - from .header_analysis_archive_handler import HeaderAnalysisArchiveHandler - from .header_analysis_checker import HeaderAnalysisChecker from .header_analysis_parser import HeaderAnalysisParser diff --git a/multiversx_cross_shard_analysis/header_analysis_archive_handler.py b/multiversx_cross_shard_analysis/header_analysis_archive_handler.py index 08c007d..c0be1f4 100644 --- a/multiversx_cross_shard_analysis/header_analysis_archive_handler.py +++ b/multiversx_cross_shard_analysis/header_analysis_archive_handler.py @@ -1,11 +1,10 @@ import json from pathlib import Path -from multiversx_logs_parser_tools.archive_handler import ArchiveHandler - -from .header_structures import HeaderData, ShardData +from multiversx_logs_parser_tools.archive_handler import ArchiveHandler from .header_analysis_checker import HeaderAnalysisChecker +from .header_structures import HeaderData, ShardData class HeaderAnalysisArchiveHandler(ArchiveHandler): diff --git a/multiversx_cross_shard_analysis/header_analysis_checker.py b/multiversx_cross_shard_analysis/header_analysis_checker.py index cf910d1..02da5e3 100644 --- a/multiversx_cross_shard_analysis/header_analysis_checker.py +++ b/multiversx_cross_shard_analysis/header_analysis_checker.py @@ -3,9 +3,8 @@ from multiversx_logs_parser_tools.node_logs_checker import NodeLogsChecker -from .header_structures import HeaderData - from .header_analysis_parser import HeaderAnalysisParser +from .header_structures import HeaderData class HeaderAnalysisChecker(NodeLogsChecker): diff --git a/multiversx_cross_shard_analysis/header_structures.py b/multiversx_cross_shard_analysis/header_structures.py index 66b928b..18af9e7 100644 --- a/multiversx_cross_shard_analysis/header_structures.py +++ b/multiversx_cross_shard_analysis/header_structures.py @@ -1,11 +1,12 @@ from typing import Any -from multiversx_cross_shard_analysis.test_decode_reserved import decode_reserved_field - from multiversx_cross_shard_analysis.miniblock_data import MiniblockData +from multiversx_cross_shard_analysis.test_decode_reserved import \ + decode_reserved_field -from .constants import COLORS_MAPPING, TYPE_NAMES, dest_shard, origin_shard, meta +from .constants import (COLORS_MAPPING, TYPE_NAMES, dest_shard, meta, + origin_shard) def get_value(variable_name: str, header: dict[str, Any]) -> Any: diff --git a/multiversx_cross_shard_analysis/headers_timeline_report.py b/multiversx_cross_shard_analysis/headers_timeline_report.py index 4ba9210..7696c4a 100644 --- a/multiversx_cross_shard_analysis/headers_timeline_report.py +++ b/multiversx_cross_shard_analysis/headers_timeline_report.py @@ -1,22 +1,16 @@ import json from typing import Any -from reportlab.lib.pagesizes import A4 + +from reportlab.graphics.shapes import Drawing, Rect, String from reportlab.lib import colors -from reportlab.platypus import ( - SimpleDocTemplate, - Paragraph, - Spacer, - Table, - TableStyle, - PageBreak, - Flowable -) +from reportlab.lib.pagesizes import A4 from reportlab.lib.styles import getSampleStyleSheet -from reportlab.graphics.shapes import Drawing, Rect, String +from reportlab.platypus import (Flowable, PageBreak, Paragraph, + SimpleDocTemplate, Spacer, Table, TableStyle) from multiversx_cross_shard_analysis.constants import COLORS_MAPPING, Colors - -from multiversx_cross_shard_analysis.header_structures import HeaderData, ShardData +from multiversx_cross_shard_analysis.header_structures import (HeaderData, + ShardData) # ----------------------------- # CONFIG (mirrors miniblock report) diff --git a/multiversx_cross_shard_analysis/miniblock_data.py b/multiversx_cross_shard_analysis/miniblock_data.py index d902c7e..a9108ec 100644 --- a/multiversx_cross_shard_analysis/miniblock_data.py +++ b/multiversx_cross_shard_analysis/miniblock_data.py @@ -1,8 +1,8 @@ from typing import Any from multiversx_cross_shard_analysis.constants import COLORS_MAPPING, Colors - -from multiversx_cross_shard_analysis.decode_reserved import get_default_decoded_data +from multiversx_cross_shard_analysis.decode_reserved import \ + get_default_decoded_data class MiniblockData: diff --git a/multiversx_cross_shard_analysis/miniblocks_round_report.py b/multiversx_cross_shard_analysis/miniblocks_round_report.py index 1cc7e41..4972a9f 100644 --- a/multiversx_cross_shard_analysis/miniblocks_round_report.py +++ b/multiversx_cross_shard_analysis/miniblocks_round_report.py @@ -1,18 +1,15 @@ import json from typing import Any -from reportlab.platypus import ( - SimpleDocTemplate, Paragraph, Spacer, - Flowable -) -from reportlab.lib.styles import getSampleStyleSheet -from reportlab.lib.pagesizes import A4 from reportlab.graphics.shapes import Drawing, Rect, String from reportlab.lib import colors -from reportlab.platypus import Table, TableStyle +from reportlab.lib.pagesizes import A4 +from reportlab.lib.styles import getSampleStyleSheet +from reportlab.platypus import (Flowable, Paragraph, SimpleDocTemplate, Spacer, + Table, TableStyle) -from multiversx_cross_shard_analysis.miniblock_data import MiniblockData from multiversx_cross_shard_analysis.constants import COLORS_MAPPING +from multiversx_cross_shard_analysis.miniblock_data import MiniblockData # ---------------------------------------- # legend diff --git a/multiversx_cross_shard_analysis/miniblocks_timeline_report.py b/multiversx_cross_shard_analysis/miniblocks_timeline_report.py index 664b0b2..85e0c19 100644 --- a/multiversx_cross_shard_analysis/miniblocks_timeline_report.py +++ b/multiversx_cross_shard_analysis/miniblocks_timeline_report.py @@ -10,23 +10,16 @@ import json from typing import Any -from reportlab.lib.pagesizes import A4 + +from reportlab.graphics.shapes import Drawing, Rect, String from reportlab.lib import colors -from reportlab.platypus import ( - SimpleDocTemplate, - Paragraph, - Spacer, - Table, - TableStyle, - PageBreak, -) +from reportlab.lib.pagesizes import A4 from reportlab.lib.styles import getSampleStyleSheet -from reportlab.graphics.shapes import Drawing, Rect, String +from reportlab.platypus import (PageBreak, Paragraph, SimpleDocTemplate, + Spacer, Table, TableStyle) from reportlab.platypus.flowables import Flowable - from multiversx_cross_shard_analysis.constants import TYPE_NAMES - from multiversx_cross_shard_analysis.miniblock_data import MiniblockData # ----------------------------- diff --git a/multiversx_cross_shard_analysis/test_decode_reserved.py b/multiversx_cross_shard_analysis/test_decode_reserved.py index 6cd6a17..85fc9eb 100644 --- a/multiversx_cross_shard_analysis/test_decode_reserved.py +++ b/multiversx_cross_shard_analysis/test_decode_reserved.py @@ -1,5 +1,5 @@ -from multiversx_cross_shard_analysis.decode_reserved import decode_reserved_field - +from multiversx_cross_shard_analysis.decode_reserved import \ + decode_reserved_field mentioned_headers = { "origin_shard_proposed_headers": "20ec12", From 7b82bc955065571bc24257a6a2f6a03c45ad7089 Mon Sep 17 00:00:00 2001 From: Mihaela Radian Date: Thu, 4 Dec 2025 21:40:37 +0200 Subject: [PATCH 12/24] MX-17306 Added CLI args for reports run from json files --- .../headers_timeline_report.py | 69 +++++++++++++++---- .../miniblocks_round_report.py | 45 ++++++++++-- .../miniblocks_timeline_report.py | 43 ++++++++++-- 3 files changed, 133 insertions(+), 24 deletions(-) diff --git a/multiversx_cross_shard_analysis/headers_timeline_report.py b/multiversx_cross_shard_analysis/headers_timeline_report.py index 7696c4a..bb08afa 100644 --- a/multiversx_cross_shard_analysis/headers_timeline_report.py +++ b/multiversx_cross_shard_analysis/headers_timeline_report.py @@ -1,4 +1,7 @@ +import argparse import json +import os +import sys from typing import Any from reportlab.graphics.shapes import Drawing, Rect, String @@ -241,29 +244,67 @@ def build_nonce_timeline_pdf(shards_data: dict[int, dict[int, dict[int, list[Any } -# build_nonce_timeline_pdf(input_data, outname="nonce_timeline_report.pdf") -# print("Nonce timeline report generated: nonce_timeline_report.pdf") if __name__ == "__main__": - # run PDF build + parser = argparse.ArgumentParser(description="Nonce timeline report generator") + group = parser.add_mutually_exclusive_group(required=True) + group.add_argument("--path", type=str, help="Path to folder containing run output") + group.add_argument("--run-name", type=str, help="Name of the run inside ./Reports/") + + args = parser.parse_args() + + # resolve final folder path + if args.path: + base_path = args.path + else: + base_path = os.path.join("Reports", args.run_name) + + # verify base folder exists + if not os.path.isdir(base_path): + print(f"Error: folder not found: {base_path}") + sys.exit(1) + + # verify expected files exist + shard_ids = [0, 1, 2, 4294967295] + missing = [] + + for shard in shard_ids: + p = os.path.join(base_path, "Shards", f"{shard}_report.json") + if not os.path.isfile(p): + missing.append(p) + + miniblocks_path = os.path.join(base_path, "Miniblocks", "miniblocks_report.json") + if not os.path.isfile(miniblocks_path): + missing.append(miniblocks_path) + + if missing: + print("Error: missing required files:") + for m in missing: + print(" -", m) + sys.exit(1) + + # load JSONs headers = ShardData() - for shard in [0, 1, 2, 4294967295]: - with open(f'./Reports/cross-shard-execution-anal-9afe696daf/Shards/{shard}_report.json', 'r') as f: - data = json.load(f) + for shard in shard_ids: + with open(os.path.join(base_path, "Shards", f"{shard}_report.json")) as f: + data = json.load(f) headers.parsed_headers[shard] = HeaderData() - headers.parsed_headers[shard].header_dictionary = data['shards'] + headers.parsed_headers[shard].header_dictionary = data["shards"] - with open('./Reports/cross-shard-execution-anal-9afe696daf/Miniblocks/miniblocks_report.json', 'r') as f: + with open(miniblocks_path) as f: data = json.load(f) - headers.miniblocks = data['miniblocks'] + headers.miniblocks = data["miniblocks"] + # process input_data = headers.get_data_for_header_horizontal_report() - for epoch in sorted(input_data.keys()): - print(f"Epoch: {epoch}") - report_list = input_data[epoch] + # output path + out_folder = os.path.join(base_path, "NonceTimeline") + os.makedirs(out_folder, exist_ok=True) - build_nonce_timeline_pdf(report_list, outname=f"nonce_timeline_report_{epoch}.pdf") - print(f"Nonce timeline report generated: nonce_timeline_report_{epoch}.pdf") + for epoch in sorted(input_data.keys()): + outfile = os.path.join(out_folder, f"nonce_timeline_report_{epoch}.pdf") + build_nonce_timeline_pdf(input_data[epoch], outname=outfile) + print(f"Nonce timeline report for Epoch {epoch} generated: {outfile}") diff --git a/multiversx_cross_shard_analysis/miniblocks_round_report.py b/multiversx_cross_shard_analysis/miniblocks_round_report.py index 4972a9f..167cf23 100644 --- a/multiversx_cross_shard_analysis/miniblocks_round_report.py +++ b/multiversx_cross_shard_analysis/miniblocks_round_report.py @@ -1,5 +1,8 @@ import json from typing import Any +import argparse +import os +import sys from reportlab.graphics.shapes import Drawing, Rect, String from reportlab.lib import colors @@ -130,10 +133,10 @@ def draw(self): # build report for one epoch # ---------------------------------------- -def build_report(epoch: int, rounds_data: dict[int, Any], shards: list[int]): +def build_report(epoch: int, rounds_data: dict[int, Any], shards: list[int], outname: str): doc = SimpleDocTemplate( - f"miniblock_report_epoch_{epoch}.pdf", + outname, pagesize=A4, leftMargin=20, rightMargin=20, topMargin=20, bottomMargin=20 ) @@ -179,12 +182,44 @@ def build_report(epoch: int, rounds_data: dict[int, Any], shards: list[int]): # ---------------------------------------- if __name__ == "__main__": - with open('./Reports/cross-shard-execution-anal-9afe696daf/Miniblocks/miniblocks_report.json', 'r') as f: + + parser = argparse.ArgumentParser(description="Miniblock shards timeline report") + group = parser.add_mutually_exclusive_group(required=True) + group.add_argument("--path", type=str, help="Path to run folder") + group.add_argument("--run-name", type=str, help="Name of the folder under ./Reports/") + + args = parser.parse_args() + + # resolve base path + if args.path: + base_path = args.path + else: + base_path = os.path.join("Reports", args.run_name) + + if not os.path.isdir(base_path): + print(f"Error: folder not found: {base_path}") + sys.exit(1) + + miniblocks_path = os.path.join(base_path, "Miniblocks", "miniblocks_report.json") + if not os.path.isfile(miniblocks_path): + print("Error: missing required file:") + print(" -", miniblocks_path) + sys.exit(1) + + # load JSON + with open(miniblocks_path, "r") as f: data = json.load(f) - mb_data = MiniblockData(data['miniblocks']).get_data_for_round_report() + mb_data = MiniblockData(data["miniblocks"]).get_data_for_round_report() + + # output folder + out_folder = os.path.join(base_path, "MiniblocksShardTimeline") + os.makedirs(out_folder, exist_ok=True) + # generate PDFs per epoch for epoch in sorted(mb_data.keys()): print(f"Epoch: {epoch}") report_dict = mb_data[epoch] - build_report(int(epoch), report_dict, shards=[0, 1, 2, 4294967295]) + outfile = os.path.join(out_folder, f"shards_timeline_report_{epoch}.pdf") + build_report(int(epoch), report_dict, shards=[0, 1, 2, 4294967295], outname=outfile) + print("→", outfile) diff --git a/multiversx_cross_shard_analysis/miniblocks_timeline_report.py b/multiversx_cross_shard_analysis/miniblocks_timeline_report.py index 85e0c19..df85704 100644 --- a/multiversx_cross_shard_analysis/miniblocks_timeline_report.py +++ b/multiversx_cross_shard_analysis/miniblocks_timeline_report.py @@ -7,7 +7,9 @@ - timeline table: columns = rounds (including gaps), each column contains stacked colored rectangles for mentions - colors: use mention['color'] if present, otherwise derived from mention type + reserved """ - +import argparse +import os +import sys import json from typing import Any @@ -228,14 +230,45 @@ def build_pdf_from_miniblocks(epoch: int, miniblocks: list[dict[str, Any]], outn if __name__ == "__main__": - # run PDF build - with open('./Reports/cross-shard-execution-anal-9afe696daf/Miniblocks/miniblocks_report.json', 'r') as f: + parser = argparse.ArgumentParser(description="Miniblock timeline detail report (CLI)") + group = parser.add_mutually_exclusive_group(required=True) + group.add_argument("--path", type=str, help="Path to run folder") + group.add_argument("--run-name", type=str, help="Name of the folder under ./Reports/") + + args = parser.parse_args() + + # resolve base path + if args.path: + base_path = args.path + else: + base_path = os.path.join("Reports", args.run_name) + + if not os.path.isdir(base_path): + print(f"Error: folder not found: {base_path}") + sys.exit(1) + + miniblocks_path = os.path.join(base_path, "Miniblocks", "miniblocks_report.json") + if not os.path.isfile(miniblocks_path): + print("Error: missing required file:") + print(" -", miniblocks_path) + sys.exit(1) + + # load JSON + with open(miniblocks_path, "r") as f: data = json.load(f) - mb_data = MiniblockData(data['miniblocks']).get_data_for_detail_report() + # build report data the exact same way you did before + mb_data = MiniblockData(data["miniblocks"]).get_data_for_detail_report() + + # prepare output folder (keeps reports inside the run folder) + out_folder = os.path.join(base_path, "MiniblocksTimelineDetail") + os.makedirs(out_folder, exist_ok=True) + # generate PDFs per epoch (same calls as before) for epoch in sorted(mb_data.keys()): print(f"Epoch: {epoch}") report_list = mb_data[epoch] - build_pdf_from_miniblocks(int(epoch), report_list, outname=f"miniblock_timeline_report_epoch_{epoch}.pdf") + outpath = os.path.join(out_folder, f"miniblock_timeline_report_epoch_{epoch}.pdf") + build_pdf_from_miniblocks(int(epoch), report_list, outname=outpath) + print(f"Miniblock timeline report generated: {outpath}") From 5ba977b18baaae518e255834ac4694f06eaca4c5 Mon Sep 17 00:00:00 2001 From: Mihaela Radian Date: Mon, 8 Dec 2025 09:37:47 +0200 Subject: [PATCH 13/24] MX-17306 Fixes --- .../gather_data.py | 42 +++++++++++++++++++ .../header_structures.py | 2 - 2 files changed, 42 insertions(+), 2 deletions(-) diff --git a/multiversx_cross_shard_analysis/gather_data.py b/multiversx_cross_shard_analysis/gather_data.py index a369ab7..80d4efd 100644 --- a/multiversx_cross_shard_analysis/gather_data.py +++ b/multiversx_cross_shard_analysis/gather_data.py @@ -1,4 +1,13 @@ from datetime import datetime, timedelta +import os + +from multiversx_cross_shard_analysis.miniblock_data import MiniblockData + +from multiversx_cross_shard_analysis.miniblocks_round_report import build_report + +from multiversx_cross_shard_analysis.miniblocks_timeline_report import build_pdf_from_miniblocks + +from multiversx_cross_shard_analysis.headers_timeline_report import build_nonce_timeline_pdf from .header_analysis_archive_handler import HeaderAnalysisArchiveHandler from .header_analysis_checker import HeaderAnalysisChecker @@ -14,6 +23,39 @@ def gather_data(): handler.handle_logs() print(f'Archive checked successfully: {timedelta(seconds=(datetime.now() - time_started).total_seconds())}s') + # Generate reports + mb_data = MiniblockData(handler.shard_data.miniblocks).get_data_for_round_report() + out_folder = os.path.join(handler.run_name, "MiniblocksShardTimeline") + os.makedirs(out_folder, exist_ok=True) + + # generate PDFs per epoch + for epoch in sorted(mb_data.keys()): + print(f"Epoch: {epoch}") + report_dict = mb_data[epoch] + outfile = os.path.join(out_folder, f"shards_timeline_report_{epoch}.pdf") + build_report(int(epoch), report_dict, shards=[0, 1, 2, 4294967295], outname=outfile) + print("→", outfile) + + mb_data = MiniblockData(handler.shard_data.miniblocks).get_data_for_detail_report() + out_folder = os.path.join(handler.run_name, "MiniblocksTimelineDetail") + os.makedirs(out_folder, exist_ok=True) + + for epoch in sorted(mb_data.keys()): + print(f"Epoch: {epoch}") + outfile = os.path.join(out_folder, f"miniblock_timeline_report_epoch_{epoch}.pdf") + build_pdf_from_miniblocks(int(epoch), mb_data[epoch], outname=outfile) + print("→", outfile) + + input_data = handler.shard_data.get_data_for_header_horizontal_report() + out_folder = os.path.join(handler.run_name, "NonceTimeline") + os.makedirs(out_folder, exist_ok=True) + + for epoch in sorted(input_data.keys()): + print(f"Epoch: {epoch}") + outfile = os.path.join(out_folder, f"nonce_timeline_report_{epoch}.pdf") + build_nonce_timeline_pdf(input_data[epoch], outname=outfile) + print("→", outfile) + if __name__ == "__main__": gather_data() diff --git a/multiversx_cross_shard_analysis/header_structures.py b/multiversx_cross_shard_analysis/header_structures.py index 18af9e7..c329fc3 100644 --- a/multiversx_cross_shard_analysis/header_structures.py +++ b/multiversx_cross_shard_analysis/header_structures.py @@ -190,12 +190,10 @@ def get_data_for_header_horizontal_report(self) -> dict[str, dict[int, Any]]: nonce = get_value('nonce', header) round_num = get_value('round', header) - print(f"Processing header: epoch={epoch}, shard={shard_id}, nonce={nonce}, round={round_num}") # build result for this header (only cross-shard miniblocks) result: dict[int, list] = {} for miniblock in [mb for mb in get_value('miniBlockHeaders', header) if mb.get('senderShardID') == shard_id and mb.get('receiverShardID') != mb.get('senderShardID')]: - print(f" Processing miniblock: hash={miniblock.get('hash')}, senderShardID={miniblock.get('senderShardID')}, receiverShardID={miniblock.get('receiverShardID')}") mb_hash = miniblock.get('hash') for mention_type, metadata in self.miniblocks[mb_hash]['mentioned']: # skip proposed mentions From 425852b920424a1e0856a495a52209086e179f19 Mon Sep 17 00:00:00 2001 From: Mihaela Radian Date: Wed, 10 Dec 2025 02:00:36 +0200 Subject: [PATCH 14/24] MX-17306 pre commit fixes --- multiversx_cross_shard_analysis/gather_data.py | 14 +++++++------- .../miniblocks_round_report.py | 4 ++-- .../miniblocks_timeline_report.py | 2 +- 3 files changed, 10 insertions(+), 10 deletions(-) diff --git a/multiversx_cross_shard_analysis/gather_data.py b/multiversx_cross_shard_analysis/gather_data.py index 80d4efd..1657454 100644 --- a/multiversx_cross_shard_analysis/gather_data.py +++ b/multiversx_cross_shard_analysis/gather_data.py @@ -1,13 +1,13 @@ -from datetime import datetime, timedelta import os +from datetime import datetime, timedelta +from multiversx_cross_shard_analysis.headers_timeline_report import \ + build_nonce_timeline_pdf from multiversx_cross_shard_analysis.miniblock_data import MiniblockData - -from multiversx_cross_shard_analysis.miniblocks_round_report import build_report - -from multiversx_cross_shard_analysis.miniblocks_timeline_report import build_pdf_from_miniblocks - -from multiversx_cross_shard_analysis.headers_timeline_report import build_nonce_timeline_pdf +from multiversx_cross_shard_analysis.miniblocks_round_report import \ + build_report +from multiversx_cross_shard_analysis.miniblocks_timeline_report import \ + build_pdf_from_miniblocks from .header_analysis_archive_handler import HeaderAnalysisArchiveHandler from .header_analysis_checker import HeaderAnalysisChecker diff --git a/multiversx_cross_shard_analysis/miniblocks_round_report.py b/multiversx_cross_shard_analysis/miniblocks_round_report.py index 167cf23..b3cbf78 100644 --- a/multiversx_cross_shard_analysis/miniblocks_round_report.py +++ b/multiversx_cross_shard_analysis/miniblocks_round_report.py @@ -1,8 +1,8 @@ -import json -from typing import Any import argparse +import json import os import sys +from typing import Any from reportlab.graphics.shapes import Drawing, Rect, String from reportlab.lib import colors diff --git a/multiversx_cross_shard_analysis/miniblocks_timeline_report.py b/multiversx_cross_shard_analysis/miniblocks_timeline_report.py index df85704..7989733 100644 --- a/multiversx_cross_shard_analysis/miniblocks_timeline_report.py +++ b/multiversx_cross_shard_analysis/miniblocks_timeline_report.py @@ -8,9 +8,9 @@ - colors: use mention['color'] if present, otherwise derived from mention type + reserved """ import argparse +import json import os import sys -import json from typing import Any from reportlab.graphics.shapes import Drawing, Rect, String From 574c5859f6de3f92c55652cc6f74400809b7d577 Mon Sep 17 00:00:00 2001 From: Mihaela Radian Date: Wed, 10 Dec 2025 17:22:53 +0200 Subject: [PATCH 15/24] MX-17306 Header V3 fixes --- multiversx_cross_shard_analysis/constants.py | 26 +++++++++++++++-- .../header_structures.py | 18 ++++++++++-- .../miniblock_data.py | 28 ++++++++++++++----- 3 files changed, 60 insertions(+), 12 deletions(-) diff --git a/multiversx_cross_shard_analysis/constants.py b/multiversx_cross_shard_analysis/constants.py index 54d65a1..df98d99 100644 --- a/multiversx_cross_shard_analysis/constants.py +++ b/multiversx_cross_shard_analysis/constants.py @@ -39,9 +39,13 @@ "dest_exec_proposed", "dest_exec_committed", - # notarization of execution results when meta includes the header containing the execution result - "meta_exec_proposed", - "meta_exec_committed", + # notarization of execution results when meta includes the header containing the execution result for origin shard + "meta_origin_exec_proposed", + "meta_origin_exec_committed", + + # notarization of execution results when meta includes the header containing the execution result for destination shard + "meta_dest_exec_proposed", + "meta_dest_exec_committed", ]) @@ -87,6 +91,14 @@ "dest_final", "meta_origin_committed", "meta_dest_committed", + "origin_exec_proposed", + "origin_exec_partial_executed", + "origin_exec_final", + "dest_exec_proposed", + "dest_exec_partial_executed", + "dest_exec_final", + "meta_origin_exec_committed", + "meta_dest_exec_committed", ]) COLORS_MAPPING = { @@ -98,4 +110,12 @@ Colors.dest_final: colors.pink, Colors.meta_origin_committed: colors.lightgreen, Colors.meta_dest_committed: colors.lightblue, + Colors.origin_exec_proposed: colors.khaki, + Colors.origin_exec_partial_executed: colors.gold, + Colors.origin_exec_final: colors.darkgoldenrod, + Colors.dest_exec_proposed: colors.lightcoral, + Colors.dest_exec_partial_executed: colors.crimson, + Colors.dest_exec_final: colors.firebrick, + Colors.meta_origin_exec_committed: colors.mediumseagreen, + Colors.meta_dest_exec_committed: colors.cornflowerblue, } diff --git a/multiversx_cross_shard_analysis/header_structures.py b/multiversx_cross_shard_analysis/header_structures.py index c329fc3..e48e38e 100644 --- a/multiversx_cross_shard_analysis/header_structures.py +++ b/multiversx_cross_shard_analysis/header_structures.py @@ -230,6 +230,10 @@ def __init__(self, header: dict[str, Any], status: str): self.metadata: dict[str, Any] = self.get_header_metadata(header) self.miniblocks: list[tuple[str, dict[str, Any]]] = self.get_miniblocks(header, status) + # returns 'origin' or 'dest' based on miniblock senderShardID + def get_miniblock_shard_type(self, miniblock_shard_id: int) -> str: + return 'origin_shard' if self.metadata["shard_id"] == miniblock_shard_id else "dest_shard" + def get_header_metadata(self, header: dict[str, Any]) -> dict[str, Any]: if Header.isHeaderV2(header): header = header['header'] @@ -241,12 +245,11 @@ def get_header_metadata(self, header: dict[str, Any]) -> dict[str, Any]: } def get_miniblocks(self, header: dict[str, Any], status: str) -> list[tuple[str, dict[str, Any]]]: - metadata = self.metadata miniblocks = [] if Header.isHeaderV2(header): header = header['header'] for miniblock in header.get('miniBlockHeaders', []): - miniblock_mention = f'{origin_shard if metadata['shard_id'] == miniblock['senderShardID'] else dest_shard}_{status}' + miniblock_mention = self.get_miniblock_shard_type(miniblock["senderShardID"]) + f'_{status}' miniblocks.append((miniblock_mention, miniblock)) if Header.isMetaHeader(header): for shard_header in header['shardInfo']: @@ -254,6 +257,17 @@ def get_miniblocks(self, header: dict[str, Any], status: str) -> list[tuple[str, for miniblock in shard_header.get('shardMiniBlockHeaders', []): miniblock_mention = f'{meta}_{origin_shard if shard_metadata['shard_id'] == miniblock['senderShardID'] else dest_shard}_{status}' miniblocks.append((miniblock_mention, miniblock)) + if Header.isMetaHeaderV3(header): + for exec_result in shard_header.get('executionResults', []): + for miniblock in exec_result.get('miniBlockHeaders', []): + miniblock_mention = f'{meta}_{origin_shard if shard_metadata["shard_id"] == miniblock["senderShardID"] else dest_shard}_exec_{status}' + miniblocks.append((miniblock_mention, miniblock)) + if Header.isHeaderV3(header): + for exec_result in header['executionResults']: + for miniblock in exec_result.get('miniBlockHeaders', []): + miniblock_mention = self.get_miniblock_shard_type(miniblock["senderShardID"]) + f'_{status}_exec' + miniblocks.append((miniblock_mention, miniblock)) + return miniblocks @staticmethod diff --git a/multiversx_cross_shard_analysis/miniblock_data.py b/multiversx_cross_shard_analysis/miniblock_data.py index a9108ec..523aa11 100644 --- a/multiversx_cross_shard_analysis/miniblock_data.py +++ b/multiversx_cross_shard_analysis/miniblock_data.py @@ -15,18 +15,32 @@ def get_color_for_state(self, mention_type: str, tx_count: int, header: dict[str if reserved == {}: reserved = get_default_decoded_data(tx_count=tx_count) if "meta" in mention_type: - color = Colors.meta_origin_committed if mention_type.startswith('meta_origin') else Colors.meta_dest_committed + if 'exec' in mention_type: + color = Colors.meta_origin_exec_committed if mention_type.startswith('meta_origin') else Colors.meta_dest_exec_committed + else: + color = Colors.meta_origin_committed if mention_type.startswith('meta_origin') else Colors.meta_dest_committed else: - color = Colors.origin_final if mention_type.startswith('origin') else Colors.dest_final + if 'exec' in mention_type: + color = Colors.origin_exec_final if mention_type.startswith('origin') else Colors.dest_exec_final + else: + color = Colors.origin_final if mention_type.startswith('origin') else Colors.dest_final else: # execution_type = header.get('reserved', {}).get('ExecutionType', '') state = header.get('reserved', {}).get('State', '') - if state == 'Proposed': - color = Colors.origin_proposed if mention_type.startswith('origin') else Colors.dest_proposed - elif state == 'PartialExecuted': - color = Colors.origin_partial_executed if mention_type.startswith('origin') else Colors.dest_partial_executed + if 'exec' in mention_type: + if state == 'Proposed': + color = Colors.origin_exec_proposed if mention_type.startswith('origin') else Colors.dest_exec_proposed + elif state == 'PartialExecuted': + color = Colors.origin_exec_partial_executed if mention_type.startswith('origin') else Colors.dest_exec_partial_executed + else: + color = Colors.origin_exec_final if mention_type.startswith('origin') else Colors.dest_exec_final else: - color = Colors.origin_final if mention_type.startswith('origin') else Colors.dest_final + if state == 'Proposed': + color = Colors.origin_proposed if mention_type.startswith('origin') else Colors.dest_proposed + elif state == 'PartialExecuted': + color = Colors.origin_partial_executed if mention_type.startswith('origin') else Colors.dest_partial_executed + else: + color = Colors.origin_final if mention_type.startswith('origin') else Colors.dest_final return color def get_data_for_round_report(self) -> dict[str, Any]: From 0a5a8902226d00f126edf69d4d176ffc884b5abb Mon Sep 17 00:00:00 2001 From: Mihaela Radian Date: Fri, 19 Dec 2025 04:27:38 +0200 Subject: [PATCH 16/24] MX-17306 Header V3 fixes --- README.md | 1 + .../gather_data.py | 3 + .../header_structures.py | 111 ++------- .../headers_timeline_report.py | 86 ++++--- .../miniblock_data.py | 61 ++++- .../miniblocks_round_report.py | 8 +- .../miniblocks_timeline_report.py | 6 +- .../test_decode_reserved.py | 9 +- .../test_miniblocks.py | 234 ++++++++++++++++++ 9 files changed, 380 insertions(+), 139 deletions(-) create mode 100644 multiversx_cross_shard_analysis/test_miniblocks.py diff --git a/README.md b/README.md index a19c46c..bc33392 100644 --- a/README.md +++ b/README.md @@ -43,4 +43,5 @@ pip install -r ./requirements-dev.txt --upgrade EXAMPLE USAGE ``` python -m multiversx_cross_shard_analysis.gather_data --path /home/mihaela/Downloads/cross-shard-execution-anal-9afe696daf.zip +python -m multiversx_cross_shard_analysis.headers_timeline_report --run-name cross-shard-execution-anal-6cc663f7af ``` diff --git a/multiversx_cross_shard_analysis/gather_data.py b/multiversx_cross_shard_analysis/gather_data.py index 1657454..6e26986 100644 --- a/multiversx_cross_shard_analysis/gather_data.py +++ b/multiversx_cross_shard_analysis/gather_data.py @@ -26,6 +26,7 @@ def gather_data(): # Generate reports mb_data = MiniblockData(handler.shard_data.miniblocks).get_data_for_round_report() out_folder = os.path.join(handler.run_name, "MiniblocksShardTimeline") + out_folder = os.path.join('Reports', out_folder) os.makedirs(out_folder, exist_ok=True) # generate PDFs per epoch @@ -38,6 +39,7 @@ def gather_data(): mb_data = MiniblockData(handler.shard_data.miniblocks).get_data_for_detail_report() out_folder = os.path.join(handler.run_name, "MiniblocksTimelineDetail") + out_folder = os.path.join('Reports', out_folder) os.makedirs(out_folder, exist_ok=True) for epoch in sorted(mb_data.keys()): @@ -48,6 +50,7 @@ def gather_data(): input_data = handler.shard_data.get_data_for_header_horizontal_report() out_folder = os.path.join(handler.run_name, "NonceTimeline") + out_folder = os.path.join('Reports', out_folder) os.makedirs(out_folder, exist_ok=True) for epoch in sorted(input_data.keys()): diff --git a/multiversx_cross_shard_analysis/header_structures.py b/multiversx_cross_shard_analysis/header_structures.py index e48e38e..dc2d3dd 100644 --- a/multiversx_cross_shard_analysis/header_structures.py +++ b/multiversx_cross_shard_analysis/header_structures.py @@ -58,6 +58,8 @@ def __init__(self): self.seen_miniblock_hashes = set() def add_node(self, node_data: HeaderData): + if node_data.header_dictionary['commited_headers'] == []: + node_data.header_dictionary['commited_headers'] = node_data.header_dictionary['proposed_headers'].copy() for header_status in node_data.header_dictionary.keys(): for header in node_data.header_dictionary[header_status]: shard_id = get_shard_id(header) @@ -74,102 +76,16 @@ def add_node(self, node_data: HeaderData): def add_miniblocks(self, header: dict[str, Any], status: str): header_struct = Header(header, status) - for mention_type, mb in header_struct.miniblocks: + for mention_type, mb, metadata in header_struct.miniblocks: mb_hash = mb.get('hash') if mb_hash not in self.seen_miniblock_hashes: self.seen_miniblock_hashes.add(mb_hash) self.miniblocks[mb_hash] = mb.copy() self.miniblocks[mb_hash]['mentioned'] = [] - metadata = header_struct.metadata.copy() + # metadata = header_struct.metadata.copy() metadata["reserved"] = decode_reserved_field(mb.get("reserved", ""), mb.get("txCount", 0)) self.miniblocks[mb_hash]['mentioned'].append((mention_type, metadata)) - def get_data_for_header_vertical_report(self) -> dict[str, dict[int, Any]]: - miniblocks = MiniblockData(self.miniblocks) - report: dict[str, dict[int, Any]] = {} - last_epoch = None - - for shard_id, header_data in self.parsed_headers.items(): - header_group_count = 0 - header_count = 0 - header_group_name = "" - last_epoch = None - - for header in sorted(header_data.header_dictionary['commited_headers'], - key=lambda x: get_value('nonce', x)): - - epoch = get_value('epoch', header) - - # reset counters when epoch changes - if epoch != last_epoch: - header_group_count = 0 - header_count = 0 - header_group_name = "" - last_epoch = epoch - - # ensure epoch entry exists and contains all shards as keys - if epoch not in report: - report[epoch] = {sid: {} for sid in self.parsed_headers.keys()} - - if get_value('miniBlockHeaders', header) == []: - continue - - nonce = get_value('nonce', header) - round_num = get_value('round', header) - print(f"Processing header: epoch={epoch}, shard={shard_id}, nonce={nonce}, round={round_num}") - - # build result for this header (only cross-shard miniblocks) - result: dict[int, list] = {} - for miniblock in [mb for mb in get_value('miniBlockHeaders', header) if mb.get('senderShardID') == shard_id and mb.get('receiverShardID') != mb.get('senderShardID')]: - print(f" Processing miniblock: hash={miniblock.get('hash')}, senderShardID={miniblock.get('senderShardID')}, receiverShardID={miniblock.get('receiverShardID')}") - mb_hash = miniblock.get('hash') - for mention_type, metadata in self.miniblocks[mb_hash]['mentioned']: - # skip proposed mentions - if 'proposed' in mention_type: - continue - - rn = metadata['round'] - color = miniblocks.get_color_for_state(mention_type, miniblock['txCount'], metadata) - shard_name = f'Shard {metadata["shard_id"]}' if metadata["shard_id"] != 4294967295 else "MetaShard" - # append tuple (label, info, color) - result.setdefault(rn, []).append((shard_name, mb_hash[:15] + '...', COLORS_MAPPING[color])) - - # if result empty -> we don't include this nonce at all, don't count it - if not result: - continue - - # --- Add this nonce to the report and handle grouping --- - # if group start (every 5 actual added nonces) - group_size = 5 - if header_count % group_size == 0: - header_group_count += 1 - header_group_name = f"Nonces {nonce}" - # initialize structure for this group - report[epoch][shard_id][header_group_count] = { - 'group_name': header_group_name, - 'rounds': (round_num, round_num), - 'nonces': {} - } - print(f"Creating new header group: {header_group_name}") - else: - # extend existing group's name - header_group_name += f" - {nonce}" - report[epoch][shard_id][header_group_count]['group_name'] = header_group_name - - # store the nonce's data - report[epoch][shard_id][header_group_count]['nonces'][nonce] = result - - # update group's rounds min/max based on result keys - min_r, max_r = report[epoch][shard_id][header_group_count]['rounds'] - actual_min = min(result.keys()) - actual_max = max(result.keys()) - report[epoch][shard_id][header_group_count]['rounds'] = (min(min_r, actual_min), max(max_r, actual_max)) - - # increment header_count because we added this nonce - header_count += 1 - - return report - def get_data_for_header_horizontal_report(self) -> dict[str, dict[int, Any]]: miniblocks = MiniblockData(self.miniblocks) report: dict[str, dict[int, Any]] = {} @@ -193,7 +109,7 @@ def get_data_for_header_horizontal_report(self) -> dict[str, dict[int, Any]]: # build result for this header (only cross-shard miniblocks) result: dict[int, list] = {} - for miniblock in [mb for mb in get_value('miniBlockHeaders', header) if mb.get('senderShardID') == shard_id and mb.get('receiverShardID') != mb.get('senderShardID')]: + for miniblock in [mb for mb in get_value('miniBlockHeaders', header) if mb.get('senderShardID') == shard_id]: mb_hash = miniblock.get('hash') for mention_type, metadata in self.miniblocks[mb_hash]['mentioned']: # skip proposed mentions @@ -228,7 +144,7 @@ def get_data_for_header_horizontal_report(self) -> dict[str, dict[int, Any]]: class Header: def __init__(self, header: dict[str, Any], status: str): self.metadata: dict[str, Any] = self.get_header_metadata(header) - self.miniblocks: list[tuple[str, dict[str, Any]]] = self.get_miniblocks(header, status) + self.miniblocks: list[tuple[str, dict[str, Any], dict[str, Any]]] = self.get_miniblocks(header, status) # returns 'origin' or 'dest' based on miniblock senderShardID def get_miniblock_shard_type(self, miniblock_shard_id: int) -> str: @@ -244,29 +160,34 @@ def get_header_metadata(self, header: dict[str, Any]) -> dict[str, Any]: "shard_id": header.get('shardID', 4294967295), } - def get_miniblocks(self, header: dict[str, Any], status: str) -> list[tuple[str, dict[str, Any]]]: + def get_miniblocks(self, header: dict[str, Any], status: str) -> list[tuple[str, dict[str, Any], dict[str, Any]]]: miniblocks = [] if Header.isHeaderV2(header): header = header['header'] for miniblock in header.get('miniBlockHeaders', []): miniblock_mention = self.get_miniblock_shard_type(miniblock["senderShardID"]) + f'_{status}' - miniblocks.append((miniblock_mention, miniblock)) + miniblocks.append((miniblock_mention, miniblock, self.metadata.copy())) if Header.isMetaHeader(header): for shard_header in header['shardInfo']: shard_metadata = self.get_header_metadata(shard_header) for miniblock in shard_header.get('shardMiniBlockHeaders', []): miniblock_mention = f'{meta}_{origin_shard if shard_metadata['shard_id'] == miniblock['senderShardID'] else dest_shard}_{status}' - miniblocks.append((miniblock_mention, miniblock)) + miniblocks.append((miniblock_mention, miniblock, self.metadata.copy())) if Header.isMetaHeaderV3(header): for exec_result in shard_header.get('executionResults', []): for miniblock in exec_result.get('miniBlockHeaders', []): miniblock_mention = f'{meta}_{origin_shard if shard_metadata["shard_id"] == miniblock["senderShardID"] else dest_shard}_exec_{status}' - miniblocks.append((miniblock_mention, miniblock)) + miniblocks.append((miniblock_mention, miniblock, self.metadata.copy())) if Header.isHeaderV3(header): for exec_result in header['executionResults']: + base_exec_result = exec_result.get('baseExecutionResult', {}) + exec_result_metadata = self.metadata.copy() + exec_result_metadata['nonce'] = base_exec_result.get('headerNonce', 0) + for miniblock in exec_result.get('miniBlockHeaders', []): miniblock_mention = self.get_miniblock_shard_type(miniblock["senderShardID"]) + f'_{status}_exec' - miniblocks.append((miniblock_mention, miniblock)) + exec_result_metadata['exec_result_hash'] = miniblock.get('hash', '') + miniblocks.append((miniblock_mention, miniblock, exec_result_metadata.copy())) return miniblocks diff --git a/multiversx_cross_shard_analysis/headers_timeline_report.py b/multiversx_cross_shard_analysis/headers_timeline_report.py index bb08afa..b4f16ef 100644 --- a/multiversx_cross_shard_analysis/headers_timeline_report.py +++ b/multiversx_cross_shard_analysis/headers_timeline_report.py @@ -9,12 +9,14 @@ from reportlab.lib.pagesizes import A4 from reportlab.lib.styles import getSampleStyleSheet from reportlab.platypus import (Flowable, PageBreak, Paragraph, - SimpleDocTemplate, Spacer, Table, TableStyle) + SimpleDocTemplate, Spacer, LongTable, TableStyle) from multiversx_cross_shard_analysis.constants import COLORS_MAPPING, Colors from multiversx_cross_shard_analysis.header_structures import (HeaderData, ShardData) +from multiversx_cross_shard_analysis.miniblock_data import MiniblockData + # ----------------------------- # CONFIG (mirrors miniblock report) # ----------------------------- @@ -38,32 +40,35 @@ # build stacked rectangles (same as miniblock version) # ----------------------------- -def build_stack_for_round(items: list[tuple[str, str, colors.Color]], col_width: float) -> Drawing: - rows = max(1, len(items)) - total_h = rows * RECT_H - d = Drawing(col_width, total_h) +def build_stack_rows(items: list[tuple[str, str, colors.Color]], col_width: float) -> list[Drawing]: + """ + Instead of one giant Drawing, we return a list of small ones. + Each drawing represents one row in the vertical stack. + """ + row_drawings = [] + + if len(items) == 0: + # Create a single "no data" row + d = Drawing(col_width, RECT_H) + rect_w = max(2, col_width - RECT_PADDING_X * 2) - 4 + d.add(Rect(0, 2, rect_w, 12, fillColor=colors.whitesmoke, strokeColor=colors.grey)) # type: ignore + d.add(String(RECT_PADDING_X + 2, 6, "no data", fontSize=RECT_LABEL_FONT)) + row_drawings.append(d) + return row_drawings - y = total_h - RECT_H for label, info, col in items: + # Create a small drawing for just this one item + d = Drawing(col_width, RECT_H) rect_w = max(2, col_width - RECT_PADDING_X * 2) - 4 - d.add(Rect(0, y + 2, rect_w, RECT_H - 4, fillColor=col, strokeColor=colors.black)) # type: ignore + d.add(Rect(0, 2, rect_w, RECT_H - 4, fillColor=col, strokeColor=colors.black)) # type: ignore text_x = RECT_PADDING_X + 3 - base_y = y + 4 + d.add(String(text_x, 12, label, fontSize=RECT_LABEL_FONT)) + d.add(String(text_x, 4, info, fontSize=RECT_INFO_FONT)) + row_drawings.append(d) - d.add(String(text_x, base_y + 8, label, fontSize=RECT_LABEL_FONT)) - d.add(String(text_x, base_y, info, fontSize=RECT_INFO_FONT)) - - y -= RECT_H - - if len(items) == 0: - rect_w = max(2, col_width - RECT_PADDING_X * 2) - 4 - mid = total_h / 2 - d.add(Rect(0, mid - 6, rect_w, 12, fillColor=colors.whitesmoke, strokeColor=colors.grey)) # type: ignore - d.add(String(RECT_PADDING_X + 2, mid - 2, "no data", fontSize=RECT_LABEL_FONT)) - - return d + return row_drawings # ----------------------------- # check for round gaps @@ -96,18 +101,31 @@ def build_nonce_section(shard_id: int, nonce: int, rounds: list[int], data: dict num_cols = len(rounds) col_width = usable_width / max(1, num_cols) + # 1. Build the Header Row header = [Paragraph(f"{r}", styles["BodyText"]) for r in rounds] - cells = [] - for r in rounds: - items = data.get(r, []) - drawing = build_stack_for_round(items, col_width) - cells.append(drawing) - - tbl = Table( - [header, cells], + # 2. Transpose the stacks into rows + # We need to find the max height among all columns to normalize the row count + column_stacks = [build_stack_rows(data.get(r, []), col_width) for r in rounds] + max_rows = max(len(stack) for stack in column_stacks) + + table_data = [header] + + # Fill the table row by row + for i in range(max_rows): + row = [] + for stack in column_stacks: + if i < len(stack): + row.append(stack[i]) + else: + row.append("") # Empty cell if this column has fewer items + table_data.append(row) + + tbl = LongTable( + table_data, colWidths=[col_width] * num_cols, hAlign="LEFT", + splitByRow=True, # This allows the table to break across pages between rows ) tbl_style = [ @@ -115,10 +133,11 @@ def build_nonce_section(shard_id: int, nonce: int, rounds: list[int], data: dict ("BACKGROUND", (0, 0), (-1, 0), colors.whitesmoke), ("ALIGN", (0, 0), (-1, 0), "CENTER"), ("VALIGN", (0, 1), (-1, -1), "TOP"), + ("TOPPADDING", (0, 0), (-1, -1), 0), # Tighten padding for large lists + ("BOTTOMPADDING", (0, 0), (-1, -1), 0), ("FONTSIZE", (0, 0), (-1, 0), ROUND_HEADER_FONT), ] - # add red border if highlighted if highlight: tbl_style.append(("BOX", (0, 0), (-1, -1), 2, colors.red)) @@ -245,7 +264,8 @@ def build_nonce_timeline_pdf(shards_data: dict[int, dict[int, dict[int, list[Any } -if __name__ == "__main__": +def main(): + parser = argparse.ArgumentParser(description="Nonce timeline report generator") group = parser.add_mutually_exclusive_group(required=True) @@ -298,7 +318,7 @@ def build_nonce_timeline_pdf(shards_data: dict[int, dict[int, dict[int, list[Any headers.miniblocks = data["miniblocks"] # process - input_data = headers.get_data_for_header_horizontal_report() + input_data = MiniblockData(headers.miniblocks).get_data_for_header_report() # output path out_folder = os.path.join(base_path, "NonceTimeline") @@ -308,3 +328,7 @@ def build_nonce_timeline_pdf(shards_data: dict[int, dict[int, dict[int, list[Any outfile = os.path.join(out_folder, f"nonce_timeline_report_{epoch}.pdf") build_nonce_timeline_pdf(input_data[epoch], outname=outfile) print(f"Nonce timeline report for Epoch {epoch} generated: {outfile}") + + +if __name__ == "__main__": + main() diff --git a/multiversx_cross_shard_analysis/miniblock_data.py b/multiversx_cross_shard_analysis/miniblock_data.py index 523aa11..56fef53 100644 --- a/multiversx_cross_shard_analysis/miniblock_data.py +++ b/multiversx_cross_shard_analysis/miniblock_data.py @@ -1,6 +1,7 @@ +from enum import Enum from typing import Any -from multiversx_cross_shard_analysis.constants import COLORS_MAPPING, Colors +from multiversx_cross_shard_analysis.constants import COLORS_MAPPING, TYPE_NAMES, Colors from multiversx_cross_shard_analysis.decode_reserved import \ get_default_decoded_data @@ -9,6 +10,24 @@ class MiniblockData: def __init__(self, miniblocks: dict[str, dict[str, Any]]): self.miniblocks = miniblocks + self.verify_miniblocks() + + def verify_miniblocks(self) -> None: + for mb_hash, mb_info in self.miniblocks.items(): + mb_info['hasAlarm'] = False + mb_info['mentioned'] = sorted(mb_info.get('mentioned', []), key=lambda x: (x[1].get('epoch', 0), x[1].get('round', 0))) + last_round = -1 + for mention_type, header in mb_info.get('mentioned', []): + if last_round == -1: + mb_info['first_seen_round'] = header.get('round') + mb_info['last_seen_round'] = header.get('round') + mb_info['first_seen_epoch'] = header.get('epoch') + mb_info['nonce'] = header.get('nonce') + mb_info['senderShardID'] = header.get('shard_id') + elif header.get('round') - last_round > 1: + mb_info['hasAlarm'] = True + last_round = header.get('round') + mb_info['last_seen_round'] = last_round def get_color_for_state(self, mention_type: str, tx_count: int, header: dict[str, Any]) -> Colors: reserved = header.get('reserved', {}) @@ -67,8 +86,6 @@ def get_data_for_round_report(self) -> dict[str, Any]: def get_data_for_detail_report(self) -> dict[str, list[dict[str, Any]]]: report = {} for mb_hash, mb_info in self.miniblocks.items(): - if mb_info['senderShardID'] == mb_info['receiverShardID']: - continue # Skip same-shard miniblocks origin_epoch = None mb_data = { @@ -109,3 +126,41 @@ def get_data_for_detail_report(self) -> dict[str, list[dict[str, Any]]]: for epoch, mb_list in report.items(): mb_list.sort(key=lambda x: x['first_seen_round']) return report + + def get_data_for_header_report(self) -> dict[int, dict[int, Any]]: + report: dict[int, dict[int, Any]] = {} + + for mb_hash, mb_info in self.miniblocks.items(): + nonce = mb_info['nonce'] + shard_id = mb_info['senderShardID'] + epoch = mb_info['first_seen_epoch'] + for mention_type, header in mb_info.get('mentioned', []): + if "proposed" in mention_type: + continue + + print(f"Processing miniblock {mb_hash} mentioned in header nonce {header.get('nonce')} round {header.get('round')} epoch {header.get('epoch')} shard {header.get('shard_id')}") + # epoch = header.get('epoch') + if epoch not in report: + report[epoch] = {} + + if shard_id not in report[epoch]: + report[epoch][shard_id] = {} + + if nonce not in report[epoch][shard_id]: + report[epoch][shard_id][nonce] = {} + + round_number = header.get('round') + if round_number not in report[epoch][shard_id][nonce]: + report[epoch][shard_id][nonce][round_number] = [] + + color = COLORS_MAPPING[self.get_color_for_state(mention_type, mb_info['txCount'], header)] + label = f'Shard {header["shard_id"]}' if header["shard_id"] != 4294967295 else "MetaShard" + if mb_info['type'] != 0: + label += f' ({TYPE_NAMES[mb_info["type"]]})' + + report[epoch][shard_id][nonce][round_number].append((label, mb_hash[:15] + '...', color)) + + with open('debug_miniblock_header_report.json', 'w') as f: + import json + json.dump(report, f, indent=4, default=lambda o: o.name if isinstance(o, Enum) else str(o)) + return report diff --git a/multiversx_cross_shard_analysis/miniblocks_round_report.py b/multiversx_cross_shard_analysis/miniblocks_round_report.py index b3cbf78..b08dca7 100644 --- a/multiversx_cross_shard_analysis/miniblocks_round_report.py +++ b/multiversx_cross_shard_analysis/miniblocks_round_report.py @@ -180,9 +180,7 @@ def build_report(epoch: int, rounds_data: dict[int, Any], shards: list[int], out # ---------------------------------------- # main # ---------------------------------------- - -if __name__ == "__main__": - +def main(): parser = argparse.ArgumentParser(description="Miniblock shards timeline report") group = parser.add_mutually_exclusive_group(required=True) group.add_argument("--path", type=str, help="Path to run folder") @@ -223,3 +221,7 @@ def build_report(epoch: int, rounds_data: dict[int, Any], shards: list[int], out outfile = os.path.join(out_folder, f"shards_timeline_report_{epoch}.pdf") build_report(int(epoch), report_dict, shards=[0, 1, 2, 4294967295], outname=outfile) print("→", outfile) + + +if __name__ == "__main__": + main() diff --git a/multiversx_cross_shard_analysis/miniblocks_timeline_report.py b/multiversx_cross_shard_analysis/miniblocks_timeline_report.py index 7989733..fff8b84 100644 --- a/multiversx_cross_shard_analysis/miniblocks_timeline_report.py +++ b/multiversx_cross_shard_analysis/miniblocks_timeline_report.py @@ -229,7 +229,7 @@ def build_pdf_from_miniblocks(epoch: int, miniblocks: list[dict[str, Any]], outn doc.build(story) -if __name__ == "__main__": +def main(): parser = argparse.ArgumentParser(description="Miniblock timeline detail report (CLI)") group = parser.add_mutually_exclusive_group(required=True) @@ -272,3 +272,7 @@ def build_pdf_from_miniblocks(epoch: int, miniblocks: list[dict[str, Any]], outn outpath = os.path.join(out_folder, f"miniblock_timeline_report_epoch_{epoch}.pdf") build_pdf_from_miniblocks(int(epoch), report_list, outname=outpath) print(f"Miniblock timeline report generated: {outpath}") + + +if __name__ == "__main__": + main() diff --git a/multiversx_cross_shard_analysis/test_decode_reserved.py b/multiversx_cross_shard_analysis/test_decode_reserved.py index 85fc9eb..e7eed86 100644 --- a/multiversx_cross_shard_analysis/test_decode_reserved.py +++ b/multiversx_cross_shard_analysis/test_decode_reserved.py @@ -62,16 +62,13 @@ "IndexOfFirstTxProcessed": 10, "IndexOfLastTxProcessed": 2305 }, - "meta_dest_shard_commited_headers": { - "ExecutionType": "Normal", - "State": "Final", - "IndexOfFirstTxProcessed": 0, - "IndexOfLastTxProcessed": 2412 - } + "meta_dest_shard_commited_headers": {} } class TestMiniBlockHeader: + def test_get_processing_type1(self): for name, hex_str in mentioned_headers.items(): + print(f"Testing decoding for: {name}") assert decode_reserved_field(hex_str, 2413) == expected[name], f"Decoding failed for {name}" diff --git a/multiversx_cross_shard_analysis/test_miniblocks.py b/multiversx_cross_shard_analysis/test_miniblocks.py new file mode 100644 index 0000000..b69989a --- /dev/null +++ b/multiversx_cross_shard_analysis/test_miniblocks.py @@ -0,0 +1,234 @@ +from enum import Enum +import json +from multiversx_cross_shard_analysis.header_structures import Header, HeaderData, ShardData + +from multiversx_cross_shard_analysis.miniblock_data import MiniblockData + +header = { + "blockBodyType": 0, + "chainID": "31", + "epoch": 2, + "epochStartMetaHash": "", + "executionResults": [ + { + "accumulatedFees": "0", + "baseExecutionResult": { + "gasUsed": 0, + "headerEpoch": 2, + "headerHash": "be0081efbafed4be3738cdd02ab358a20e8e16f83bfe4b4d7858cfb868366f6b", + "headerNonce": 1647, + "headerRound": 1647, + "rootHash": "a55990c083e7868a6f567bfa3fa6ec9fa017f5eda34ee2b667ef7e55290d8259" + }, + "developerFees": "0", + "executedTxCount": 0, + "miniBlockHeaders": [], + "receiptsHash": "0e5751c026e543b2e8ab2eb06099daa1d1e5df47778f7787faab45cdf12fe3a8" + } + ], + "gasLimit": 0, + "lastExecutionResult": { + "executionResult": { + "gasUsed": 0, + "headerEpoch": 0, + "headerHash": "be0081efbafed4be3738cdd02ab358a20e8e16f83bfe4b4d7858cfb868366f6b", + "headerNonce": 1647, + "headerRound": 1647, + "rootHash": "a55990c083e7868a6f567bfa3fa6ec9fa017f5eda34ee2b667ef7e55290d8259" + }, + "notarizedInRound": 1648 + }, + "leaderSignature": "f25f5ffa015cb16b4173a17742142e6e7435999b3477cec0320c8892c92e72015453331a8c9c707a03d922f3514c7f0a", + "metaBlockHashes": [ + "ff0c0da960b7a41b7e3b4c6f702b427bcb493fb963dcce35074fa8ecb1391608" + ], + "miniBlockHeaders": [ + { + "hash": "52af8b3c899198e823ef94c80fc12cc4ba301e005d8e67f615ba872226a4963c", + "receiverShardID": 0, + "reserved": "1001", + "senderShardID": 0, + "txCount": 809, + "type": 0 + } + ], + "nonce": 1648, + "peerChanges": [], + "prevHash": "be0081efbafed4be3738cdd02ab358a20e8e16f83bfe4b4d7858cfb868366f6b", + "prevRandSeed": "b6e86481e0751eaf68c6505382ba783c028b837dbcb7c76db19ef14ec7df3d4a6268d161e7fe743c71473ccb89095691", + "randSeed": "018187ce3f41e8f126f8f2f3c336e98417faac23fc96a0d856ab04db20d4fcc58c632f94a6d8ff349eef42de47b82792", + "receiptsHash": "", + "reserved": "", + "round": 1648, + "shardID": 0, + "softwareVersion": "33", + "timestampMs": 1765297076800, + "txCount": 809 +} + +header_exec_result = { + "blockBodyType": 0, + "chainID": "31", + "epoch": 2, + "epochStartMetaHash": "", + "executionResults": [ + { + "accumulatedFees": "46517500000000000", + "baseExecutionResult": { + "gasUsed": 46517500, + "headerEpoch": 2, + "headerHash": "afbd732a8d4842a2bd6fc1edd466b1f8d8b67cbf8737301c83d9cde03f0e7cf0", + "headerNonce": 1648, + "headerRound": 1648, + "rootHash": "ca61fd6e23ff56a5c58016afd83d810b0fa77b1e39b945b2432696c73458ebf3" + }, + "developerFees": "0", + "executedTxCount": 809, + "miniBlockHeaders": [ + { + "hash": "4df428a4f8c34e62382d7bdbec08749188049959131c2acbd514edff1890b28e", + "receiverShardID": 1, + "reserved": "20a806", + "senderShardID": 0, + "txCount": 809, + "type": 0 + } + ], + "receiptsHash": "0e5751c026e543b2e8ab2eb06099daa1d1e5df47778f7787faab45cdf12fe3a8" + } + ], + "gasLimit": 0, + "lastExecutionResult": { + "executionResult": { + "gasUsed": 46517500, + "headerEpoch": 0, + "headerHash": "afbd732a8d4842a2bd6fc1edd466b1f8d8b67cbf8737301c83d9cde03f0e7cf0", + "headerNonce": 1648, + "headerRound": 1648, + "rootHash": "ca61fd6e23ff56a5c58016afd83d810b0fa77b1e39b945b2432696c73458ebf3" + }, + "notarizedInRound": 1649 + }, + "leaderSignature": "929e5b34dd6ae016deeab2667b67b41baccda435c01d5ad89c9b56b92db6fc526fe3f7d30ba043878ab091bfe7836392", + "metaBlockHashes": [ + "b782177d39d7495558992faed007536df77f576ed790d7117162a732ebcabd6a" + ], + "miniBlockHeaders": [ + { + "hash": "994ceb37eb426a123501928c8c5b67e59f607557fb5f332d5e55fd297ab5d870", + "receiverShardID": 0, + "reserved": "1001", + "senderShardID": 0, + "txCount": 1610, + "type": 0 + } + ], + "nonce": 1649, + "peerChanges": [], + "prevHash": "afbd732a8d4842a2bd6fc1edd466b1f8d8b67cbf8737301c83d9cde03f0e7cf0", + "prevRandSeed": "018187ce3f41e8f126f8f2f3c336e98417faac23fc96a0d856ab04db20d4fcc58c632f94a6d8ff349eef42de47b82792", + "randSeed": "1cefd79bab2fafda3ad83f665fd9aef5840b0736d3cf61dc2e5bc227dec81d3122b847ccf13289484bb15a2141e1ff08", + "receiptsHash": "", + "reserved": "", + "round": 1649, + "shardID": 0, + "softwareVersion": "33", + "timestampMs": 1765297077400, + "txCount": 1610 +} + + +class TestMiniBlockHeader: + def test_header_data(self): + header_data = HeaderData() + header_data.add_commited_header(header_exec_result) + assert header_data.header_dictionary['commited_headers'][0] == header_exec_result + + header_data.add_proposed_header(header_exec_result) + assert header_data.header_dictionary['proposed_headers'][0] == header_exec_result + + def test_header(self): + header_instance = Header(header_exec_result, 'commited') + assert header_instance.metadata['epoch'] == 2 + assert header_instance.metadata['round'] == 1649 + assert header_instance.metadata['shard_id'] == 0 + assert header_instance.metadata['nonce'] == 1649 + assert header_instance.isHeaderV3(header_exec_result) is True + assert len(header_instance.miniblocks) == 2 + + for mention_type, miniblock, metadata in header_instance.miniblocks: + assert mention_type in ["origin_shard_commited", "origin_shard_commited_exec"] + if mention_type == "origin_shard_commited": + assert miniblock['hash'] == "994ceb37eb426a123501928c8c5b67e59f607557fb5f332d5e55fd297ab5d870" + assert metadata['nonce'] == 1649 + elif mention_type == "origin_shard_commited_exec": + assert miniblock['hash'] == "4df428a4f8c34e62382d7bdbec08749188049959131c2acbd514edff1890b28e" + assert metadata['nonce'] == 1648 + + def test_shard_data(self): + header_data = HeaderData() + header_data.add_commited_header(header_exec_result) + header_data.add_proposed_header(header_exec_result) + shard_data = ShardData() + shard_data.add_node(header_data) + assert shard_data.parsed_headers[0].header_dictionary['commited_headers'][0] == header_exec_result + assert shard_data.parsed_headers[0].header_dictionary['proposed_headers'][0] == header_exec_result + assert len(shard_data.miniblocks) == 2 # two miniblocks in the header + + def test_nonce_timeline(self): + header_data = HeaderData() + + header_data.add_commited_header(header) + header_data.add_proposed_header(header) + + header_data.add_commited_header(header_exec_result) + header_data.add_proposed_header(header_exec_result) + + shard_data = ShardData() + shard_data.add_node(header_data) + + print("Miniblocks data:") + print(json.dumps(shard_data.miniblocks, indent=4)) + + timeline = shard_data.get_data_for_header_horizontal_report() + assert len(timeline) == 1 # one epoch + + print("Timeline data:") + print(json.dumps(timeline, indent=4, default=lambda o: o.name if isinstance(o, Enum) else str(o))) + + def test_nonce_timeline_new(self): + header_data = HeaderData() + + header_data.add_commited_header(header) + header_data.add_proposed_header(header) + + header_data.add_commited_header(header_exec_result) + header_data.add_proposed_header(header_exec_result) + + shard_data = ShardData() + shard_data.add_node(header_data) + + miniblock_data = MiniblockData(shard_data.miniblocks) + print("Miniblocks data:") + print(json.dumps(miniblock_data.miniblocks, indent=4)) + + timeline = miniblock_data.get_data_for_header_report() + + print("Timeline data:") + print(json.dumps(timeline, indent=4, default=lambda o: o.name if isinstance(o, Enum) else str(o))) + + def test_miniblock_data_verify(self): + header_data = HeaderData() + + header_data.add_commited_header(header) + header_data.add_proposed_header(header) + + header_data.add_commited_header(header_exec_result) + header_data.add_proposed_header(header_exec_result) + + shard_data = ShardData() + shard_data.add_node(header_data) + + miniblock_data = MiniblockData(shard_data.miniblocks) + print("Miniblocks data:") + print(json.dumps(miniblock_data.miniblocks, indent=4)) From 87acb823c5dfb7ba91c41906e2bf5c4c1640554f Mon Sep 17 00:00:00 2001 From: Mihaela Radian Date: Fri, 19 Dec 2025 05:05:34 +0200 Subject: [PATCH 17/24] MX-17306 Cleanup, added miniblock timeline gap indicator --- .../miniblock_data.py | 33 +++++++++++++--- .../miniblocks_timeline_report.py | 38 ++++++++++++------- 2 files changed, 51 insertions(+), 20 deletions(-) diff --git a/multiversx_cross_shard_analysis/miniblock_data.py b/multiversx_cross_shard_analysis/miniblock_data.py index 56fef53..5354bea 100644 --- a/multiversx_cross_shard_analysis/miniblock_data.py +++ b/multiversx_cross_shard_analysis/miniblock_data.py @@ -138,8 +138,6 @@ def get_data_for_header_report(self) -> dict[int, dict[int, Any]]: if "proposed" in mention_type: continue - print(f"Processing miniblock {mb_hash} mentioned in header nonce {header.get('nonce')} round {header.get('round')} epoch {header.get('epoch')} shard {header.get('shard_id')}") - # epoch = header.get('epoch') if epoch not in report: report[epoch] = {} @@ -160,7 +158,30 @@ def get_data_for_header_report(self) -> dict[int, dict[int, Any]]: report[epoch][shard_id][nonce][round_number].append((label, mb_hash[:15] + '...', color)) - with open('debug_miniblock_header_report.json', 'w') as f: - import json - json.dump(report, f, indent=4, default=lambda o: o.name if isinstance(o, Enum) else str(o)) - return report + return sort_report(report) + + +def sort_report(report: dict[int, dict[int, Any]]) -> dict[int, dict[int, Any]]: + out: dict[int, dict[int, Any]] = {} + + for epoch in sorted(report.keys()): + out[epoch] = {} + + # metas hard (4294967295) last + shard_ids = sorted( + report[epoch].keys(), + key=lambda s: (s == 4294967295, s), + ) + + for shard_id in shard_ids: + out[epoch][shard_id] = {} + + for nonce in sorted(report[epoch][shard_id].keys()): + rounds = report[epoch][shard_id][nonce] + + out[epoch][shard_id][nonce] = { + r: rounds[r] + for r in sorted(rounds.keys()) + } + + return out diff --git a/multiversx_cross_shard_analysis/miniblocks_timeline_report.py b/multiversx_cross_shard_analysis/miniblocks_timeline_report.py index fff8b84..4d2c2d4 100644 --- a/multiversx_cross_shard_analysis/miniblocks_timeline_report.py +++ b/multiversx_cross_shard_analysis/miniblocks_timeline_report.py @@ -123,6 +123,15 @@ def build_stack_for_round(items: list[tuple[str, str, colors.Color]], col_width: # ----------------------------- # miniblock section # ----------------------------- +def has_round_gap(rounds: list[int]) -> bool: + if len(rounds) < 2: + return False + for a, b in zip(rounds, rounds[1:]): + if b != a + 1: + return True + return False + + def build_miniblock_section(miniblock: dict[str, Any], page_usable_width: float) -> list[Flowable]: flow = [] styles = getSampleStyleSheet() @@ -143,9 +152,8 @@ def build_miniblock_section(miniblock: dict[str, Any], page_usable_width: float) flow.append(Spacer(1, 6)) return flow - first_r = miniblock.get("first_seen_round", 0) - last_r = miniblock.get("last_seen_round", 0) - rounds = list(range(first_r, last_r + 1)) + mentioned = miniblock.get("mentioned", {}) + rounds = sorted(mentioned.keys()) num_cols = max(1, len(rounds)) col_width = page_usable_width / num_cols @@ -160,6 +168,7 @@ def build_miniblock_section(miniblock: dict[str, Any], page_usable_width: float) items = mentioned.get(r, []) drawing = build_stack_for_round(items, col_width) cells.append(drawing) + gap = has_round_gap(rounds) tbl = Table( [header, cells], @@ -167,17 +176,18 @@ def build_miniblock_section(miniblock: dict[str, Any], page_usable_width: float) hAlign="LEFT", ) - tbl.setStyle( - TableStyle( - [ - ("GRID", (0, 0), (-1, -1), 0.25, colors.grey), - ("BACKGROUND", (0, 0), (-1, 0), colors.whitesmoke), - ("ALIGN", (0, 0), (-1, 0), "CENTER"), - ("FONTSIZE", (0, 0), (-1, 0), ROUND_HEADER_FONT), - ("VALIGN", (0, 1), (-1, -1), "TOP"), - ] - ) - ) + style = [ + ("GRID", (0, 0), (-1, -1), 0.25, colors.grey), + ("BACKGROUND", (0, 0), (-1, 0), colors.whitesmoke), + ("ALIGN", (0, 0), (-1, 0), "CENTER"), + ("FONTSIZE", (0, 0), (-1, 0), ROUND_HEADER_FONT), + ("VALIGN", (0, 1), (-1, -1), "TOP"), + ] + + if gap: + style.append(("BOX", (0, 0), (-1, -1), 2, colors.red)) + + tbl.setStyle(TableStyle(style)) flow.append(tbl) flow.append(Spacer(1, 8)) From 25c3890275b8b540b29ea5c2f9942d6c9916abc6 Mon Sep 17 00:00:00 2001 From: Mihaela Radian Date: Sun, 21 Dec 2025 18:18:58 +0200 Subject: [PATCH 18/24] MX-17306 Added issues report --- multiversx_cross_shard_analysis/constants.py | 20 +- .../gather_data.py | 4 +- .../header_analysis_parser.py | 2 +- .../header_structures.py | 26 +- .../headers_alarms_report.py | 338 ++++++++++++++++++ .../headers_timeline_report.py | 14 +- multiversx_cross_shard_analysis/issues.py | 163 +++++++++ .../miniblock_data.py | 206 ++++++++++- .../miniblocks_timeline_report.py | 3 +- .../test_decode_reserved.py | 16 +- .../test_miniblocks.py | 40 ++- 11 files changed, 753 insertions(+), 79 deletions(-) create mode 100644 multiversx_cross_shard_analysis/headers_alarms_report.py create mode 100644 multiversx_cross_shard_analysis/issues.py diff --git a/multiversx_cross_shard_analysis/constants.py b/multiversx_cross_shard_analysis/constants.py index df98d99..a34da72 100644 --- a/multiversx_cross_shard_analysis/constants.py +++ b/multiversx_cross_shard_analysis/constants.py @@ -19,6 +19,10 @@ "origin_shard_proposed", "origin_shard_committed", + # miniblock is mentioned in an execution result, either on origin or destination shard + "origin_exec_proposed", + "origin_exec_committed", + # notarization of shard miniblock when meta includes the shard header "meta_origin_shard_proposed", "meta_origin_shard_committed", @@ -27,18 +31,14 @@ "dest_shard_proposed", "dest_shard_committed", - # notarization of shard miniblock when meta includes the shard header - "meta_dest_shard_proposed", - "meta_dest_shard_committed", - - # miniblock is mentioned in an execution result, either on origin or destination shard - "origin_exec_proposed", - "origin_exec_committed", - # miniblock is mentioned in an execution result, either on origin or destination shard "dest_exec_proposed", "dest_exec_committed", + # notarization of shard miniblock when meta includes the shard header + "meta_dest_shard_proposed", + "meta_dest_shard_committed", + # notarization of execution results when meta includes the header containing the execution result for origin shard "meta_origin_exec_proposed", "meta_origin_exec_committed", @@ -112,10 +112,10 @@ Colors.meta_dest_committed: colors.lightblue, Colors.origin_exec_proposed: colors.khaki, Colors.origin_exec_partial_executed: colors.gold, - Colors.origin_exec_final: colors.darkgoldenrod, + Colors.origin_exec_final: colors.yellow, Colors.dest_exec_proposed: colors.lightcoral, Colors.dest_exec_partial_executed: colors.crimson, - Colors.dest_exec_final: colors.firebrick, + Colors.dest_exec_final: colors.pink, Colors.meta_origin_exec_committed: colors.mediumseagreen, Colors.meta_dest_exec_committed: colors.cornflowerblue, } diff --git a/multiversx_cross_shard_analysis/gather_data.py b/multiversx_cross_shard_analysis/gather_data.py index 6e26986..1f8d305 100644 --- a/multiversx_cross_shard_analysis/gather_data.py +++ b/multiversx_cross_shard_analysis/gather_data.py @@ -48,7 +48,7 @@ def gather_data(): build_pdf_from_miniblocks(int(epoch), mb_data[epoch], outname=outfile) print("→", outfile) - input_data = handler.shard_data.get_data_for_header_horizontal_report() + input_data, nonce_alarms = MiniblockData(handler.shard_data.miniblocks).get_data_for_header_report() out_folder = os.path.join(handler.run_name, "NonceTimeline") out_folder = os.path.join('Reports', out_folder) os.makedirs(out_folder, exist_ok=True) @@ -56,7 +56,7 @@ def gather_data(): for epoch in sorted(input_data.keys()): print(f"Epoch: {epoch}") outfile = os.path.join(out_folder, f"nonce_timeline_report_{epoch}.pdf") - build_nonce_timeline_pdf(input_data[epoch], outname=outfile) + build_nonce_timeline_pdf(input_data[epoch], nonce_alarms, outname=outfile) print("→", outfile) diff --git a/multiversx_cross_shard_analysis/header_analysis_parser.py b/multiversx_cross_shard_analysis/header_analysis_parser.py index dba0e2c..a529189 100644 --- a/multiversx_cross_shard_analysis/header_analysis_parser.py +++ b/multiversx_cross_shard_analysis/header_analysis_parser.py @@ -32,7 +32,7 @@ def process_match(self, line: str, end_index: int, pattern_idx: int, args: dict[ if pattern_idx < 2: self.parsed_headers.add_proposed_header(header) elif pattern_idx == 2: - self.parsed_headers.add_commited_header(header) + self.parsed_headers.add_committed_header(header) return {} diff --git a/multiversx_cross_shard_analysis/header_structures.py b/multiversx_cross_shard_analysis/header_structures.py index dc2d3dd..fae85b8 100644 --- a/multiversx_cross_shard_analysis/header_structures.py +++ b/multiversx_cross_shard_analysis/header_structures.py @@ -21,18 +21,18 @@ class HeaderData: def __init__(self): self.header_dictionary = { 'proposed_headers': [], - 'commited_headers': [] + 'committed_headers': [] } self.seen_headers: dict[str, set[str]] = {'proposed_headers': set(), - 'commited_headers': set()} + 'committed_headers': set()} def reset(self): self.header_dictionary = { 'proposed_headers': [], - 'commited_headers': [] + 'committed_headers': [] } self.seen_headers: dict[str, set[str]] = {'proposed_headers': set(), - 'commited_headers': set()} + 'committed_headers': set()} def add_proposed_header(self, header: dict[str, Any]) -> bool: nonce = get_value('nonce', header) @@ -42,12 +42,12 @@ def add_proposed_header(self, header: dict[str, Any]) -> bool: self.seen_headers['proposed_headers'].add(nonce) return True - def add_commited_header(self, header: dict[str, Any]) -> bool: + def add_committed_header(self, header: dict[str, Any]) -> bool: nonce = get_value('nonce', header) - if nonce in self.seen_headers['commited_headers']: + if nonce in self.seen_headers['committed_headers']: return False - self.header_dictionary['commited_headers'].append(header) - self.seen_headers['commited_headers'].add(nonce) + self.header_dictionary['committed_headers'].append(header) + self.seen_headers['committed_headers'].add(nonce) return True @@ -58,14 +58,14 @@ def __init__(self): self.seen_miniblock_hashes = set() def add_node(self, node_data: HeaderData): - if node_data.header_dictionary['commited_headers'] == []: - node_data.header_dictionary['commited_headers'] = node_data.header_dictionary['proposed_headers'].copy() + if node_data.header_dictionary['committed_headers'] == []: + node_data.header_dictionary['committed_headers'] = node_data.header_dictionary['proposed_headers'].copy() for header_status in node_data.header_dictionary.keys(): for header in node_data.header_dictionary[header_status]: shard_id = get_shard_id(header) added = False - if header_status == 'commited_headers': - added = self.parsed_headers[shard_id].add_commited_header(header) + if header_status == 'committed_headers': + added = self.parsed_headers[shard_id].add_committed_header(header) elif header_status == 'proposed_headers': added = self.parsed_headers[shard_id].add_proposed_header(header) else: @@ -92,7 +92,7 @@ def get_data_for_header_horizontal_report(self) -> dict[str, dict[int, Any]]: for shard_id, header_data in self.parsed_headers.items(): - for header in sorted(header_data.header_dictionary['commited_headers'], + for header in sorted(header_data.header_dictionary['committed_headers'], key=lambda x: get_value('nonce', x)): epoch = get_value('epoch', header) diff --git a/multiversx_cross_shard_analysis/headers_alarms_report.py b/multiversx_cross_shard_analysis/headers_alarms_report.py new file mode 100644 index 0000000..a583e36 --- /dev/null +++ b/multiversx_cross_shard_analysis/headers_alarms_report.py @@ -0,0 +1,338 @@ +import argparse +import json +import os +import sys +from typing import Any + +from reportlab.graphics.shapes import Drawing, Rect, String +from reportlab.lib import colors +from reportlab.lib.pagesizes import A4 +from reportlab.lib.styles import getSampleStyleSheet +from reportlab.platypus import (Flowable, LongTable, PageBreak, Paragraph, + SimpleDocTemplate, Spacer, TableStyle) + +from multiversx_cross_shard_analysis.constants import COLORS_MAPPING, Colors +from multiversx_cross_shard_analysis.header_structures import (HeaderData, + ShardData) +from multiversx_cross_shard_analysis.miniblock_data import MiniblockData + +# ----------------------------- +# CONFIG (mirrors miniblock report) +# ----------------------------- + +PAGE_WIDTH, PAGE_HEIGHT = A4 +LEFT_MARGIN = RIGHT_MARGIN = 20 +TOP_MARGIN = BOTTOM_MARGIN = 20 + +RECT_H = 20 +RECT_PADDING_X = 4 +ROUND_HEADER_FONT = 7 +RECT_LABEL_FONT = 8 +RECT_INFO_FONT = 8 + +SECTION_BASE_HEIGHT = 110 # same idea as miniblock +EXTRA_LINE_HEIGHT = 18 # additional rows per stack +TITLE_HEIGHT = 60 + + +# ----------------------------- +# build stacked rectangles (same as miniblock version) +# ----------------------------- + +def build_stack_rows(items: list[tuple[str, str, colors.Color]], col_width: float) -> list[Drawing]: + """ + Instead of one giant Drawing, we return a list of small ones. + Each drawing represents one row in the vertical stack. + """ + row_drawings = [] + + if len(items) == 0: + # Create a single "no data" row + d = Drawing(col_width, RECT_H) + rect_w = max(2, col_width - RECT_PADDING_X * 2) - 4 + d.add(Rect(0, 2, rect_w, 12, fillColor=colors.whitesmoke, strokeColor=colors.grey)) # type: ignore + d.add(String(RECT_PADDING_X + 2, 6, "no data", fontSize=RECT_LABEL_FONT)) + row_drawings.append(d) + return row_drawings + + for label, info, col in items: + # Create a small drawing for just this one item + d = Drawing(col_width, RECT_H) + rect_w = max(2, col_width - RECT_PADDING_X * 2) - 4 + + d.add(Rect(0, 2, rect_w, RECT_H - 4, fillColor=col, strokeColor=colors.black)) # type: ignore + + text_x = RECT_PADDING_X + 3 + d.add(String(text_x, 12, label, fontSize=RECT_LABEL_FONT)) + d.add(String(text_x, 4, info, fontSize=RECT_INFO_FONT)) + row_drawings.append(d) + + return row_drawings + +# ----------------------------- +# check for round gaps +# ----------------------------- + + +def has_round_gap(rounds: list[int]) -> bool: + if len(rounds) < 2: + return False + rounds_sorted = sorted(rounds) + for a, b in zip(rounds_sorted, rounds_sorted[1:]): + if b != a + 1: + return True + return False + + +# ----------------------------- +# build section for one nonce +# ----------------------------- + +def build_nonce_section(shard_id: int, nonce: int, rounds: list[int], data: dict[int, list[Any]], + usable_width: float, highlight: bool = False) -> list[Flowable]: + + flow = [] + styles = getSampleStyleSheet() + + flow.append(Paragraph(f"Shard {shard_id} — Nonce {nonce}", styles["Heading3"])) + flow.append(Spacer(1, 4)) + + num_cols = len(rounds) + col_width = usable_width / max(1, num_cols) + + # 1. Build the Header Row + header = [Paragraph(f"{r}", styles["BodyText"]) for r in rounds] + + # 2. Transpose the stacks into rows + # We need to find the max height among all columns to normalize the row count + column_stacks = [build_stack_rows(data.get(r, []), col_width) for r in rounds] + max_rows = max(len(stack) for stack in column_stacks) + + table_data = [header] + + # Fill the table row by row + for i in range(max_rows): + row = [] + for stack in column_stacks: + if i < len(stack): + row.append(stack[i]) + else: + row.append("") # Empty cell if this column has fewer items + table_data.append(row) + + tbl = LongTable( + table_data, + colWidths=[col_width] * num_cols, + hAlign="LEFT", + splitByRow=True, # This allows the table to break across pages between rows + ) + + tbl_style = [ + ("GRID", (0, 0), (-1, -1), 0.25, colors.grey), + ("BACKGROUND", (0, 0), (-1, 0), colors.whitesmoke), + ("ALIGN", (0, 0), (-1, 0), "CENTER"), + ("VALIGN", (0, 1), (-1, -1), "TOP"), + ("TOPPADDING", (0, 0), (-1, -1), 0), # Tighten padding for large lists + ("BOTTOMPADDING", (0, 0), (-1, -1), 0), + ("FONTSIZE", (0, 0), (-1, 0), ROUND_HEADER_FONT), + ] + + if highlight: + tbl_style.append(("BOX", (0, 0), (-1, -1), 2, colors.red)) + + tbl.setStyle(TableStyle(tbl_style)) + + flow.append(tbl) + flow.append(Spacer(1, 8)) + return flow + + +# ----------------------------- +# PDF builder +# ----------------------------- + +def build_nonce_timeline_pdf(alarm_data: dict[str, dict[int, dict[int, dict[int, list[Any]]]]], + outname="nonce_alarms.pdf"): + doc = SimpleDocTemplate( + outname, + pagesize=A4, + leftMargin=LEFT_MARGIN, + rightMargin=RIGHT_MARGIN, + topMargin=TOP_MARGIN, + bottomMargin=BOTTOM_MARGIN, + ) + + usable_width = PAGE_WIDTH - LEFT_MARGIN - RIGHT_MARGIN + MAX_H = PAGE_HEIGHT - TOP_MARGIN - BOTTOM_MARGIN + + styles = getSampleStyleSheet() + story = [] + story.append(Paragraph("Nonce Alarms Report", styles["Title"])) + story.append(Spacer(1, 10)) + + current_h = 0 + first_page = True + for alarm, shards_data in alarm_data.items(): + if not shards_data: + continue + story.append(Paragraph(f"Alarm: {alarm}", styles["Heading2"])) + story.append(Spacer(1, 6)) + current_h += 36 # approx height of heading + spacer + + for shard_id, shard_dict in shards_data.items(): + for nonce, rdata in sorted(shard_dict.items()): + # height estimate based on max stack height + max_stack = max((len(v) for v in rdata.values()), default=1) + h_needed = SECTION_BASE_HEIGHT + max(0, max_stack - 2) * EXTRA_LINE_HEIGHT + + effective_page_height = MAX_H - (TITLE_HEIGHT if first_page else 0) + + if current_h + h_needed > effective_page_height: + story.append(PageBreak()) + current_h = 0 + first_page = False + + round_list = list(rdata.keys()) + story.extend(build_nonce_section(shard_id, nonce, round_list, rdata, usable_width)) + current_h += h_needed + + doc.build(story) + + +# ----------------------------- Example input data ------------------------------ +input_data = { + 0: { + 1: { + 100: [('origin_final', 'Shard 0', COLORS_MAPPING[Colors.origin_final])], + 101: [('origin_notarized', 'Shard 0', COLORS_MAPPING[Colors.meta_origin_committed])], + 102: [('dest_proposed', 'Shard 1', COLORS_MAPPING[Colors.dest_proposed]), ('dest_final', 'Shard 2', COLORS_MAPPING[Colors.dest_final])], + 103: [('dest_partial', 'Shard 1', COLORS_MAPPING[Colors.dest_partial_executed]), ('dest_notarized', 'Shard 2', COLORS_MAPPING[Colors.meta_dest_committed])], + 104: [('dest_final', 'Shard 1', COLORS_MAPPING[Colors.dest_final])], + 105: [('dest_notarized', 'Shard 1', COLORS_MAPPING[Colors.meta_dest_committed])], + }, + 2: { + 101: [('origin_proposed', 'Shard 0', COLORS_MAPPING[Colors.origin_proposed])], + 103: [('origin_final', 'Shard 0', COLORS_MAPPING[Colors.origin_final])], + 104: [('dest_final', 'Shard 2', COLORS_MAPPING[Colors.dest_final]), ('origin_notarized', 'Shard 0', COLORS_MAPPING[Colors.meta_origin_committed])], + 105: [('dest_notarized', 'Shard 2', COLORS_MAPPING[Colors.meta_dest_committed])], + } + }, + 1: { + 1: { + 101: [('N1', 'S1', COLORS_MAPPING[Colors.origin_final])], + 102: [('N1', 'S1', COLORS_MAPPING[Colors.meta_origin_committed])], + 103: [('N1', 'S0', COLORS_MAPPING[Colors.dest_proposed]), ('N1', 'S2', COLORS_MAPPING[Colors.dest_final])], + 104: [('N1', 'S0', COLORS_MAPPING[Colors.dest_partial_executed]), ('N1', 'S2', COLORS_MAPPING[Colors.meta_dest_committed])], + 105: [('N1', 'S0', COLORS_MAPPING[Colors.dest_final])], + 106: [('N1', 'S0', COLORS_MAPPING[Colors.meta_dest_committed])], + }, + 2: { + 102: [('N2', 'S1', COLORS_MAPPING[Colors.origin_partial_executed])], + 104: [('N2', 'S1', COLORS_MAPPING[Colors.origin_final])], + 105: [('N2', 'S2', COLORS_MAPPING[Colors.dest_final]), ('N2', 'S1', COLORS_MAPPING[Colors.meta_origin_committed])], + 106: [('N2', 'S2', COLORS_MAPPING[Colors.meta_dest_committed])], + }, + }, + 2: { + 1: { + 100: [('N1', 'S2', COLORS_MAPPING[Colors.origin_final])], + 101: [('N1', 'S2', COLORS_MAPPING[Colors.meta_origin_committed])], + 102: [('N1', 'S0', COLORS_MAPPING[Colors.dest_final]), ('N1', 'S1', COLORS_MAPPING[Colors.dest_final])], + 103: [('N1', 'S0', COLORS_MAPPING[Colors.meta_dest_committed]), ('N1', 'S1', COLORS_MAPPING[Colors.meta_dest_committed])], + }, + 2: { + 101: [('N2', 'S2', COLORS_MAPPING[Colors.origin_final])], + 102: [('N2', 'S2', COLORS_MAPPING[Colors.meta_origin_committed])], + 103: [('N2', 'S0', COLORS_MAPPING[Colors.dest_final])], + 104: [('N2', 'S0', COLORS_MAPPING[Colors.meta_dest_committed]), ('N2', 'S1', COLORS_MAPPING[Colors.dest_final])], + 105: [('N2', 'S1', COLORS_MAPPING[Colors.meta_dest_committed])], + }, + 3: { + 103: [('N3', 'S2', COLORS_MAPPING[Colors.origin_final])], + 104: [('N3', 'S2', COLORS_MAPPING[Colors.meta_origin_committed])], + 105: [('N3', 'S1', COLORS_MAPPING[Colors.dest_final])], + 106: [('N3', 'S1', COLORS_MAPPING[Colors.dest_final])], + 107: [('N3', 'S1', COLORS_MAPPING[Colors.meta_dest_committed])], + }, + }, + 4294967295: { + 1: { + 100: [('N1', 'M', COLORS_MAPPING[Colors.origin_final])], + 103: [('N1', 'M', COLORS_MAPPING[Colors.meta_origin_committed])], + 104: [('N1', 'S0', COLORS_MAPPING[Colors.dest_final]), ('N1', 'S1', COLORS_MAPPING[Colors.dest_final])], + 105: [('N1', 'S0', COLORS_MAPPING[Colors.meta_dest_committed]), ('N1', 'S1', COLORS_MAPPING[Colors.meta_dest_committed]), ('N1', 'S2', COLORS_MAPPING[Colors.dest_final])], + 106: [('N1', 'S2', COLORS_MAPPING[Colors.meta_dest_committed])], + } + } + +} + + +def main(): + + parser = argparse.ArgumentParser(description="Nonce timeline alarms report generator") + + group = parser.add_mutually_exclusive_group(required=True) + group.add_argument("--path", type=str, help="Path to folder containing run output") + group.add_argument("--run-name", type=str, help="Name of the run inside ./Reports/") + + args = parser.parse_args() + + # resolve final folder path + if args.path: + base_path = args.path + else: + base_path = os.path.join("Reports", args.run_name) + + # verify base folder exists + if not os.path.isdir(base_path): + print(f"Error: folder not found: {base_path}") + sys.exit(1) + + # verify expected files exist + shard_ids = [0, 1, 2, 4294967295] + missing = [] + + for shard in shard_ids: + p = os.path.join(base_path, "Shards", f"{shard}_report.json") + if not os.path.isfile(p): + missing.append(p) + + miniblocks_path = os.path.join(base_path, "Miniblocks", "miniblocks_report.json") + if not os.path.isfile(miniblocks_path): + missing.append(miniblocks_path) + + if missing: + print("Error: missing required files:") + for m in missing: + print(" -", m) + sys.exit(1) + + # load JSONs + headers = ShardData() + + for shard in shard_ids: + with open(os.path.join(base_path, "Shards", f"{shard}_report.json")) as f: + data = json.load(f) + headers.parsed_headers[shard] = HeaderData() + headers.parsed_headers[shard].header_dictionary = data["shards"] + + with open(miniblocks_path) as f: + data = json.load(f) + headers.miniblocks = data["miniblocks"] + + # process + input_data = MiniblockData(headers.miniblocks).get_data_for_header_alarms_report() + + # output path + out_folder = os.path.join(base_path, "NonceAlarms") + os.makedirs(out_folder, exist_ok=True) + + for epoch in sorted(input_data.keys()): + outfile = os.path.join(out_folder, f"nonce_alarms_report_{epoch}.pdf") + build_nonce_timeline_pdf(input_data[epoch], outname=outfile) + print(f"Nonce alarms report for Epoch {epoch} generated: {outfile}") + + +if __name__ == "__main__": + main() diff --git a/multiversx_cross_shard_analysis/headers_timeline_report.py b/multiversx_cross_shard_analysis/headers_timeline_report.py index b4f16ef..61779de 100644 --- a/multiversx_cross_shard_analysis/headers_timeline_report.py +++ b/multiversx_cross_shard_analysis/headers_timeline_report.py @@ -8,13 +8,12 @@ from reportlab.lib import colors from reportlab.lib.pagesizes import A4 from reportlab.lib.styles import getSampleStyleSheet -from reportlab.platypus import (Flowable, PageBreak, Paragraph, - SimpleDocTemplate, Spacer, LongTable, TableStyle) +from reportlab.platypus import (Flowable, LongTable, PageBreak, Paragraph, + SimpleDocTemplate, Spacer, TableStyle) from multiversx_cross_shard_analysis.constants import COLORS_MAPPING, Colors from multiversx_cross_shard_analysis.header_structures import (HeaderData, ShardData) - from multiversx_cross_shard_analysis.miniblock_data import MiniblockData # ----------------------------- @@ -153,6 +152,7 @@ def build_nonce_section(shard_id: int, nonce: int, rounds: list[int], data: dict # ----------------------------- def build_nonce_timeline_pdf(shards_data: dict[int, dict[int, dict[int, list[Any]]]], + nonce_alarms: dict[int, set[int]], outname="nonce_timeline.pdf"): doc = SimpleDocTemplate( outname, @@ -188,8 +188,8 @@ def build_nonce_timeline_pdf(shards_data: dict[int, dict[int, dict[int, list[Any first_page = False round_list = list(rdata.keys()) - gap = has_round_gap(round_list) - story.extend(build_nonce_section(shard_id, nonce, round_list, rdata, usable_width, gap)) + alarm = nonce in nonce_alarms[shard_id] + story.extend(build_nonce_section(shard_id, nonce, round_list, rdata, usable_width, alarm)) current_h += h_needed doc.build(story) @@ -318,7 +318,7 @@ def main(): headers.miniblocks = data["miniblocks"] # process - input_data = MiniblockData(headers.miniblocks).get_data_for_header_report() + input_data, nonce_alarms = MiniblockData(headers.miniblocks).get_data_for_header_report() # output path out_folder = os.path.join(base_path, "NonceTimeline") @@ -326,7 +326,7 @@ def main(): for epoch in sorted(input_data.keys()): outfile = os.path.join(out_folder, f"nonce_timeline_report_{epoch}.pdf") - build_nonce_timeline_pdf(input_data[epoch], outname=outfile) + build_nonce_timeline_pdf(input_data[epoch], nonce_alarms, outname=outfile) print(f"Nonce timeline report for Epoch {epoch} generated: {outfile}") diff --git a/multiversx_cross_shard_analysis/issues.py b/multiversx_cross_shard_analysis/issues.py new file mode 100644 index 0000000..2c847f5 --- /dev/null +++ b/multiversx_cross_shard_analysis/issues.py @@ -0,0 +1,163 @@ +from enum import Enum +from typing import Any, Callable + +MAX_ROUND_GAP_ALLOWED = 1 +SUPERNOVA_ACTIVATION_EPOCH = 2 + + +class Issues(Enum): + MISSING_OR_DUPLICATE_DESTINATION = 'missing_or_duplicate_destination' + WRONG_PROCESSING_ORDER = 'wrong_processing_order' + GAP_BETWEEN_ROUNDS = 'gap_between_rounds' + + # Logic for: GAP_BETWEEN_ROUNDS + def check_gap_between_rounds(self, mb_info: dict[str, Any]) -> bool: + last_round = -1 + for _, mentioning_header in mb_info.get('mentioned', []): + if last_round == -1: + last_round = mentioning_header.get('round') + elif mentioning_header.get('round') - last_round > MAX_ROUND_GAP_ALLOWED: + return True + last_round = mentioning_header.get('round') + return False + + # Logic for: MISSING_DESTINATION + def check_missing_or_duplicate_destination(self, mb_info: dict[str, Any]) -> bool: + receiver = mb_info.get("receiverShardID") + count = 0 + + for _, header in mb_info.get("mentioned", []): + if header.get("shard_id") == receiver and mb_info.get("type") in [0, 90]: + count += 1 + + is_dest_missing = count == 0 and mb_info.get("type") in [0, 90] + is_dest_duplicate = count > 2 and mb_info.get("type") in [0, 90] and mb_info.get("first_seen_epoch", 0) >= SUPERNOVA_ACTIVATION_EPOCH + + return is_dest_missing or is_dest_duplicate + + # Logic for: WRONG_PROCESSING_ORDER + def check_wrong_order(self, mb_info: dict[str, Any]) -> bool: + max_phase = -1 + + for mtype, data in sorted(mb_info.get('mentioned', []), key=lambda x: x[1].get('round', 0)): + if 'exec' in mtype: + phase = 1 if 'origin' in mtype else 4 + elif 'meta' in mtype: + phase = 2 if 'origin' in mtype else 5 + else: + phase = 0 if 'origin' in mtype else 3 + + if phase < max_phase: + return True + max_phase = phase + + return False + + def run_check(self, issue_type: 'Issues', mb_info: dict[str, Any]) -> bool: + """Helper to route to the correct method.""" + check_map: dict[Issues, Callable] = { + Issues.MISSING_OR_DUPLICATE_DESTINATION: self.check_missing_or_duplicate_destination, + Issues.WRONG_PROCESSING_ORDER: self.check_wrong_order, + Issues.GAP_BETWEEN_ROUNDS: self.check_gap_between_rounds, + } + return check_map[issue_type](mb_info) + + +''' +Example miniblock structure after being processed and enriched: + { + "hash": "5db8a831cad452a5d85aa1b7aa033f864827d083f54fc5133f83e8e5d16a2dac", + "receiverShardID": 1, + "reserved": "209208", + "senderShardID": 0, + "txCount": 1043, + "type": 0, + "first_seen_round": 441, + "last_seen_round": 443, + "first_seen_epoch": 2, + "nonce": 440, + "mentioned": [ + [ + "origin_shard_proposed_headers_exec", + { + "nonce": 440, + "round": 441, + "epoch": 2, + "shard_id": 0, + "reserved": { + "ExecutionType": "Normal", + "State": "Final", + "IndexOfFirstTxProcessed": 0, + "IndexOfLastTxProcessed": 26 + } + } + ], + [ + "origin_shard_committed_headers_exec", + { + "nonce": 440, + "round": 441, + "epoch": 2, + "shard_id": 0, + "reserved": { + "ExecutionType": "Normal", + "State": "Final", + "IndexOfFirstTxProcessed": 0, + "IndexOfLastTxProcessed": 26 + } + } + ], + [ + "dest_shard_proposed_headers", + { + "nonce": 443, + "round": 443, + "epoch": 2, + "shard_id": 1, + "reserved": { + "ExecutionType": "Normal", + "State": "Proposed", + "IndexOfFirstTxProcessed": 0, + "IndexOfLastTxProcessed": 1042 + } + } + ], + [ + "dest_shard_committed_headers", + { + "nonce": 443, + "round": 443, + "epoch": 2, + "shard_id": 1, + "reserved": { + "ExecutionType": "Normal", + "State": "Proposed", + "IndexOfFirstTxProcessed": 0, + "IndexOfLastTxProcessed": 1042 + } + } + ], + [ + "meta_origin_shard_proposed_headers", + { + "nonce": 442, + "round": 442, + "epoch": 2, + "shard_id": 4294967295, + "reserved": {} + } + ], + [ + "meta_origin_shard_committed_headers", + { + "nonce": 442, + "round": 442, + "epoch": 2, + "shard_id": 4294967295, + "reserved": {} + } + ] + ] + } + +''' diff --git a/multiversx_cross_shard_analysis/miniblock_data.py b/multiversx_cross_shard_analysis/miniblock_data.py index 5354bea..10bb3d8 100644 --- a/multiversx_cross_shard_analysis/miniblock_data.py +++ b/multiversx_cross_shard_analysis/miniblock_data.py @@ -1,9 +1,12 @@ -from enum import Enum + +import json from typing import Any -from multiversx_cross_shard_analysis.constants import COLORS_MAPPING, TYPE_NAMES, Colors +from multiversx_cross_shard_analysis.constants import (COLORS_MAPPING, + TYPE_NAMES, Colors) from multiversx_cross_shard_analysis.decode_reserved import \ get_default_decoded_data +from multiversx_cross_shard_analysis.issues import Issues class MiniblockData: @@ -14,20 +17,24 @@ def __init__(self, miniblocks: dict[str, dict[str, Any]]): def verify_miniblocks(self) -> None: for mb_hash, mb_info in self.miniblocks.items(): - mb_info['hasAlarm'] = False mb_info['mentioned'] = sorted(mb_info.get('mentioned', []), key=lambda x: (x[1].get('epoch', 0), x[1].get('round', 0))) - last_round = -1 - for mention_type, header in mb_info.get('mentioned', []): - if last_round == -1: - mb_info['first_seen_round'] = header.get('round') - mb_info['last_seen_round'] = header.get('round') - mb_info['first_seen_epoch'] = header.get('epoch') - mb_info['nonce'] = header.get('nonce') - mb_info['senderShardID'] = header.get('shard_id') - elif header.get('round') - last_round > 1: - mb_info['hasAlarm'] = True - last_round = header.get('round') - mb_info['last_seen_round'] = last_round + mentioning_header = mb_info['mentioned'][0][1] if mb_info['mentioned'] else None + if mentioning_header: + mb_info['first_seen_round'] = mentioning_header.get('round') + mb_info['first_seen_epoch'] = mentioning_header.get('epoch') + mb_info['nonce'] = mentioning_header.get('nonce') + mb_info['senderShardID'] = mentioning_header.get('shard_id') + mb_info['alarms'] = [] + + # Perform the configurable checks + for issue in Issues: + if issue.run_check(issue, mb_info): + mb_info['alarms'].append(issue.name) + + # Set the general alarm flag if any issues were found + mb_info['hasAlarm'] = len(mb_info['alarms']) > 0 + if mb_hash.startswith("66bc97b69075f10"): + print(json.dumps(mb_info, indent=4)) def get_color_for_state(self, mention_type: str, tx_count: int, header: dict[str, Any]) -> Colors: reserved = header.get('reserved', {}) @@ -96,6 +103,7 @@ def get_data_for_detail_report(self) -> dict[str, list[dict[str, Any]]]: "senderShardID": mb_info['senderShardID'], "txCount": mb_info['txCount'], "type": mb_info['type'], + "hasAlarm": mb_info['hasAlarm'], "mentioned": {}, } for mention_type, header in mb_info.get('mentioned', []): @@ -127,13 +135,18 @@ def get_data_for_detail_report(self) -> dict[str, list[dict[str, Any]]]: mb_list.sort(key=lambda x: x['first_seen_round']) return report - def get_data_for_header_report(self) -> dict[int, dict[int, Any]]: + def get_data_for_header_report(self) -> tuple[dict[int, dict[int, Any]], dict[int, set[int]]]: report: dict[int, dict[int, Any]] = {} + nonce_alarms = dict[int, set[int]]() for mb_hash, mb_info in self.miniblocks.items(): nonce = mb_info['nonce'] shard_id = mb_info['senderShardID'] epoch = mb_info['first_seen_epoch'] + hasAlarm = mb_info['hasAlarm'] + if hasAlarm: + nonce_alarms.setdefault(shard_id, set()).add(nonce) + for mention_type, header in mb_info.get('mentioned', []): if "proposed" in mention_type: continue @@ -153,12 +166,103 @@ def get_data_for_header_report(self) -> dict[int, dict[int, Any]]: color = COLORS_MAPPING[self.get_color_for_state(mention_type, mb_info['txCount'], header)] label = f'Shard {header["shard_id"]}' if header["shard_id"] != 4294967295 else "MetaShard" + if mb_info['type'] != 0: label += f' ({TYPE_NAMES[mb_info["type"]]})' + else: + label += f' ({mb_info["senderShardID"]} -> {mb_info["receiverShardID"]})' report[epoch][shard_id][nonce][round_number].append((label, mb_hash[:15] + '...', color)) - return sort_report(report) + return sort_any(report), nonce_alarms + + def get_data_for_header_alarms_report(self) -> dict[int, Any]: + report: dict[int, Any] = {} + nonce_alarms: dict[int, dict[int, set]] = {} + + seen_miniblocks = set[str]() + + for mb_hash, mb_info in [(hash, miniblock) for hash, miniblock in self.miniblocks.items() if miniblock['hasAlarm']]: + nonce = mb_info['nonce'] + shard_id = mb_info['senderShardID'] + epoch = mb_info['first_seen_epoch'] + + hasAlarm = mb_info['hasAlarm'] + alarms = mb_info['alarms'] + if hasAlarm: + shard_map = nonce_alarms.setdefault(shard_id, {}) + shard_map.setdefault(nonce, set()).update(alarms) + + for mention_type, header in mb_info.get('mentioned', []): + if "proposed" in mention_type: + continue + + # prepare epoch level + if epoch not in report: + report[epoch] = {} + for issue in Issues: + report[epoch][issue.name] = {} + + color = COLORS_MAPPING[self.get_color_for_state(mention_type, mb_info['txCount'], header)] + label = f'Shard {header["shard_id"]}' if header["shard_id"] != 4294967295 else "MetaShard" + + if mb_info['type'] != 0: + label += f' ({TYPE_NAMES[mb_info["type"]]})' + else: + label += f' ({mb_info["senderShardID"]} -> {mb_info["receiverShardID"]})' + + for issue in mb_info['alarms']: + if shard_id not in report[epoch][issue]: + report[epoch][issue][shard_id] = {} + + if nonce not in report[epoch][issue][shard_id]: + report[epoch][issue][shard_id][nonce] = {} + + round_number = header.get('round') + if round_number not in report[epoch][issue][shard_id][nonce]: + report[epoch][issue][shard_id][nonce][round_number] = [] + + report[epoch][issue][shard_id][nonce][round_number].append((label, mb_hash[:15] + '...', color)) + seen_miniblocks.add(mb_hash) + + for mb_hash in [item for item in self.miniblocks.keys() if item not in seen_miniblocks and self.miniblocks[item]['nonce'] in nonce_alarms.get(self.miniblocks[item]['senderShardID'], set())]: + mb_info = self.miniblocks[mb_hash] + nonce = mb_info['nonce'] + shard_id = mb_info['senderShardID'] + epoch = mb_info['first_seen_epoch'] + + for mention_type, header in mb_info.get('mentioned', []): + if "proposed" in mention_type: + continue + + # prepare epoch level + if epoch not in report: + report[epoch] = {} + for issue in Issues: + report[epoch][issue.name] = {} + + color = COLORS_MAPPING[self.get_color_for_state(mention_type, mb_info['txCount'], header)] + label = f'Shard {header["shard_id"]}' if header["shard_id"] != 4294967295 else "MetaShard" + + if mb_info['type'] != 0: + label += f' ({TYPE_NAMES[mb_info["type"]]})' + else: + label += f' ({mb_info["senderShardID"]} -> {mb_info["receiverShardID"]})' + + for issue in nonce_alarms[shard_id][nonce]: + if shard_id not in report[epoch][issue]: + report[epoch][issue][shard_id] = {} + + if nonce not in report[epoch][issue][shard_id]: + report[epoch][issue][shard_id][nonce] = {} + + round_number = header.get('round') + if round_number not in report[epoch][issue][shard_id][nonce]: + report[epoch][issue][shard_id][nonce][round_number] = [] + + report[epoch][issue][shard_id][nonce][round_number].append((label, mb_hash[:15] + '...', color)) + + return sort_any(report) def sort_report(report: dict[int, dict[int, Any]]) -> dict[int, dict[int, Any]]: @@ -185,3 +289,71 @@ def sort_report(report: dict[int, dict[int, Any]]) -> dict[int, dict[int, Any]]: } return out + + +def sort_report1(report: dict[int, dict[int, Any]]) -> dict[int, dict[int, Any]]: + out: dict[int, dict[int, Any]] = {} + + for epoch in sorted(report.keys()): + out[epoch] = {} + + for issue in sorted(report[epoch].keys()): + out[epoch][issue] = {} + # metas hard (4294967295) last + shard_ids = sorted( + report[epoch][issue].keys(), + key=lambda s: (s == 4294967295, s), + ) + + for shard_id in shard_ids: + out[epoch][issue][shard_id] = {} + + for nonce in sorted(report[epoch][issue][shard_id].keys()): + rounds = report[epoch][issue][shard_id][nonce] + + out[epoch][issue][shard_id][nonce] = { + r: rounds[r] + for r in sorted(rounds.keys()) + } + + return out + + +META_SHARD_ID = 4294967295 + + +def sort_any(data: Any) -> Any: + """ + Recursively sorts dictionaries and lists. + - Dictionaries: Sorted by keys (Meta Shard always last). + - Lists: Elements are recursively sorted. + - Others: Returned as is. + """ + # Case 1: It's a Dictionary + if isinstance(data, dict): + # Determine the sorted order of keys + sorted_keys = sorted( + data.keys(), + key=lambda k: ( + # Rule: If key is the Meta Shard ID, put it last + (k == META_SHARD_ID) if isinstance(k, int) else False, + # Otherwise, sort naturally by value/string + k + ) + ) + # Rebuild dictionary recursively + return {k: sort_any(data[k]) for k in sorted_keys} + + # Case 2: It's a List/Array + elif isinstance(data, list): + # Sort each item inside the list first + processed_list = [sort_any(item) for item in data] + try: + # Try to sort the list itself if elements are comparable + return sorted(processed_list) + except TypeError: + # If elements are non-comparable (e.g., list of dicts), return as is + return processed_list + + # Case 3: Primitive types (int, str, bool, None) + return data diff --git a/multiversx_cross_shard_analysis/miniblocks_timeline_report.py b/multiversx_cross_shard_analysis/miniblocks_timeline_report.py index 4d2c2d4..9f12086 100644 --- a/multiversx_cross_shard_analysis/miniblocks_timeline_report.py +++ b/multiversx_cross_shard_analysis/miniblocks_timeline_report.py @@ -168,7 +168,6 @@ def build_miniblock_section(miniblock: dict[str, Any], page_usable_width: float) items = mentioned.get(r, []) drawing = build_stack_for_round(items, col_width) cells.append(drawing) - gap = has_round_gap(rounds) tbl = Table( [header, cells], @@ -184,7 +183,7 @@ def build_miniblock_section(miniblock: dict[str, Any], page_usable_width: float) ("VALIGN", (0, 1), (-1, -1), "TOP"), ] - if gap: + if miniblock.get("hasAlarm", False): style.append(("BOX", (0, 0), (-1, -1), 2, colors.red)) tbl.setStyle(TableStyle(style)) diff --git a/multiversx_cross_shard_analysis/test_decode_reserved.py b/multiversx_cross_shard_analysis/test_decode_reserved.py index e7eed86..4c01d9d 100644 --- a/multiversx_cross_shard_analysis/test_decode_reserved.py +++ b/multiversx_cross_shard_analysis/test_decode_reserved.py @@ -3,14 +3,14 @@ mentioned_headers = { "origin_shard_proposed_headers": "20ec12", - "origin_shard_commited_headers": "20ec12", + "origin_shard_committed_headers": "20ec12", "dest_shard_proposed_headers_1": "1002208112", "dest_shard_proposed_headers_2": "18821220ec12", - "dest_shard_commited_headers_1": "1002208112", - "dest_shard_commited_headers_2": "18821220ec12", + "dest_shard_committed_headers_1": "1002208112", + "dest_shard_committed_headers_2": "18821220ec12", "meta_origin_shard_proposed_headers": "08011002208112", "meta_dest_shard_proposed_headers": "08011002180a208112", - "meta_dest_shard_commited_headers": "", + "meta_dest_shard_committed_headers": "", } expected = { @@ -20,7 +20,7 @@ "IndexOfFirstTxProcessed": 0, "IndexOfLastTxProcessed": 2412 }, - "origin_shard_commited_headers": { + "origin_shard_committed_headers": { "ExecutionType": "Normal", "State": "Final", "IndexOfFirstTxProcessed": 0, @@ -38,13 +38,13 @@ "IndexOfFirstTxProcessed": 2306, "IndexOfLastTxProcessed": 2412 }, - "dest_shard_commited_headers_1": { + "dest_shard_committed_headers_1": { "ExecutionType": "Normal", "State": "PartialExecuted", "IndexOfFirstTxProcessed": 0, "IndexOfLastTxProcessed": 2305 }, - "dest_shard_commited_headers_2": { + "dest_shard_committed_headers_2": { "ExecutionType": "Normal", "State": "Final", "IndexOfFirstTxProcessed": 2306, @@ -62,7 +62,7 @@ "IndexOfFirstTxProcessed": 10, "IndexOfLastTxProcessed": 2305 }, - "meta_dest_shard_commited_headers": {} + "meta_dest_shard_committed_headers": {} } diff --git a/multiversx_cross_shard_analysis/test_miniblocks.py b/multiversx_cross_shard_analysis/test_miniblocks.py index b69989a..80b26e9 100644 --- a/multiversx_cross_shard_analysis/test_miniblocks.py +++ b/multiversx_cross_shard_analysis/test_miniblocks.py @@ -1,7 +1,9 @@ -from enum import Enum import json -from multiversx_cross_shard_analysis.header_structures import Header, HeaderData, ShardData +from enum import Enum +from multiversx_cross_shard_analysis.header_structures import (Header, + HeaderData, + ShardData) from multiversx_cross_shard_analysis.miniblock_data import MiniblockData header = { @@ -47,9 +49,9 @@ "hash": "52af8b3c899198e823ef94c80fc12cc4ba301e005d8e67f615ba872226a4963c", "receiverShardID": 0, "reserved": "1001", - "senderShardID": 0, - "txCount": 809, - "type": 0 + "senderShardID": 0, + "txCount": 809, + "type": 0 } ], "nonce": 1648, @@ -141,14 +143,14 @@ class TestMiniBlockHeader: def test_header_data(self): header_data = HeaderData() - header_data.add_commited_header(header_exec_result) - assert header_data.header_dictionary['commited_headers'][0] == header_exec_result + header_data.add_committed_header(header_exec_result) + assert header_data.header_dictionary['committed_headers'][0] == header_exec_result header_data.add_proposed_header(header_exec_result) assert header_data.header_dictionary['proposed_headers'][0] == header_exec_result def test_header(self): - header_instance = Header(header_exec_result, 'commited') + header_instance = Header(header_exec_result, 'committed') assert header_instance.metadata['epoch'] == 2 assert header_instance.metadata['round'] == 1649 assert header_instance.metadata['shard_id'] == 0 @@ -157,31 +159,31 @@ def test_header(self): assert len(header_instance.miniblocks) == 2 for mention_type, miniblock, metadata in header_instance.miniblocks: - assert mention_type in ["origin_shard_commited", "origin_shard_commited_exec"] - if mention_type == "origin_shard_commited": + assert mention_type in ["origin_shard_committed", "origin_shard_committed_exec"] + if mention_type == "origin_shard_committed": assert miniblock['hash'] == "994ceb37eb426a123501928c8c5b67e59f607557fb5f332d5e55fd297ab5d870" assert metadata['nonce'] == 1649 - elif mention_type == "origin_shard_commited_exec": + elif mention_type == "origin_shard_committed_exec": assert miniblock['hash'] == "4df428a4f8c34e62382d7bdbec08749188049959131c2acbd514edff1890b28e" assert metadata['nonce'] == 1648 def test_shard_data(self): header_data = HeaderData() - header_data.add_commited_header(header_exec_result) + header_data.add_committed_header(header_exec_result) header_data.add_proposed_header(header_exec_result) shard_data = ShardData() shard_data.add_node(header_data) - assert shard_data.parsed_headers[0].header_dictionary['commited_headers'][0] == header_exec_result + assert shard_data.parsed_headers[0].header_dictionary['committed_headers'][0] == header_exec_result assert shard_data.parsed_headers[0].header_dictionary['proposed_headers'][0] == header_exec_result assert len(shard_data.miniblocks) == 2 # two miniblocks in the header def test_nonce_timeline(self): header_data = HeaderData() - header_data.add_commited_header(header) + header_data.add_committed_header(header) header_data.add_proposed_header(header) - header_data.add_commited_header(header_exec_result) + header_data.add_committed_header(header_exec_result) header_data.add_proposed_header(header_exec_result) shard_data = ShardData() @@ -199,10 +201,10 @@ def test_nonce_timeline(self): def test_nonce_timeline_new(self): header_data = HeaderData() - header_data.add_commited_header(header) + header_data.add_committed_header(header) header_data.add_proposed_header(header) - header_data.add_commited_header(header_exec_result) + header_data.add_committed_header(header_exec_result) header_data.add_proposed_header(header_exec_result) shard_data = ShardData() @@ -220,10 +222,10 @@ def test_nonce_timeline_new(self): def test_miniblock_data_verify(self): header_data = HeaderData() - header_data.add_commited_header(header) + header_data.add_committed_header(header) header_data.add_proposed_header(header) - header_data.add_commited_header(header_exec_result) + header_data.add_committed_header(header_exec_result) header_data.add_proposed_header(header_exec_result) shard_data = ShardData() From 0d27082f688ca2ac82220b26aa4c531656b2d7aa Mon Sep 17 00:00:00 2001 From: Mihaela Radian Date: Sun, 21 Dec 2025 18:31:06 +0200 Subject: [PATCH 19/24] MX-17306 Fixes to issues report --- multiversx_cross_shard_analysis/issues.py | 6 ++++-- multiversx_cross_shard_analysis/miniblock_data.py | 3 --- 2 files changed, 4 insertions(+), 5 deletions(-) diff --git a/multiversx_cross_shard_analysis/issues.py b/multiversx_cross_shard_analysis/issues.py index 2c847f5..012342f 100644 --- a/multiversx_cross_shard_analysis/issues.py +++ b/multiversx_cross_shard_analysis/issues.py @@ -24,6 +24,7 @@ def check_gap_between_rounds(self, mb_info: dict[str, Any]) -> bool: # Logic for: MISSING_DESTINATION def check_missing_or_duplicate_destination(self, mb_info: dict[str, Any]) -> bool: receiver = mb_info.get("receiverShardID") + sender = mb_info.get("senderShardID") count = 0 for _, header in mb_info.get("mentioned", []): @@ -31,8 +32,9 @@ def check_missing_or_duplicate_destination(self, mb_info: dict[str, Any]) -> boo count += 1 is_dest_missing = count == 0 and mb_info.get("type") in [0, 90] - is_dest_duplicate = count > 2 and mb_info.get("type") in [0, 90] and mb_info.get("first_seen_epoch", 0) >= SUPERNOVA_ACTIVATION_EPOCH - + is_dest_duplicate = count > 2 and mb_info.get("type") in [0, 90] and receiver != sender and mb_info.get("first_seen_epoch", 0) >= SUPERNOVA_ACTIVATION_EPOCH + if is_dest_missing or is_dest_duplicate: + print(f"Miniblock {mb_info.get('hash')} nonce {mb_info.get('nonce')} has issue: Count={count}, Type={mb_info.get('type')}, SenderShardID={mb_info.get('senderShardID')}, ReceiverShardID={receiver}") return is_dest_missing or is_dest_duplicate # Logic for: WRONG_PROCESSING_ORDER diff --git a/multiversx_cross_shard_analysis/miniblock_data.py b/multiversx_cross_shard_analysis/miniblock_data.py index 10bb3d8..7113d6a 100644 --- a/multiversx_cross_shard_analysis/miniblock_data.py +++ b/multiversx_cross_shard_analysis/miniblock_data.py @@ -1,5 +1,4 @@ -import json from typing import Any from multiversx_cross_shard_analysis.constants import (COLORS_MAPPING, @@ -33,8 +32,6 @@ def verify_miniblocks(self) -> None: # Set the general alarm flag if any issues were found mb_info['hasAlarm'] = len(mb_info['alarms']) > 0 - if mb_hash.startswith("66bc97b69075f10"): - print(json.dumps(mb_info, indent=4)) def get_color_for_state(self, mention_type: str, tx_count: int, header: dict[str, Any]) -> Colors: reserved = header.get('reserved', {}) From 1fcf437baa24e1849ed3eb0883a009dd0b1accd4 Mon Sep 17 00:00:00 2001 From: Mihaela Radian Date: Sun, 21 Dec 2025 19:26:39 +0200 Subject: [PATCH 20/24] MX-17306 Added alarms report to gather data --- multiversx_cross_shard_analysis/gather_data.py | 12 ++++++++++++ .../headers_alarms_report.py | 6 +++--- 2 files changed, 15 insertions(+), 3 deletions(-) diff --git a/multiversx_cross_shard_analysis/gather_data.py b/multiversx_cross_shard_analysis/gather_data.py index 1f8d305..5f46226 100644 --- a/multiversx_cross_shard_analysis/gather_data.py +++ b/multiversx_cross_shard_analysis/gather_data.py @@ -9,6 +9,7 @@ from multiversx_cross_shard_analysis.miniblocks_timeline_report import \ build_pdf_from_miniblocks +from .headers_alarms_report import build_nonce_alarms_timeline_pdf from .header_analysis_archive_handler import HeaderAnalysisArchiveHandler from .header_analysis_checker import HeaderAnalysisChecker from .header_analysis_parser import HeaderAnalysisParser @@ -59,6 +60,17 @@ def gather_data(): build_nonce_timeline_pdf(input_data[epoch], nonce_alarms, outname=outfile) print("→", outfile) + input_data = MiniblockData(handler.shard_data.miniblocks).get_data_for_header_alarms_report() + out_folder = os.path.join(handler.run_name, "NonceAlarms") + out_folder = os.path.join('Reports', out_folder) + os.makedirs(out_folder, exist_ok=True) + + for epoch in sorted(input_data.keys()): + print(f"Epoch: {epoch}") + outfile = os.path.join(out_folder, f"nonce_alarms_report_{epoch}.pdf") + build_nonce_alarms_timeline_pdf(input_data[epoch], outname=outfile) + print("→", outfile) + if __name__ == "__main__": gather_data() diff --git a/multiversx_cross_shard_analysis/headers_alarms_report.py b/multiversx_cross_shard_analysis/headers_alarms_report.py index a583e36..06b34e9 100644 --- a/multiversx_cross_shard_analysis/headers_alarms_report.py +++ b/multiversx_cross_shard_analysis/headers_alarms_report.py @@ -151,8 +151,8 @@ def build_nonce_section(shard_id: int, nonce: int, rounds: list[int], data: dict # PDF builder # ----------------------------- -def build_nonce_timeline_pdf(alarm_data: dict[str, dict[int, dict[int, dict[int, list[Any]]]]], - outname="nonce_alarms.pdf"): +def build_nonce_alarms_timeline_pdf(alarm_data: dict[str, dict[int, dict[int, dict[int, list[Any]]]]], + outname="nonce_alarms.pdf"): doc = SimpleDocTemplate( outname, pagesize=A4, @@ -330,7 +330,7 @@ def main(): for epoch in sorted(input_data.keys()): outfile = os.path.join(out_folder, f"nonce_alarms_report_{epoch}.pdf") - build_nonce_timeline_pdf(input_data[epoch], outname=outfile) + build_nonce_alarms_timeline_pdf(input_data[epoch], outname=outfile) print(f"Nonce alarms report for Epoch {epoch} generated: {outfile}") From 28ec8c4c32f4fad983750704ba941dfbc0090c48 Mon Sep 17 00:00:00 2001 From: Mihaela Radian Date: Sun, 21 Dec 2025 19:55:06 +0200 Subject: [PATCH 21/24] MX-17306 edited readme.md --- README.md | 22 ++++++++++++++++++---- 1 file changed, 18 insertions(+), 4 deletions(-) diff --git a/README.md b/README.md index bc33392..79f81b4 100644 --- a/README.md +++ b/README.md @@ -14,7 +14,7 @@ In order to create an application that uses off-line parsing of logs files, thes - node level methods should be implemented in inheriting classes ### AHO-CORASIK PARSER -- Log level processing implementing the aho-corasik algorithm that searches for a list of given keywords simultaneously. It uses an EntryParser to extract information from the entries of interest +- Log level processing implementing the Aho-Corasick algorithm that searches for a list of given keywords simultaneously. It uses an *EntryParser* to extract information from the entries of interest ### ENTRY PARSER - Entry level processing, divides the log entry into its basic components: log level, context, message, parameters @@ -22,8 +22,15 @@ In order to create an application that uses off-line parsing of logs files, thes ## CROSS SHARD ANALYSIS TOOL -Tool that validates that cross shard miniblocks are executed (and proposed) in strict order, without gaps or duplications. +This tool validates that cross shard mini-blocks are executed (and proposed) in strict order, without gaps or duplications. +It uses color coded data to illustrate each state in the processing. A configuration file (issues.py) is provided to signal certain issues with the miniblock production. +The tool creates a run specific folder under Reports that includes parsed headers in the *Shards* subfolder, mini-blocks in the *Miniblocks* folder. +The generated reports will also be included in this folder, in individual sub-folders named after the respective report: +- **MiniblocksShardTimeline** contains a report that goes through rounds and displays what mini-blocks where proposed, executed or notarized for each shard; individual pdf files are generated for each epoch; +- **MiniblocksTimelineDetails** will produce a timeline of mini-blocks for each shard, type of miniblock and other information is included for each one of them; +- **NonceTimeline** ; will produce a timeline of headers processed, originating from each shard. Alarms, like round gaps, missing are representedd by colored borders; +- **NonceAlarms** this report is similar to the NonceTimeline report, but only includes headers that have issues. The report is divided into chapters for each type of alarm. A header may be included in more than one such category, depending on its characteristics. INSTALL Create a virtual environment and install the dependencies: @@ -42,6 +49,13 @@ pip install -r ./requirements-dev.txt --upgrade EXAMPLE USAGE ``` -python -m multiversx_cross_shard_analysis.gather_data --path /home/mihaela/Downloads/cross-shard-execution-anal-9afe696daf.zip -python -m multiversx_cross_shard_analysis.headers_timeline_report --run-name cross-shard-execution-anal-6cc663f7af +python -m multiversx_cross_shard_analysis.gather_data --path ~/Downloads/cross-shard-execution-anal-9afe696daf.zip +``` +where the argument --path is mandatory, describing the path to the zip file containing the logs. +The command will also generate all reports available, saving them inside a subfolder of Reports with the same name as the zip file provided. + +In order to run a specific report from the report folder: ``` +python -m multiversx_cross_shard_analysis.headers_timeline_report --run-name cross-shard-execution-anal-6cc663f7af +``` +where --run-name is the name of the subfolder where the run's files reside. \ No newline at end of file From 1ea5aa6c1df4adffc9f1f24ef3e4f296111dce70 Mon Sep 17 00:00:00 2001 From: Mihaela Radian Date: Sun, 21 Dec 2025 21:45:00 +0200 Subject: [PATCH 22/24] MX-17306 Fixes after copilot review --- multiversx_cross_shard_analysis/header_analysis_checker.py | 1 - multiversx_cross_shard_analysis/header_structures.py | 3 +-- multiversx_cross_shard_analysis/issues.py | 3 +-- .../miniblocks_timeline_report.py | 1 - multiversx_cross_shard_analysis/test_miniblocks.py | 6 +++--- 5 files changed, 5 insertions(+), 9 deletions(-) diff --git a/multiversx_cross_shard_analysis/header_analysis_checker.py b/multiversx_cross_shard_analysis/header_analysis_checker.py index 02da5e3..ac3e606 100644 --- a/multiversx_cross_shard_analysis/header_analysis_checker.py +++ b/multiversx_cross_shard_analysis/header_analysis_checker.py @@ -22,7 +22,6 @@ def process_parsed_result(self): def post_process_node_logs(self): # Implement post-processing logic here self.write_node_json() - pass def create_json_for_node(self) -> dict[str, Any]: return { diff --git a/multiversx_cross_shard_analysis/header_structures.py b/multiversx_cross_shard_analysis/header_structures.py index fae85b8..b89e1df 100644 --- a/multiversx_cross_shard_analysis/header_structures.py +++ b/multiversx_cross_shard_analysis/header_structures.py @@ -2,8 +2,7 @@ from typing import Any from multiversx_cross_shard_analysis.miniblock_data import MiniblockData -from multiversx_cross_shard_analysis.test_decode_reserved import \ - decode_reserved_field +from multiversx_cross_shard_analysis.decode_reserved import decode_reserved_field from .constants import (COLORS_MAPPING, TYPE_NAMES, dest_shard, meta, origin_shard) diff --git a/multiversx_cross_shard_analysis/issues.py b/multiversx_cross_shard_analysis/issues.py index 012342f..85509b3 100644 --- a/multiversx_cross_shard_analysis/issues.py +++ b/multiversx_cross_shard_analysis/issues.py @@ -33,8 +33,7 @@ def check_missing_or_duplicate_destination(self, mb_info: dict[str, Any]) -> boo is_dest_missing = count == 0 and mb_info.get("type") in [0, 90] is_dest_duplicate = count > 2 and mb_info.get("type") in [0, 90] and receiver != sender and mb_info.get("first_seen_epoch", 0) >= SUPERNOVA_ACTIVATION_EPOCH - if is_dest_missing or is_dest_duplicate: - print(f"Miniblock {mb_info.get('hash')} nonce {mb_info.get('nonce')} has issue: Count={count}, Type={mb_info.get('type')}, SenderShardID={mb_info.get('senderShardID')}, ReceiverShardID={receiver}") + return is_dest_missing or is_dest_duplicate # Logic for: WRONG_PROCESSING_ORDER diff --git a/multiversx_cross_shard_analysis/miniblocks_timeline_report.py b/multiversx_cross_shard_analysis/miniblocks_timeline_report.py index 9f12086..96da12f 100644 --- a/multiversx_cross_shard_analysis/miniblocks_timeline_report.py +++ b/multiversx_cross_shard_analysis/miniblocks_timeline_report.py @@ -152,7 +152,6 @@ def build_miniblock_section(miniblock: dict[str, Any], page_usable_width: float) flow.append(Spacer(1, 6)) return flow - mentioned = miniblock.get("mentioned", {}) rounds = sorted(mentioned.keys()) num_cols = max(1, len(rounds)) diff --git a/multiversx_cross_shard_analysis/test_miniblocks.py b/multiversx_cross_shard_analysis/test_miniblocks.py index 80b26e9..df1f6f3 100644 --- a/multiversx_cross_shard_analysis/test_miniblocks.py +++ b/multiversx_cross_shard_analysis/test_miniblocks.py @@ -120,9 +120,9 @@ "hash": "994ceb37eb426a123501928c8c5b67e59f607557fb5f332d5e55fd297ab5d870", "receiverShardID": 0, "reserved": "1001", - "senderShardID": 0, - "txCount": 1610, - "type": 0 + "senderShardID": 0, + "txCount": 1610, + "type": 0 } ], "nonce": 1649, From 85afa07997e91186ec594e56e4d5442bf92d391a Mon Sep 17 00:00:00 2001 From: Mihaela Radian Date: Tue, 23 Dec 2025 22:45:55 +0200 Subject: [PATCH 23/24] MX-17306 Fixes --- multiversx_cross_shard_analysis/headers_timeline_report.py | 2 +- multiversx_cross_shard_analysis/issues.py | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/multiversx_cross_shard_analysis/headers_timeline_report.py b/multiversx_cross_shard_analysis/headers_timeline_report.py index 61779de..b35dddc 100644 --- a/multiversx_cross_shard_analysis/headers_timeline_report.py +++ b/multiversx_cross_shard_analysis/headers_timeline_report.py @@ -188,7 +188,7 @@ def build_nonce_timeline_pdf(shards_data: dict[int, dict[int, dict[int, list[Any first_page = False round_list = list(rdata.keys()) - alarm = nonce in nonce_alarms[shard_id] + alarm = nonce in nonce_alarms.get(shard_id, set()) story.extend(build_nonce_section(shard_id, nonce, round_list, rdata, usable_width, alarm)) current_h += h_needed diff --git a/multiversx_cross_shard_analysis/issues.py b/multiversx_cross_shard_analysis/issues.py index 85509b3..0d0c082 100644 --- a/multiversx_cross_shard_analysis/issues.py +++ b/multiversx_cross_shard_analysis/issues.py @@ -32,7 +32,7 @@ def check_missing_or_duplicate_destination(self, mb_info: dict[str, Any]) -> boo count += 1 is_dest_missing = count == 0 and mb_info.get("type") in [0, 90] - is_dest_duplicate = count > 2 and mb_info.get("type") in [0, 90] and receiver != sender and mb_info.get("first_seen_epoch", 0) >= SUPERNOVA_ACTIVATION_EPOCH + is_dest_duplicate = count > 4 and mb_info.get("type") in [0, 90] and receiver != sender and mb_info.get("first_seen_epoch", 0) >= SUPERNOVA_ACTIVATION_EPOCH return is_dest_missing or is_dest_duplicate From af20b3f2bd1d6e4558dc07d9b7a553f99b4c7c7d Mon Sep 17 00:00:00 2001 From: Mihaela Radian Date: Mon, 12 Jan 2026 11:09:25 +0200 Subject: [PATCH 24/24] MX-17306 Fix for rewards and peers blocks --- multiversx_cross_shard_analysis/header_structures.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/multiversx_cross_shard_analysis/header_structures.py b/multiversx_cross_shard_analysis/header_structures.py index b89e1df..1255da3 100644 --- a/multiversx_cross_shard_analysis/header_structures.py +++ b/multiversx_cross_shard_analysis/header_structures.py @@ -177,7 +177,7 @@ def get_miniblocks(self, header: dict[str, Any], status: str) -> list[tuple[str, for miniblock in exec_result.get('miniBlockHeaders', []): miniblock_mention = f'{meta}_{origin_shard if shard_metadata["shard_id"] == miniblock["senderShardID"] else dest_shard}_exec_{status}' miniblocks.append((miniblock_mention, miniblock, self.metadata.copy())) - if Header.isHeaderV3(header): + if Header.isHeaderV3(header) or Header.isMetaHeaderV3(header): for exec_result in header['executionResults']: base_exec_result = exec_result.get('baseExecutionResult', {}) exec_result_metadata = self.metadata.copy()