From d43efffa130b549cfabf30ed6290048d330b539c Mon Sep 17 00:00:00 2001 From: Piotr Korkus Date: Fri, 27 Feb 2026 10:56:35 +0100 Subject: [PATCH 01/10] bazel: add starlark, formatting, copyright checks --- .vscode/rustfmt.sh | 3 + .vscode/settings.json | 3 + BUILD | 38 ++++++++ MODULE.bazel | 5 ++ bazel_common/score_basic_bazel.MODULE.bazel | 2 +- bazel_common/score_python.MODULE.bazel | 2 +- pyproject.toml | 98 +++++++++++++++++++++ 7 files changed, 149 insertions(+), 2 deletions(-) create mode 100755 .vscode/rustfmt.sh create mode 100644 pyproject.toml diff --git a/.vscode/rustfmt.sh b/.vscode/rustfmt.sh new file mode 100755 index 0000000000..594c3c4ac1 --- /dev/null +++ b/.vscode/rustfmt.sh @@ -0,0 +1,3 @@ +#!/usr/bin/env bash + +bazel run @score_tooling//format_checker:rustfmt_with_policies \ No newline at end of file diff --git a/.vscode/settings.json b/.vscode/settings.json index c64d045842..63fd298329 100644 --- a/.vscode/settings.json +++ b/.vscode/settings.json @@ -2,6 +2,9 @@ "rust-analyzer.linkedProjects": [ "rust-project.json" ], + "rust-analyzer.rustfmt.overrideCommand": [ + "${workspaceFolder}/.vscode/rustfmt.sh" + ], "python.testing.pytestArgs": [ "feature_integration_tests" ], diff --git a/BUILD b/BUILD index e54b90d0e0..7b66a32842 100644 --- a/BUILD +++ b/BUILD @@ -12,7 +12,10 @@ # ******************************************************************************* load("@score_docs_as_code//:docs.bzl", "docs") +load("@score_tooling//:defs.bzl", "copyright_checker", "setup_starpls", "use_format_targets") + +# Docs-as-code docs( data = [ # Software components @@ -32,3 +35,38 @@ docs( ], source_dir = "docs", ) + +# Bazel formatting +setup_starpls( + name = "starpls_server", + visibility = ["//visibility:public"], +) + +# Copyright check +copyright_checker( + name = "copyright", + srcs = [ + ".github", + "bazel_common", + "docs", + "scripts", + "tests", + "feature_integration_tests", + "images", + "runners", + "rust_coverage", + "showcases", + "//:BUILD", + "//:MODULE.bazel", + ], + config = "@score_tooling//cr_checker/resources:config", + template = "@score_tooling//cr_checker/resources:templates", + visibility = ["//visibility:public"], +) + +# Add target for formatting checks +use_format_targets() + +exports_files([ + "MODULE.bazel", +]) diff --git a/MODULE.bazel b/MODULE.bazel index bce23a9907..c3c0e9c96b 100644 --- a/MODULE.bazel +++ b/MODULE.bazel @@ -44,6 +44,11 @@ archive_override( strip_prefix = "rules_boost-master", ) +# Implicit dependencies for score_tooling +bazel_dep(name = "aspect_rules_lint", version = "2.0.0") +bazel_dep(name = "buildifier_prebuilt", version = "8.2.0.2") +bazel_dep(name = "score_rust_policies", version = "0.0.3") + # TRLC dependency for requirements traceability bazel_dep(name = "trlc") git_override( diff --git a/bazel_common/score_basic_bazel.MODULE.bazel b/bazel_common/score_basic_bazel.MODULE.bazel index d7f3c8088f..fbc133ba1d 100644 --- a/bazel_common/score_basic_bazel.MODULE.bazel +++ b/bazel_common/score_basic_bazel.MODULE.bazel @@ -1,5 +1,5 @@ bazel_dep(name = "rules_shell", version = "0.6.0") -bazel_dep(name = "rules_cc", version = "0.1.1") +bazel_dep(name = "rules_cc", version = "0.2.16") bazel_dep(name = "rules_pkg", version = "1.2.0") bazel_dep(name = "rules_rpm", version = "0.1.0") diff --git a/bazel_common/score_python.MODULE.bazel b/bazel_common/score_python.MODULE.bazel index 7c3453ff45..a2e613fe2b 100644 --- a/bazel_common/score_python.MODULE.bazel +++ b/bazel_common/score_python.MODULE.bazel @@ -1,5 +1,5 @@ -bazel_dep(name = "rules_python", version = "1.0.0") +bazel_dep(name = "rules_python", version = "1.8.3") PYTHON_VERSION = "3.12" diff --git a/pyproject.toml b/pyproject.toml new file mode 100644 index 0000000000..2317e7d70e --- /dev/null +++ b/pyproject.toml @@ -0,0 +1,98 @@ +[tool.ruff] +# Exclude a variety of commonly ignored directories. +exclude = [ + ".bzr", + ".direnv", + ".eggs", + ".git", + ".git-rewrite", + ".hg", + ".mypy_cache", + ".nox", + ".pants.d", + ".pytype", + ".ruff_cache", + ".svn", + ".tox", + ".venv", + "__pypackages__", + "_build", + "buck-out", + "build", + "dist", + "node_modules", + "venv", +] + +line-length = 120 +indent-width = 4 + +[tool.ruff.lint] +select = [ + # flake8-boolean-trap + "FBT", + # flake8-bugbear + "B", + # flake8-builtins + "A", + # flake8-comprehensions + "C4", + # flake8-fixme + "FIX", + # flake8-implicit-str-concat + "ISC", + # flake8-pie + "PIE", + # flake8-print + "T20", + # flake8-pytest-style + "PT", + # flake8-raise + "RSE", + # flake8-return + "RET501", + "RET502", + "RET503", + "RET504", + # flake8-self + "SLF", + # flake8-simplify + "SIM", + # flake8-type-checking + "TC", + # flake8-unused-arguments + "ARG", + # flake8-use-pathlib + "PTH", + + # isort + "I", + + # pycodestyle error + "E", + # Pyflakes + "F", + # pyupgrade + "UP", +] +ignore = ["F401", "PTH123", "ARG002", "T201"] + +# Allow fix for all enabled rules (when `--fix`) is provided. +fixable = ["ALL"] +unfixable = [] + +# Allow unused variables when underscore-prefixed. +dummy-variable-rgx = "^(_+|(_+[a-zA-Z0-9_]*[a-zA-Z0-9]+?))$" + +[tool.ruff.format] +# Like Black, use double quotes for strings. +quote-style = "double" + +# Like Black, indent with spaces, rather than tabs. +indent-style = "space" + +# Like Black, respect magic trailing commas. +skip-magic-trailing-comma = false + +# Like Black, automatically detect the appropriate line ending. +line-ending = "auto" From 84e8fd943646752a0caaa697744d51e60591f8d2 Mon Sep 17 00:00:00 2001 From: Piotr Korkus Date: Fri, 27 Feb 2026 11:05:36 +0100 Subject: [PATCH 02/10] format: apply for python --- .github/tools/qnx_credential_helper.py | 6 +- .../itf/test_showcases.py | 4 +- feature_integration_tests/itf/test_ssh.py | 16 +- scripts/integration_test.py | 215 ++++++++------- .../known_good_to_workspace_metadata.py | 32 ++- scripts/known_good/models/known_good.py | 206 +++++++------- scripts/known_good/models/module.py | 4 +- .../known_good/override_known_good_repo.py | 102 +++---- .../update_module_from_known_good.py | 41 +-- scripts/known_good/update_module_latest.py | 256 +++++++++--------- scripts/models/build_config.py | 17 +- scripts/quality_runners.py | 30 +- 12 files changed, 431 insertions(+), 498 deletions(-) diff --git a/.github/tools/qnx_credential_helper.py b/.github/tools/qnx_credential_helper.py index 06ae795160..ed2673ee13 100755 --- a/.github/tools/qnx_credential_helper.py +++ b/.github/tools/qnx_credential_helper.py @@ -49,9 +49,7 @@ def eprint(*args, **kwargs): eprint("Failed getting credentials from .netrc") sys.exit(1) - data = urllib.parse.urlencode( - {"userlogin": login, "password": password, "UseCookie": "1"} - ) + data = urllib.parse.urlencode({"userlogin": login, "password": password, "UseCookie": "1"}) data = data.encode("ascii") cookie_jar = http.cookiejar.CookieJar() @@ -65,7 +63,7 @@ def eprint(*args, **kwargs): sys.exit(1) cookies = {c.name: c.value for c in list(cookie_jar)} - if not "myQNX" in cookies: + if "myQNX" not in cookies: eprint("Failed to get myQNX cookie from login page") sys.exit(1) diff --git a/feature_integration_tests/itf/test_showcases.py b/feature_integration_tests/itf/test_showcases.py index 1f925dbd16..4b4d60f804 100644 --- a/feature_integration_tests/itf/test_showcases.py +++ b/feature_integration_tests/itf/test_showcases.py @@ -31,8 +31,6 @@ def test_ipc_bridge_cpp_app_is_running(target): def test_run_all_showcases(target): - exit_code, out = target.execute( - "/showcases/bin/cli --examples=all" - ) + exit_code, out = target.execute("/showcases/bin/cli --examples=all") logger.info(out) assert exit_code == 0 diff --git a/feature_integration_tests/itf/test_ssh.py b/feature_integration_tests/itf/test_ssh.py index d778393544..a7574fee6c 100644 --- a/feature_integration_tests/itf/test_ssh.py +++ b/feature_integration_tests/itf/test_ssh.py @@ -17,14 +17,10 @@ @score.itf.plugins.core.requires_capabilities("ssh") def test_ssh_with_default_user(target): with target.ssh() as ssh: - exit_code, stdout, stderr = ssh.execute_command_output( - "echo 'Username:' $USER && uname -a" - ) + exit_code, stdout, stderr = ssh.execute_command_output("echo 'Username:' $USER && uname -a") assert exit_code == 0, "SSH command failed" assert "Username: root" in stdout[0], "Expected username not found in output" - assert "QNX Qnx_S-core 8.0.0" in stdout[1], ( - "Expected QNX kernel information not found in output" - ) + assert "QNX Qnx_S-core 8.0.0" in stdout[1], "Expected QNX kernel information not found in output" assert stderr == [], "Expected no error output" @@ -32,12 +28,8 @@ def test_ssh_with_default_user(target): def test_ssh_with_qnx_user(target): user = "qnxuser" with target.ssh(username=user) as ssh: - exit_code, stdout, stderr = ssh.execute_command_output( - "echo 'Username:' $USER && uname -a" - ) + exit_code, stdout, stderr = ssh.execute_command_output("echo 'Username:' $USER && uname -a") assert exit_code == 0, "SSH command failed" assert f"Username: {user}" in stdout[0], "Expected username not found in output" - assert "QNX Qnx_S-core 8.0.0" in stdout[1], ( - "Expected QNX kernel information not found in output" - ) + assert "QNX Qnx_S-core 8.0.0" in stdout[1], "Expected QNX kernel information not found in output" assert stderr == [], "Expected no error output" diff --git a/scripts/integration_test.py b/scripts/integration_test.py index 6ec2aca861..1ba3381e72 100755 --- a/scripts/integration_test.py +++ b/scripts/integration_test.py @@ -24,43 +24,48 @@ def get_module_version_gh(repo_url: str, commit_hash: str) -> Optional[str]: """Get version tag from GitHub API for a commit hash. - + Args: repo_url: GitHub repository URL commit_hash: Commit hash to look up - + Returns: Tag name if found, None otherwise """ # Check if gh CLI is installed - if not subprocess.run(['which', 'gh'], capture_output=True).returncode == 0: + if not subprocess.run(["which", "gh"], capture_output=True).returncode == 0: print("::warning::gh CLI not found. Install it to resolve commit hashes to tags.") return None - + # Extract owner/repo from GitHub URL - match = re.search(r'github\.com[/:]([^/]+)/([^/.]+)(\.git)?$', repo_url) + match = re.search(r"github\.com[/:]([^/]+)/([^/.]+)(\.git)?$", repo_url) if not match: print(f"::warning::Invalid repo URL format: {repo_url}") return None - + owner, repo = match.group(1), match.group(2) - + print(f"::debug::Querying GitHub API: repos/{owner}/{repo}/tags for commit {commit_hash}") - + try: result = subprocess.run( - ['gh', 'api', f'repos/{owner}/{repo}/tags', '--jq', - f'.[] | select(.commit.sha == "{commit_hash}") | .name'], + [ + "gh", + "api", + f"repos/{owner}/{repo}/tags", + "--jq", + f'.[] | select(.commit.sha == "{commit_hash}") | .name', + ], capture_output=True, text=True, - timeout=10 + timeout=10, ) - + if result.returncode == 0 and result.stdout.strip(): - tag = result.stdout.strip().split('\n')[0] + tag = result.stdout.strip().split("\n")[0] print(f"::debug::Found tag: {tag}") return tag - + print(f"::debug::No tag found for commit {commit_hash}") return None except Exception as e: @@ -70,11 +75,11 @@ def get_module_version_gh(repo_url: str, commit_hash: str) -> Optional[str]: def truncate_hash(hash_str: str, length: int = 8) -> str: """Truncate hash to specified length. - + Args: hash_str: Full hash string length: Maximum length - + Returns: Truncated hash """ @@ -85,19 +90,19 @@ def truncate_hash(hash_str: str, length: int = 8) -> str: def count_pattern(log_file: Path, pattern: str) -> int: """Count lines matching pattern in log file. - + Args: log_file: Path to log file pattern: Pattern to search for (case-insensitive) - + Returns: Number of matching lines found """ if not log_file.exists(): return 0 - + count = 0 - with open(log_file, 'r') as f: + with open(log_file, "r") as f: for line in f: if pattern in line.lower(): count += 1 @@ -106,16 +111,16 @@ def count_pattern(log_file: Path, pattern: str) -> int: def get_identifier_and_link(module: Optional[Module]) -> Tuple[Optional[str], str]: """Get display identifier and link for a module. - + Args: module: Module instance or None - + Returns: Tuple of (identifier, link_url) """ if not module or not module.hash: return None, "" - + if module.version: identifier = module.version link = f"{module.repo}/releases/tag/{module.version}" if module.repo else "" @@ -132,98 +137,91 @@ def get_identifier_and_link(module: Optional[Module]) -> Tuple[Optional[str], st else: identifier = truncate_hash(module.hash) link = "" - + return identifier, link def build_group(group_name: str, targets: str, config: str, log_file: Path) -> Tuple[int, int]: """Build a group of Bazel targets. - + Args: group_name: Name of the build group targets: Bazel targets to build config: Bazel config to use log_file: Path to log file - + Returns: Tuple of (exit_code, duration_seconds) """ print(f"--- Building group: {group_name} ---") - + # Build command - cmd = ['bazel', 'build', '--verbose_failures', f'--config={config}'] + targets.split() + cmd = ["bazel", "build", "--verbose_failures", f"--config={config}"] + targets.split() print(f"bazel build --verbose_failures --config {config} {targets}") print(f"::group::Bazel build ({group_name})") - + start_time = time.time() - + # Run build and capture output - with open(log_file, 'w') as f: + with open(log_file, "w") as f: # Write command to log file f.write(f"Command: {' '.join(cmd)}\n") f.write("-" * 80 + "\n\n") - - process = subprocess.Popen( - cmd, - stdout=subprocess.PIPE, - stderr=subprocess.STDOUT, - text=True - ) - + + process = subprocess.Popen(cmd, stdout=subprocess.PIPE, stderr=subprocess.STDOUT, text=True) + # Stream output to both terminal and file if process.stdout: for line in process.stdout: - print(line, end='') + print(line, end="") f.write(line) - + process.wait() - + end_time = time.time() duration = int(end_time - start_time) - + print("::endgroup::") - + return process.returncode, duration -def format_commit_version_cell( - group_name: str, - old_modules: Dict[str, Module], - new_modules: Dict[str, Module] -) -> str: +def format_commit_version_cell(group_name: str, old_modules: Dict[str, Module], new_modules: Dict[str, Module]) -> str: """Format the commit/version cell for the summary table. - + Args: group_name: Name of the module group old_modules: Modules from old known_good.json new_modules: Modules from new known_good.json - + Returns: Formatted markdown cell content """ # Get module info or defaults old_module = old_modules.get(group_name) new_module = new_modules.get(group_name) - + if new_module is None or new_module.hash is None: return "N/A" - - print(f"::debug::Module={group_name}, old_version={old_module.version if old_module else 'None'}, " - f"old_hash={old_module.hash if old_module else 'None'}, " - f"new_version={new_module.version}, " - f"new_hash={new_module.hash}, " - f"repo={new_module.repo}") - + + print( + f"::debug::Module={group_name}, old_version={old_module.version if old_module else 'None'}, " + f"old_hash={old_module.hash if old_module else 'None'}, " + f"new_version={new_module.version}, " + f"new_hash={new_module.hash}, " + f"repo={new_module.repo}" + ) + # Get identifiers and links old_identifier, old_link = get_identifier_and_link(old_module) - + # Check if hash changed hash_changed = old_module is None or old_module.hash != new_module.hash - + # Determine new identifier only if hash changed new_identifier, new_link = (None, "") if not hash_changed else get_identifier_and_link(new_module) - + # Format output if hash_changed: # Hash changed - show old -> new @@ -248,63 +246,62 @@ def format_commit_version_cell( def main(): """Main entry point.""" parser = argparse.ArgumentParser( - description='Integration build script for SCORE modules', - formatter_class=argparse.RawDescriptionHelpFormatter + description="Integration build script for SCORE modules", formatter_class=argparse.RawDescriptionHelpFormatter ) parser.add_argument( - '--known-good', + "--known-good", type=Path, default=None, - help='Path to known_good.json file (default: known_good.json in repo root)' + help="Path to known_good.json file (default: known_good.json in repo root)", ) parser.add_argument( - '--build-config', + "--build-config", type=Path, default=None, - help='Path to build_config.json file (default: build_config.json in repo root)' + help="Path to build_config.json file (default: build_config.json in repo root)", ) parser.add_argument( - '--config', - default=os.environ.get('CONFIG', 'x86_64-linux'), - help='Bazel config to use (default: x86_64-linux, or from CONFIG env var)' + "--config", + default=os.environ.get("CONFIG", "x86_64-linux"), + help="Bazel config to use (default: x86_64-linux, or from CONFIG env var)", ) - + args = parser.parse_args() - + # Configuration config = args.config - log_dir = Path(os.environ.get('LOG_DIR', '_logs/logs')) - summary_file = Path(os.environ.get('SUMMARY_FILE', f'_logs/build_summary-{config}.md')) - + log_dir = Path(os.environ.get("LOG_DIR", "_logs/logs")) + summary_file = Path(os.environ.get("SUMMARY_FILE", f"_logs/build_summary-{config}.md")) + known_good_file = args.known_good if not known_good_file: - known_good_file = repo_root / 'known_good.json' - + known_good_file = repo_root / "known_good.json" + build_config_file = args.build_config if not build_config_file: - build_config_file = repo_root / 'build_config.json' - + build_config_file = repo_root / "build_config.json" + # Load build configuration BUILD_TARGET_GROUPS = load_build_config(build_config_file) - + # Create log directory log_dir.mkdir(parents=True, exist_ok=True) summary_file.parent.mkdir(parents=True, exist_ok=True) - + # Load modules from known_good files try: - old_modules = load_known_good(Path('known_good.json')).modules if Path('known_good.json').exists() else {} + old_modules = load_known_good(Path("known_good.json")).modules if Path("known_good.json").exists() else {} except FileNotFoundError: old_modules = {} - + try: new_modules = load_known_good(known_good_file).modules if known_good_file else {} except FileNotFoundError as e: raise SystemExit(f"ERROR: {e}") - + # Start summary - timestamp = datetime.now().strftime('%Y-%m-%d %H:%M:%S') - with open(summary_file, 'w') as f: + timestamp = datetime.now().strftime("%Y-%m-%d %H:%M:%S") + with open(summary_file, "w") as f: f.write(f"=== Integration Build Started {timestamp} ===\n") f.write(f"Config: {config}\n") if known_good_file: @@ -314,60 +311,60 @@ def main(): f.write("\n") f.write("| Group | Status | Duration (s) | Warnings | Deprecated refs | Commit/Version |\n") f.write("|-------|--------|--------------|----------|-----------------|----------------|\n") - + print(f"=== Integration Build Started {timestamp} ===") print(f"Config: {config}") if known_good_file: print(f"Known Good File: {known_good_file}") - + overall_warn_total = 0 overall_depr_total = 0 any_failed = False - + # Build each group for group_name, module_config in BUILD_TARGET_GROUPS.items(): log_file = log_dir / f"{group_name}-{config}.log" - + exit_code, duration = build_group(group_name, module_config.build_targets, config, log_file) - + if exit_code != 0: any_failed = True - + # Count warnings and deprecated - warn_count = count_pattern(log_file, 'warning:') - depr_count = count_pattern(log_file, 'deprecated') + warn_count = count_pattern(log_file, "warning:") + depr_count = count_pattern(log_file, "deprecated") overall_warn_total += warn_count overall_depr_total += depr_count - + # Format status status_symbol = "✅" if exit_code == 0 else f"❌({exit_code})" - + # Format commit/version cell commit_version_cell = format_commit_version_cell(group_name, old_modules, new_modules) - + # Append row to summary row = f"| {group_name} | {status_symbol} | {duration} | {warn_count} | {depr_count} | {commit_version_cell} |\n" - with open(summary_file, 'a') as f: + with open(summary_file, "a") as f: f.write(row) print(row.strip()) - + # Append totals - with open(summary_file, 'a') as f: + with open(summary_file, "a") as f: f.write(f"| TOTAL | | | {overall_warn_total} | {overall_depr_total} | |\n") - + # Print summary - print('::group::Build Summary') - print('=== Build Summary ===') - with open(summary_file, 'r') as f: + print("::group::Build Summary") + print("=== Build Summary ===") + with open(summary_file, "r") as f: for line in f: - print(line, end='') - print('::endgroup::') - + print(line, end="") + print("::endgroup::") + # Exit with error if any build failed if any_failed: print("::error::One or more build groups failed. See summary above.") sys.exit(1) -if __name__ == '__main__': +if __name__ == "__main__": main() diff --git a/scripts/known_good/known_good_to_workspace_metadata.py b/scripts/known_good/known_good_to_workspace_metadata.py index 6c4cb93cea..c61ce03391 100644 --- a/scripts/known_good/known_good_to_workspace_metadata.py +++ b/scripts/known_good/known_good_to_workspace_metadata.py @@ -5,20 +5,21 @@ from models.known_good import load_known_good -MODULES_CSV_HEADER = [ - "repo_url", - "name", - "workspace_path", - "version", - "hash", - "branch" -] +MODULES_CSV_HEADER = ["repo_url", "name", "workspace_path", "version", "hash", "branch"] + def main(): - parser = argparse.ArgumentParser(description="Convert known_good.json to workspace metadata files for gita and git submodules.") + parser = argparse.ArgumentParser( + description="Convert known_good.json to workspace metadata files for gita and git submodules." + ) parser.add_argument("--known-good", dest="known_good", default="known_good.json", help="Path to known_good.json") - parser.add_argument("--gita-workspace", dest="gita_workspace", default=".gita-workspace.csv", help="File to output gita workspace metadata") + parser.add_argument( + "--gita-workspace", + dest="gita_workspace", + default=".gita-workspace.csv", + help="File to output gita workspace metadata", + ) args = parser.parse_args() # Load known_good using KnownGood dataclass @@ -28,20 +29,20 @@ def main(): raise SystemExit(f"ERROR: {e}") except ValueError as e: raise SystemExit(f"ERROR: {e}") - + modules = list(known_good.modules.values()) - + gita_metadata = [] for module in modules: if not module.repo: raise RuntimeError(f"Module {module.name}: repo must not be empty") - + # if no hash is given, use branch hash_value = module.hash if module.hash else module.branch - + # workspace_path is not available in known_good.json, default to name of repository workspace_path = module.name - + # gita format: {url},{name},{path},{prop['type']},{repo_flags},{branch} row = [module.repo, module.name, workspace_path, "", "", hash_value] gita_metadata.append(row) @@ -51,5 +52,6 @@ def main(): for row in gita_metadata: writer.writerow(row) + if __name__ == "__main__": main() diff --git a/scripts/known_good/models/known_good.py b/scripts/known_good/models/known_good.py index a0ea976655..2cda4b84be 100644 --- a/scripts/known_good/models/known_good.py +++ b/scripts/known_good/models/known_good.py @@ -2,120 +2,114 @@ from __future__ import annotations +import datetime as dt +import json from dataclasses import dataclass -from typing import Any, Dict from pathlib import Path -import json -import datetime as dt +from typing import Any, Dict from .module import Module @dataclass class KnownGood: - """Known good configuration with modules and metadata. - - Module structure: modules = {"group1": {"module1": Module}, "group2": {"module2": Module}} - """ - modules: Dict[str, Dict[str, Module]] - timestamp: str - - @classmethod - def from_dict(cls, data: Dict[str, Any]) -> KnownGood: - """Create a KnownGood instance from a dictionary. - - Expected structure: - {"modules": {"group1": {"score_baselibs": {...}}, "group2": {"score_logging": {...}}}} - - Args: - data: Dictionary containing known_good.json data - - Returns: - KnownGood instance - """ - modules_dict = data.get('modules', {}) - timestamp = data.get('timestamp', '') - - parsed_modules: Dict[str, Dict[str, Module]] = {} - for group_name, group_modules in modules_dict.items(): - if isinstance(group_modules, dict): - modules_list = Module.parse_modules(group_modules) - parsed_modules[group_name] = {m.name: m for m in modules_list} - - return cls(modules=parsed_modules, timestamp=timestamp) - - def to_dict(self) -> Dict[str, Any]: - """Convert KnownGood instance to dictionary for JSON output. - - Returns: - Dictionary with known_good configuration - """ - modules_output = { - group_name: {name: module.to_dict() for name, module in group_modules.items()} - for group_name, group_modules in self.modules.items() - } - - return { - "modules": modules_output, - "timestamp": self.timestamp - } - - def write(self, output_path: Path, dry_run: bool = False) -> None: - """Write known_good data to file or print for dry-run. - - Args: - output_path: Path to output file - dry_run: If True, print instead of writing - """ - - # Update timestamp before writing - self.timestamp = dt.datetime.now(dt.timezone.utc).replace(microsecond=0).isoformat() + "Z" - - output_json = json.dumps(self.to_dict(), indent=4, sort_keys=False) + "\n" - - if dry_run: - print(f"\nDry run: would write to {output_path}\n") - print("---- BEGIN UPDATED JSON ----") - print(output_json, end="") - print("---- END UPDATED JSON ----") - else: - with open(output_path, "w", encoding="utf-8") as f: - f.write(output_json) - print(f"Successfully wrote updated known_good.json to {output_path}") + """Known good configuration with modules and metadata. + + Module structure: modules = {"group1": {"module1": Module}, "group2": {"module2": Module}} + """ + + modules: Dict[str, Dict[str, Module]] + timestamp: str + + @classmethod + def from_dict(cls, data: Dict[str, Any]) -> KnownGood: + """Create a KnownGood instance from a dictionary. + + Expected structure: + {"modules": {"group1": {"score_baselibs": {...}}, "group2": {"score_logging": {...}}}} + + Args: + data: Dictionary containing known_good.json data + + Returns: + KnownGood instance + """ + modules_dict = data.get("modules", {}) + timestamp = data.get("timestamp", "") + + parsed_modules: Dict[str, Dict[str, Module]] = {} + for group_name, group_modules in modules_dict.items(): + if isinstance(group_modules, dict): + modules_list = Module.parse_modules(group_modules) + parsed_modules[group_name] = {m.name: m for m in modules_list} + + return cls(modules=parsed_modules, timestamp=timestamp) + + def to_dict(self) -> Dict[str, Any]: + """Convert KnownGood instance to dictionary for JSON output. + + Returns: + Dictionary with known_good configuration + """ + modules_output = { + group_name: {name: module.to_dict() for name, module in group_modules.items()} + for group_name, group_modules in self.modules.items() + } + + return {"modules": modules_output, "timestamp": self.timestamp} + + def write(self, output_path: Path, dry_run: bool = False) -> None: + """Write known_good data to file or print for dry-run. + + Args: + output_path: Path to output file + dry_run: If True, print instead of writing + """ + + # Update timestamp before writing + self.timestamp = dt.datetime.now(dt.timezone.utc).replace(microsecond=0).isoformat() + "Z" + + output_json = json.dumps(self.to_dict(), indent=4, sort_keys=False) + "\n" + + if dry_run: + print(f"\nDry run: would write to {output_path}\n") + print("---- BEGIN UPDATED JSON ----") + print(output_json, end="") + print("---- END UPDATED JSON ----") + else: + with open(output_path, "w", encoding="utf-8") as f: + f.write(output_json) + print(f"Successfully wrote updated known_good.json to {output_path}") def load_known_good(path: Path) -> KnownGood: - """Load and parse the known_good.json file. - - Args: - path: Path to known_good.json file - - Returns: - KnownGood instance with parsed modules - """ - - with open(path, "r", encoding="utf-8") as f: - text = f.read() - try: - data = json.loads(text) - except json.JSONDecodeError as e: - lines = text.splitlines() - line = lines[e.lineno - 1] if 0 <= e.lineno - 1 < len(lines) else "" - pointer = " " * (e.colno - 1) + "^" - - hint = "" - if "Expecting value" in e.msg: - hint = "Possible causes: trailing comma, missing value, or extra comma." - - raise ValueError( - f"Invalid JSON at line {e.lineno}, column {e.colno}\n" - f"{line}\n{pointer}\n" - f"{e.msg}. {hint}" - ) from None - - if not isinstance(data, dict) or not isinstance(data.get("modules"), dict): - raise ValueError( - f"Invalid known_good.json at {path} (expected object with 'modules' dict)" - ) - - return KnownGood.from_dict(data) + """Load and parse the known_good.json file. + + Args: + path: Path to known_good.json file + + Returns: + KnownGood instance with parsed modules + """ + + with open(path, "r", encoding="utf-8") as f: + text = f.read() + try: + data = json.loads(text) + except json.JSONDecodeError as e: + lines = text.splitlines() + line = lines[e.lineno - 1] if 0 <= e.lineno - 1 < len(lines) else "" + pointer = " " * (e.colno - 1) + "^" + + hint = "" + if "Expecting value" in e.msg: + hint = "Possible causes: trailing comma, missing value, or extra comma." + + raise ValueError( + f"Invalid JSON at line {e.lineno}, column {e.colno}\n{line}\n{pointer}\n{e.msg}. {hint}" + ) from None + + if not isinstance(data, dict) or not isinstance(data.get("modules"), dict): + raise ValueError(f"Invalid known_good.json at {path} (expected object with 'modules' dict)") + + return KnownGood.from_dict(data) diff --git a/scripts/known_good/models/module.py b/scripts/known_good/models/module.py index 0ba65add53..6f439ff9a8 100644 --- a/scripts/known_good/models/module.py +++ b/scripts/known_good/models/module.py @@ -103,9 +103,7 @@ def from_dict(cls, name: str, module_data: Dict[str, Any]) -> Module: "Use either 'hash' (git_override) or 'version' (single_version_override), not both." ) # Support both 'bazel_patches' and legacy 'patches' keys - bazel_patches = module_data.get("bazel_patches") or module_data.get( - "patches", [] - ) + bazel_patches = module_data.get("bazel_patches") or module_data.get("patches", []) # Parse metadata - if not present or is None/empty dict, use defaults metadata_data = module_data.get("metadata") diff --git a/scripts/known_good/override_known_good_repo.py b/scripts/known_good/override_known_good_repo.py index 70a61ca06a..5b541fd0e5 100755 --- a/scripts/known_good/override_known_good_repo.py +++ b/scripts/known_good/override_known_good_repo.py @@ -12,6 +12,7 @@ module commit pins. The output can then be used with update_module_from_known_good.py to generate the MODULE.bazel file. """ + import argparse import os import re @@ -24,43 +25,40 @@ from models.known_good import KnownGood, load_known_good # Configure logging -logging.basicConfig(level=logging.INFO, format='%(levelname)s: %(message)s') +logging.basicConfig(level=logging.INFO, format="%(levelname)s: %(message)s") def parse_and_apply_overrides(modules: Dict[str, Module], repo_overrides: List[str]) -> int: """ Parse repo override arguments and apply them to modules. - + Supports two formats: 1. module_name@hash (find repo from module) 2. module_name@repo_url@hash (explicit repo with module validation) - + Args: modules: Dictionary mapping module names to Module instances repo_overrides: List of override strings - + Returns: The number of overrides applied. """ - repo_url_pattern = re.compile(r'^https://[a-zA-Z0-9.-]+/[a-zA-Z0-9._/-]+\.git$') - hash_pattern = re.compile(r'^[a-fA-F0-9]{7,40}$') + repo_url_pattern = re.compile(r"^https://[a-zA-Z0-9.-]+/[a-zA-Z0-9._/-]+\.git$") + hash_pattern = re.compile(r"^[a-fA-F0-9]{7,40}$") overrides_applied = 0 - + # Parse and validate overrides for entry in repo_overrides: logging.info(f"Override registered: {entry}") parts = entry.split("@") - + if len(parts) == 2: # module_name@hash module_name, commit_hash = parts - + if not hash_pattern.match(commit_hash): - raise SystemExit( - f"Invalid commit hash in '{entry}': {commit_hash}\n" - "Expected 7-40 hex characters" - ) - + raise SystemExit(f"Invalid commit hash in '{entry}': {commit_hash}\nExpected 7-40 hex characters") + # Validate module exists if module_name not in modules: logging.warning( @@ -68,36 +66,30 @@ def parse_and_apply_overrides(modules: Dict[str, Module], repo_overrides: List[s f"Available modules: {', '.join(sorted(modules.keys()))}" ) continue - + module = modules[module_name] old_value = module.version or module.hash - + if commit_hash == module.hash: - logging.info( - f"Module '{module_name}' already at specified commit {commit_hash}, no change needed" - ) + logging.info(f"Module '{module_name}' already at specified commit {commit_hash}, no change needed") else: module.hash = commit_hash module.version = None # Clear version when overriding hash logging.info(f"Applied override to {module_name}: {old_value} -> {commit_hash}") overrides_applied += 1 - + elif len(parts) == 3: # Format: module_name@repo_url@hash module_name, repo_url, commit_hash = parts - + if not hash_pattern.match(commit_hash): - raise SystemExit( - f"Invalid commit hash in '{entry}': {commit_hash}\n" - "Expected 7-40 hex characters" - ) - + raise SystemExit(f"Invalid commit hash in '{entry}': {commit_hash}\nExpected 7-40 hex characters") + if not repo_url_pattern.match(repo_url): raise SystemExit( - f"Invalid repo URL in '{entry}': {repo_url}\n" - "Expected format: https://github.com/org/repo.git" + f"Invalid repo URL in '{entry}': {repo_url}\nExpected format: https://github.com/org/repo.git" ) - + # Validate module exists if module_name not in modules: logging.warning( @@ -105,18 +97,18 @@ def parse_and_apply_overrides(modules: Dict[str, Module], repo_overrides: List[s f"Available modules: {', '.join(sorted(modules.keys()))}" ) continue - + module = modules[module_name] old_value = module.version or module.hash - + if module.hash != commit_hash: module.hash = commit_hash module.version = None # Clear version when overriding hash - + module.repo = repo_url logging.info(f"Applied override to {module_name}: {old_value} -> {commit_hash} (repo: {repo_url})") overrides_applied += 1 - + else: raise SystemExit( f"Invalid override spec: {entry}\n" @@ -124,31 +116,31 @@ def parse_and_apply_overrides(modules: Dict[str, Module], repo_overrides: List[s " 1. module_name@commit_hash\n" " 2. module_name@repo_url@commit_hash\n" ) - + return overrides_applied def apply_overrides(known_good: KnownGood, repo_overrides: List[str]) -> KnownGood: """Apply repository commit overrides to the known_good data. - + Args: known_good: KnownGood instance to modify repo_overrides: List of override strings - + Returns: Updated KnownGood instance """ # Parse and apply overrides overrides_applied = parse_and_apply_overrides(known_good.modules, repo_overrides) - + if overrides_applied == 0: logging.warning("No overrides were applied to any modules") else: logging.info(f"Successfully applied {overrides_applied} override(s)") - + # Update timestamp known_good.timestamp = dt.datetime.now(dt.timezone.utc).replace(microsecond=0).isoformat() + "Z" - + return known_good @@ -176,18 +168,16 @@ def main() -> None: --output known_good.updated.json \ --module-override score_baselibs@abc123 \ --module-override score_communication@def456 - """ + """, ) - + parser.add_argument( - "--known", - default="known_good.json", - help="Path to input known_good.json file (default: known_good.json)" + "--known", default="known_good.json", help="Path to input known_good.json file (default: known_good.json)" ) parser.add_argument( "--output", default="known_good.updated.json", - help="Path to output JSON file (default: known_good.updated.json)" + help="Path to output JSON file (default: known_good.updated.json)", ) parser.add_argument( "--module-override", @@ -199,25 +189,17 @@ def main() -> None: "module_name@repo_url@hash. Can be specified multiple times." ), ) - parser.add_argument( - "--dry-run", - action="store_true", - help="Print the result instead of writing to file" - ) - parser.add_argument( - "-v", "--verbose", - action="store_true", - help="Enable verbose logging" - ) - + parser.add_argument("--dry-run", action="store_true", help="Print the result instead of writing to file") + parser.add_argument("-v", "--verbose", action="store_true", help="Enable verbose logging") + args = parser.parse_args() - + if args.verbose: logging.getLogger().setLevel(logging.DEBUG) - + known_path = os.path.abspath(args.known) output_path = os.path.abspath(args.output) - + # Load, update, and output logging.info(f"Loading {known_path}") try: @@ -226,7 +208,7 @@ def main() -> None: raise SystemExit(f"ERROR: {e}") except ValueError as e: raise SystemExit(f"ERROR: {e}") - + if not args.module_overrides: parser.error("at least one --module-override is required") diff --git a/scripts/known_good/update_module_from_known_good.py b/scripts/known_good/update_module_from_known_good.py index 7812347cd2..70cd2e1333 100755 --- a/scripts/known_good/update_module_from_known_good.py +++ b/scripts/known_good/update_module_from_known_good.py @@ -30,9 +30,7 @@ logging.basicConfig(level=logging.WARNING, format="%(levelname)s: %(message)s") -def generate_git_override_blocks( - modules: List[Module], repo_commit_dict: Dict[str, str] -) -> List[str]: +def generate_git_override_blocks(modules: List[Module], repo_commit_dict: Dict[str, str]) -> List[str]: """Generate bazel_dep and git_override blocks for each module.""" blocks = [] @@ -123,7 +121,9 @@ def generate_coverage_blocks(modules: List[Module]) -> List[str]: continue if module.metadata.exclude_test_targets: - excluded_tests = f" {' '.join([f'-@{module.name}{target}' for target in module.metadata.exclude_test_targets])}" + excluded_tests = ( + f" {' '.join([f'-@{module.name}{target}' for target in module.metadata.exclude_test_targets])}" + ) else: excluded_tests = "" @@ -179,10 +179,7 @@ def generate_file_content( if args.override_type == "git": blocks = generate_git_override_blocks(modules, repo_commit_dict) else: - header += ( - "# Note: This file uses local_path overrides. Ensure that local paths are set up correctly.\n" - "\n" - ) + header += "# Note: This file uses local_path overrides. Ensure that local paths are set up correctly.\n\n" blocks = generate_local_override_blocks(modules) elif file_type == "build": blocks = generate_coverage_blocks(modules) @@ -240,9 +237,7 @@ def main() -> None: action="store_true", help="Print generated content instead of writing to file", ) - parser.add_argument( - "-v", "--verbose", action="store_true", help="Enable verbose logging" - ) + parser.add_argument("-v", "--verbose", action="store_true", help="Enable verbose logging") parser.add_argument( "--repo-override", action="append", @@ -268,9 +263,7 @@ def main() -> None: # Parse repo overrides repo_commit_dict = {} if args.repo_override: - repo_pattern = re.compile( - r"https://[a-zA-Z0-9.-]+/[a-zA-Z0-9._/-]+\.git@[a-fA-F0-9]{7,40}$" - ) + repo_pattern = re.compile(r"https://[a-zA-Z0-9.-]+/[a-zA-Z0-9._/-]+\.git@[a-fA-F0-9]{7,40}$") for entry in args.repo_override: if not repo_pattern.match(entry): raise SystemExit( @@ -321,34 +314,26 @@ def main() -> None: print("---- BEGIN GENERATED CONTENT FOR MODULE ----") print(content_module) print("---- END GENERATED CONTENT FOR MODULE ----") - print( - f"\nGenerated {len(modules)} {args.override_type}_override entries for group '{group_name}'" - ) + print(f"\nGenerated {len(modules)} {args.override_type}_override entries for group '{group_name}'") else: with open(output_path_modules, "w", encoding="utf-8") as f: f.write(content_module) generated_files.append(output_path_modules) total_module_count += len(modules) - print( - f"Generated {output_path_modules} with {len(modules)} {args.override_type}_override entries" - ) + print(f"Generated {output_path_modules} with {len(modules)} {args.override_type}_override entries") # Generate file content of BUILD coverage files if "target_sw" not in group_name: continue # Only generate coverage for software modules - content_build = generate_file_content( - args, modules, repo_commit_dict, known_good.timestamp, file_type="build" - ) + content_build = generate_file_content(args, modules, repo_commit_dict, known_good.timestamp, file_type="build") if args.dry_run: print(f"\nDry run: would write to {output_path_coverage}\n") print("---- BEGIN GENERATED CONTENT FOR BUILD ----") print(content_build) print("---- END GENERATED CONTENT FOR BUILD ----") - print( - f"\nGenerated {len(modules)} {args.override_type}_override entries for group '{group_name}'" - ) + print(f"\nGenerated {len(modules)} {args.override_type}_override entries for group '{group_name}'") else: with open(output_path_coverage, "w", encoding="utf-8") as f: f.write(content_build) @@ -356,9 +341,7 @@ def main() -> None: print(f"Generated {output_path_coverage}") if not args.dry_run and generated_files: - print( - f"\nSuccessfully generated {len(generated_files)} file(s) with {total_module_count} total modules" - ) + print(f"\nSuccessfully generated {len(generated_files)} file(s) with {total_module_count} total modules") if __name__ == "__main__": diff --git a/scripts/known_good/update_module_latest.py b/scripts/known_good/update_module_latest.py index d5184bd2f0..8c341ed458 100755 --- a/scripts/known_good/update_module_latest.py +++ b/scripts/known_good/update_module_latest.py @@ -35,144 +35,150 @@ from models.known_good import load_known_good try: - from github import Github, GithubException - HAS_PYGITHUB = True + from github import Github, GithubException + + HAS_PYGITHUB = True except ImportError: - HAS_PYGITHUB = False - Github = None - GithubException = None + HAS_PYGITHUB = False + Github = None + GithubException = None def fetch_latest_commit(owner_repo: str, branch: str, token: str | None) -> str: - """Fetch latest commit sha for given owner_repo & branch using PyGithub.""" - if not HAS_PYGITHUB: - raise RuntimeError("PyGithub not installed. Install it with: pip install PyGithub") - try: - gh = Github(token) if token else Github() - repo = gh.get_repo(owner_repo) - branch_obj = repo.get_branch(branch) - return branch_obj.commit.sha - except GithubException as e: - raise RuntimeError(f"GitHub API error for {owner_repo}:{branch} - {e.status}: {e.data.get('message', str(e))}") from e - except Exception as e: - raise RuntimeError(f"Error fetching {owner_repo}:{branch} - {e}") from e + """Fetch latest commit sha for given owner_repo & branch using PyGithub.""" + if not HAS_PYGITHUB: + raise RuntimeError("PyGithub not installed. Install it with: pip install PyGithub") + try: + gh = Github(token) if token else Github() + repo = gh.get_repo(owner_repo) + branch_obj = repo.get_branch(branch) + return branch_obj.commit.sha + except GithubException as e: + raise RuntimeError( + f"GitHub API error for {owner_repo}:{branch} - {e.status}: {e.data.get('message', str(e))}" + ) from e + except Exception as e: + raise RuntimeError(f"Error fetching {owner_repo}:{branch} - {e}") from e def fetch_latest_commit_gh(owner_repo: str, branch: str) -> str: - """Fetch latest commit using GitHub CLI 'gh' if installed. - - Uses: gh api repos/{owner_repo}/branches/{branch} --jq .commit.sha - Raises RuntimeError on failure. - """ - if not shutil.which("gh"): - raise RuntimeError("'gh' CLI not found in PATH") - cmd = [ - "gh", - "api", - f"repos/{owner_repo}/branches/{branch}", - "--jq", - ".commit.sha", - ] - try: - res = subprocess.run(cmd, check=True, capture_output=True, text=True, timeout=30) - except subprocess.CalledProcessError as e: - raise RuntimeError(f"gh api failed: {e.stderr.strip() or e}") from e - sha = res.stdout.strip() - if not sha: - raise RuntimeError("Empty sha returned by gh") - return sha + """Fetch latest commit using GitHub CLI 'gh' if installed. + + Uses: gh api repos/{owner_repo}/branches/{branch} --jq .commit.sha + Raises RuntimeError on failure. + """ + if not shutil.which("gh"): + raise RuntimeError("'gh' CLI not found in PATH") + cmd = [ + "gh", + "api", + f"repos/{owner_repo}/branches/{branch}", + "--jq", + ".commit.sha", + ] + try: + res = subprocess.run(cmd, check=True, capture_output=True, text=True, timeout=30) + except subprocess.CalledProcessError as e: + raise RuntimeError(f"gh api failed: {e.stderr.strip() or e}") from e + sha = res.stdout.strip() + if not sha: + raise RuntimeError("Empty sha returned by gh") + return sha def parse_args(argv: list[str]) -> argparse.Namespace: - p = argparse.ArgumentParser(description="Update module hashes to latest commit on branch") - p.add_argument( - "--known-good", - default="known_good.json", - help="Path to known_good.json file (default: known_good.json in CWD)", - ) - p.add_argument("--branch", default="main", help="Git branch to fetch latest commits from (default: main)") - p.add_argument("--output", help="Optional output path to write updated JSON") - p.add_argument("--fail-fast", action="store_true", help="Stop on first failure instead of continuing") - p.add_argument("--no-gh", action="store_true", help="Disable GitHub CLI usage even if installed; fall back to HTTP API; GITHUB_TOKEN has to be known in the environment") - return p.parse_args(argv) + p = argparse.ArgumentParser(description="Update module hashes to latest commit on branch") + p.add_argument( + "--known-good", + default="known_good.json", + help="Path to known_good.json file (default: known_good.json in CWD)", + ) + p.add_argument("--branch", default="main", help="Git branch to fetch latest commits from (default: main)") + p.add_argument("--output", help="Optional output path to write updated JSON") + p.add_argument("--fail-fast", action="store_true", help="Stop on first failure instead of continuing") + p.add_argument( + "--no-gh", + action="store_true", + help="Disable GitHub CLI usage even if installed; fall back to HTTP API; GITHUB_TOKEN has to be known in the environment", + ) + return p.parse_args(argv) def main(argv: list[str]) -> int: - args = parse_args(argv) - try: - known_good = load_known_good(Path(args.known_good)) - except FileNotFoundError as e: - print(f"ERROR: {e}", file=sys.stderr) - return 3 - except ValueError as e: - print(f"ERROR: {e}", file=sys.stderr) - return 3 - except json.JSONDecodeError as e: - print(f"ERROR: Invalid JSON syntax: {e}", file=sys.stderr) - return 3 - - if not known_good.modules: - print("ERROR: No modules found to update.", file=sys.stderr) - return 3 - - token = os.environ.get("GITHUB_TOKEN") - failures = 0 - # Default: use gh if available unless --no-gh specified - use_gh = (not args.no_gh) and shutil.which("gh") is not None - - # If PyGithub is not available and gh CLI is not available, error out - if not use_gh and not HAS_PYGITHUB: - print("ERROR: Neither 'gh' CLI nor PyGithub library found.", file=sys.stderr) - print("Please install PyGithub (pip install PyGithub) or install GitHub CLI.", file=sys.stderr) - return 3 - - if not args.no_gh and not use_gh: - print("INFO: 'gh' CLI not found; using direct GitHub API", file=sys.stderr) - if args.no_gh and shutil.which("gh") is not None: - print("INFO: --no-gh specified; ignoring installed 'gh' CLI", file=sys.stderr) - - for mod in known_good.modules.values(): - if mod.pin_version: - print(f"{mod.name}: pinned, skipping") - continue - - try: - branch = mod.branch if mod.branch else args.branch - if use_gh: - latest = fetch_latest_commit_gh(mod.owner_repo, branch) - else: - latest = fetch_latest_commit(mod.owner_repo, branch, token) - - old_hash = mod.hash - if latest != old_hash: - mod.hash = latest - mod.version = None # Clear version when hash changes - if mod.version: - print(f"{mod.name}: {mod.version} -> {latest[:8]} (branch {branch})") - else: - print(f"{mod.name}: {old_hash[:8]} -> {latest[:8]} (branch {branch})") - else: - print(f"{mod.name}: {old_hash[:8]} (no update)") - except Exception as e: # noqa: BLE001 - failures += 1 - print(f"ERROR {mod.name}: {e}", file=sys.stderr) - if args.fail_fast: - break - - if args.output: - try: - known_good.write(Path(args.output)) - print(f"Updated JSON written to {args.output}") - except OSError as e: - print(f"ERROR: Failed writing output file: {e}", file=sys.stderr) - return 3 - - if failures: - print(f"Completed with {failures} failure(s).", file=sys.stderr) - return 2 - return 0 + args = parse_args(argv) + try: + known_good = load_known_good(Path(args.known_good)) + except FileNotFoundError as e: + print(f"ERROR: {e}", file=sys.stderr) + return 3 + except ValueError as e: + print(f"ERROR: {e}", file=sys.stderr) + return 3 + except json.JSONDecodeError as e: + print(f"ERROR: Invalid JSON syntax: {e}", file=sys.stderr) + return 3 + + if not known_good.modules: + print("ERROR: No modules found to update.", file=sys.stderr) + return 3 + + token = os.environ.get("GITHUB_TOKEN") + failures = 0 + # Default: use gh if available unless --no-gh specified + use_gh = (not args.no_gh) and shutil.which("gh") is not None + + # If PyGithub is not available and gh CLI is not available, error out + if not use_gh and not HAS_PYGITHUB: + print("ERROR: Neither 'gh' CLI nor PyGithub library found.", file=sys.stderr) + print("Please install PyGithub (pip install PyGithub) or install GitHub CLI.", file=sys.stderr) + return 3 + + if not args.no_gh and not use_gh: + print("INFO: 'gh' CLI not found; using direct GitHub API", file=sys.stderr) + if args.no_gh and shutil.which("gh") is not None: + print("INFO: --no-gh specified; ignoring installed 'gh' CLI", file=sys.stderr) + + for mod in known_good.modules.values(): + if mod.pin_version: + print(f"{mod.name}: pinned, skipping") + continue + + try: + branch = mod.branch if mod.branch else args.branch + if use_gh: + latest = fetch_latest_commit_gh(mod.owner_repo, branch) + else: + latest = fetch_latest_commit(mod.owner_repo, branch, token) + + old_hash = mod.hash + if latest != old_hash: + mod.hash = latest + mod.version = None # Clear version when hash changes + if mod.version: + print(f"{mod.name}: {mod.version} -> {latest[:8]} (branch {branch})") + else: + print(f"{mod.name}: {old_hash[:8]} -> {latest[:8]} (branch {branch})") + else: + print(f"{mod.name}: {old_hash[:8]} (no update)") + except Exception as e: # noqa: BLE001 + failures += 1 + print(f"ERROR {mod.name}: {e}", file=sys.stderr) + if args.fail_fast: + break + + if args.output: + try: + known_good.write(Path(args.output)) + print(f"Updated JSON written to {args.output}") + except OSError as e: + print(f"ERROR: Failed writing output file: {e}", file=sys.stderr) + return 3 + + if failures: + print(f"Completed with {failures} failure(s).", file=sys.stderr) + return 2 + return 0 if __name__ == "__main__": # pragma: no cover - sys.exit(main(sys.argv[1:])) - + sys.exit(main(sys.argv[1:])) diff --git a/scripts/models/build_config.py b/scripts/models/build_config.py index b7975f6be8..993d91ff45 100644 --- a/scripts/models/build_config.py +++ b/scripts/models/build_config.py @@ -9,6 +9,7 @@ @dataclass class BuildModuleConfig: """Configuration for a build module.""" + name: str build_targets: str test_targets: Optional[str] = None @@ -16,25 +17,23 @@ class BuildModuleConfig: def load_build_config(config_path: Path) -> Dict[str, BuildModuleConfig]: """Load build configuration from JSON file. - + Args: config_path: Path to build_config.json file - + Returns: Dictionary mapping module names to BuildModuleConfig instances """ if not config_path.exists(): raise FileNotFoundError(f"Build config file not found: {config_path}") - - with open(config_path, 'r') as f: + + with open(config_path, "r") as f: data = json.load(f) - - modules = data.get('modules', {}) + + modules = data.get("modules", {}) return { name: BuildModuleConfig( - name=name, - build_targets=module_data.get('build_targets', ''), - test_targets=module_data.get('test_targets') + name=name, build_targets=module_data.get("build_targets", ""), test_targets=module_data.get("test_targets") ) for name, module_data in modules.items() } diff --git a/scripts/quality_runners.py b/scripts/quality_runners.py index 2f37818ce0..b542b9b1e7 100644 --- a/scripts/quality_runners.py +++ b/scripts/quality_runners.py @@ -79,12 +79,8 @@ def cpp_coverage(module: Module, artifact_dir: Path) -> ProcessResult: output_dir = artifact_dir / "cpp" / module.name output_dir.mkdir(parents=True, exist_ok=True) # Find input locations - bazel_coverage_output_directory = run_command( - ["bazel", "info", "output_path"] - ).stdout.strip() - bazel_source_directory = run_command( - ["bazel", "info", "output_base"] - ).stdout.strip() + bazel_coverage_output_directory = run_command(["bazel", "info", "output_path"]).stdout.strip() + bazel_source_directory = run_command(["bazel", "info", "output_base"]).stdout.strip() genhtml_call = [ "genhtml", @@ -132,11 +128,7 @@ def generate_markdown_report( # Build rows rows = [] for name, stats in data.items(): - rows.append( - "| " - + " | ".join([name] + [str(stats.get(col, "")) for col in columns[1:]]) - + " |" - ) + rows.append("| " + " | ".join([name] + [str(stats.get(col, "")) for col in columns[1:]]) + " |") md = "\n".join([title, header, separator] + rows + [""]) output_path.write_text(md) @@ -246,9 +238,7 @@ def run_command(command: list[str], **kwargs) -> ProcessResult: p.wait() raise - return ProcessResult( - stdout="".join(stdout_data), stderr="".join(stderr_data), exit_code=exit_code - ) + return ProcessResult(stdout="".join(stdout_data), stderr="".join(stderr_data), exit_code=exit_code) def parse_arguments() -> argparse.Namespace: @@ -286,9 +276,7 @@ def main() -> bool: unit_tests_summary, coverage_summary = {}, {} if args.modules_to_test: - print_centered( - f"QR: User requested tests only for specified modules: {', '.join(args.modules_to_test)}" - ) + print_centered(f"QR: User requested tests only for specified modules: {', '.join(args.modules_to_test)}") for module in known.modules["target_sw"].values(): if args.modules_to_test and module.name not in args.modules_to_test: @@ -309,9 +297,7 @@ def main() -> bool: "score_orchestrator", ] # Known issues with coverage extraction for these modules, mostly proc_macro if module.name in DISABLED_RUST_COVERAGE: - print_centered( - f"QR: Skipping rust coverage extraction for module {module.name} due to known issues" - ) + print_centered(f"QR: Skipping rust coverage extraction for module {module.name} due to known issues") continue coverage_summary[f"{module.name}_rust"] = run_rust_coverage_extraction( module=module, output_path=args.coverage_output_dir @@ -340,9 +326,7 @@ def main() -> bool: # Check all exit codes and return non-zero if any test or coverage extraction failed return any( result_ut["exit_code"] != 0 or result_cov["exit_code"] != 0 - for result_ut, result_cov in zip( - unit_tests_summary.values(), coverage_summary.values() - ) + for result_ut, result_cov in zip(unit_tests_summary.values(), coverage_summary.values()) ) From c4b8a6d36fb1838436545cbffd990121faf873b6 Mon Sep 17 00:00:00 2001 From: Piotr Korkus Date: Fri, 27 Feb 2026 11:05:36 +0100 Subject: [PATCH 03/10] format: apply for rust --- .../src/internals/kyron/runtime_helper.rs | 30 +++---- .../src/internals/persistency/kvs_instance.rs | 4 +- .../test_scenarios/rust/src/main.rs | 7 +- .../basic/orchestration_with_persistency.rs | 14 +--- .../persistency/multiple_kvs_per_app.rs | 14 +--- showcases/cli/main.rs | 28 +++---- showcases/orchestration_persistency/main.rs | 79 ++++++------------- 7 files changed, 56 insertions(+), 120 deletions(-) diff --git a/feature_integration_tests/test_scenarios/rust/src/internals/kyron/runtime_helper.rs b/feature_integration_tests/test_scenarios/rust/src/internals/kyron/runtime_helper.rs index 3713d23566..6054c0ce3a 100644 --- a/feature_integration_tests/test_scenarios/rust/src/internals/kyron/runtime_helper.rs +++ b/feature_integration_tests/test_scenarios/rust/src/internals/kyron/runtime_helper.rs @@ -138,34 +138,24 @@ impl Runtime { for dedicated_worker in dedicated_workers { // Create thread parameters object. let mut async_rt_thread_params = AsyncRtThreadParameters::default(); - if let Some(thread_priority) = - dedicated_worker.thread_parameters.thread_priority - { + if let Some(thread_priority) = dedicated_worker.thread_parameters.thread_priority { async_rt_thread_params = async_rt_thread_params.priority(thread_priority); } - if let Some(thread_affinity) = - &dedicated_worker.thread_parameters.thread_affinity - { + if let Some(thread_affinity) = &dedicated_worker.thread_parameters.thread_affinity { async_rt_thread_params = async_rt_thread_params.affinity(thread_affinity); } - if let Some(thread_stack_size) = - dedicated_worker.thread_parameters.thread_stack_size - { - async_rt_thread_params = - async_rt_thread_params.stack_size(thread_stack_size); + if let Some(thread_stack_size) = dedicated_worker.thread_parameters.thread_stack_size { + async_rt_thread_params = async_rt_thread_params.stack_size(thread_stack_size); } - if let Some(thread_scheduler) = - dedicated_worker.thread_parameters.thread_scheduler - { - async_rt_thread_params = - async_rt_thread_params.scheduler_type(thread_scheduler); + if let Some(thread_scheduler) = dedicated_worker.thread_parameters.thread_scheduler { + async_rt_thread_params = async_rt_thread_params.scheduler_type(thread_scheduler); } // Create `UniqueWorkerId`. let unique_worker_id = UniqueWorkerId::from(&dedicated_worker.id); - exec_engine_builder = exec_engine_builder - .with_dedicated_worker(unique_worker_id, async_rt_thread_params); + exec_engine_builder = + exec_engine_builder.with_dedicated_worker(unique_worker_id, async_rt_thread_params); } } @@ -173,8 +163,6 @@ impl Runtime { async_rt_builder = builder; } - async_rt_builder - .build() - .expect("Failed to build async runtime") + async_rt_builder.build().expect("Failed to build async runtime") } } diff --git a/feature_integration_tests/test_scenarios/rust/src/internals/persistency/kvs_instance.rs b/feature_integration_tests/test_scenarios/rust/src/internals/persistency/kvs_instance.rs index 6d75b06276..9e7cea2f4f 100644 --- a/feature_integration_tests/test_scenarios/rust/src/internals/persistency/kvs_instance.rs +++ b/feature_integration_tests/test_scenarios/rust/src/internals/persistency/kvs_instance.rs @@ -28,9 +28,7 @@ pub fn kvs_instance(kvs_parameters: KvsParameters) -> Result { } else if let Some(snapshot_max_count) = kvs_parameters.snapshot_max_count { // Configure snapshot_max_count via backend // change back to snapshot_max_count, if https://github.com/eclipse-score/persistency/issues/222 is resolved. - let backend = JsonBackendBuilder::new() - .snapshot_max_count(snapshot_max_count) - .build(); + let backend = JsonBackendBuilder::new().snapshot_max_count(snapshot_max_count).build(); builder = builder.backend(Box::new(backend)); } diff --git a/feature_integration_tests/test_scenarios/rust/src/main.rs b/feature_integration_tests/test_scenarios/rust/src/main.rs index 1b6fd78e5c..f676641198 100644 --- a/feature_integration_tests/test_scenarios/rust/src/main.rs +++ b/feature_integration_tests/test_scenarios/rust/src/main.rs @@ -26,9 +26,7 @@ struct NumericUnixTime; impl FormatTime for NumericUnixTime { fn format_time(&self, w: &mut tracing_subscriber::fmt::format::Writer<'_>) -> std::fmt::Result { - let now = SystemTime::now() - .duration_since(UNIX_EPOCH) - .unwrap_or_default(); + let now = SystemTime::now().duration_since(UNIX_EPOCH).unwrap_or_default(); write!(w, "{}", now.as_secs()) } } @@ -41,8 +39,7 @@ fn init_tracing_subscriber() { .json() .finish(); - tracing::subscriber::set_global_default(subscriber) - .expect("Setting default subscriber failed!"); + tracing::subscriber::set_global_default(subscriber).expect("Setting default subscriber failed!"); } fn main() -> Result<(), String> { diff --git a/feature_integration_tests/test_scenarios/rust/src/scenarios/basic/orchestration_with_persistency.rs b/feature_integration_tests/test_scenarios/rust/src/scenarios/basic/orchestration_with_persistency.rs index cb567d976a..3fb1f8cb1e 100644 --- a/feature_integration_tests/test_scenarios/rust/src/scenarios/basic/orchestration_with_persistency.rs +++ b/feature_integration_tests/test_scenarios/rust/src/scenarios/basic/orchestration_with_persistency.rs @@ -85,9 +85,7 @@ async fn kvs_save_cycle_number(path: String) -> Result<(), UserErrValue> { builder = builder.backend(Box::new(backend)); } else if let Some(max_count) = params.snapshot_max_count { // Configure snapshot_max_count via backend even without custom dir - let backend = JsonBackendBuilder::new() - .snapshot_max_count(max_count) - .build(); + let backend = JsonBackendBuilder::new().snapshot_max_count(max_count).build(); builder = builder.backend(Box::new(backend)); } @@ -98,8 +96,7 @@ async fn kvs_save_cycle_number(path: String) -> Result<(), UserErrValue> { let key = "run_cycle_number"; let last_cycle_number: u32 = kvs.get_value_as::(key).unwrap_or_else(|_| 0_u32); - kvs.set_value(key, last_cycle_number + 1) - .expect("Failed to set value"); + kvs.set_value(key, last_cycle_number + 1).expect("Failed to set value"); let value_read = kvs.get_value_as::(key).expect("Failed to read value"); kvs.flush().expect("Failed to flush KVS"); @@ -111,8 +108,7 @@ async fn kvs_save_cycle_number(path: String) -> Result<(), UserErrValue> { fn single_sequence_design(kvs_path: String) -> Result { let mut design = Design::new("SingleSequence".into(), DesignConfig::default()); - let kvs_cycle_tag = - design.register_invoke_async("KVS save cycle".into(), persistency_task!(kvs_path))?; + let kvs_cycle_tag = design.register_invoke_async("KVS save cycle".into(), persistency_task!(kvs_path))?; // Create a program with actions design.add_program(file!(), move |_design_instance, builder| { @@ -143,9 +139,7 @@ impl Scenario for OrchestrationWithPersistency { .add_design(single_sequence_design(logic.kvs_path).expect("Failed to create design")) .design_done(); - let mut program_manager = orch - .into_program_manager() - .expect("Failed to create programs"); + let mut program_manager = orch.into_program_manager().expect("Failed to create programs"); let mut programs = program_manager.get_programs(); rt.block_on(async move { diff --git a/feature_integration_tests/test_scenarios/rust/src/scenarios/persistency/multiple_kvs_per_app.rs b/feature_integration_tests/test_scenarios/rust/src/scenarios/persistency/multiple_kvs_per_app.rs index 18ff276744..61407b2111 100644 --- a/feature_integration_tests/test_scenarios/rust/src/scenarios/persistency/multiple_kvs_per_app.rs +++ b/feature_integration_tests/test_scenarios/rust/src/scenarios/persistency/multiple_kvs_per_app.rs @@ -31,10 +31,8 @@ impl Scenario for MultipleKvsPerApp { fn run(&self, input: &str) -> Result<(), String> { // Parameters. let v: Value = serde_json::from_str(input).expect("Failed to parse input string"); - let params1 = - KvsParameters::from_value(&v["kvs_parameters_1"]).expect("Failed to parse parameters"); - let params2 = - KvsParameters::from_value(&v["kvs_parameters_2"]).expect("Failed to parse parameters"); + let params1 = KvsParameters::from_value(&v["kvs_parameters_1"]).expect("Failed to parse parameters"); + let params2 = KvsParameters::from_value(&v["kvs_parameters_2"]).expect("Failed to parse parameters"); let logic = TestInput::from_json(input).expect("Failed to parse input string"); { // Create first KVS instance. @@ -59,17 +57,13 @@ impl Scenario for MultipleKvsPerApp { let kvs1 = kvs_instance(params1).expect("Failed to create KVS1 instance"); let kvs2 = kvs_instance(params2).expect("Failed to create KVS2 instance"); - let value1 = kvs1 - .get_value_as::(&logic.key) - .expect("Failed to read kvs1 value"); + let value1 = kvs1.get_value_as::(&logic.key).expect("Failed to read kvs1 value"); info!( instance = field::debug(kvs1.parameters().instance_id), key = logic.key, value = value1 ); - let value2 = kvs2 - .get_value_as::(&logic.key) - .expect("Failed to read kvs2 value"); + let value2 = kvs2.get_value_as::(&logic.key).expect("Failed to read kvs2 value"); info!( instance = field::debug(kvs2.parameters().instance_id), key = logic.key, diff --git a/showcases/cli/main.rs b/showcases/cli/main.rs index 3b8fdc273b..fae93bb8d2 100644 --- a/showcases/cli/main.rs +++ b/showcases/cli/main.rs @@ -94,11 +94,7 @@ fn main() -> Result<()> { anyhow::bail!( "No examples found matching: {}. Available examples: {}", examples_str, - configs - .iter() - .map(|c| c.name.as_str()) - .collect::>() - .join(", ") + configs.iter().map(|c| c.name.as_str()).collect::>().join(", ") ); } @@ -121,9 +117,10 @@ fn main() -> Result<()> { .map(|(i, c)| (i, c.name.clone(), c.description.clone())) .collect(); - let selected: Vec = multiselect("Select examples to run (use space to select (multiselect supported), enter to run examples):") - .items(&options) - .interact()?; + let selected: Vec = + multiselect("Select examples to run (use space to select (multiselect supported), enter to run examples):") + .items(&options) + .interact()?; if selected.is_empty() { outro("No examples selected. Goodbye!")?; @@ -157,17 +154,16 @@ fn visit_dir(dir: &Path, configs: &mut Vec) -> Result<()> { } if is_score_file(&path) { - let content = - fs::read_to_string(&path).with_context(|| format!("Failed reading {:?}", path))?; - let value: serde_json::Value = serde_json::from_str(&content) - .with_context(|| format!("Invalid JSON in {:?}", path))?; + let content = fs::read_to_string(&path).with_context(|| format!("Failed reading {:?}", path))?; + let value: serde_json::Value = + serde_json::from_str(&content).with_context(|| format!("Invalid JSON in {:?}", path))?; if value.is_array() { - let found: Vec = serde_json::from_value(value) - .with_context(|| format!("Invalid JSON array in {:?}", path))?; + let found: Vec = + serde_json::from_value(value).with_context(|| format!("Invalid JSON array in {:?}", path))?; configs.extend(found); } else { - let config: ScoreConfig = serde_json::from_value(value) - .with_context(|| format!("Invalid JSON in {:?}", path))?; + let config: ScoreConfig = + serde_json::from_value(value).with_context(|| format!("Invalid JSON in {:?}", path))?; configs.push(config); } } diff --git a/showcases/orchestration_persistency/main.rs b/showcases/orchestration_persistency/main.rs index f9bf644600..a293daea87 100644 --- a/showcases/orchestration_persistency/main.rs +++ b/showcases/orchestration_persistency/main.rs @@ -18,9 +18,7 @@ use kyron::runtime::*; use kyron_foundation::prelude::*; use logging_tracing::LogAndTraceBuilder; use orchestration::{ - actions::{ - invoke::Invoke, sequence::SequenceBuilder, sync::SyncBuilder, trigger::TriggerBuilder, - }, + actions::{invoke::Invoke, sequence::SequenceBuilder, sync::SyncBuilder, trigger::TriggerBuilder}, api::{design::Design, Orchestration}, common::DesignConfig, prelude::InvokeResult, @@ -58,13 +56,11 @@ async fn on_shutdown() -> InvokeResult { // Instance ID for KVS object instances. let instance_id = InstanceId(0); - + // Configure backend with directory path (workaround: KvsBuilder::dir() not available in Rust) // change back to dir, if https://github.com/eclipse-score/persistency/issues/222 is resolved. - let backend = JsonBackendBuilder::new() - .working_dir(PathBuf::from("/tmp")) - .build(); - + let backend = JsonBackendBuilder::new().working_dir(PathBuf::from("/tmp")).build(); + let builder = KvsBuilder::new(instance_id) .backend(Box::new(backend)) .kvs_load(KvsLoad::Optional); @@ -77,15 +73,13 @@ async fn on_shutdown() -> InvokeResult { let instance_id = kvs.parameters().instance_id; let snapshot_id = SnapshotId(0); - match kvs.parameters().backend.as_any().downcast_ref(){ - Some(backend ) => { + match kvs.parameters().backend.as_any().downcast_ref() { + Some(backend) => { let backend = backend as &JsonBackend; let filename = backend.kvs_file_path(instance_id, snapshot_id); info!("KVS snapshot saved to file: {:?}", filename); }, - None => { - - }, + None => {}, }; Ok(()) @@ -101,25 +95,16 @@ fn camera_processing_component_design() -> Result { design.register_invoke_async("on_camera_image_ready".into(), on_camera_image_ready)?; // Create a program describing task chain - design.add_program( - "CameraProcessingProgram", - move |design_instance, builder| { - builder.with_run_action( - SequenceBuilder::new() - .with_step(SyncBuilder::from_design(CAMERA_IMG_READY, design_instance)) - .with_step(Invoke::from_tag( - &on_camera_image_ready_tag, - design_instance.config(), - )) - .with_step(TriggerBuilder::from_design( - CAMERA_IMG_PROCESSED, - design_instance, - )) - .build(), - ); - Ok(()) - }, - ); + design.add_program("CameraProcessingProgram", move |design_instance, builder| { + builder.with_run_action( + SequenceBuilder::new() + .with_step(SyncBuilder::from_design(CAMERA_IMG_READY, design_instance)) + .with_step(Invoke::from_tag(&on_camera_image_ready_tag, design_instance.config())) + .with_step(TriggerBuilder::from_design(CAMERA_IMG_PROCESSED, design_instance)) + .build(), + ); + Ok(()) + }); Ok(design) } @@ -129,8 +114,7 @@ fn detect_object_component_design() -> Result { // Register events and invoke actions in design so it knows how to build task chains design.register_event(CAMERA_IMG_PROCESSED.into())?; - let detect_objects_tag = - design.register_invoke_async("detect_objects".into(), detect_objects)?; + let detect_objects_tag = design.register_invoke_async("detect_objects".into(), detect_objects)?; let on_shutdown_tag = design.register_invoke_async("on_shutdown".into(), on_shutdown)?; // Create a program describing task chain @@ -138,14 +122,8 @@ fn detect_object_component_design() -> Result { builder .with_run_action( SequenceBuilder::new() - .with_step(SyncBuilder::from_design( - CAMERA_IMG_PROCESSED, - design_instance, - )) - .with_step(Invoke::from_tag( - &detect_objects_tag, - design_instance.config(), - )) + .with_step(SyncBuilder::from_design(CAMERA_IMG_PROCESSED, design_instance)) + .with_step(Invoke::from_tag(&detect_objects_tag, design_instance.config())) .build(), ) .with_stop_action( @@ -166,23 +144,14 @@ fn main() { .build(); // Create runtime - let (builder, _engine_id) = kyron::runtime::RuntimeBuilder::new().with_engine( - ExecutionEngineBuilder::new() - .task_queue_size(256) - .workers(2), - ); + let (builder, _engine_id) = kyron::runtime::RuntimeBuilder::new() + .with_engine(ExecutionEngineBuilder::new().task_queue_size(256).workers(2)); let mut runtime = builder.build().unwrap(); // Build Orchestration let mut orch = Orchestration::new() - .add_design( - camera_processing_component_design() - .expect("Failed to create camera_processing_component_design"), - ) - .add_design( - detect_object_component_design() - .expect("Failed to create detect_object_component_design"), - ) + .add_design(camera_processing_component_design().expect("Failed to create camera_processing_component_design")) + .add_design(detect_object_component_design().expect("Failed to create detect_object_component_design")) .design_done(); // Specify deployment information, ie. which event is local, which timer etc From 04b5539e24db2e2bc819cb1daddfff3812deeb19 Mon Sep 17 00:00:00 2001 From: Piotr Korkus Date: Fri, 27 Feb 2026 11:05:36 +0100 Subject: [PATCH 04/10] format: apply for bazel --- BUILD | 5 ++- MODULE.bazel | 14 ++++---- bazel_common/bundlers.bzl | 21 +++++------ bazel_common/score_basic_bazel.MODULE.bazel | 5 ++- .../score_gcc_toolchains.MODULE.bazel | 27 +++++++------- .../score_modules_target_sw.MODULE.bazel | 20 +++++------ .../score_modules_tooling.MODULE.bazel | 18 +++++----- bazel_common/score_python.MODULE.bazel | 2 -- .../score_qnx_toolchains.MODULE.bazel | 3 -- .../score_rust_toolchains.MODULE.bazel | 6 ++-- feature_integration_tests/configs/BUILD | 2 +- feature_integration_tests/itf/BUILD | 1 - feature_integration_tests/test_cases/BUILD | 6 +++- .../test_scenarios/rust/BUILD | 14 ++++---- images/autosd_x86_64/BUILD.bazel | 14 ++++---- .../persistency_integration/BUILD | 24 ++++++------- .../scrample_integration/BUILD | 28 +++++++-------- images/linux_x86_64/BUILD | 5 ++- images/qnx_x86_64/build/BUILD | 35 +++++++++---------- images/qnx_x86_64/configs/BUILD | 3 +- runners/docker_x86_64/scripts/BUILD | 2 +- runners/qemu_x86_64/BUILD | 1 - rust_coverage/BUILD | 9 ----- showcases/BUILD | 5 +-- showcases/cli/BUILD | 6 ++-- showcases/orchestration_persistency/BUILD | 7 ++-- showcases/simple_lifecycle/BUILD | 32 +++++++++++------ showcases/simple_lifecycle/configs/BUILD | 4 +-- showcases/standalone/BUILD | 18 +++++----- 29 files changed, 166 insertions(+), 171 deletions(-) diff --git a/BUILD b/BUILD index 7b66a32842..904beefcdc 100644 --- a/BUILD +++ b/BUILD @@ -14,7 +14,6 @@ load("@score_docs_as_code//:docs.bzl", "docs") load("@score_tooling//:defs.bzl", "copyright_checker", "setup_starpls", "use_format_targets") - # Docs-as-code docs( data = [ @@ -49,13 +48,13 @@ copyright_checker( ".github", "bazel_common", "docs", - "scripts", - "tests", "feature_integration_tests", "images", "runners", "rust_coverage", + "scripts", "showcases", + "tests", "//:BUILD", "//:MODULE.bazel", ], diff --git a/MODULE.bazel b/MODULE.bazel index c3c0e9c96b..1c21842342 100644 --- a/MODULE.bazel +++ b/MODULE.bazel @@ -20,13 +20,13 @@ module( include("//bazel_common:score_basic_bazel.MODULE.bazel") # GCC toolchains -include ("//bazel_common:score_gcc_toolchains.MODULE.bazel") # WIP: Not used yet as have problems with IFS toolchain +include("//bazel_common:score_gcc_toolchains.MODULE.bazel") # WIP: Not used yet as have problems with IFS toolchain # GCC toolchains -include ("//bazel_common:score_qnx_toolchains.MODULE.bazel") # WIP: Not used yet as have problems with IFS toolchain +include("//bazel_common:score_qnx_toolchains.MODULE.bazel") # WIP: Not used yet as have problems with IFS toolchain # Rust toolchains -include ("//bazel_common:score_rust_toolchains.MODULE.bazel") +include("//bazel_common:score_rust_toolchains.MODULE.bazel") # Python support include("//bazel_common:score_python.MODULE.bazel") @@ -40,8 +40,8 @@ include("//bazel_common:score_modules_target_sw.MODULE.bazel") bazel_dep(name = "rules_boost", repo_name = "com_github_nelhage_rules_boost") archive_override( module_name = "rules_boost", - urls = ["https://github.com/nelhage/rules_boost/archive/refs/heads/master.tar.gz"], strip_prefix = "rules_boost-master", + urls = ["https://github.com/nelhage/rules_boost/archive/refs/heads/master.tar.gz"], ) # Implicit dependencies for score_tooling @@ -59,7 +59,8 @@ git_override( # Currently required for ifs tooling bazel_dep(name = "score_toolchains_qnx", version = "0.0.7") -toolchains_qnx = use_extension("@score_toolchains_qnx//:extensions.bzl", "toolchains_qnx", dev_dependency=True) + +toolchains_qnx = use_extension("@score_toolchains_qnx//:extensions.bzl", "toolchains_qnx", dev_dependency = True) toolchains_qnx.sdp( sha256 = "f2e0cb21c6baddbcb65f6a70610ce498e7685de8ea2e0f1648f01b327f6bac63", strip_prefix = "installation", @@ -68,11 +69,9 @@ toolchains_qnx.sdp( use_repo(toolchains_qnx, "toolchains_qnx_sdp") use_repo(toolchains_qnx, "toolchains_qnx_ifs") - bazel_dep(name = "rules_oci", version = "1.8.0") oci = use_extension("@rules_oci//oci:extensions.bzl", "oci") - oci.pull( name = "ubuntu_22_04", digest = "sha256:3ba65aa20f86a0fad9df2b2c259c613df006b2e6d0bfcc8a146afb8c525a9751", @@ -80,5 +79,4 @@ oci.pull( platforms = ["linux/amd64"], tag = "22.04", ) - use_repo(oci, "ubuntu_22_04") diff --git a/bazel_common/bundlers.bzl b/bazel_common/bundlers.bzl index 31d8be3239..2733de30f5 100644 --- a/bazel_common/bundlers.bzl +++ b/bazel_common/bundlers.bzl @@ -1,10 +1,9 @@ -load("@rules_pkg//pkg:pkg.bzl", "pkg_tar") load("@rules_pkg//pkg:mappings.bzl", "pkg_files") +load("@rules_pkg//pkg:pkg.bzl", "pkg_tar") +def score_pkg_bundle(name, bins, config_data = None, package_dir = None, other_package_files = [], custom_layout = {}): + """ Creates a reusable bundle by chaining Bazel packaging rules: -def score_pkg_bundle(name, bins, config_data= None, package_dir = None, other_package_files = [], custom_layout = {}): - """ - Creates a reusable bundle by chaining Bazel packaging rules: - Collects binaries and config files into a pkg_files target, renaming them into subdirectories. - Packs them into a tar archive using pkg_tar, optionally with additional package files and a custom package directory. - Extracts the tar archive using a custom untar rule. @@ -23,11 +22,15 @@ def score_pkg_bundle(name, bins, config_data= None, package_dir = None, other_pa "//app:data.txt": "resources/data.txt", "//lib:helper.sh": "scripts/helper.sh", } + Returns: + A dict with the following keys: + - "all_files": Label of the pkg_files target that collects all files with their renamed paths. + - "tar": Label of the pkg_tar target that creates the tar archive. + - "tree": Label of the untar target that extracts the tar archive. """ all_files_name = name + "_pkg_files" bundle_name = name + "_pkg_tar" - all_cfg = name + "_configs" untar_name = name rename_dict = {} @@ -42,17 +45,16 @@ def score_pkg_bundle(name, bins, config_data= None, package_dir = None, other_pa if config_data != None: config_data_arr = config_data - if custom_layout == None: custom_layout = {} for label, dst in custom_layout.items(): rename_dict[label] = "data/" + name + "/" + dst - + # Step 1: pkg_files pkg_files( name = all_files_name, - srcs = bins + config_data_arr + list(custom_layout.keys()), + srcs = bins + config_data_arr + list(custom_layout.keys()), renames = rename_dict, visibility = ["//visibility:public"], ) @@ -80,7 +82,6 @@ def score_pkg_bundle(name, bins, config_data= None, package_dir = None, other_pa "tree": ":" + untar_name, } - def _untar_impl(ctx): out = ctx.actions.declare_directory(ctx.label.name) @@ -98,4 +99,4 @@ untar = rule( attrs = { "src": attr.label(allow_single_file = True), }, -) \ No newline at end of file +) diff --git a/bazel_common/score_basic_bazel.MODULE.bazel b/bazel_common/score_basic_bazel.MODULE.bazel index fbc133ba1d..69dca59d09 100644 --- a/bazel_common/score_basic_bazel.MODULE.bazel +++ b/bazel_common/score_basic_bazel.MODULE.bazel @@ -1,13 +1,12 @@ bazel_dep(name = "rules_shell", version = "0.6.0") bazel_dep(name = "rules_cc", version = "0.2.16") bazel_dep(name = "rules_pkg", version = "1.2.0") - bazel_dep(name = "rules_rpm", version = "0.1.0") git_override( module_name = "rules_rpm", - remote = "https://github.com/eclipse-score/inc_os_autosd.git", branch = "main", - strip_prefix = "rpm" + remote = "https://github.com/eclipse-score/inc_os_autosd.git", + strip_prefix = "rpm", ) bazel_dep(name = "flatbuffers", version = "25.9.23") diff --git a/bazel_common/score_gcc_toolchains.MODULE.bazel b/bazel_common/score_gcc_toolchains.MODULE.bazel index 1a44306269..733c733d66 100644 --- a/bazel_common/score_gcc_toolchains.MODULE.bazel +++ b/bazel_common/score_gcc_toolchains.MODULE.bazel @@ -11,10 +11,9 @@ # SPDX-License-Identifier: Apache-2.0 # ******************************************************************************* - bazel_dep(name = "score_bazel_cpp_toolchains", version = "0.2.2") -gcc = use_extension("@score_bazel_cpp_toolchains//extensions:gcc.bzl", "gcc", dev_dependency = True) +gcc = use_extension("@score_bazel_cpp_toolchains//extensions:gcc.bzl", "gcc", dev_dependency = True) gcc.toolchain( name = "score_gcc_x86_64_toolchain", target_cpu = "x86_64", @@ -43,28 +42,31 @@ gcc.toolchain( # ) # Workaround until above is fixed -bazel_dep(name = "score_toolchains_gcc", dev_dependency=True) -git_override( # Elektrobit corbos Linux for Safety Applications needs a specific toolchain - module_name = "score_toolchains_gcc", - remote = "https://github.com/elektrobit-contrib/eclipse-score_toolchains_gcc.git", - tag = "0.5.0-beta" # corresponds to git sha 158921ffd9aabef41a2a03bca5baeaa9f4aa9d33 +bazel_dep(name = "score_toolchains_gcc", dev_dependency = True) +git_override( + # Elektrobit corbos Linux for Safety Applications needs a specific toolchain + module_name = "score_toolchains_gcc", + remote = "https://github.com/elektrobit-contrib/eclipse-score_toolchains_gcc.git", + tag = "0.5.0-beta", # corresponds to git sha 158921ffd9aabef41a2a03bca5baeaa9f4aa9d33 ) -eb = use_extension("@score_toolchains_gcc//extensions:gcc.bzl", "gcc", dev_dependency=True) + +eb = use_extension("@score_toolchains_gcc//extensions:gcc.bzl", "gcc", dev_dependency = True) eb.toolchain( - url = "https://github.com/elektrobit-contrib/eclipse-score_toolchains_gcc/releases/download/0.5.0-beta/fastdev-sdk-ubuntu-ebclfsa-ebcl-qemuarm64.tar.gz", sha256 = "05b57bbc8d99d46df6b57f774c39a5a2664964ea7eb94147cbece08508c1f121", strip_prefix = "fastdev-sdk-ubuntu-ebclfsa-ebcl-qemuarm64", + url = "https://github.com/elektrobit-contrib/eclipse-score_toolchains_gcc/releases/download/0.5.0-beta/fastdev-sdk-ubuntu-ebclfsa-ebcl-qemuarm64.tar.gz", ) - bazel_dep(name = "os_autosd_toolchain", version = "0.0.1") + # override with git git_override( module_name = "os_autosd_toolchain", - remote = "https://github.com/eclipse-score/inc_os_autosd.git", branch = "main", - strip_prefix = "toolchain" + remote = "https://github.com/eclipse-score/inc_os_autosd.git", + strip_prefix = "toolchain", ) + autosd_10_gcc = use_extension("@os_autosd_toolchain//autosd_10_gcc:extensions.bzl", "autosd_10_gcc_extension") use_repo(autosd_10_gcc, "autosd_10_gcc_repo") @@ -73,6 +75,7 @@ rpm_toolchain = use_extension("@rules_rpm//toolchain:extensions.bzl", "rpm_toolc use_repo(rpm_toolchain, "rpm_toolchain") use_repo(eb, "gcc_toolchain", "gcc_toolchain_gcc") + use_repo( gcc, "score_gcc_x86_64_toolchain", diff --git a/bazel_common/score_modules_target_sw.MODULE.bazel b/bazel_common/score_modules_target_sw.MODULE.bazel index 0f0460c45a..4ee74d6398 100644 --- a/bazel_common/score_modules_target_sw.MODULE.bazel +++ b/bazel_common/score_modules_target_sw.MODULE.bazel @@ -17,63 +17,63 @@ bazel_dep(name = "score_baselibs") git_override( module_name = "score_baselibs", - remote = "https://github.com/eclipse-score/baselibs.git", commit = "158fe6a7b791c58f6eac5f7e4662b8db0cf9ac6e", + patch_strip = 1, patches = [ "//patches/baselibs:003-acl-fixes-for-aarch64.patch", ], - patch_strip = 1, + remote = "https://github.com/eclipse-score/baselibs.git", ) bazel_dep(name = "score_baselibs_rust") git_override( module_name = "score_baselibs_rust", - remote = "https://github.com/eclipse-score/baselibs_rust.git", commit = "0eba2934fa8b0e1a343ec6bf6f7ff00cec27d81c", + remote = "https://github.com/eclipse-score/baselibs_rust.git", ) bazel_dep(name = "score_communication") git_override( module_name = "score_communication", - remote = "https://github.com/eclipse-score/communication.git", commit = "56448a5589a5f7d3921b873e8127b824a8c1ca95", + patch_strip = 1, patches = [ "//patches/communication:001-expose-comm-examples.patch", ], - patch_strip = 1, + remote = "https://github.com/eclipse-score/communication.git", ) bazel_dep(name = "score_persistency") git_override( module_name = "score_persistency", - remote = "https://github.com/eclipse-score/persistency.git", commit = "438bf9b5c447fd41ad43b321679dd3d1b3a6c737", + remote = "https://github.com/eclipse-score/persistency.git", ) bazel_dep(name = "score_orchestrator") git_override( module_name = "score_orchestrator", - remote = "https://github.com/eclipse-score/orchestrator.git", commit = "600fdd8186305ffbf495e4fc788195c077de79ac", + remote = "https://github.com/eclipse-score/orchestrator.git", ) bazel_dep(name = "score_kyron") git_override( module_name = "score_kyron", - remote = "https://github.com/eclipse-score/kyron.git", commit = "5acfb1a593ec65cf4f64424f581c6ddd04813ee7", + remote = "https://github.com/eclipse-score/kyron.git", ) bazel_dep(name = "score_lifecycle_health") git_override( module_name = "score_lifecycle_health", - remote = "https://github.com/eclipse-score/lifecycle.git", commit = "14ee704eeac03e03ca10bece5de8b694d3c5e2dd", + remote = "https://github.com/eclipse-score/lifecycle.git", ) bazel_dep(name = "score_logging") git_override( module_name = "score_logging", - remote = "https://github.com/eclipse-score/logging.git", commit = "80feaae8ffaa2448c3872011b724e1668ad2930e", + remote = "https://github.com/eclipse-score/logging.git", ) diff --git a/bazel_common/score_modules_tooling.MODULE.bazel b/bazel_common/score_modules_tooling.MODULE.bazel index 9bc0caa9d8..5851bc25b1 100644 --- a/bazel_common/score_modules_tooling.MODULE.bazel +++ b/bazel_common/score_modules_tooling.MODULE.bazel @@ -17,59 +17,59 @@ bazel_dep(name = "score_crates") git_override( module_name = "score_crates", - remote = "https://github.com/eclipse-score/score-crates.git", commit = "372438042bafe21146899dc1f89771a031d12ed0", + remote = "https://github.com/eclipse-score/score-crates.git", ) bazel_dep(name = "score_itf") git_override( module_name = "score_itf", - remote = "https://github.com/eclipse-score/itf.git", commit = "e1243f3818fd78b72f79741082016fd3d7c85329", + remote = "https://github.com/eclipse-score/itf.git", ) bazel_dep(name = "score_tooling") git_override( module_name = "score_tooling", - remote = "https://github.com/eclipse-score/tooling.git", commit = "1159dcf9d35981ac4877265eff7a1e7a24452161", + remote = "https://github.com/eclipse-score/tooling.git", ) bazel_dep(name = "score_platform") git_override( module_name = "score_platform", - remote = "https://github.com/eclipse-score/score.git", commit = "bf8502071d750cb70d88f1cb5cfbf5e5e7407f27", + remote = "https://github.com/eclipse-score/score.git", ) bazel_dep(name = "score_bazel_platforms") git_override( module_name = "score_bazel_platforms", - remote = "https://github.com/eclipse-score/bazel_platforms.git", commit = "2286de89c35d5660ad183906a6f010b33fcac8db", + remote = "https://github.com/eclipse-score/bazel_platforms.git", ) bazel_dep(name = "score_test_scenarios") git_override( module_name = "score_test_scenarios", - remote = "https://github.com/eclipse-score/testing_tools.git", commit = "c3d385c8e5562b594a409e92a8f493207b53b49d", + remote = "https://github.com/eclipse-score/testing_tools.git", ) bazel_dep(name = "score_docs_as_code") git_override( module_name = "score_docs_as_code", - remote = "https://github.com/eclipse-score/docs-as-code.git", commit = "7349eb22430febb367765ced3fbabfc0ef2988d0", + patch_strip = 1, patches = [ "//patches/docs-as-code:fix-extra-properties.patch", ], - patch_strip = 1, + remote = "https://github.com/eclipse-score/docs-as-code.git", ) bazel_dep(name = "score_process") git_override( module_name = "score_process", - remote = "https://github.com/eclipse-score/process_description.git", commit = "fbe68f10041eb34fb791f08e31006afce8c9564a", + remote = "https://github.com/eclipse-score/process_description.git", ) diff --git a/bazel_common/score_python.MODULE.bazel b/bazel_common/score_python.MODULE.bazel index a2e613fe2b..b9ba82c3ad 100644 --- a/bazel_common/score_python.MODULE.bazel +++ b/bazel_common/score_python.MODULE.bazel @@ -1,4 +1,3 @@ - bazel_dep(name = "rules_python", version = "1.8.3") PYTHON_VERSION = "3.12" @@ -17,5 +16,4 @@ pip.parse( python_version = PYTHON_VERSION, requirements_lock = "//feature_integration_tests/test_cases:requirements.txt.lock", ) - use_repo(pip, "pip_score_venv_test") diff --git a/bazel_common/score_qnx_toolchains.MODULE.bazel b/bazel_common/score_qnx_toolchains.MODULE.bazel index 4ff05dd7a0..b07cb1b59b 100644 --- a/bazel_common/score_qnx_toolchains.MODULE.bazel +++ b/bazel_common/score_qnx_toolchains.MODULE.bazel @@ -1,6 +1,4 @@ - qcc = use_extension("@score_bazel_cpp_toolchains//extensions:gcc.bzl", "gcc", dev_dependency = True) - qcc.toolchain( name = "score_qcc_aarch64_toolchain", sdp_version = "8.0.0", @@ -9,7 +7,6 @@ qcc.toolchain( use_default_package = True, version = "12.2.0", ) - qcc.toolchain( name = "score_qcc_x86_64_toolchain", sdp_version = "8.0.0", diff --git a/bazel_common/score_rust_toolchains.MODULE.bazel b/bazel_common/score_rust_toolchains.MODULE.bazel index 4ade3bd300..37faa9184e 100644 --- a/bazel_common/score_rust_toolchains.MODULE.bazel +++ b/bazel_common/score_rust_toolchains.MODULE.bazel @@ -1,7 +1,9 @@ bazel_dep(name = "rules_rust", version = "0.61.0") + bazel_dep(name = "score_toolchains_rust", version = "0.4.0", dev_dependency = True) + git_override( module_name = "rules_rust", - remote = "https://github.com/pawelrutkaq/rules_rust.git", # To be fixed once rule_rust is in score bazel registry commit = "293337fd6402ec3dfbeb8f2d589f38ad9124dadd", -) \ No newline at end of file + remote = "https://github.com/pawelrutkaq/rules_rust.git", # To be fixed once rule_rust is in score bazel registry +) diff --git a/feature_integration_tests/configs/BUILD b/feature_integration_tests/configs/BUILD index 6a7ef1fbbb..8556521da4 100644 --- a/feature_integration_tests/configs/BUILD +++ b/feature_integration_tests/configs/BUILD @@ -25,7 +25,7 @@ filegroup( name = "etc_configs", srcs = [ "etc/logging.json", - "etc/mw_com_config.json", + "etc/mw_com_config.json", ], visibility = ["//visibility:public"], ) diff --git a/feature_integration_tests/itf/BUILD b/feature_integration_tests/itf/BUILD index 465800f86c..262a06e790 100644 --- a/feature_integration_tests/itf/BUILD +++ b/feature_integration_tests/itf/BUILD @@ -1,7 +1,6 @@ load("@score_itf//:defs.bzl", "py_itf_test") load("@score_itf//score/itf/plugins:plugins.bzl", "docker", "qemu") - filegroup( name = "all_tests", srcs = [ diff --git a/feature_integration_tests/test_cases/BUILD b/feature_integration_tests/test_cases/BUILD index cb64410b5d..fb5b529f47 100644 --- a/feature_integration_tests/test_cases/BUILD +++ b/feature_integration_tests/test_cases/BUILD @@ -28,7 +28,11 @@ score_virtualenv( # Tests targets score_py_pytest( name = "fit", - srcs = glob(["tests/**/*.py"]) + ["conftest.py", "fit_scenario.py", "test_properties.py"], + srcs = glob(["tests/**/*.py"]) + [ + "conftest.py", + "fit_scenario.py", + "test_properties.py", + ], args = [ "--traces=all", "--rust-target-path=$(rootpath //feature_integration_tests/test_scenarios/rust:rust_test_scenarios)", diff --git a/feature_integration_tests/test_scenarios/rust/BUILD b/feature_integration_tests/test_scenarios/rust/BUILD index 29e476e5db..6ef6c741be 100644 --- a/feature_integration_tests/test_scenarios/rust/BUILD +++ b/feature_integration_tests/test_scenarios/rust/BUILD @@ -16,19 +16,19 @@ load("@rules_rust//rust:defs.bzl", "rust_binary") rust_binary( name = "rust_test_scenarios", srcs = glob(["src/**/*.rs"]), - visibility = ["//feature_integration_tests/test_cases:__pkg__"], tags = [ "manual", ], + visibility = ["//feature_integration_tests/test_cases:__pkg__"], deps = [ + "@score_crates//:serde", + "@score_crates//:serde_json", + "@score_crates//:tracing", + "@score_crates//:tracing_subscriber", "@score_kyron//src/kyron:libkyron", "@score_kyron//src/kyron-foundation:libkyron_foundation", "@score_orchestrator//src/orchestration:liborchestration", - "@score_persistency//src/rust/rust_kvs:rust_kvs", - "@score_test_scenarios//test_scenarios_rust:test_scenarios_rust", - "@score_crates//:tracing", - "@score_crates//:tracing_subscriber", - "@score_crates//:serde", - "@score_crates//:serde_json", + "@score_persistency//src/rust/rust_kvs", + "@score_test_scenarios//test_scenarios_rust", ], ) diff --git a/images/autosd_x86_64/BUILD.bazel b/images/autosd_x86_64/BUILD.bazel index 20ec8dabca..e452f00f97 100644 --- a/images/autosd_x86_64/BUILD.bazel +++ b/images/autosd_x86_64/BUILD.bazel @@ -20,25 +20,25 @@ alias( rpm_package( name = "score-showcases", + data = [ + "//showcases:showcases_all", + ], + data_dir = "/usr/share/score/examples", description = "Eclipse S-CORE Reference Integration Showcases", summary = "Eclipse S-CORE Reference Integration Showcases", version = "0.6.0", - data = [ - "//showcases:showcases_all" - ], - data_dir = "/usr/share/score/examples", ) sh_binary( name = "run", srcs = ["//runners/docker_x86_64/scripts:run_docker.sh"], - env={ - "OCI_IMAGE": "quay.io/centos/centos:stream10" - }, args = [ "$(location :image)", ], data = [ ":image", ], + env = { + "OCI_IMAGE": "quay.io/centos/centos:stream10", + }, ) diff --git a/images/ebclfsa_aarch64/persistency_integration/BUILD b/images/ebclfsa_aarch64/persistency_integration/BUILD index fdb5c75f0a..5693c7f58e 100644 --- a/images/ebclfsa_aarch64/persistency_integration/BUILD +++ b/images/ebclfsa_aarch64/persistency_integration/BUILD @@ -16,7 +16,7 @@ genrule( name = "fetch-fastdev-archive", srcs = [], outs = ["fastdev-archive.tgz"], - cmd = "wget -O $@ https://github.com/elektrobit-contrib/eclipse-score_toolchains_gcc/releases/download/0.5.0-beta/fastdev-ubuntu-ebcl-deb-qemu-arm64.tgz" + cmd = "wget -O $@ https://github.com/elektrobit-contrib/eclipse-score_toolchains_gcc/releases/download/0.5.0-beta/fastdev-ubuntu-ebcl-deb-qemu-arm64.tgz", ) genrule( @@ -24,7 +24,7 @@ genrule( srcs = [":fetch-fastdev-archive"], outs = [ "ebcl-qemuarm64/fastdev-ubuntu-ebclfsa-ebcl-qemuarm64.wic", - "ebcl-qemuarm64/fastdev-ubuntu-ebclfsa-ebcl-qemuarm64-vmlinux" + "ebcl-qemuarm64/fastdev-ubuntu-ebclfsa-ebcl-qemuarm64-vmlinux", ], cmd = "tar xzf $(location :fetch-fastdev-archive) -C $(RULEDIR)", ) @@ -37,6 +37,11 @@ genrule( "scripts/cpp_tests_persistency.sh", "run_qemu.sh", ], + outs = [ + "qemu_upload.log", + "ebcl-qemuarm64-modified/fastdev-ubuntu-ebclfsa-ebcl-qemuarm64.wic", + "ebcl-qemuarm64-modified/fastdev-ubuntu-ebclfsa-ebcl-qemuarm64-vmlinux", + ], cmd = " \ mkdir -p $(RULEDIR)/ebcl-qemuarm64-modified &&\ cp $(RULEDIR)/ebcl-qemuarm64/* $(RULEDIR)/ebcl-qemuarm64-modified/ &&\ @@ -48,11 +53,6 @@ genrule( sshpass -p linux ssh -o StrictHostKeyChecking=no -p 2222 root@localhost crinit-ctl poweroff || true \ sleep 5 \ ", - outs = [ - "qemu_upload.log", - "ebcl-qemuarm64-modified/fastdev-ubuntu-ebclfsa-ebcl-qemuarm64.wic", - "ebcl-qemuarm64-modified/fastdev-ubuntu-ebclfsa-ebcl-qemuarm64-vmlinux" - ], ) genrule( @@ -60,7 +60,11 @@ genrule( srcs = [ "ebcl-qemuarm64-modified/fastdev-ubuntu-ebclfsa-ebcl-qemuarm64.wic", "ebcl-qemuarm64-modified/fastdev-ubuntu-ebclfsa-ebcl-qemuarm64-vmlinux", - "run_qemu.sh" + "run_qemu.sh", + ], + outs = [ + "qemu_run.log", + "ssh_persistency_test_scenarios_run.log", ], cmd = " \ $(location run_qemu.sh) $(RULEDIR)/ebcl-qemuarm64-modified -pidfile $(RULEDIR)/qemu.pid > $(RULEDIR)/qemu_run.log &\ @@ -69,8 +73,4 @@ genrule( sshpass -p linux ssh -o StrictHostKeyChecking=no -p 2222 root@localhost crinit-ctl poweroff || true \ sleep 5 \ ", - outs = [ - "qemu_run.log", - "ssh_persistency_test_scenarios_run.log", - ], ) diff --git a/images/ebclfsa_aarch64/scrample_integration/BUILD b/images/ebclfsa_aarch64/scrample_integration/BUILD index b310ca159f..287378b659 100644 --- a/images/ebclfsa_aarch64/scrample_integration/BUILD +++ b/images/ebclfsa_aarch64/scrample_integration/BUILD @@ -28,8 +28,8 @@ genrule( name = "hi_app", srcs = [ ":scrample_sil_wrapper", - ":scrample_sil_r" - ], + ":scrample_sil_r", + ], outs = ["hi_app"], cmd = "cp $(location :scrample_sil_wrapper) $@ && \ chmod ugo+w $@ && \ @@ -52,7 +52,7 @@ genrule( name = "fetch-fastdev-archive", srcs = [], outs = ["fastdev-archive.tgz"], - cmd = "wget -O $@ https://github.com/elektrobit-contrib/eclipse-score_toolchains_gcc/releases/download/0.5.0-beta/fastdev-ubuntu-ebcl-deb-qemu-arm64.tgz" + cmd = "wget -O $@ https://github.com/elektrobit-contrib/eclipse-score_toolchains_gcc/releases/download/0.5.0-beta/fastdev-ubuntu-ebcl-deb-qemu-arm64.tgz", ) genrule( @@ -60,7 +60,7 @@ genrule( srcs = [":fetch-fastdev-archive"], outs = [ "ebcl-qemuarm64/fastdev-ubuntu-ebclfsa-ebcl-qemuarm64.wic", - "ebcl-qemuarm64/fastdev-ubuntu-ebclfsa-ebcl-qemuarm64-vmlinux" + "ebcl-qemuarm64/fastdev-ubuntu-ebclfsa-ebcl-qemuarm64-vmlinux", ], cmd = "tar xzf $(location :fetch-fastdev-archive) -C $(RULEDIR)", ) @@ -76,6 +76,11 @@ genrule( "etc/logging.json", "run_qemu.sh", ], + outs = [ + "qemu_upload.log", + "ebcl-qemuarm64-modified/fastdev-ubuntu-ebclfsa-ebcl-qemuarm64.wic", + "ebcl-qemuarm64-modified/fastdev-ubuntu-ebclfsa-ebcl-qemuarm64-vmlinux", + ], cmd = " \ mkdir -p $(RULEDIR)/ebcl-qemuarm64-modified &&\ cp $(RULEDIR)/ebcl-qemuarm64/* $(RULEDIR)/ebcl-qemuarm64-modified/ &&\ @@ -90,11 +95,6 @@ genrule( sshpass -p linux ssh -o StrictHostKeyChecking=no -p 2222 root@localhost crinit-ctl poweroff || true \ sleep 5 \ ", - outs = [ - "qemu_upload.log", - "ebcl-qemuarm64-modified/fastdev-ubuntu-ebclfsa-ebcl-qemuarm64.wic", - "ebcl-qemuarm64-modified/fastdev-ubuntu-ebclfsa-ebcl-qemuarm64-vmlinux" - ], ) genrule( @@ -102,7 +102,11 @@ genrule( srcs = [ "ebcl-qemuarm64-modified/fastdev-ubuntu-ebclfsa-ebcl-qemuarm64.wic", "ebcl-qemuarm64-modified/fastdev-ubuntu-ebclfsa-ebcl-qemuarm64-vmlinux", - "run_qemu.sh" + "run_qemu.sh", + ], + outs = [ + "qemu_run.log", + "ssh_scrample_run.log", ], cmd = " \ $(location run_qemu.sh) $(RULEDIR)/ebcl-qemuarm64-modified -pidfile $(RULEDIR)/qemu.pid > $(RULEDIR)/qemu_run.log &\ @@ -111,8 +115,4 @@ genrule( sshpass -p linux ssh -o StrictHostKeyChecking=no -p 2222 root@localhost crinit-ctl poweroff || true \ sleep 5 \ ", - outs = [ - "qemu_run.log", - "ssh_scrample_run.log", - ], ) diff --git a/images/linux_x86_64/BUILD b/images/linux_x86_64/BUILD index fc303826ce..8dfaa879a5 100644 --- a/images/linux_x86_64/BUILD +++ b/images/linux_x86_64/BUILD @@ -10,9 +10,9 @@ # # SPDX-License-Identifier: Apache-2.0 # ******************************************************************************* -load("@rules_shell//shell:sh_binary.bzl", "sh_binary") load("@rules_oci//oci:defs.bzl", "oci_image", "oci_tarball") load("@rules_pkg//pkg:tar.bzl", "pkg_tar") +load("@rules_shell//shell:sh_binary.bzl", "sh_binary") alias( name = "image", @@ -38,7 +38,7 @@ pkg_tar( ], symlinks = { "showcases": "showcases_all", - } + }, ) oci_image( @@ -57,4 +57,3 @@ oci_tarball( "//feature_integration_tests/itf:__pkg__", ], ) - diff --git a/images/qnx_x86_64/build/BUILD b/images/qnx_x86_64/build/BUILD index 1ec2e2d81b..dea5102c10 100644 --- a/images/qnx_x86_64/build/BUILD +++ b/images/qnx_x86_64/build/BUILD @@ -24,38 +24,38 @@ filegroup( filegroup( name = "configs", srcs = [ - "//images/qnx_x86_64/configs:pci_server.cfg", - "//images/qnx_x86_64/configs:pci_hw.cfg", - "//images/qnx_x86_64/configs:qcrypto.conf", - "//images/qnx_x86_64/configs:passwd", + "//images/qnx_x86_64/configs:dhcpcd.conf", "//images/qnx_x86_64/configs:group", - "//images/qnx_x86_64/configs:sshd_config", - "//images/qnx_x86_64/configs:ssh_host_rsa_key", - "//images/qnx_x86_64/configs:ssh_host_rsa_key.pub", "//images/qnx_x86_64/configs:hostname", - "//images/qnx_x86_64/configs:network_setup.sh", "//images/qnx_x86_64/configs:network_capture.sh", - "//images/qnx_x86_64/configs:profile", - "//images/qnx_x86_64/configs:dhcpcd.conf", + "//images/qnx_x86_64/configs:network_setup.sh", "//images/qnx_x86_64/configs:network_setup_dhcp.sh", + "//images/qnx_x86_64/configs:passwd", + "//images/qnx_x86_64/configs:pci_hw.cfg", + "//images/qnx_x86_64/configs:pci_server.cfg", + "//images/qnx_x86_64/configs:profile", + "//images/qnx_x86_64/configs:qcrypto.conf", + "//images/qnx_x86_64/configs:ssh_host_rsa_key", + "//images/qnx_x86_64/configs:ssh_host_rsa_key.pub", + "//images/qnx_x86_64/configs:sshd_config", ], visibility = ["//visibility:private"], ) qnx_ifs( name = "init", - build_file = "init.build", srcs = [ + ":configs", + ":scripts", ":system.build", ":system_dir", - ":scripts", - ":configs", - "//showcases:showcases_all", "//feature_integration_tests/configs:etc_configs", - "@score_persistency//tests/test_scenarios/cpp:test_scenarios", + "//showcases:showcases_all", + "@score_persistency//tests/test_scenarios/cpp:test_scenarios", ], + build_file = "init.build", ext_repo_maping = { - "BUNDLE_PATH": "$(location //showcases:showcases_all)" + "BUNDLE_PATH": "$(location //showcases:showcases_all)", }, visibility = [ "//visibility:public", @@ -65,8 +65,7 @@ qnx_ifs( filegroup( name = "system_dir", srcs = [ - "system.build", "init.build", + "system.build", ], ) - diff --git a/images/qnx_x86_64/configs/BUILD b/images/qnx_x86_64/configs/BUILD index 40d52d113d..4ce5a8da20 100644 --- a/images/qnx_x86_64/configs/BUILD +++ b/images/qnx_x86_64/configs/BUILD @@ -11,7 +11,6 @@ # SPDX-License-Identifier: Apache-2.0 # ******************************************************************************* - exports_files([ "startup.sh", "network_setup.sh", @@ -27,5 +26,5 @@ exports_files([ "ssh_host_rsa_key.pub", "hostname", "profile", - "network_capture.sh" + "network_capture.sh", ]) diff --git a/runners/docker_x86_64/scripts/BUILD b/runners/docker_x86_64/scripts/BUILD index 6e17fd110a..400ffffccd 100644 --- a/runners/docker_x86_64/scripts/BUILD +++ b/runners/docker_x86_64/scripts/BUILD @@ -1 +1 @@ -exports_files(["run_docker.sh"]) \ No newline at end of file +exports_files(["run_docker.sh"]) diff --git a/runners/qemu_x86_64/BUILD b/runners/qemu_x86_64/BUILD index 52ddaba0f2..9ff2afb309 100644 --- a/runners/qemu_x86_64/BUILD +++ b/runners/qemu_x86_64/BUILD @@ -13,7 +13,6 @@ load("@rules_shell//shell:sh_binary.bzl", "sh_binary") load("@score_itf//:defs.bzl", "py_itf_test") - # sh_binary( # name = "qemu_x86_64", # srcs = ["scripts/run_qemu.sh"], diff --git a/rust_coverage/BUILD b/rust_coverage/BUILD index c900ed584a..6d28f79d7a 100644 --- a/rust_coverage/BUILD +++ b/rust_coverage/BUILD @@ -16,8 +16,6 @@ load("@score_tooling//:defs.bzl", "rust_coverage_report") - - rust_coverage_report( name = "rust_coverage_score_baselibs_rust", bazel_configs = [ @@ -27,7 +25,6 @@ rust_coverage_report( query = 'kind("rust_test", @score_baselibs_rust//src/...) -@score_baselibs_rust//src/log/score_log_fmt_macro:tests', visibility = ["//visibility:public"], ) - rust_coverage_report( name = "rust_coverage_score_communication", @@ -38,7 +35,6 @@ rust_coverage_report( query = 'kind("rust_test", @score_communication//score/mw/com/impl/...) -@score_communication//score/mw/com/impl:unit_test_runtime_single_exec -@score_communication//score/mw/com/impl/configuration:config_parser_test -@score_communication//score/mw/com/impl/configuration:configuration_test -@score_communication//score/mw/com/impl/tracing/configuration:tracing_filter_config_parser_test', visibility = ["//visibility:public"], ) - rust_coverage_report( name = "rust_coverage_score_persistency", @@ -49,7 +45,6 @@ rust_coverage_report( query = 'kind("rust_test", @score_persistency//src/...) -@score_persistency//src/cpp/tests:bm_kvs_cpp', visibility = ["//visibility:public"], ) - rust_coverage_report( name = "rust_coverage_score_orchestrator", @@ -60,7 +55,6 @@ rust_coverage_report( query = 'kind("rust_test", @score_orchestrator//src/...)', visibility = ["//visibility:public"], ) - rust_coverage_report( name = "rust_coverage_score_kyron", @@ -71,7 +65,6 @@ rust_coverage_report( query = 'kind("rust_test", @score_kyron//src/...)', visibility = ["//visibility:public"], ) - rust_coverage_report( name = "rust_coverage_score_lifecycle_health", @@ -82,7 +75,6 @@ rust_coverage_report( query = 'kind("rust_test", @score_lifecycle_health//src/...)', visibility = ["//visibility:public"], ) - rust_coverage_report( name = "rust_coverage_score_logging", @@ -93,4 +85,3 @@ rust_coverage_report( query = 'kind("rust_test", @score_logging//score/...) -@score_logging//score/datarouter/test/ut/ut_logging:dltprotocolUT -@score_logging//score/datarouter/test/ut/ut_logging:persistentLogConfigUT -@score_logging//score/datarouter/test/ut/ut_logging:socketserverConfigUT -@score_logging//score/datarouter/test/ut/ut_logging:socketserverUT -@score_logging//score/mw/log/legacy_non_verbose_api:unit_test', visibility = ["//visibility:public"], ) - \ No newline at end of file diff --git a/showcases/BUILD b/showcases/BUILD index 01395893a6..3fd3cd19ab 100644 --- a/showcases/BUILD +++ b/showcases/BUILD @@ -14,14 +14,11 @@ load("//bazel_common:bundlers.bzl", "score_pkg_bundle") score_pkg_bundle( name = "showcases_all", - bins = ["//showcases/cli" ], + bins = ["//showcases/cli"], other_package_files = [ "//showcases/standalone:comm_pkg_files", "//showcases/standalone:kyron_pkg_files", "//showcases/orchestration_persistency:orch_per_pkg_files", "//showcases/simple_lifecycle:simple_lifecycle_pkg_files", ], - ) - - diff --git a/showcases/cli/BUILD b/showcases/cli/BUILD index 3a516cdd7c..bb2eed8ce4 100644 --- a/showcases/cli/BUILD +++ b/showcases/cli/BUILD @@ -16,12 +16,12 @@ load("@rules_rust//rust:defs.bzl", "rust_binary") rust_binary( name = "cli", srcs = ["main.rs"], + visibility = ["//visibility:public"], deps = [ + "@score_crates//:anyhow", "@score_crates//:clap", "@score_crates//:cliclack", "@score_crates//:serde", "@score_crates//:serde_json", - "@score_crates//:anyhow", ], - visibility = ["//visibility:public"], -) \ No newline at end of file +) diff --git a/showcases/orchestration_persistency/BUILD b/showcases/orchestration_persistency/BUILD index 5343c8808e..b4a8117f09 100644 --- a/showcases/orchestration_persistency/BUILD +++ b/showcases/orchestration_persistency/BUILD @@ -19,14 +19,14 @@ exports_files(["orch_per.score.json"]) rust_binary( name = "orch_per_example", srcs = ["main.rs"], + visibility = ["//visibility:public"], deps = [ "@score_kyron//src/kyron:libkyron", "@score_kyron//src/kyron-foundation:libkyron_foundation", - "@score_orchestrator//src/orchestration:liborchestration", "@score_kyron//src/logging_tracing:liblogging_tracing", - "@score_persistency//src/rust/rust_kvs:rust_kvs", + "@score_orchestrator//src/orchestration:liborchestration", + "@score_persistency//src/rust/rust_kvs", ], - visibility = ["//visibility:public"], ) score_pkg_bundle( @@ -34,4 +34,3 @@ score_pkg_bundle( bins = [":orch_per_example"], config_data = ["//showcases/orchestration_persistency:orch_per.score.json"], ) - diff --git a/showcases/simple_lifecycle/BUILD b/showcases/simple_lifecycle/BUILD index 0db2695e58..45107bfdf3 100644 --- a/showcases/simple_lifecycle/BUILD +++ b/showcases/simple_lifecycle/BUILD @@ -8,12 +8,11 @@ score_pkg_bundle( "@score_lifecycle_health//examples/cpp_supervised_app", "@score_lifecycle_health//examples/rust_supervised_app", "@score_lifecycle_health//examples/control_application:control_daemon", - ":lifecycle_signal.sh" + ":lifecycle_signal.sh", ], config_data = [ "//showcases/simple_lifecycle:simple_lifecycle.score.json", ], - custom_layout = { ":lm_demo.bin": "etc/lm_demo.bin", ":hm_demo.bin": "etc/hm_demo.bin", @@ -24,37 +23,48 @@ score_pkg_bundle( genrule( name = "gen_lm_demo.bin", - srcs = ["@score_lifecycle_health//src/launch_manager_daemon:lm_flatcfg_fbs", "//showcases/simple_lifecycle/configs:lm_demo.json"], + srcs = [ + "@score_lifecycle_health//src/launch_manager_daemon:lm_flatcfg_fbs", + "//showcases/simple_lifecycle/configs:lm_demo.json", + ], outs = ["lm_demo.bin"], - tools = ["@flatbuffers//:flatc"], cmd = "$(location @flatbuffers//:flatc) --binary $(SRCS) && mv lm_demo.bin $(location lm_demo.bin)", + tools = ["@flatbuffers//:flatc"], visibility = ["//visibility:public"], ) genrule( name = "gen_hm_demo.bin", - srcs = ["@score_lifecycle_health//src/launch_manager_daemon/health_monitor_lib:hm_flatcfg_fbs", "//showcases/simple_lifecycle/configs:hm_demo.json"], + srcs = [ + "@score_lifecycle_health//src/launch_manager_daemon/health_monitor_lib:hm_flatcfg_fbs", + "//showcases/simple_lifecycle/configs:hm_demo.json", + ], outs = ["hm_demo.bin"], - tools = ["@flatbuffers//:flatc"], cmd = "$(location @flatbuffers//:flatc) --binary $(SRCS) && mv hm_demo.bin $(location hm_demo.bin)", + tools = ["@flatbuffers//:flatc"], visibility = ["//visibility:public"], ) genrule( name = "gen_cpp_supervised_app_demo.bin", - srcs = ["@score_lifecycle_health//src/launch_manager_daemon/health_monitor_lib:hm_flatcfg_fbs", "//showcases/simple_lifecycle/configs:cpp_supervised_app_demo.json"], + srcs = [ + "@score_lifecycle_health//src/launch_manager_daemon/health_monitor_lib:hm_flatcfg_fbs", + "//showcases/simple_lifecycle/configs:cpp_supervised_app_demo.json", + ], outs = ["cpp_supervised_app_demo.bin"], - tools = ["@flatbuffers//:flatc"], cmd = "$(location @flatbuffers//:flatc) --binary $(SRCS) && mv cpp_supervised_app_demo.bin $(location cpp_supervised_app_demo.bin)", + tools = ["@flatbuffers//:flatc"], visibility = ["//visibility:public"], ) genrule( name = "gen_rust_supervised_app_demo.bin", - srcs = ["@score_lifecycle_health//src/launch_manager_daemon/health_monitor_lib:hm_flatcfg_fbs", "//showcases/simple_lifecycle/configs:rust_supervised_app_demo.json"], + srcs = [ + "@score_lifecycle_health//src/launch_manager_daemon/health_monitor_lib:hm_flatcfg_fbs", + "//showcases/simple_lifecycle/configs:rust_supervised_app_demo.json", + ], outs = ["rust_supervised_app_demo.bin"], - tools = ["@flatbuffers//:flatc"], cmd = "$(location @flatbuffers//:flatc) --binary $(SRCS) && mv rust_supervised_app_demo.bin $(location rust_supervised_app_demo.bin)", + tools = ["@flatbuffers//:flatc"], visibility = ["//visibility:public"], ) - diff --git a/showcases/simple_lifecycle/configs/BUILD b/showcases/simple_lifecycle/configs/BUILD index d1cba824a5..d9e94433c6 100644 --- a/showcases/simple_lifecycle/configs/BUILD +++ b/showcases/simple_lifecycle/configs/BUILD @@ -2,5 +2,5 @@ exports_files([ "hm_demo.json", "lm_demo.json", "cpp_supervised_app_demo.json", - "rust_supervised_app_demo.json" -]) \ No newline at end of file + "rust_supervised_app_demo.json", +]) diff --git a/showcases/standalone/BUILD b/showcases/standalone/BUILD index 607aa68bca..fcff31034f 100644 --- a/showcases/standalone/BUILD +++ b/showcases/standalone/BUILD @@ -1,24 +1,26 @@ load("//bazel_common:bundlers.bzl", "score_pkg_bundle") - score_pkg_bundle( name = "comm", - package_dir = "standalone", bins = ["@score_communication//score/mw/com/example/ipc_bridge:ipc_bridge_cpp"], - custom_layout = { - "@score_communication//score/mw/com/example/ipc_bridge:etc/mw_com_config.json": "etc/mw_com_config.json" - }, config_data = [ "//showcases/standalone:com.score.json", ], + custom_layout = { + "@score_communication//score/mw/com/example/ipc_bridge:etc/mw_com_config.json": "etc/mw_com_config.json", + }, + package_dir = "standalone", ) score_pkg_bundle( name = "kyron", - package_dir = "standalone", - bins = ["@score_kyron//examples:select", "@score_kyron//examples:safety_task", "@score_kyron//examples:main_macro"], - + bins = [ + "@score_kyron//examples:select", + "@score_kyron//examples:safety_task", + "@score_kyron//examples:main_macro", + ], config_data = [ "//showcases/standalone:kyron.score.json", ], + package_dir = "standalone", ) From e4e767a21db89a3a0f9af3d06b582cbaceb85dd5 Mon Sep 17 00:00:00 2001 From: Piotr Korkus Date: Fri, 27 Feb 2026 11:07:44 +0100 Subject: [PATCH 05/10] format: apply for yaml --- .github/codeql/codeql-config.yml | 1 - .github/workflows/build_and_test_autosd.yml | 22 +- .github/workflows/build_and_test_ebclfsa.yml | 8 - .github/workflows/build_and_test_linux.yml | 55 ++--- .github/workflows/build_and_test_qnx.yml | 13 - .../workflows/codeql-multiple-repo-scan.yml | 230 ++++-------------- .github/workflows/docs_cleanup.yml | 4 - .github/workflows/known_good_correct.yml | 4 - .../workflows/reusable_integration-build.yml | 6 +- .github/workflows/reusable_smoke-test.yml | 9 +- .github/workflows/test_and_docs.yml | 21 -- .github/workflows/test_integration.yml | 2 - images/autosd_x86_64/build/image.aib.yml | 21 +- images/autosd_x86_64/build/vars-devel.yml | 1 - images/autosd_x86_64/build/vars.yml | 1 - scripts/workflow/checkout_repos.sh | 47 ++++ scripts/workflow/parse_repos.sh | 54 ++++ scripts/workflow/recategorize_guidelines.sh | 28 +++ 18 files changed, 214 insertions(+), 313 deletions(-) create mode 100755 scripts/workflow/checkout_repos.sh create mode 100755 scripts/workflow/parse_repos.sh create mode 100755 scripts/workflow/recategorize_guidelines.sh diff --git a/.github/codeql/codeql-config.yml b/.github/codeql/codeql-config.yml index bfe715cbcf..1efa17346b 100644 --- a/.github/codeql/codeql-config.yml +++ b/.github/codeql/codeql-config.yml @@ -1,5 +1,4 @@ name: "Custom CodeQL Configuration for MISRA" - paths: - repos paths-ignore: diff --git a/.github/workflows/build_and_test_autosd.yml b/.github/workflows/build_and_test_autosd.yml index 9ba9f347ca..9bcead89de 100644 --- a/.github/workflows/build_and_test_autosd.yml +++ b/.github/workflows/build_and_test_autosd.yml @@ -10,32 +10,24 @@ # # SPDX-License-Identifier: Apache-2.0 # ******************************************************************************* - name: AutoSD - Build & Integration Test - on: pull_request: types: [opened, reopened, synchronize] - merge_group: types: [checks_requested] - push: branches: - main - jobs: build: name: x86_64 runs-on: ubuntu-latest - steps: - name: Clean disk space uses: eclipse-score/more-disk-space@v1 - - name: Checkout repository uses: actions/checkout@v4 - - name: Setup Bazel uses: bazel-contrib/setup-bazel@0.18.0 with: @@ -43,36 +35,31 @@ jobs: disk-cache: ${{ github.workflow }} repository-cache: true cache-save: ${{ github.event_name == 'push' }} - - name: Install System Dependencies run: | sudo apt-get update -y sudo apt-get install -y podman curl qemu-system createrepo-c - - name: Build Showcases run: | bazel build --config autosd-x86_64 //images/autosd_x86_64:image working-directory: ./images/autosd_x86_64 - - name: Copy showcases_all folder run: | set -e mkdir -p ./images/autosd_x86_64/build/rpms cp -R bazel-bin/showcases/showcases_all ./images/autosd_x86_64/build/files/ - - name: Install AIB Tools run: | curl -o auto-image-builder.sh "https://gitlab.com/CentOS/automotive/src/automotive-image-builder/-/raw/main/auto-image-builder.sh?ref_type=heads" chmod +x auto-image-builder.sh working-directory: ./images/autosd_x86_64/build - - name: Build AutoSD QEMU (x86_64) run: | export AIB_LOCAL_CONTAINER_STORAGE=$PWD/_build/containers-storage mkdir -p _build/containers-storage sudo -E ./auto-image-builder.sh build-builder --distro=autosd10-sig - + sudo -E ./auto-image-builder.sh build \ --distro=autosd10-sig \ --target=qemu \ @@ -85,26 +72,23 @@ jobs: sudo chown $(id -u):$(id -u) _build/disk.qcow2 working-directory: ./images/autosd_x86_64/build - - name: Enable KVM group perms run: | echo 'KERNEL=="kvm", GROUP="kvm", MODE="0666", OPTIONS+="static_node=kvm"' | sudo tee /etc/udev/rules.d/99-kvm4all.rules sudo udevadm control --reload-rules sudo udevadm trigger --name-match=kvm - - name: Test QEMU image run: | sshcmd() { sshpass -p "$SSH_PASSWORD" ssh -o StrictHostKeyChecking=no -o UserKnownHostsFile=/dev/null -p 2222 root@localhost $@ } - + ./scripts/run_qemu - + sshcmd 'cat /etc/os-release' working-directory: ./images/autosd_x86_64/build env: SSH_PASSWORD: password - - name: Archive QEMU disk image (x86_64) uses: actions/upload-artifact@v4 with: diff --git a/.github/workflows/build_and_test_ebclfsa.yml b/.github/workflows/build_and_test_ebclfsa.yml index 46c7f98661..d03c4caded 100644 --- a/.github/workflows/build_and_test_ebclfsa.yml +++ b/.github/workflows/build_and_test_ebclfsa.yml @@ -10,15 +10,12 @@ # # SPDX-License-Identifier: Apache-2.0 # ******************************************************************************* - name: EB corbos Linux for Safety Applications - Build & Integration Test - on: pull_request: types: [opened, reopened, synchronize] merge_group: types: [checks_requested] - push: branches: - main @@ -28,14 +25,11 @@ jobs: runs-on: ubuntu-latest container: image: ghcr.io/eclipse-score/devcontainer:v1.1.0 - steps: - name: Clean disk space uses: eclipse-score/more-disk-space@v1 - - name: Checkout repository uses: actions/checkout@v4 - - name: Setup Bazel uses: bazel-contrib/setup-bazel@0.18.0 with: @@ -43,11 +37,9 @@ jobs: disk-cache: ${{ github.workflow }} repository-cache: true cache-save: ${{ github.event_name == 'push' }} - - name: Build for EB corbos Linux for Safety Applications and run tests run: | bazel build --config=eb-aarch64 //images/ebclfsa_aarch64/scrample_integration:run - - name: Upload test logs uses: actions/upload-artifact@v5 with: diff --git a/.github/workflows/build_and_test_linux.yml b/.github/workflows/build_and_test_linux.yml index 0d1dc69b20..583b0b402c 100644 --- a/.github/workflows/build_and_test_linux.yml +++ b/.github/workflows/build_and_test_linux.yml @@ -10,7 +10,6 @@ # # SPDX-License-Identifier: Apache-2.0 # ******************************************************************************* - name: Linux - Build & Integration Test on: workflow_dispatch: @@ -21,33 +20,27 @@ on: branches: - main jobs: - x86_64: - runs-on: ubuntu-latest - permissions: - contents: write # required to upload release assets - - steps: - - name: Clean disk space - uses: eclipse-score/more-disk-space@v1 - - - name: Checkout repository - uses: actions/checkout@v4.2.2 - - - name: Set up Docker Buildx - uses: docker/setup-buildx-action@v3.8.0 - - - name: Setup Bazel - uses: bazel-contrib/setup-bazel@0.18.0 - with: - bazelisk-cache: true - disk-cache: ${{ github.workflow }} - repository-cache: true - cache-save: ${{ github.event_name == 'push' }} - - - name: Build image - run: | - bazel build --config=linux-x86_64 //images/linux_x86_64:image - - - name: Integration tests - run: | - bazel test --config=linux-x86_64 //feature_integration_tests/itf:linux_x86_64 + x86_64: + runs-on: ubuntu-latest + permissions: + contents: write # required to upload release assets + steps: + - name: Clean disk space + uses: eclipse-score/more-disk-space@v1 + - name: Checkout repository + uses: actions/checkout@v4.2.2 + - name: Set up Docker Buildx + uses: docker/setup-buildx-action@v3.8.0 + - name: Setup Bazel + uses: bazel-contrib/setup-bazel@0.18.0 + with: + bazelisk-cache: true + disk-cache: ${{ github.workflow }} + repository-cache: true + cache-save: ${{ github.event_name == 'push' }} + - name: Build image + run: | + bazel build --config=linux-x86_64 //images/linux_x86_64:image + - name: Integration tests + run: | + bazel test --config=linux-x86_64 //feature_integration_tests/itf:linux_x86_64 diff --git a/.github/workflows/build_and_test_qnx.yml b/.github/workflows/build_and_test_qnx.yml index cc5bc39529..7810829f67 100644 --- a/.github/workflows/build_and_test_qnx.yml +++ b/.github/workflows/build_and_test_qnx.yml @@ -10,19 +10,15 @@ # # SPDX-License-Identifier: Apache-2.0 # ******************************************************************************* - name: QNX8 - Build & Integration Test on: pull_request_target: types: [opened, reopened, synchronize] - merge_group: types: [checks_requested] - push: branches: - main - jobs: qnx-build-x86_64: name: x86_64 @@ -31,14 +27,12 @@ jobs: permissions: contents: read pull-requests: read - steps: - name: Checkout repository (Handle all events) uses: actions/checkout@v4.2.2 with: ref: ${{ github.head_ref || github.event.pull_request.head.ref || github.ref }} repository: ${{ github.event.pull_request.head.repo.full_name || github.repository }} - - name: Setup Bazel with shared caching uses: bazel-contrib/setup-bazel@0.18.0 with: @@ -46,7 +40,6 @@ jobs: repository-cache: true bazelisk-cache: true cache-save: ${{ github.event_name == 'push' }} - - name: Prepare QNX license env: SCORE_QNX_LICENSE: ${{ secrets.SCORE_QNX_LICENSE }} @@ -56,33 +49,27 @@ jobs: LICENSE_DIR="/opt/score_qnx/license" sudo mkdir -p "${LICENSE_DIR}" echo "${SCORE_QNX_LICENSE}" | base64 --decode | sudo tee "${LICENSE_DIR}/licenses" >/dev/null - - name: Install qemu run: | sudo apt-get update sudo apt-get install -y qemu-system - - name: Enable KVM group perms run: | echo 'KERNEL=="kvm", GROUP="kvm", MODE="0666", OPTIONS+="static_node=kvm"' | sudo tee /etc/udev/rules.d/99-kvm4all.rules sudo udevadm control --reload-rules sudo udevadm trigger --name-match=kvm - - name: Allow unprivileged user namespaces run: | sudo sysctl kernel.apparmor_restrict_unprivileged_userns=0 - - name: Build with QNX toolchain env: SCORE_QNX_USER: ${{ secrets.SCORE_QNX_USER }} SCORE_QNX_PASSWORD: ${{ secrets.SCORE_QNX_PASSWORD }} run: | bazel build --config qnx-x86_64 //images/qnx_x86_64:image - - name: Run integration tests run: | bazel test --config=itf-qnx-x86_64 //feature_integration_tests/itf:qnx_x86_64 - - name: Cleanup QNX license if: always() run: sudo rm -rf /opt/score_qnx diff --git a/.github/workflows/codeql-multiple-repo-scan.yml b/.github/workflows/codeql-multiple-repo-scan.yml index d8b3ed2386..13c27a83d9 100644 --- a/.github/workflows/codeql-multiple-repo-scan.yml +++ b/.github/workflows/codeql-multiple-repo-scan.yml @@ -10,9 +10,7 @@ # # SPDX-License-Identifier: Apache-2.0 # ******************************************************************************* - name: "CodeQL - Multi-Repo Source Scan" - on: pull_request: types: [opened, reopened, synchronize] @@ -23,11 +21,9 @@ on: - main release: types: [created] - workflow_dispatch: - + workflow_dispatch: permissions: contents: write - jobs: analyze-repos: name: Analyze Multiple Repositories @@ -37,176 +33,56 @@ jobs: packages: read actions: read contents: read - steps: - - name: Checkout central repository - uses: actions/checkout@v4 - - - name: Checkout CodeQL Coding Standards scripts - uses: actions/checkout@v4 - with: - repository: github/codeql-coding-standards - path: codeql-coding-standards-repo # Klonen in diesen Ordner - ref: main # Oder eine spezifische Release-Version, z.B. 'v2.53.0-dev' - - # Add coding standard packages and dependencies - - name: Install Python dependencies for Coding Standards scripts - run: | - python3 -m pip install --upgrade pip - pip3 install pyyaml jsonpath-ng jsonschema jsonpatch jsonpointer pytest sarif-tools - - - name: Parse known_good.json and create repos.json - id: parse-repos - run: | - sudo apt-get update && sudo apt-get install -y jq - JSON_FILE="./known_good.json" - - # Check if the file exists - if [ ! -f "$JSON_FILE" ]; then - echo "Error file not found '$JSON_FILE' " - ls -la . - exit 1 - fi - - # Create repos.json from known_good.json - # This jq command transforms the 'modules' object into an array of repository objects - # with 'name', 'url', 'version' (branch/tag/hash), and 'path'. - jq '[.modules.target_sw | to_entries[] | { - name: .key, - url: .value.repo, - version: (.value.branch // .value.hash // .value.version), - path: ("repos/" + .key) - }]' "$JSON_FILE" > repos.json - - echo "Generated repos.json:" - cat repos.json - echo "" # Add a newline for better readability - - # The following GITHUB_OUTPUT variables are set for each module. - # These might be useful for other steps, but are not directly used by the 'checkout-repos' step - # which now reads 'repos.json' directly. - echo "MODULE_COUNT=$(jq '.modules.target_sw | length' "$JSON_FILE")" >> $GITHUB_OUTPUT - - jq -c '.modules.target_sw | to_entries[]' "$JSON_FILE" | while read -r module_entry; do - module_name=$(echo "$module_entry" | jq -r '.key') - repo_url=$(echo "$module_entry" | jq -r '.value.repo // empty') - version=$(echo "$module_entry" | jq -r '.value.version // empty') - branch=$(echo "$module_entry" | jq -r '.value.branch // empty') - hash=$(echo "$module_entry" | jq -r '.value.hash // empty') - - echo "${module_name}_url=$repo_url" >> $GITHUB_OUTPUT - - if [ -n "$version" ]; then - echo "${module_name}_version=$version" >> $GITHUB_OUTPUT - fi - - if [ -n "$branch" ]; then - echo "${module_name}_branch=$branch" >> $GITHUB_OUTPUT - fi - - if [ -n "$hash" ]; then - echo "${module_name}_hash=$hash" >> $GITHUB_OUTPUT - fi - done - - - name: Checkout all pinned repositories - id: checkout-repos - run: | - # jq is already installed by the previous step. - - # Read repositories from the repos.json file created by the previous step - repos=$(cat repos.json) - repo_count=$(echo "$repos" | jq length) - - # Initialize an empty string for paths to be outputted - repo_paths_output="" - - for i in $(seq 0 $((repo_count-1))); do - name=$(echo "$repos" | jq -r ".[$i].name") - url=$(echo "$repos" | jq -r ".[$i].url") - ref=$(echo "$repos" | jq -r ".[$i].version") # This can be a branch, tag, or commit hash - path=$(echo "$repos" | jq -r ".[$i].path") # e.g., "repos/score_baselibs" - - echo "Checking out $name ($ref) to $path" - - # Create the parent directory if it doesn't exist - mkdir -p "$(dirname "$path")" - - # Check if 'ref' looks like a commit hash (e.g., 40 hex characters) - # This is a heuristic; a more robust check might involve fetching refs first. - if [[ "$ref" =~ ^[0-9a-fA-F]{40}$ ]]; then - echo " Detected commit hash. Cloning and then checking out." - git clone "$url" "$path" - (cd "$path" && git checkout "$ref") - else - echo " Detected branch/tag. Cloning with --branch." - git clone --depth 1 --branch v"$ref" "$url" "$path" - fi - - # Append the path to the list, separated by commas - if [ -z "$repo_paths_output" ]; then - repo_paths_output="$path" - else - repo_paths_output="$repo_paths_output,$path" - fi - done - - # Output all paths as a single variable - echo "repo_paths=$repo_paths_output" >> $GITHUB_OUTPUT - - - name: Initialize CodeQL for all repositories - uses: github/codeql-action/init@v4 - with: - languages: cpp - build-mode: none - packs: codeql/misra-cpp-coding-standards - config-file: ./.github/codeql/codeql-config.yml - - - name: Perform CodeQL Analysis - uses: github/codeql-action/analyze@v4 - with: - upload-database: false # Don't upload databases for each repo - output: sarif-results/ - category: "multi-repo-scan" - - - name: Recategorize Guidelines - if: always() - run: | - RECATEGORIZE_SCRIPT="codeql-coding-standards-repo/scripts/guideline_recategorization/recategorize.py" - CODING_STANDARDS_CONFIG="./.github/codeql/coding-standards.yml" - - CODING_STANDARDS_SCHEMA="codeql-coding-standards-repo/schemas/coding-standards-schema-1.0.0.json" - SARIF_SCHEMA="codeql-coding-standards-repo/schemas/sarif-schema-2.1.0.json" - - - SARIF_FILE="sarif-results/cpp.sarif" - - mkdir -p sarif-results-recategorized - echo "Processing $SARIF_FILE for recategorization..." - python3 "$RECATEGORIZE_SCRIPT" \ - --coding-standards-schema-file "$CODING_STANDARDS_SCHEMA" \ - --sarif-schema-file "$SARIF_SCHEMA" \ - "$CODING_STANDARDS_CONFIG" \ - "$SARIF_FILE" \ - "sarif-results-recategorized/$(basename "$SARIF_FILE")" - - rm "$SARIF_FILE" - mv "sarif-results-recategorized/$(basename "$SARIF_FILE")" "$SARIF_FILE" - - - name: Generate HTML Report from SARIF - run: | - SARIF_FILE="sarif-results/cpp.sarif" - sarif html "$SARIF_FILE" --output codeql-report.html - - - name: Upload SARIF results as artifact - uses: actions/upload-artifact@v4 - with: - name: codeql-sarif-results - path: sarif-results/ - - - - name: Upload HTML Report as artifact - uses: actions/upload-artifact@v4 - with: - name: codeql-html-report - path: codeql-report.html + - name: Checkout central repository + uses: actions/checkout@v4 + - name: Checkout CodeQL Coding Standards scripts + uses: actions/checkout@v4 + with: + repository: github/codeql-coding-standards + path: codeql-coding-standards-repo # Klonen in diesen Ordner + ref: main # Oder eine spezifische Release-Version, z.B. 'v2.53.0-dev' + # Add coding standard packages and dependencies + - name: Install Python dependencies for Coding Standards scripts + run: | + python3 -m pip install --upgrade pip + pip3 install pyyaml jsonpath-ng jsonschema jsonpatch jsonpointer pytest sarif-tools + - name: Parse known_good.json and create repos.json + id: parse-repos + run: | + scripts/workflow/parse_repos.sh + - name: Checkout all pinned repositories + id: checkout-repos + run: | + scripts/workflow/checkout_repos.sh + - name: Initialize CodeQL for all repositories + uses: github/codeql-action/init@v4 + with: + languages: cpp + build-mode: none + packs: codeql/misra-cpp-coding-standards + config-file: ./.github/codeql/codeql-config.yml + - name: Perform CodeQL Analysis + uses: github/codeql-action/analyze@v4 + with: + upload-database: false # Don't upload databases for each repo + output: sarif-results/ + category: "multi-repo-scan" + - name: Recategorize Guidelines + if: always() + run: | + scripts/workflow/recategorize_guidelines.sh + - name: Generate HTML Report from SARIF + run: | + SARIF_FILE="sarif-results/cpp.sarif" + sarif html "$SARIF_FILE" --output codeql-report.html + - name: Upload SARIF results as artifact + uses: actions/upload-artifact@v4 + with: + name: codeql-sarif-results + path: sarif-results/ + - name: Upload HTML Report as artifact + uses: actions/upload-artifact@v4 + with: + name: codeql-html-report + path: codeql-report.html diff --git a/.github/workflows/docs_cleanup.yml b/.github/workflows/docs_cleanup.yml index cfa4ae2444..d845653f7b 100644 --- a/.github/workflows/docs_cleanup.yml +++ b/.github/workflows/docs_cleanup.yml @@ -10,18 +10,14 @@ # # SPDX-License-Identifier: Apache-2.0 # ******************************************************************************* - name: Documentation Cleanup - permissions: contents: write pages: write id-token: write - on: schedule: - cron: '0 0 * * *' # Runs every day at midnight UTC - jobs: docs-cleanup: uses: eclipse-score/cicd-workflows/.github/workflows/docs-cleanup.yml@main diff --git a/.github/workflows/known_good_correct.yml b/.github/workflows/known_good_correct.yml index 03b6536003..094db421b3 100644 --- a/.github/workflows/known_good_correct.yml +++ b/.github/workflows/known_good_correct.yml @@ -10,18 +10,14 @@ # # SPDX-License-Identifier: Apache-2.0 # ******************************************************************************* - name: Known Good Matches Bazel - on: pull_request: types: [opened, reopened, synchronize] merge_group: types: [checks_requested] - permissions: contents: write - jobs: check-known-good-matches-bazel: name: Check Known Good Matches with Bazel diff --git a/.github/workflows/reusable_integration-build.yml b/.github/workflows/reusable_integration-build.yml index e0d8ba682c..7343ceb736 100644 --- a/.github/workflows/reusable_integration-build.yml +++ b/.github/workflows/reusable_integration-build.yml @@ -49,7 +49,6 @@ # REPO_READ_TOKEN: ${{ secrets.REPO_READ_TOKEN }} # name: Module Integration Build - on: workflow_call: secrets: @@ -75,10 +74,8 @@ on: required: false type: string default: 'main' - env: REFERENCE_INTEGRATION_REPO: ${{ vars.REFERENCE_INTEGRATION_REPO != '' && vars.REFERENCE_INTEGRATION_REPO || 'eclipse-score/reference_integration' }} - jobs: integration-test: name: Integration Test @@ -86,7 +83,6 @@ jobs: steps: - name: Clean disk space uses: eclipse-score/more-disk-space@v1 - - name: Checkout repository uses: actions/checkout@v4.2.2 with: @@ -111,7 +107,7 @@ jobs: EOF cat known_good.updated.json echo "::endgroup::" - + echo "::group::update score_modules.MODULE.bazel" python3 scripts/known_good/update_module_from_known_good.py --known known_good.updated.json --output-dir bazel_common/ cat bazel_common/score_modules.MODULE.bazel diff --git a/.github/workflows/reusable_smoke-test.yml b/.github/workflows/reusable_smoke-test.yml index ec9b7ac8c2..929b540265 100644 --- a/.github/workflows/reusable_smoke-test.yml +++ b/.github/workflows/reusable_smoke-test.yml @@ -49,9 +49,7 @@ # # Notes: # - Extend the matrix in `integration-test` to cover additional configs. - name: Module Smoke Test - on: workflow_call: inputs: @@ -73,10 +71,8 @@ on: REPO_READ_TOKEN: description: 'Token for reading repositories' required: false - env: REFERENCE_INTEGRATION_REPO: ${{ vars.reference_integration_repo != '' && vars.reference_integration_repo || 'eclipse-score/reference_integration' }} - jobs: preparation: name: Preparation @@ -106,7 +102,7 @@ jobs: cat known_good.updated.json echo "::endgroup::" fi - + # Output the content as a JSON string echo "known_good_updated<> $GITHUB_OUTPUT cat known_good.updated.json >> $GITHUB_OUTPUT @@ -122,7 +118,6 @@ jobs: with: name: known_good.updated.json path: known_good.updated.json - docs: name: Generate Documentation runs-on: ubuntu-latest @@ -130,7 +125,6 @@ jobs: steps: - name: not implemented run: echo "Documentation generation not yet implemented here." - integration-test: name: Integration Testing (${{ matrix.config }}) needs: preparation @@ -150,7 +144,6 @@ jobs: config: ${{ matrix.config }} repo_runner_labels: ${{ inputs.repo_runner_labels }} target_branch: ${{ inputs.target_branch }} - summary: name: Publish Summary runs-on: ubuntu-latest diff --git a/.github/workflows/test_and_docs.yml b/.github/workflows/test_and_docs.yml index fde8aceb6e..4d7a03e2fd 100644 --- a/.github/workflows/test_and_docs.yml +++ b/.github/workflows/test_and_docs.yml @@ -10,15 +10,12 @@ # # SPDX-License-Identifier: Apache-2.0 # ******************************************************************************* - name: Code Quality & Documentation - permissions: contents: write pages: write pull-requests: write id-token: write - on: pull_request_target: # Allows forks to trigger the docs build types: [opened, reopened, synchronize] @@ -29,18 +26,15 @@ on: types: [checks_requested] release: types: [created] - jobs: test_and_docs: runs-on: ubuntu-22.04 permissions: contents: write # required to upload release assets pull-requests: write - steps: - name: Clean disk space uses: eclipse-score/more-disk-space@v1 - - name: Setup Bazel uses: bazel-contrib/setup-bazel@0.18.0 with: @@ -48,32 +42,26 @@ jobs: disk-cache: ${{ github.workflow }} repository-cache: true cache-save: ${{ github.event_name == 'push' }} - - name: Set up Python 3 uses: actions/setup-python@v5 with: python-version: '3.12' - - name: Install lcov run: | sudo apt-get update sudo apt-get install -y lcov - - name: Checkout repository (pull_request_target via workflow_call) if: ${{ github.event_name == 'pull_request_target' }} uses: actions/checkout@v4 with: ref: ${{ github.head_ref || github.event.pull_request.head.ref || github.ref }} repository: ${{ github.event.pull_request.head.repo.full_name || github.repository }} - - name: Checkout repository if: ${{ github.event_name != 'pull_request_target' }} uses: actions/checkout@v4 - - name: Execute Unit Tests with Coverage Analysis run: | python ./scripts/quality_runners.py - - name: Publish build summary if: always() run: | @@ -88,7 +76,6 @@ jobs: else echo "No coverage summary file found (docs/verification/coverage_summary.md)" >> "$GITHUB_STEP_SUMMARY" fi - - name: Create archive of test reports if: github.ref_type == 'tag' run: | @@ -97,7 +84,6 @@ jobs: cp -r "$(bazel info bazel-bin)/coverage/rust-tests" artifacts/rust zip -r ${{ github.event.repository.name }}_test_reports.zip artifacts/ shell: bash - - name: Upload release asset (attach ZIP to GitHub Release) uses: softprops/action-gh-release@v2.5.0 if: github.ref_type == 'tag' @@ -105,13 +91,11 @@ jobs: files: ${{ github.event.repository.name }}_test_reports.zip env: GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} - - name: Install Graphviz uses: eclipse-score/apt-install@main with: packages: graphviz cache: false - - name: Build documentation run: | bazel --output_base="/home/runner/.cache/bazel/output_base" run \ @@ -123,7 +107,6 @@ jobs: --github_repo=${{ github.event.repository.name }} tar -cf github-pages.tar _build - - name: Upload documentation artifact uses: actions/upload-artifact@v4.4.0 with: @@ -131,7 +114,6 @@ jobs: path: github-pages.tar retention-days: 3 if-no-files-found: error - docs-deploy: name: Deploy Documentation to GitHub Pages runs-on: ${{ vars.REPO_RUNNER_LABELS && fromJSON(vars.REPO_RUNNER_LABELS) || 'ubuntu-latest' }} @@ -196,15 +178,12 @@ jobs: fi - name: Checkout repository uses: actions/checkout@v4.2.2 - - name: Download documentation artifact uses: actions/download-artifact@v4.1.8 with: name: github-pages-${{ github.event.pull_request.head.sha || github.sha }} - - name: Untar documentation artifact run: mkdir -p extracted_docs && tar -xf github-pages.tar -C extracted_docs - - name: Deploy 🚀 id: pages-deployment uses: eclipse-score/cicd-workflows/.github/actions/deploy-versioned-pages@main diff --git a/.github/workflows/test_integration.yml b/.github/workflows/test_integration.yml index 96850ed2d2..34a961e529 100644 --- a/.github/workflows/test_integration.yml +++ b/.github/workflows/test_integration.yml @@ -10,10 +10,8 @@ # # SPDX-License-Identifier: Apache-2.0 # ******************************************************************************* - # Workflow configuration for S-CORE CI - Bazel Build & Test baselibs # This workflow runs Bazel build and test when triggered by specific pull request events. - name: build latest mains on: workflow_dispatch: diff --git a/images/autosd_x86_64/build/image.aib.yml b/images/autosd_x86_64/build/image.aib.yml index 4cc4f2cf59..5fdd8e0598 100644 --- a/images/autosd_x86_64/build/image.aib.yml +++ b/images/autosd_x86_64/build/image.aib.yml @@ -12,7 +12,6 @@ # ******************************************************************************* # name: score-autosd - image: selinux_mode: permissive content: @@ -21,9 +20,9 @@ content: repos: - id: epel baseurl: https://dl.fedoraproject.org/pub/epel/10/Everything/$arch/ - # removing it for now because the image is relying on copying globs - #- id: score - # baseurl: file:///host/rpms + # removing it for now because the image is relying on copying globs + #- id: score + # baseurl: file:///host/rpms rpms: # For testing the image only: - openssh-server @@ -33,13 +32,11 @@ content: - bluechi-controller - bluechi-agent - bluechi-ctl - make_dirs: - path: /etc/containers/systemd/qm.container.d mode: 0755 parents: true exist_ok: true - # Configure shared socket directory via tmpfiles add_files: - path: /usr/lib/tmpfiles.d/lola_disc.conf @@ -72,7 +69,6 @@ content: source_glob: "files/showcases_all/**/*" preserve_path: true max_files: 50 - chmod_files: - path: /usr/bin/lola-ipc-test mode: "0755" @@ -80,36 +76,27 @@ content: - path: /usr/share/score/examples/showcases_all/bin/cli mode: "0755" recursive: true - - path: /usr/share/score/examples/showcases_all/bin/control_daemon mode: "0755" recursive: true - - path: /usr/share/score/examples/showcases_all/bin/cpp_supervised_app mode: "0755" recursive: true - - path: /usr/share/score/examples/showcases_all/bin/ipc_bridge_cpp mode: "0755" recursive: true - - path: /usr/share/score/examples/showcases_all/bin/launch_manager mode: "0755" recursive: true - - path: /usr/share/score/examples/showcases_all/bin/lifecycle_signal.sh mode: "0755" recursive: true - - path: /usr/share/score/examples/showcases_all/bin/orch_per_example mode: "0755" recursive: true - - path: /usr/share/score/examples/showcases_all/bin/rust_supervised_app mode: "0755" recursive: true - - # Required for testing the image only: systemd: enabled_services: @@ -118,7 +105,6 @@ content: # bluechi services - bluechi-controller.service - bluechi-agent.service - qm: memory_limit: max: infinity @@ -141,7 +127,6 @@ qm: systemd: enabled_services: - bluechi-agent - auth: # Required for testing the image only: sshd_config: diff --git a/images/autosd_x86_64/build/vars-devel.yml b/images/autosd_x86_64/build/vars-devel.yml index cfebf2ebf2..66dbb6d410 100644 --- a/images/autosd_x86_64/build/vars-devel.yml +++ b/images/autosd_x86_64/build/vars-devel.yml @@ -10,5 +10,4 @@ # # SPDX-License-Identifier: Apache-2.0 # ******************************************************************************* - root_password: $6$xoLqEUz0cGGJRx01$H3H/bFm0myJPULNMtbSsOFd/2BnHqHkMD92Sfxd.EKM9hXTWSmELG8cf205l6dktomuTcgKGGtGDgtvHVXSWU. diff --git a/images/autosd_x86_64/build/vars.yml b/images/autosd_x86_64/build/vars.yml index f76998dd8f..5f3c012108 100644 --- a/images/autosd_x86_64/build/vars.yml +++ b/images/autosd_x86_64/build/vars.yml @@ -10,5 +10,4 @@ # # SPDX-License-Identifier: Apache-2.0 # ******************************************************************************* - disable_ipv6: false diff --git a/scripts/workflow/checkout_repos.sh b/scripts/workflow/checkout_repos.sh new file mode 100755 index 0000000000..d70cce8bc0 --- /dev/null +++ b/scripts/workflow/checkout_repos.sh @@ -0,0 +1,47 @@ +#!/bin/bash +# ******************************************************************************* +# Copyright (c) 2025 Contributors to the Eclipse Foundation +# +# See the NOTICE file(s) distributed with this work for additional +# information regarding copyright ownership. +# +# This program and the accompanying materials are made available under the +# terms of the Apache License Version 2.0 which is available at +# https://www.apache.org/licenses/LICENSE-2.0 +# +# SPDX-License-Identifier: Apache-2.0 +# ******************************************************************************* +# jq is already installed by the previous step. + +# Read repositories from the repos.json file created by the previous step +repos=$(cat repos.json) +repo_count=$(echo "$repos" | jq length) +# Initialize an empty string for paths to be outputted +repo_paths_output="" +for i in $(seq 0 $((repo_count-1))); do + name=$(echo "$repos" | jq -r ".[$i].name") + url=$(echo "$repos" | jq -r ".[$i].url") + ref=$(echo "$repos" | jq -r ".[$i].version") # This can be a branch, tag, or commit hash + path=$(echo "$repos" | jq -r ".[$i].path") # e.g., "repos/score_baselibs" + echo "Checking out $name ($ref) to $path" + # Create the parent directory if it doesn't exist + mkdir -p "$(dirname "$path")" + # Check if 'ref' looks like a commit hash (e.g., 40 hex characters) + # This is a heuristic; a more robust check might involve fetching refs first. + if [[ "$ref" =~ ^[0-9a-fA-F]{40}$ ]]; then + echo " Detected commit hash. Cloning and then checking out." + git clone "$url" "$path" + (cd "$path" && git checkout "$ref") + else + echo " Detected branch/tag. Cloning with --branch." + git clone --depth 1 --branch v"$ref" "$url" "$path" + fi + # Append the path to the list, separated by commas + if [ -z "$repo_paths_output" ]; then + repo_paths_output="$path" + else + repo_paths_output="$repo_paths_output,$path" + fi +done +# Output all paths as a single variable +echo "repo_paths=$repo_paths_output" >> $GITHUB_OUTPUT diff --git a/scripts/workflow/parse_repos.sh b/scripts/workflow/parse_repos.sh new file mode 100755 index 0000000000..71c50737a5 --- /dev/null +++ b/scripts/workflow/parse_repos.sh @@ -0,0 +1,54 @@ +#!/bin/bash +# ******************************************************************************* +# Copyright (c) 2025 Contributors to the Eclipse Foundation +# +# See the NOTICE file(s) distributed with this work for additional +# information regarding copyright ownership. +# +# This program and the accompanying materials are made available under the +# terms of the Apache License Version 2.0 which is available at +# https://www.apache.org/licenses/LICENSE-2.0 +# +# SPDX-License-Identifier: Apache-2.0 +# ******************************************************************************* +sudo apt-get update && sudo apt-get install -y jq +JSON_FILE="./known_good.json" +# Check if the file exists +if [ ! -f "$JSON_FILE" ]; then + echo "Error file not found '$JSON_FILE' " + ls -la . + exit 1 +fi +# Create repos.json from known_good.json +# This jq command transforms the 'modules' object into an array of repository objects +# with 'name', 'url', 'version' (branch/tag/hash), and 'path'. +jq '[.modules.target_sw | to_entries[] | { + name: .key, + url: .value.repo, + version: (.value.branch // .value.hash // .value.version), + path: ("repos/" + .key) +}]' "$JSON_FILE" > repos.json +echo "Generated repos.json:" +cat repos.json +echo "" # Add a newline for better readability +# The following GITHUB_OUTPUT variables are set for each module. +# These might be useful for other steps, but are not directly used by the 'checkout-repos' step +# which now reads 'repos.json' directly. +echo "MODULE_COUNT=$(jq '.modules.target_sw | length' "$JSON_FILE")" >> $GITHUB_OUTPUT +jq -c '.modules.target_sw | to_entries[]' "$JSON_FILE" | while read -r module_entry; do + module_name=$(echo "$module_entry" | jq -r '.key') + repo_url=$(echo "$module_entry" | jq -r '.value.repo // empty') + version=$(echo "$module_entry" | jq -r '.value.version // empty') + branch=$(echo "$module_entry" | jq -r '.value.branch // empty') + hash=$(echo "$module_entry" | jq -r '.value.hash // empty') + echo "${module_name}_url=$repo_url" >> $GITHUB_OUTPUT + if [ -n "$version" ]; then + echo "${module_name}_version=$version" >> $GITHUB_OUTPUT + fi + if [ -n "$branch" ]; then + echo "${module_name}_branch=$branch" >> $GITHUB_OUTPUT + fi + if [ -n "$hash" ]; then + echo "${module_name}_hash=$hash" >> $GITHUB_OUTPUT + fi +done diff --git a/scripts/workflow/recategorize_guidelines.sh b/scripts/workflow/recategorize_guidelines.sh new file mode 100755 index 0000000000..8fa4b73602 --- /dev/null +++ b/scripts/workflow/recategorize_guidelines.sh @@ -0,0 +1,28 @@ +#!/bin/bash +# ******************************************************************************* +# Copyright (c) 2025 Contributors to the Eclipse Foundation +# +# See the NOTICE file(s) distributed with this work for additional +# information regarding copyright ownership. +# +# This program and the accompanying materials are made available under the +# terms of the Apache License Version 2.0 which is available at +# https://www.apache.org/licenses/LICENSE-2.0 +# +# SPDX-License-Identifier: Apache-2.0 +# ******************************************************************************* +RECATEGORIZE_SCRIPT="codeql-coding-standards-repo/scripts/guideline_recategorization/recategorize.py" +CODING_STANDARDS_CONFIG="./.github/codeql/coding-standards.yml" +CODING_STANDARDS_SCHEMA="codeql-coding-standards-repo/schemas/coding-standards-schema-1.0.0.json" +SARIF_SCHEMA="codeql-coding-standards-repo/schemas/sarif-schema-2.1.0.json" +SARIF_FILE="sarif-results/cpp.sarif" +mkdir -p sarif-results-recategorized +echo "Processing $SARIF_FILE for recategorization..." +python3 "$RECATEGORIZE_SCRIPT" \ + --coding-standards-schema-file "$CODING_STANDARDS_SCHEMA" \ + --sarif-schema-file "$SARIF_SCHEMA" \ + "$CODING_STANDARDS_CONFIG" \ + "$SARIF_FILE" \ + "sarif-results-recategorized/$(basename "$SARIF_FILE")" + rm "$SARIF_FILE" + mv "sarif-results-recategorized/$(basename "$SARIF_FILE")" "$SARIF_FILE" From 3b77a389e3ef9167e63735d44df949042de8dabe Mon Sep 17 00:00:00 2001 From: Piotr Korkus Date: Fri, 27 Feb 2026 11:07:44 +0100 Subject: [PATCH 06/10] format: apply copyright headers --- .github/codeql/codeql-config.yml | 12 ++++++++++++ .github/codeql/coding-standards.yml | 12 ++++++++++++ .github/workflows/reusable_integration-build.yml | 12 ++++++++++++ .github/workflows/reusable_smoke-test.yml | 12 ++++++++++++ bazel_common/bundlers.bzl | 12 ++++++++++++ bazel_common/score_basic_bazel.MODULE.bazel | 12 ++++++++++++ bazel_common/score_python.MODULE.bazel | 12 ++++++++++++ bazel_common/score_qnx_toolchains.MODULE.bazel | 12 ++++++++++++ bazel_common/score_rust_toolchains.MODULE.bazel | 12 ++++++++++++ feature_integration_tests/itf/BUILD | 12 ++++++++++++ feature_integration_tests/test_cases/BUILD | 12 ++++++++++++ feature_integration_tests/test_cases/conftest.py | 12 ++++++++++++ .../test_cases/fit_scenario.py | 12 ++++++++++++ feature_integration_tests/test_cases/pytest.ini | 12 ++++++++++++ .../test_cases/test_properties.py | 12 ++++++++++++ .../basic/test_orchestration_with_persistency.py | 12 ++++++++++++ .../test_scenarios/rust/src/internals/kyron/mod.rs | 6 +++--- .../rust/src/internals/kyron/runtime_helper.rs | 6 +++--- .../test_scenarios/rust/src/internals/mod.rs | 6 +++--- .../rust/src/internals/persistency/kvs_instance.rs | 12 ++++++++++++ .../src/internals/persistency/kvs_parameters.rs | 12 ++++++++++++ .../rust/src/internals/persistency/mod.rs | 12 ++++++++++++ .../test_scenarios/rust/src/main.rs | 6 +++--- .../test_scenarios/rust/src/scenarios/basic/mod.rs | 5 +++-- .../basic/orchestration_with_persistency.rs | 6 +++--- .../test_scenarios/rust/src/scenarios/mod.rs | 6 +++--- .../rust/src/scenarios/persistency/mod.rs | 5 +++-- .../scenarios/persistency/multiple_kvs_per_app.rs | 12 ++++++++++++ images/ebclfsa_aarch64/BUILD | 12 ++++++++++++ .../persistency_integration/run_qemu.sh | 13 ++++++++++++- .../scripts/cpp_tests_persistency.sh | 12 ++++++++++++ .../scrample_integration/run_qemu.sh | 13 ++++++++++++- runners/docker_x86_64/scripts/BUILD | 12 ++++++++++++ runners/qemu_x86_64/scripts/BUILD | 12 ++++++++++++ scripts/__init__.py | 12 ++++++++++++ scripts/generate_rust_analyzer_support.sh | 13 ++++++++++++- scripts/integration_test.py | 12 ++++++++++++ scripts/known_good/__init__.py | 12 ++++++++++++ .../known_good/known_good_to_workspace_metadata.py | 12 ++++++++++++ scripts/known_good/models/__init__.py | 12 ++++++++++++ scripts/known_good/models/known_good.py | 12 ++++++++++++ scripts/known_good/models/module.py | 12 ++++++++++++ scripts/known_good/override_known_good_repo.py | 12 ++++++++++++ scripts/known_good/update_module_from_known_good.py | 12 ++++++++++++ scripts/known_good/update_module_latest.py | 12 ++++++++++++ scripts/models/__init__.py | 12 ++++++++++++ scripts/models/build_config.py | 12 ++++++++++++ scripts/publish_integration_summary.py | 12 ++++++++++++ scripts/quality_runners.py | 12 ++++++++++++ showcases/cli/main.rs | 12 ++++++++++++ showcases/orchestration_persistency/main.rs | 6 +++--- showcases/simple_lifecycle/BUILD | 12 ++++++++++++ showcases/simple_lifecycle/configs/BUILD | 12 ++++++++++++ showcases/simple_lifecycle/lifecycle_signal.sh | 12 ++++++++++++ showcases/standalone/BUILD | 12 ++++++++++++ 55 files changed, 579 insertions(+), 28 deletions(-) diff --git a/.github/codeql/codeql-config.yml b/.github/codeql/codeql-config.yml index 1efa17346b..88d151e9ae 100644 --- a/.github/codeql/codeql-config.yml +++ b/.github/codeql/codeql-config.yml @@ -1,3 +1,15 @@ +# ******************************************************************************* +# Copyright (c) 2026 Contributors to the Eclipse Foundation +# +# See the NOTICE file(s) distributed with this work for additional +# information regarding copyright ownership. +# +# This program and the accompanying materials are made available under the +# terms of the Apache License Version 2.0 which is available at +# https://www.apache.org/licenses/LICENSE-2.0 +# +# SPDX-License-Identifier: Apache-2.0 +# ******************************************************************************* name: "Custom CodeQL Configuration for MISRA" paths: - repos diff --git a/.github/codeql/coding-standards.yml b/.github/codeql/coding-standards.yml index 8675280ae0..14d249f95f 100644 --- a/.github/codeql/coding-standards.yml +++ b/.github/codeql/coding-standards.yml @@ -1,3 +1,15 @@ +# ******************************************************************************* +# Copyright (c) 2026 Contributors to the Eclipse Foundation +# +# See the NOTICE file(s) distributed with this work for additional +# information regarding copyright ownership. +# +# This program and the accompanying materials are made available under the +# terms of the Apache License Version 2.0 which is available at +# https://www.apache.org/licenses/LICENSE-2.0 +# +# SPDX-License-Identifier: Apache-2.0 +# ******************************************************************************* deviations: [] guideline-recategorizations: [] deviation-permits: [] diff --git a/.github/workflows/reusable_integration-build.yml b/.github/workflows/reusable_integration-build.yml index 7343ceb736..c3811fb34d 100644 --- a/.github/workflows/reusable_integration-build.yml +++ b/.github/workflows/reusable_integration-build.yml @@ -1,3 +1,15 @@ +# ******************************************************************************* +# Copyright (c) 2026 Contributors to the Eclipse Foundation +# +# See the NOTICE file(s) distributed with this work for additional +# information regarding copyright ownership. +# +# This program and the accompanying materials are made available under the +# terms of the Apache License Version 2.0 which is available at +# https://www.apache.org/licenses/LICENSE-2.0 +# +# SPDX-License-Identifier: Apache-2.0 +# ******************************************************************************* # Module Integration Build Workflow # # Summary: diff --git a/.github/workflows/reusable_smoke-test.yml b/.github/workflows/reusable_smoke-test.yml index 929b540265..afc095df06 100644 --- a/.github/workflows/reusable_smoke-test.yml +++ b/.github/workflows/reusable_smoke-test.yml @@ -1,3 +1,15 @@ +# ******************************************************************************* +# Copyright (c) 2026 Contributors to the Eclipse Foundation +# +# See the NOTICE file(s) distributed with this work for additional +# information regarding copyright ownership. +# +# This program and the accompanying materials are made available under the +# terms of the Apache License Version 2.0 which is available at +# https://www.apache.org/licenses/LICENSE-2.0 +# +# SPDX-License-Identifier: Apache-2.0 +# ******************************************************************************* # Module Smoke Test Workflow # # Summary: diff --git a/bazel_common/bundlers.bzl b/bazel_common/bundlers.bzl index 2733de30f5..173116d577 100644 --- a/bazel_common/bundlers.bzl +++ b/bazel_common/bundlers.bzl @@ -1,3 +1,15 @@ +# ******************************************************************************* +# Copyright (c) 2026 Contributors to the Eclipse Foundation +# +# See the NOTICE file(s) distributed with this work for additional +# information regarding copyright ownership. +# +# This program and the accompanying materials are made available under the +# terms of the Apache License Version 2.0 which is available at +# https://www.apache.org/licenses/LICENSE-2.0 +# +# SPDX-License-Identifier: Apache-2.0 +# ******************************************************************************* load("@rules_pkg//pkg:mappings.bzl", "pkg_files") load("@rules_pkg//pkg:pkg.bzl", "pkg_tar") diff --git a/bazel_common/score_basic_bazel.MODULE.bazel b/bazel_common/score_basic_bazel.MODULE.bazel index 69dca59d09..c3f4e1c60e 100644 --- a/bazel_common/score_basic_bazel.MODULE.bazel +++ b/bazel_common/score_basic_bazel.MODULE.bazel @@ -1,3 +1,15 @@ +# ******************************************************************************* +# Copyright (c) 2026 Contributors to the Eclipse Foundation +# +# See the NOTICE file(s) distributed with this work for additional +# information regarding copyright ownership. +# +# This program and the accompanying materials are made available under the +# terms of the Apache License Version 2.0 which is available at +# https://www.apache.org/licenses/LICENSE-2.0 +# +# SPDX-License-Identifier: Apache-2.0 +# ******************************************************************************* bazel_dep(name = "rules_shell", version = "0.6.0") bazel_dep(name = "rules_cc", version = "0.2.16") bazel_dep(name = "rules_pkg", version = "1.2.0") diff --git a/bazel_common/score_python.MODULE.bazel b/bazel_common/score_python.MODULE.bazel index b9ba82c3ad..3293cfb261 100644 --- a/bazel_common/score_python.MODULE.bazel +++ b/bazel_common/score_python.MODULE.bazel @@ -1,3 +1,15 @@ +# ******************************************************************************* +# Copyright (c) 2026 Contributors to the Eclipse Foundation +# +# See the NOTICE file(s) distributed with this work for additional +# information regarding copyright ownership. +# +# This program and the accompanying materials are made available under the +# terms of the Apache License Version 2.0 which is available at +# https://www.apache.org/licenses/LICENSE-2.0 +# +# SPDX-License-Identifier: Apache-2.0 +# ******************************************************************************* bazel_dep(name = "rules_python", version = "1.8.3") PYTHON_VERSION = "3.12" diff --git a/bazel_common/score_qnx_toolchains.MODULE.bazel b/bazel_common/score_qnx_toolchains.MODULE.bazel index b07cb1b59b..0340dd96d6 100644 --- a/bazel_common/score_qnx_toolchains.MODULE.bazel +++ b/bazel_common/score_qnx_toolchains.MODULE.bazel @@ -1,3 +1,15 @@ +# ******************************************************************************* +# Copyright (c) 2026 Contributors to the Eclipse Foundation +# +# See the NOTICE file(s) distributed with this work for additional +# information regarding copyright ownership. +# +# This program and the accompanying materials are made available under the +# terms of the Apache License Version 2.0 which is available at +# https://www.apache.org/licenses/LICENSE-2.0 +# +# SPDX-License-Identifier: Apache-2.0 +# ******************************************************************************* qcc = use_extension("@score_bazel_cpp_toolchains//extensions:gcc.bzl", "gcc", dev_dependency = True) qcc.toolchain( name = "score_qcc_aarch64_toolchain", diff --git a/bazel_common/score_rust_toolchains.MODULE.bazel b/bazel_common/score_rust_toolchains.MODULE.bazel index 37faa9184e..33f93f308c 100644 --- a/bazel_common/score_rust_toolchains.MODULE.bazel +++ b/bazel_common/score_rust_toolchains.MODULE.bazel @@ -1,3 +1,15 @@ +# ******************************************************************************* +# Copyright (c) 2026 Contributors to the Eclipse Foundation +# +# See the NOTICE file(s) distributed with this work for additional +# information regarding copyright ownership. +# +# This program and the accompanying materials are made available under the +# terms of the Apache License Version 2.0 which is available at +# https://www.apache.org/licenses/LICENSE-2.0 +# +# SPDX-License-Identifier: Apache-2.0 +# ******************************************************************************* bazel_dep(name = "rules_rust", version = "0.61.0") bazel_dep(name = "score_toolchains_rust", version = "0.4.0", dev_dependency = True) diff --git a/feature_integration_tests/itf/BUILD b/feature_integration_tests/itf/BUILD index 262a06e790..b796b26948 100644 --- a/feature_integration_tests/itf/BUILD +++ b/feature_integration_tests/itf/BUILD @@ -1,3 +1,15 @@ +# ******************************************************************************* +# Copyright (c) 2026 Contributors to the Eclipse Foundation +# +# See the NOTICE file(s) distributed with this work for additional +# information regarding copyright ownership. +# +# This program and the accompanying materials are made available under the +# terms of the Apache License Version 2.0 which is available at +# https://www.apache.org/licenses/LICENSE-2.0 +# +# SPDX-License-Identifier: Apache-2.0 +# ******************************************************************************* load("@score_itf//:defs.bzl", "py_itf_test") load("@score_itf//score/itf/plugins:plugins.bzl", "docker", "qemu") diff --git a/feature_integration_tests/test_cases/BUILD b/feature_integration_tests/test_cases/BUILD index fb5b529f47..a845e01694 100644 --- a/feature_integration_tests/test_cases/BUILD +++ b/feature_integration_tests/test_cases/BUILD @@ -1,3 +1,15 @@ +# ******************************************************************************* +# Copyright (c) 2026 Contributors to the Eclipse Foundation +# +# See the NOTICE file(s) distributed with this work for additional +# information regarding copyright ownership. +# +# This program and the accompanying materials are made available under the +# terms of the Apache License Version 2.0 which is available at +# https://www.apache.org/licenses/LICENSE-2.0 +# +# SPDX-License-Identifier: Apache-2.0 +# ******************************************************************************* load("@pip_score_venv_test//:requirements.bzl", "all_requirements") load("@rules_python//python:pip.bzl", "compile_pip_requirements") load("@score_tooling//python_basics:defs.bzl", "score_py_pytest", "score_virtualenv") diff --git a/feature_integration_tests/test_cases/conftest.py b/feature_integration_tests/test_cases/conftest.py index 001b14a2b3..fc20f61f2f 100644 --- a/feature_integration_tests/test_cases/conftest.py +++ b/feature_integration_tests/test_cases/conftest.py @@ -1,3 +1,15 @@ +# ******************************************************************************* +# Copyright (c) 2026 Contributors to the Eclipse Foundation +# +# See the NOTICE file(s) distributed with this work for additional +# information regarding copyright ownership. +# +# This program and the accompanying materials are made available under the +# terms of the Apache License Version 2.0 which is available at +# https://www.apache.org/licenses/LICENSE-2.0 +# +# SPDX-License-Identifier: Apache-2.0 +# ******************************************************************************* from pathlib import Path import pytest diff --git a/feature_integration_tests/test_cases/fit_scenario.py b/feature_integration_tests/test_cases/fit_scenario.py index 6038f37b47..6188fec4fa 100644 --- a/feature_integration_tests/test_cases/fit_scenario.py +++ b/feature_integration_tests/test_cases/fit_scenario.py @@ -1,3 +1,15 @@ +# ******************************************************************************* +# Copyright (c) 2026 Contributors to the Eclipse Foundation +# +# See the NOTICE file(s) distributed with this work for additional +# information regarding copyright ownership. +# +# This program and the accompanying materials are made available under the +# terms of the Apache License Version 2.0 which is available at +# https://www.apache.org/licenses/LICENSE-2.0 +# +# SPDX-License-Identifier: Apache-2.0 +# ******************************************************************************* import shutil from pathlib import Path from typing import Generator diff --git a/feature_integration_tests/test_cases/pytest.ini b/feature_integration_tests/test_cases/pytest.ini index d4b25fd2f8..801c0a4304 100644 --- a/feature_integration_tests/test_cases/pytest.ini +++ b/feature_integration_tests/test_cases/pytest.ini @@ -1,3 +1,15 @@ +# ******************************************************************************* +# Copyright (c) 2026 Contributors to the Eclipse Foundation +# +# See the NOTICE file(s) distributed with this work for additional +# information regarding copyright ownership. +# +# This program and the accompanying materials are made available under the +# terms of the Apache License Version 2.0 which is available at +# https://www.apache.org/licenses/LICENSE-2.0 +# +# SPDX-License-Identifier: Apache-2.0 +# ******************************************************************************* [pytest] addopts = -v testpaths = tests diff --git a/feature_integration_tests/test_cases/test_properties.py b/feature_integration_tests/test_cases/test_properties.py index 2f30b7b7a1..b02ec02cb9 100644 --- a/feature_integration_tests/test_cases/test_properties.py +++ b/feature_integration_tests/test_cases/test_properties.py @@ -1,3 +1,15 @@ +# ******************************************************************************* +# Copyright (c) 2026 Contributors to the Eclipse Foundation +# +# See the NOTICE file(s) distributed with this work for additional +# information regarding copyright ownership. +# +# This program and the accompanying materials are made available under the +# terms of the Apache License Version 2.0 which is available at +# https://www.apache.org/licenses/LICENSE-2.0 +# +# SPDX-License-Identifier: Apache-2.0 +# ******************************************************************************* try: from attribute_plugin import add_test_properties # type: ignore[import-untyped] except ImportError: diff --git a/feature_integration_tests/test_cases/tests/basic/test_orchestration_with_persistency.py b/feature_integration_tests/test_cases/tests/basic/test_orchestration_with_persistency.py index 8659bb90e3..d22276baf1 100644 --- a/feature_integration_tests/test_cases/tests/basic/test_orchestration_with_persistency.py +++ b/feature_integration_tests/test_cases/tests/basic/test_orchestration_with_persistency.py @@ -1,3 +1,15 @@ +# ******************************************************************************* +# Copyright (c) 2026 Contributors to the Eclipse Foundation +# +# See the NOTICE file(s) distributed with this work for additional +# information regarding copyright ownership. +# +# This program and the accompanying materials are made available under the +# terms of the Apache License Version 2.0 which is available at +# https://www.apache.org/licenses/LICENSE-2.0 +# +# SPDX-License-Identifier: Apache-2.0 +# ******************************************************************************* import json from collections.abc import Generator from pathlib import Path diff --git a/feature_integration_tests/test_scenarios/rust/src/internals/kyron/mod.rs b/feature_integration_tests/test_scenarios/rust/src/internals/kyron/mod.rs index 2938643029..e66df89f06 100644 --- a/feature_integration_tests/test_scenarios/rust/src/internals/kyron/mod.rs +++ b/feature_integration_tests/test_scenarios/rust/src/internals/kyron/mod.rs @@ -1,5 +1,5 @@ -// -// Copyright (c) 2025 Contributors to the Eclipse Foundation +// ******************************************************************************* +// Copyright (c) 2026 Contributors to the Eclipse Foundation // // See the NOTICE file(s) distributed with this work for additional // information regarding copyright ownership. @@ -9,5 +9,5 @@ // // // SPDX-License-Identifier: Apache-2.0 -// +// ******************************************************************************* pub mod runtime_helper; diff --git a/feature_integration_tests/test_scenarios/rust/src/internals/kyron/runtime_helper.rs b/feature_integration_tests/test_scenarios/rust/src/internals/kyron/runtime_helper.rs index 6054c0ce3a..0b1ec4addb 100644 --- a/feature_integration_tests/test_scenarios/rust/src/internals/kyron/runtime_helper.rs +++ b/feature_integration_tests/test_scenarios/rust/src/internals/kyron/runtime_helper.rs @@ -1,5 +1,5 @@ -// -// Copyright (c) 2025 Contributors to the Eclipse Foundation +// ******************************************************************************* +// Copyright (c) 2026 Contributors to the Eclipse Foundation // // See the NOTICE file(s) distributed with this work for additional // information regarding copyright ownership. @@ -9,7 +9,7 @@ // // // SPDX-License-Identifier: Apache-2.0 -// +// ******************************************************************************* use kyron::core::types::UniqueWorkerId; use kyron::prelude::ThreadParameters as AsyncRtThreadParameters; use kyron::runtime::*; diff --git a/feature_integration_tests/test_scenarios/rust/src/internals/mod.rs b/feature_integration_tests/test_scenarios/rust/src/internals/mod.rs index 117edcc98e..1174477b7b 100644 --- a/feature_integration_tests/test_scenarios/rust/src/internals/mod.rs +++ b/feature_integration_tests/test_scenarios/rust/src/internals/mod.rs @@ -1,5 +1,5 @@ -// -// Copyright (c) 2025 Contributors to the Eclipse Foundation +// ******************************************************************************* +// Copyright (c) 2026 Contributors to the Eclipse Foundation // // See the NOTICE file(s) distributed with this work for additional // information regarding copyright ownership. @@ -9,6 +9,6 @@ // // // SPDX-License-Identifier: Apache-2.0 -// +// ******************************************************************************* pub mod kyron; pub mod persistency; diff --git a/feature_integration_tests/test_scenarios/rust/src/internals/persistency/kvs_instance.rs b/feature_integration_tests/test_scenarios/rust/src/internals/persistency/kvs_instance.rs index 9e7cea2f4f..95676864be 100644 --- a/feature_integration_tests/test_scenarios/rust/src/internals/persistency/kvs_instance.rs +++ b/feature_integration_tests/test_scenarios/rust/src/internals/persistency/kvs_instance.rs @@ -1,3 +1,15 @@ +// ******************************************************************************* +// Copyright (c) 2026 Contributors to the Eclipse Foundation +// +// See the NOTICE file(s) distributed with this work for additional +// information regarding copyright ownership. +// +// This program and the accompanying materials are made available under the +// terms of the Apache License Version 2.0 which is available at +// +// +// SPDX-License-Identifier: Apache-2.0 +// ******************************************************************************* //! KVS instance test helpers. use crate::internals::persistency::kvs_parameters::KvsParameters; diff --git a/feature_integration_tests/test_scenarios/rust/src/internals/persistency/kvs_parameters.rs b/feature_integration_tests/test_scenarios/rust/src/internals/persistency/kvs_parameters.rs index cbdefeded3..4b9819eb14 100644 --- a/feature_integration_tests/test_scenarios/rust/src/internals/persistency/kvs_parameters.rs +++ b/feature_integration_tests/test_scenarios/rust/src/internals/persistency/kvs_parameters.rs @@ -1,3 +1,15 @@ +// ******************************************************************************* +// Copyright (c) 2026 Contributors to the Eclipse Foundation +// +// See the NOTICE file(s) distributed with this work for additional +// information regarding copyright ownership. +// +// This program and the accompanying materials are made available under the +// terms of the Apache License Version 2.0 which is available at +// +// +// SPDX-License-Identifier: Apache-2.0 +// ******************************************************************************* //! KVS parameters test helpers. use rust_kvs::prelude::{InstanceId, KvsDefaults, KvsLoad}; diff --git a/feature_integration_tests/test_scenarios/rust/src/internals/persistency/mod.rs b/feature_integration_tests/test_scenarios/rust/src/internals/persistency/mod.rs index db40645314..4a4873ccff 100644 --- a/feature_integration_tests/test_scenarios/rust/src/internals/persistency/mod.rs +++ b/feature_integration_tests/test_scenarios/rust/src/internals/persistency/mod.rs @@ -1,2 +1,14 @@ +// ******************************************************************************* +// Copyright (c) 2026 Contributors to the Eclipse Foundation +// +// See the NOTICE file(s) distributed with this work for additional +// information regarding copyright ownership. +// +// This program and the accompanying materials are made available under the +// terms of the Apache License Version 2.0 which is available at +// +// +// SPDX-License-Identifier: Apache-2.0 +// ******************************************************************************* pub mod kvs_instance; pub mod kvs_parameters; diff --git a/feature_integration_tests/test_scenarios/rust/src/main.rs b/feature_integration_tests/test_scenarios/rust/src/main.rs index f676641198..024b09a255 100644 --- a/feature_integration_tests/test_scenarios/rust/src/main.rs +++ b/feature_integration_tests/test_scenarios/rust/src/main.rs @@ -1,5 +1,5 @@ -// -// Copyright (c) 2025 Contributors to the Eclipse Foundation +// ******************************************************************************* +// Copyright (c) 2026 Contributors to the Eclipse Foundation // // See the NOTICE file(s) distributed with this work for additional // information regarding copyright ownership. @@ -9,7 +9,7 @@ // // // SPDX-License-Identifier: Apache-2.0 -// +// ******************************************************************************* mod internals; mod scenarios; diff --git a/feature_integration_tests/test_scenarios/rust/src/scenarios/basic/mod.rs b/feature_integration_tests/test_scenarios/rust/src/scenarios/basic/mod.rs index cb871dd13a..f918dcac76 100644 --- a/feature_integration_tests/test_scenarios/rust/src/scenarios/basic/mod.rs +++ b/feature_integration_tests/test_scenarios/rust/src/scenarios/basic/mod.rs @@ -1,4 +1,5 @@ -// Copyright (c) 2025 Contributors to the Eclipse Foundation +// ******************************************************************************* +// Copyright (c) 2026 Contributors to the Eclipse Foundation // // See the NOTICE file(s) distributed with this work for additional // information regarding copyright ownership. @@ -8,7 +9,7 @@ // // // SPDX-License-Identifier: Apache-2.0 -// +// ******************************************************************************* mod orchestration_with_persistency; use orchestration_with_persistency::OrchestrationWithPersistency; diff --git a/feature_integration_tests/test_scenarios/rust/src/scenarios/basic/orchestration_with_persistency.rs b/feature_integration_tests/test_scenarios/rust/src/scenarios/basic/orchestration_with_persistency.rs index 3fb1f8cb1e..17fbdd3766 100644 --- a/feature_integration_tests/test_scenarios/rust/src/scenarios/basic/orchestration_with_persistency.rs +++ b/feature_integration_tests/test_scenarios/rust/src/scenarios/basic/orchestration_with_persistency.rs @@ -1,5 +1,5 @@ -// -// Copyright (c) 2025 Contributors to the Eclipse Foundation +// ******************************************************************************* +// Copyright (c) 2026 Contributors to the Eclipse Foundation // // See the NOTICE file(s) distributed with this work for additional // information regarding copyright ownership. @@ -9,7 +9,7 @@ // // // SPDX-License-Identifier: Apache-2.0 -// +// ******************************************************************************* use crate::internals::kyron::runtime_helper::Runtime; use kyron_foundation::containers::Vector; use kyron_foundation::prelude::CommonErrors; diff --git a/feature_integration_tests/test_scenarios/rust/src/scenarios/mod.rs b/feature_integration_tests/test_scenarios/rust/src/scenarios/mod.rs index 8c7f224287..00f6645772 100644 --- a/feature_integration_tests/test_scenarios/rust/src/scenarios/mod.rs +++ b/feature_integration_tests/test_scenarios/rust/src/scenarios/mod.rs @@ -1,5 +1,5 @@ -// -// Copyright (c) 2025 Contributors to the Eclipse Foundation +// ******************************************************************************* +// Copyright (c) 2026 Contributors to the Eclipse Foundation // // See the NOTICE file(s) distributed with this work for additional // information regarding copyright ownership. @@ -9,7 +9,7 @@ // // // SPDX-License-Identifier: Apache-2.0 -// +// ******************************************************************************* use test_scenarios_rust::scenario::{ScenarioGroup, ScenarioGroupImpl}; mod basic; diff --git a/feature_integration_tests/test_scenarios/rust/src/scenarios/persistency/mod.rs b/feature_integration_tests/test_scenarios/rust/src/scenarios/persistency/mod.rs index ca8fec5792..62353e6aed 100644 --- a/feature_integration_tests/test_scenarios/rust/src/scenarios/persistency/mod.rs +++ b/feature_integration_tests/test_scenarios/rust/src/scenarios/persistency/mod.rs @@ -1,4 +1,5 @@ -// Copyright (c) 2025 Contributors to the Eclipse Foundation +// ******************************************************************************* +// Copyright (c) 2026 Contributors to the Eclipse Foundation // // See the NOTICE file(s) distributed with this work for additional // information regarding copyright ownership. @@ -8,7 +9,7 @@ // // // SPDX-License-Identifier: Apache-2.0 -// +// ******************************************************************************* mod multiple_kvs_per_app; use multiple_kvs_per_app::MultipleKvsPerApp; diff --git a/feature_integration_tests/test_scenarios/rust/src/scenarios/persistency/multiple_kvs_per_app.rs b/feature_integration_tests/test_scenarios/rust/src/scenarios/persistency/multiple_kvs_per_app.rs index 61407b2111..fc9ffd6d92 100644 --- a/feature_integration_tests/test_scenarios/rust/src/scenarios/persistency/multiple_kvs_per_app.rs +++ b/feature_integration_tests/test_scenarios/rust/src/scenarios/persistency/multiple_kvs_per_app.rs @@ -1,3 +1,15 @@ +// ******************************************************************************* +// Copyright (c) 2026 Contributors to the Eclipse Foundation +// +// See the NOTICE file(s) distributed with this work for additional +// information regarding copyright ownership. +// +// This program and the accompanying materials are made available under the +// terms of the Apache License Version 2.0 which is available at +// +// +// SPDX-License-Identifier: Apache-2.0 +// ******************************************************************************* use crate::internals::persistency::{kvs_instance::kvs_instance, kvs_parameters::KvsParameters}; use rust_kvs::prelude::KvsApi; use serde::Deserialize; diff --git a/images/ebclfsa_aarch64/BUILD b/images/ebclfsa_aarch64/BUILD index df35a19965..13f09c3752 100644 --- a/images/ebclfsa_aarch64/BUILD +++ b/images/ebclfsa_aarch64/BUILD @@ -1,4 +1,16 @@ # ******************************************************************************* +# Copyright (c) 2026 Contributors to the Eclipse Foundation +# +# See the NOTICE file(s) distributed with this work for additional +# information regarding copyright ownership. +# +# This program and the accompanying materials are made available under the +# terms of the Apache License Version 2.0 which is available at +# https://www.apache.org/licenses/LICENSE-2.0 +# +# SPDX-License-Identifier: Apache-2.0 +# ******************************************************************************* +# ******************************************************************************* # Copyright (c) 2025 Contributors to the Eclipse Foundation # # See the NOTICE file(s) distributed with this work for additional diff --git a/images/ebclfsa_aarch64/persistency_integration/run_qemu.sh b/images/ebclfsa_aarch64/persistency_integration/run_qemu.sh index 2b2e30349b..8dba73351d 100755 --- a/images/ebclfsa_aarch64/persistency_integration/run_qemu.sh +++ b/images/ebclfsa_aarch64/persistency_integration/run_qemu.sh @@ -1,5 +1,16 @@ #!/bin/bash - +# ******************************************************************************* +# Copyright (c) 2025 Contributors to the Eclipse Foundation +# +# See the NOTICE file(s) distributed with this work for additional +# information regarding copyright ownership. +# +# This program and the accompanying materials are made available under the +# terms of the Apache License Version 2.0 which is available at +# https://www.apache.org/licenses/LICENSE-2.0 +# +# SPDX-License-Identifier: Apache-2.0 +# ******************************************************************************* set -xu if [ -z "$1" ]; then diff --git a/images/ebclfsa_aarch64/persistency_integration/scripts/cpp_tests_persistency.sh b/images/ebclfsa_aarch64/persistency_integration/scripts/cpp_tests_persistency.sh index 0dd092d139..afe14ec080 100755 --- a/images/ebclfsa_aarch64/persistency_integration/scripts/cpp_tests_persistency.sh +++ b/images/ebclfsa_aarch64/persistency_integration/scripts/cpp_tests_persistency.sh @@ -1,4 +1,16 @@ #!/bin/bash +# ******************************************************************************* +# Copyright (c) 2026 Contributors to the Eclipse Foundation +# +# See the NOTICE file(s) distributed with this work for additional +# information regarding copyright ownership. +# +# This program and the accompanying materials are made available under the +# terms of the Apache License Version 2.0 which is available at +# https://www.apache.org/licenses/LICENSE-2.0 +# +# SPDX-License-Identifier: Apache-2.0 +# ******************************************************************************* # Script to run C++ persistency tests inside the QEMU environment # Wrapper due to quoting issues when calling directly from Bazel cpp_tests_persistency --name basic.basic --input '{"kvs_parameters":{"instance_id":0}}' diff --git a/images/ebclfsa_aarch64/scrample_integration/run_qemu.sh b/images/ebclfsa_aarch64/scrample_integration/run_qemu.sh index 3817c374f9..adab692e6b 100755 --- a/images/ebclfsa_aarch64/scrample_integration/run_qemu.sh +++ b/images/ebclfsa_aarch64/scrample_integration/run_qemu.sh @@ -1,5 +1,16 @@ #!/bin/bash - +# ******************************************************************************* +# Copyright (c) 2025 Contributors to the Eclipse Foundation +# +# See the NOTICE file(s) distributed with this work for additional +# information regarding copyright ownership. +# +# This program and the accompanying materials are made available under the +# terms of the Apache License Version 2.0 which is available at +# https://www.apache.org/licenses/LICENSE-2.0 +# +# SPDX-License-Identifier: Apache-2.0 +# ******************************************************************************* set -xu if [ -z "$1" ]; then diff --git a/runners/docker_x86_64/scripts/BUILD b/runners/docker_x86_64/scripts/BUILD index 400ffffccd..9bfe4ec77e 100644 --- a/runners/docker_x86_64/scripts/BUILD +++ b/runners/docker_x86_64/scripts/BUILD @@ -1 +1,13 @@ +# ******************************************************************************* +# Copyright (c) 2026 Contributors to the Eclipse Foundation +# +# See the NOTICE file(s) distributed with this work for additional +# information regarding copyright ownership. +# +# This program and the accompanying materials are made available under the +# terms of the Apache License Version 2.0 which is available at +# https://www.apache.org/licenses/LICENSE-2.0 +# +# SPDX-License-Identifier: Apache-2.0 +# ******************************************************************************* exports_files(["run_docker.sh"]) diff --git a/runners/qemu_x86_64/scripts/BUILD b/runners/qemu_x86_64/scripts/BUILD index 44ff61c805..29e9c7db4a 100644 --- a/runners/qemu_x86_64/scripts/BUILD +++ b/runners/qemu_x86_64/scripts/BUILD @@ -1 +1,13 @@ +# ******************************************************************************* +# Copyright (c) 2026 Contributors to the Eclipse Foundation +# +# See the NOTICE file(s) distributed with this work for additional +# information regarding copyright ownership. +# +# This program and the accompanying materials are made available under the +# terms of the Apache License Version 2.0 which is available at +# https://www.apache.org/licenses/LICENSE-2.0 +# +# SPDX-License-Identifier: Apache-2.0 +# ******************************************************************************* exports_files(["run_qemu.sh"]) diff --git a/scripts/__init__.py b/scripts/__init__.py index 7994ee43e2..15833012d2 100644 --- a/scripts/__init__.py +++ b/scripts/__init__.py @@ -1 +1,13 @@ +# ******************************************************************************* +# Copyright (c) 2026 Contributors to the Eclipse Foundation +# +# See the NOTICE file(s) distributed with this work for additional +# information regarding copyright ownership. +# +# This program and the accompanying materials are made available under the +# terms of the Apache License Version 2.0 which is available at +# https://www.apache.org/licenses/LICENSE-2.0 +# +# SPDX-License-Identifier: Apache-2.0 +# ******************************************************************************* # Scripts package diff --git a/scripts/generate_rust_analyzer_support.sh b/scripts/generate_rust_analyzer_support.sh index de16c4b973..960c5edc7b 100755 --- a/scripts/generate_rust_analyzer_support.sh +++ b/scripts/generate_rust_analyzer_support.sh @@ -1,5 +1,16 @@ #!/bin/bash - +# ******************************************************************************* +# Copyright (c) 2025 Contributors to the Eclipse Foundation +# +# See the NOTICE file(s) distributed with this work for additional +# information regarding copyright ownership. +# +# This program and the accompanying materials are made available under the +# terms of the Apache License Version 2.0 which is available at +# https://www.apache.org/licenses/LICENSE-2.0 +# +# SPDX-License-Identifier: Apache-2.0 +# ******************************************************************************* set -e # Manual targets are not take into account, must be set explicitly diff --git a/scripts/integration_test.py b/scripts/integration_test.py index 1ba3381e72..219905689c 100755 --- a/scripts/integration_test.py +++ b/scripts/integration_test.py @@ -1,4 +1,16 @@ #!/usr/bin/env python3 +# ******************************************************************************* +# Copyright (c) 2026 Contributors to the Eclipse Foundation +# +# See the NOTICE file(s) distributed with this work for additional +# information regarding copyright ownership. +# +# This program and the accompanying materials are made available under the +# terms of the Apache License Version 2.0 which is available at +# https://www.apache.org/licenses/LICENSE-2.0 +# +# SPDX-License-Identifier: Apache-2.0 +# ******************************************************************************* """Integration build script for SCORE modules. Captures warning counts for regression tracking and generates build summaries. diff --git a/scripts/known_good/__init__.py b/scripts/known_good/__init__.py index eac8e9d3f5..310987124b 100644 --- a/scripts/known_good/__init__.py +++ b/scripts/known_good/__init__.py @@ -1 +1,13 @@ +# ******************************************************************************* +# Copyright (c) 2026 Contributors to the Eclipse Foundation +# +# See the NOTICE file(s) distributed with this work for additional +# information regarding copyright ownership. +# +# This program and the accompanying materials are made available under the +# terms of the Apache License Version 2.0 which is available at +# https://www.apache.org/licenses/LICENSE-2.0 +# +# SPDX-License-Identifier: Apache-2.0 +# ******************************************************************************* # Known good utilities package diff --git a/scripts/known_good/known_good_to_workspace_metadata.py b/scripts/known_good/known_good_to_workspace_metadata.py index c61ce03391..bb933c8dcc 100644 --- a/scripts/known_good/known_good_to_workspace_metadata.py +++ b/scripts/known_good/known_good_to_workspace_metadata.py @@ -1,4 +1,16 @@ #!/usr/bin/env python3 +# ******************************************************************************* +# Copyright (c) 2026 Contributors to the Eclipse Foundation +# +# See the NOTICE file(s) distributed with this work for additional +# information regarding copyright ownership. +# +# This program and the accompanying materials are made available under the +# terms of the Apache License Version 2.0 which is available at +# https://www.apache.org/licenses/LICENSE-2.0 +# +# SPDX-License-Identifier: Apache-2.0 +# ******************************************************************************* import argparse import csv from pathlib import Path diff --git a/scripts/known_good/models/__init__.py b/scripts/known_good/models/__init__.py index c6600de577..edf7659729 100644 --- a/scripts/known_good/models/__init__.py +++ b/scripts/known_good/models/__init__.py @@ -1,3 +1,15 @@ +# ******************************************************************************* +# Copyright (c) 2026 Contributors to the Eclipse Foundation +# +# See the NOTICE file(s) distributed with this work for additional +# information regarding copyright ownership. +# +# This program and the accompanying materials are made available under the +# terms of the Apache License Version 2.0 which is available at +# https://www.apache.org/licenses/LICENSE-2.0 +# +# SPDX-License-Identifier: Apache-2.0 +# ******************************************************************************* """Models for score reference integration tools.""" from .module import Module, Metadata diff --git a/scripts/known_good/models/known_good.py b/scripts/known_good/models/known_good.py index 2cda4b84be..48c6eaabec 100644 --- a/scripts/known_good/models/known_good.py +++ b/scripts/known_good/models/known_good.py @@ -1,3 +1,15 @@ +# ******************************************************************************* +# Copyright (c) 2026 Contributors to the Eclipse Foundation +# +# See the NOTICE file(s) distributed with this work for additional +# information regarding copyright ownership. +# +# This program and the accompanying materials are made available under the +# terms of the Apache License Version 2.0 which is available at +# https://www.apache.org/licenses/LICENSE-2.0 +# +# SPDX-License-Identifier: Apache-2.0 +# ******************************************************************************* """KnownGood dataclass for score reference integration.""" from __future__ import annotations diff --git a/scripts/known_good/models/module.py b/scripts/known_good/models/module.py index 6f439ff9a8..9fa266f91b 100644 --- a/scripts/known_good/models/module.py +++ b/scripts/known_good/models/module.py @@ -1,3 +1,15 @@ +# ******************************************************************************* +# Copyright (c) 2026 Contributors to the Eclipse Foundation +# +# See the NOTICE file(s) distributed with this work for additional +# information regarding copyright ownership. +# +# This program and the accompanying materials are made available under the +# terms of the Apache License Version 2.0 which is available at +# https://www.apache.org/licenses/LICENSE-2.0 +# +# SPDX-License-Identifier: Apache-2.0 +# ******************************************************************************* """Module dataclass for score reference integration.""" from __future__ import annotations diff --git a/scripts/known_good/override_known_good_repo.py b/scripts/known_good/override_known_good_repo.py index 5b541fd0e5..218d360d0a 100755 --- a/scripts/known_good/override_known_good_repo.py +++ b/scripts/known_good/override_known_good_repo.py @@ -1,4 +1,16 @@ #!/usr/bin/env python3 +# ******************************************************************************* +# Copyright (c) 2026 Contributors to the Eclipse Foundation +# +# See the NOTICE file(s) distributed with this work for additional +# information regarding copyright ownership. +# +# This program and the accompanying materials are made available under the +# terms of the Apache License Version 2.0 which is available at +# https://www.apache.org/licenses/LICENSE-2.0 +# +# SPDX-License-Identifier: Apache-2.0 +# ******************************************************************************* """ Update a known_good.json file by pinning modules to specific commits. diff --git a/scripts/known_good/update_module_from_known_good.py b/scripts/known_good/update_module_from_known_good.py index 70cd2e1333..bd4b732db5 100755 --- a/scripts/known_good/update_module_from_known_good.py +++ b/scripts/known_good/update_module_from_known_good.py @@ -1,4 +1,16 @@ #!/usr/bin/env python3 +# ******************************************************************************* +# Copyright (c) 2026 Contributors to the Eclipse Foundation +# +# See the NOTICE file(s) distributed with this work for additional +# information regarding copyright ownership. +# +# This program and the accompanying materials are made available under the +# terms of the Apache License Version 2.0 which is available at +# https://www.apache.org/licenses/LICENSE-2.0 +# +# SPDX-License-Identifier: Apache-2.0 +# ******************************************************************************* """ Read a known_good.json file and generate a score_modules.MODULE.bazel file with `bazel_dep` and `git_override` calls for each module in the JSON. diff --git a/scripts/known_good/update_module_latest.py b/scripts/known_good/update_module_latest.py index 8c341ed458..33e238c4f1 100755 --- a/scripts/known_good/update_module_latest.py +++ b/scripts/known_good/update_module_latest.py @@ -1,4 +1,16 @@ #!/usr/bin/env python3 +# ******************************************************************************* +# Copyright (c) 2026 Contributors to the Eclipse Foundation +# +# See the NOTICE file(s) distributed with this work for additional +# information regarding copyright ownership. +# +# This program and the accompanying materials are made available under the +# terms of the Apache License Version 2.0 which is available at +# https://www.apache.org/licenses/LICENSE-2.0 +# +# SPDX-License-Identifier: Apache-2.0 +# ******************************************************************************* """Update module commit hashes to latest on a given branch. Reads a known_good.json file containing a list of modules with keys: diff --git a/scripts/models/__init__.py b/scripts/models/__init__.py index f3d9f4b1ed..4efac1919a 100644 --- a/scripts/models/__init__.py +++ b/scripts/models/__init__.py @@ -1 +1,13 @@ +# ******************************************************************************* +# Copyright (c) 2026 Contributors to the Eclipse Foundation +# +# See the NOTICE file(s) distributed with this work for additional +# information regarding copyright ownership. +# +# This program and the accompanying materials are made available under the +# terms of the Apache License Version 2.0 which is available at +# https://www.apache.org/licenses/LICENSE-2.0 +# +# SPDX-License-Identifier: Apache-2.0 +# ******************************************************************************* # Models package diff --git a/scripts/models/build_config.py b/scripts/models/build_config.py index 993d91ff45..540172a626 100644 --- a/scripts/models/build_config.py +++ b/scripts/models/build_config.py @@ -1,3 +1,15 @@ +# ******************************************************************************* +# Copyright (c) 2026 Contributors to the Eclipse Foundation +# +# See the NOTICE file(s) distributed with this work for additional +# information regarding copyright ownership. +# +# This program and the accompanying materials are made available under the +# terms of the Apache License Version 2.0 which is available at +# https://www.apache.org/licenses/LICENSE-2.0 +# +# SPDX-License-Identifier: Apache-2.0 +# ******************************************************************************* """Build configuration management for SCORE modules.""" import json diff --git a/scripts/publish_integration_summary.py b/scripts/publish_integration_summary.py index c67d9bdd86..09baab39f6 100644 --- a/scripts/publish_integration_summary.py +++ b/scripts/publish_integration_summary.py @@ -1,4 +1,16 @@ #!/usr/bin/env python3 +# ******************************************************************************* +# Copyright (c) 2026 Contributors to the Eclipse Foundation +# +# See the NOTICE file(s) distributed with this work for additional +# information regarding copyright ownership. +# +# This program and the accompanying materials are made available under the +# terms of the Apache License Version 2.0 which is available at +# https://www.apache.org/licenses/LICENSE-2.0 +# +# SPDX-License-Identifier: Apache-2.0 +# ******************************************************************************* import argparse import os import sys diff --git a/scripts/quality_runners.py b/scripts/quality_runners.py index b542b9b1e7..c5c7bf704f 100644 --- a/scripts/quality_runners.py +++ b/scripts/quality_runners.py @@ -1,3 +1,15 @@ +# ******************************************************************************* +# Copyright (c) 2026 Contributors to the Eclipse Foundation +# +# See the NOTICE file(s) distributed with this work for additional +# information regarding copyright ownership. +# +# This program and the accompanying materials are made available under the +# terms of the Apache License Version 2.0 which is available at +# https://www.apache.org/licenses/LICENSE-2.0 +# +# SPDX-License-Identifier: Apache-2.0 +# ******************************************************************************* import argparse import re import select diff --git a/showcases/cli/main.rs b/showcases/cli/main.rs index fae93bb8d2..55cfa62224 100644 --- a/showcases/cli/main.rs +++ b/showcases/cli/main.rs @@ -1,3 +1,15 @@ +// ******************************************************************************* +// Copyright (c) 2026 Contributors to the Eclipse Foundation +// +// See the NOTICE file(s) distributed with this work for additional +// information regarding copyright ownership. +// +// This program and the accompanying materials are made available under the +// terms of the Apache License Version 2.0 which is available at +// +// +// SPDX-License-Identifier: Apache-2.0 +// ******************************************************************************* use anyhow::{Context, Result}; use clap::Parser; use serde::Deserialize; diff --git a/showcases/orchestration_persistency/main.rs b/showcases/orchestration_persistency/main.rs index a293daea87..995e125c30 100644 --- a/showcases/orchestration_persistency/main.rs +++ b/showcases/orchestration_persistency/main.rs @@ -1,5 +1,5 @@ -// -// Copyright (c) 2025 Contributors to the Eclipse Foundation +// ******************************************************************************* +// Copyright (c) 2026 Contributors to the Eclipse Foundation // // See the NOTICE file(s) distributed with this work for additional // information regarding copyright ownership. @@ -9,7 +9,7 @@ // // // SPDX-License-Identifier: Apache-2.0 -// +// ******************************************************************************* use std::path::PathBuf; use std::time::Duration; diff --git a/showcases/simple_lifecycle/BUILD b/showcases/simple_lifecycle/BUILD index 45107bfdf3..2bf578fb89 100644 --- a/showcases/simple_lifecycle/BUILD +++ b/showcases/simple_lifecycle/BUILD @@ -1,3 +1,15 @@ +# ******************************************************************************* +# Copyright (c) 2026 Contributors to the Eclipse Foundation +# +# See the NOTICE file(s) distributed with this work for additional +# information regarding copyright ownership. +# +# This program and the accompanying materials are made available under the +# terms of the Apache License Version 2.0 which is available at +# https://www.apache.org/licenses/LICENSE-2.0 +# +# SPDX-License-Identifier: Apache-2.0 +# ******************************************************************************* load("@rules_cc//cc:cc_library.bzl", "cc_library") load("//bazel_common:bundlers.bzl", "score_pkg_bundle") diff --git a/showcases/simple_lifecycle/configs/BUILD b/showcases/simple_lifecycle/configs/BUILD index d9e94433c6..64d254dd1f 100644 --- a/showcases/simple_lifecycle/configs/BUILD +++ b/showcases/simple_lifecycle/configs/BUILD @@ -1,3 +1,15 @@ +# ******************************************************************************* +# Copyright (c) 2026 Contributors to the Eclipse Foundation +# +# See the NOTICE file(s) distributed with this work for additional +# information regarding copyright ownership. +# +# This program and the accompanying materials are made available under the +# terms of the Apache License Version 2.0 which is available at +# https://www.apache.org/licenses/LICENSE-2.0 +# +# SPDX-License-Identifier: Apache-2.0 +# ******************************************************************************* exports_files([ "hm_demo.json", "lm_demo.json", diff --git a/showcases/simple_lifecycle/lifecycle_signal.sh b/showcases/simple_lifecycle/lifecycle_signal.sh index 55d81c468b..8d933288a9 100755 --- a/showcases/simple_lifecycle/lifecycle_signal.sh +++ b/showcases/simple_lifecycle/lifecycle_signal.sh @@ -1,4 +1,16 @@ #! /bin/sh +# ******************************************************************************* +# Copyright (c) 2026 Contributors to the Eclipse Foundation +# +# See the NOTICE file(s) distributed with this work for additional +# information regarding copyright ownership. +# +# This program and the accompanying materials are made available under the +# terms of the Apache License Version 2.0 which is available at +# https://www.apache.org/licenses/LICENSE-2.0 +# +# SPDX-License-Identifier: Apache-2.0 +# ******************************************************************************* # This script sends a signal to a process by name. It uses `slay` on QNX and `pkill` on other systems. # Usage: lifecycle_signal.sh # Example: lifecycle_signal.sh my_process SIGTERM diff --git a/showcases/standalone/BUILD b/showcases/standalone/BUILD index fcff31034f..aae7c22560 100644 --- a/showcases/standalone/BUILD +++ b/showcases/standalone/BUILD @@ -1,3 +1,15 @@ +# ******************************************************************************* +# Copyright (c) 2026 Contributors to the Eclipse Foundation +# +# See the NOTICE file(s) distributed with this work for additional +# information regarding copyright ownership. +# +# This program and the accompanying materials are made available under the +# terms of the Apache License Version 2.0 which is available at +# https://www.apache.org/licenses/LICENSE-2.0 +# +# SPDX-License-Identifier: Apache-2.0 +# ******************************************************************************* load("//bazel_common:bundlers.bzl", "score_pkg_bundle") score_pkg_bundle( From 12cf952331e6d6eb4aa9aee67ff0f26ebb7f181e Mon Sep 17 00:00:00 2001 From: Piotr Korkus Date: Fri, 27 Feb 2026 11:07:44 +0100 Subject: [PATCH 07/10] cicd: add format checks --- .github/workflows/format.yml | 38 ++++++++++++++++++++++++++++++++++++ BUILD | 1 - 2 files changed, 38 insertions(+), 1 deletion(-) create mode 100644 .github/workflows/format.yml diff --git a/.github/workflows/format.yml b/.github/workflows/format.yml new file mode 100644 index 0000000000..80c1862a9d --- /dev/null +++ b/.github/workflows/format.yml @@ -0,0 +1,38 @@ +# ******************************************************************************* +# Copyright (c) 2025 Contributors to the Eclipse Foundation +# +# See the NOTICE file(s) distributed with this work for additional +# information regarding copyright ownership. +# +# This program and the accompanying materials are made available under the +# terms of the Apache License Version 2.0 which is available at +# https://www.apache.org/licenses/LICENSE-2.0 +# +# SPDX-License-Identifier: Apache-2.0 +# ******************************************************************************* +name: Formatting checks +on: + pull_request: + types: [opened, reopened, synchronize] + merge_group: + types: [checks_requested] +jobs: + formatting-check: + runs-on: ubuntu-latest + steps: + - name: Checkout repository + uses: actions/checkout@v4.2.2 + - name: Cache Bazel + uses: actions/cache@v4 + with: + path: ~/.cache/bazel + key: ${{ runner.os }}-format-${{ hashFiles('**/*.bazel', '**/BUILD', '**/*.bzl') }} + - name: Setup Bazel with cache + uses: bazel-contrib/setup-bazel@0.15.0 + with: + disk-cache: true + repository-cache: true + bazelisk-cache: true + - name: Run formatting checks + run: | + bazel test //:format.check diff --git a/BUILD b/BUILD index 904beefcdc..12c556345e 100644 --- a/BUILD +++ b/BUILD @@ -54,7 +54,6 @@ copyright_checker( "rust_coverage", "scripts", "showcases", - "tests", "//:BUILD", "//:MODULE.bazel", ], From 0dd7438bcbb4de8f0023cece519b7ebfea72e816 Mon Sep 17 00:00:00 2001 From: Piotr Korkus Date: Fri, 27 Feb 2026 11:07:44 +0100 Subject: [PATCH 08/10] update known_good script to match expected bazel formatting --- .../update_module_from_known_good.py | 18 ++++++++++-------- 1 file changed, 10 insertions(+), 8 deletions(-) diff --git a/scripts/known_good/update_module_from_known_good.py b/scripts/known_good/update_module_from_known_good.py index bd4b732db5..e6c82fe3cb 100755 --- a/scripts/known_good/update_module_from_known_good.py +++ b/scripts/known_good/update_module_from_known_good.py @@ -59,7 +59,8 @@ def generate_git_override_blocks(modules: List[Module], repo_commit_dict: Dict[s patches_lines = " patches = [\n" for patch in module.bazel_patches: patches_lines += f' "{patch}",\n' - patches_lines += " ],\n patch_strip = 1,\n" + patches_lines += " ],\n" + patch_strip_line = " patch_strip = 1,\n" if patches_lines else "" if module.version: # If version is provided, use bazel_dep with single_version_override @@ -67,8 +68,9 @@ def generate_git_override_blocks(modules: List[Module], repo_commit_dict: Dict[s f'bazel_dep(name = "{module.name}")\n' "single_version_override(\n" f' module_name = "{module.name}",\n' - f' version = "{module.version}",\n' + f"{patch_strip_line}" f"{patches_lines}" + f' version = "{module.version}",\n' ")\n" ) else: @@ -91,16 +93,17 @@ def generate_git_override_blocks(modules: List[Module], repo_commit_dict: Dict[s continue # If no version, use bazel_dep with git_override + # Only include patch_strip if there are patches to apply block = ( f'bazel_dep(name = "{module.name}")\n' "git_override(\n" f' module_name = "{module.name}",\n' - f' remote = "{module.repo}",\n' f' commit = "{commit}",\n' + f"{patch_strip_line}" f"{patches_lines}" + f' remote = "{module.repo}",\n' ")\n" ) - blocks.append(block) return blocks @@ -126,7 +129,7 @@ def generate_local_override_blocks(modules: List[Module]) -> List[str]: def generate_coverage_blocks(modules: List[Module]) -> List[str]: """Generate rust_coverage_report blocks for each module with rust impl.""" - blocks = ["""load("@score_tooling//:defs.bzl", "rust_coverage_report")\n\n"""] + blocks = ["""load("@score_tooling//:defs.bzl", "rust_coverage_report")"""] for module in modules: if "rust" not in module.metadata.langs: @@ -148,11 +151,10 @@ def generate_coverage_blocks(modules: List[Module]) -> List[str]: ], query = 'kind("rust_test", @{module.name}{module.metadata.code_root_path}){excluded_tests}', visibility = ["//visibility:public"], -) - """ +)""" blocks.append(block) - + blocks.append("") # Add final newline return blocks From 0f56ae93c6d5be8e2193dcac2dcba59ac9f40370 Mon Sep 17 00:00:00 2001 From: Piotr Korkus Date: Fri, 27 Feb 2026 11:43:39 +0100 Subject: [PATCH 09/10] format: empty line in the end for bash --- .vscode/rustfmt.sh | 2 +- images/qnx_x86_64/configs/network_setup_dhcp.sh | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/.vscode/rustfmt.sh b/.vscode/rustfmt.sh index 594c3c4ac1..b0481d52a7 100755 --- a/.vscode/rustfmt.sh +++ b/.vscode/rustfmt.sh @@ -1,3 +1,3 @@ #!/usr/bin/env bash -bazel run @score_tooling//format_checker:rustfmt_with_policies \ No newline at end of file +bazel run @score_tooling//format_checker:rustfmt_with_policies diff --git a/images/qnx_x86_64/configs/network_setup_dhcp.sh b/images/qnx_x86_64/configs/network_setup_dhcp.sh index 1b88acf568..785fced419 100644 --- a/images/qnx_x86_64/configs/network_setup_dhcp.sh +++ b/images/qnx_x86_64/configs/network_setup_dhcp.sh @@ -89,4 +89,4 @@ fi # Configure system network settings sysctl -w net.inet.icmp.bmcastecho=1 > /dev/null # Enable ICMP broadcast echo (responds to broadcast pings) -echo "---> Network configuration completed" \ No newline at end of file +echo "---> Network configuration completed" From 44adf4f2bab98e838aff69c2159c396358a43742 Mon Sep 17 00:00:00 2001 From: Piotr Korkus Date: Fri, 27 Feb 2026 11:43:57 +0100 Subject: [PATCH 10/10] cicd: use reusable format check --- .github/workflows/format.yml | 19 +------------------ 1 file changed, 1 insertion(+), 18 deletions(-) diff --git a/.github/workflows/format.yml b/.github/workflows/format.yml index 80c1862a9d..6664722abd 100644 --- a/.github/workflows/format.yml +++ b/.github/workflows/format.yml @@ -18,21 +18,4 @@ on: types: [checks_requested] jobs: formatting-check: - runs-on: ubuntu-latest - steps: - - name: Checkout repository - uses: actions/checkout@v4.2.2 - - name: Cache Bazel - uses: actions/cache@v4 - with: - path: ~/.cache/bazel - key: ${{ runner.os }}-format-${{ hashFiles('**/*.bazel', '**/BUILD', '**/*.bzl') }} - - name: Setup Bazel with cache - uses: bazel-contrib/setup-bazel@0.15.0 - with: - disk-cache: true - repository-cache: true - bazelisk-cache: true - - name: Run formatting checks - run: | - bazel test //:format.check + uses: eclipse-score/cicd-workflows/.github/workflows/format.yml@main