diff --git a/.gitignore b/.gitignore index b4c5bb79..88eef561 100644 --- a/.gitignore +++ b/.gitignore @@ -12,6 +12,7 @@ user.bazelrc # docs build artifacts /_build* docs/ubproject.toml +docs/schemas.json # Vale - editorial style guide .vale.ini diff --git a/docs/internals/requirements/requirements.rst b/docs/internals/requirements/requirements.rst index 816646cb..70a5413a 100644 --- a/docs/internals/requirements/requirements.rst +++ b/docs/internals/requirements/requirements.rst @@ -1097,6 +1097,6 @@ Grouped Requirements .. needextend:: c.this_doc() and type == 'tool_req' and not status :status: valid -.. needextend:: "metamodel.yaml" in source_code_link +.. needextend:: source_code_link is not None and "metamodel.yaml" in source_code_link :+satisfies: tool_req__docs_metamodel :+tags: config diff --git a/src/extensions/score_metamodel/README.md b/src/extensions/score_metamodel/README.md new file mode 100644 index 00000000..9f1b8296 --- /dev/null +++ b/src/extensions/score_metamodel/README.md @@ -0,0 +1,97 @@ +# score_metamodel + +Sphinx extension that enforces the S-CORE metamodel on sphinx-needs documents. + +It reads `metamodel.yaml` (the single source of truth for all need types, fields, +links, and constraints) and validates every need in the documentation against +those rules. + +## What it does + +1. **Registers need types** with sphinx-needs (directives like `feat_req`, `comp`, + `workflow`, etc.) including their fields, links, and extra options. +2. **Generates `schemas.json`** from the metamodel so that sphinx-needs 6 can + validate needs at parse time (required fields, regex patterns, link + constraints). Because ubCode (the VS Code extension for sphinx-needs) + evaluates these schemas during editing, **metamodel violations are shown + as diagnostics directly in the IDE** -- catching errors early with + lightweight, fast rendering, without needing a full Sphinx build. +3. **Runs post-build checks** that go beyond what JSON Schema can express + (graph traversals, prohibited words, ID format rules). + +## Metamodel overview + +`metamodel.yaml` defines: + +| Section | Purpose | +|---|---| +| `needs_types` | All need types (e.g. `feat_req`, `comp`, `document`) with their mandatory/optional fields and links | +| `needs_types_base_options` | Global optional fields applied to every type (e.g. `source_code_link`, `testlink`) | +| `needs_extra_links` | Custom link types (e.g. `satisfies`, `implements`, `mitigated_by`) | +| `prohibited_words_checks` | Forbidden words in titles/descriptions (e.g. "shall", "must") | +| `graph_checks` | Cross-need constraints (e.g. safety level decomposition rules) | + +Each need type can specify: + +- **`mandatory_options`** -- fields that must be present, with a regex pattern + the value must match (e.g. `status: ^(valid|invalid)$`). +- **`optional_options`** -- fields that, if present, must match a pattern. +- **`mandatory_links`** -- links that must have at least one target. The value + is either a plain type name (`stkh_req`) or a regex (`^logic_arc_int__.+$`). +- **`optional_links`** -- links that are allowed but not required. + +## Validation layers + +### Schema validation (sphinx-needs >6) + +`sn_schemas.py` translates the metamodel into a `schemas.json` file that +sphinx-needs evaluates at parse time. Each schema entry has: + +- **`select`** -- matches needs by their `type` field. +- **`validate.local`** -- JSON Schema checking the need's own properties + (required fields, regex patterns on option values, mandatory links with + `minItems: 1`). Regex patterns on **link IDs** (e.g. checking that + `includes` entries match `^logic_arc_int(_op)*__.+$`) are not yet + validated here; the schema only enforces that at least one link exists. + ID-pattern checking is still done by the Python `validate_links()` in + `check_options.py`. +- **`validate.network`** -- validates that linked needs have the expected + `type` (e.g. `satisfies` targets must be `stkh_req`). Uses the + sphinx-needs `items.local` format so each linked need is checked + individually. Only **mandatory** links are checked here; optional link + type violations are left to the Python `validate_links()` check, which + treats them as informational (`treat_as_info=True`) rather than errors. + Fields that mix regex and plain targets (e.g. + `complies: std_wp, ^std_req__aspice_40__iic.*$`) are also excluded + because the `items` schema would incorrectly require all linked needs + to match the plain type. + +### Post-build S-Core metamodel checks + +Checks in `checks/` run after the Sphinx build and cover rules that +JSON Schema cannot express: + +| Check | File | What it validates | +|---|---|---| +| `check_options` | `check_options.py` | Mandatory/optional field presence and patterns (legacy, overlaps with schema validation) | +| `check_extra_options` | `check_options.py` | Warns about fields not defined in the metamodel | +| `check_id_format` | `attributes_format.py` | ID structure (`____`, part count) | +| `check_for_prohibited_words` | `attributes_format.py` | Forbidden words in titles | +| `check_metamodel_graph` | `graph_checks.py` | Cross-need constraints (e.g. ASIL_B needs must link to non-QM requirements) | +| `check_id_contains_feature` | `id_contains_feature.py` | Need IDs must contain the feature abbreviation from the file path | +| `check_standards` | `standards.py` | Standard compliance link validation | + +## File layout + +``` +score_metamodel/ + __init__.py # Sphinx extension entry point (setup, check orchestration) + metamodel.yaml # The S-CORE metamodel definition + metamodel_types.py # Type definitions (ScoreNeedType, etc.) + yaml_parser.py # Parses metamodel.yaml into MetaModelData + sn_schemas.py # Generates schemas.json for sphinx-needs 6 + log.py # CheckLogger for structured warning output + external_needs.py # External needs integration + checks/ # Post-build validation checks + tests/ # Unit and integration tests +``` diff --git a/src/extensions/score_metamodel/__init__.py b/src/extensions/score_metamodel/__init__.py index 0a6c4dae..b0d207de 100644 --- a/src/extensions/score_metamodel/__init__.py +++ b/src/extensions/score_metamodel/__init__.py @@ -15,6 +15,7 @@ import pkgutil from collections.abc import Callable from pathlib import Path +from typing import Any from sphinx.application import Sphinx from sphinx_needs import logging @@ -31,6 +32,7 @@ from src.extensions.score_metamodel.metamodel_types import ( ScoreNeedType as ScoreNeedType, ) +from src.extensions.score_metamodel.sn_schemas import write_sn_schemas from src.extensions.score_metamodel.yaml_parser import ( default_options as default_options, ) @@ -237,10 +239,28 @@ def setup(app: Sphinx) -> dict[str, str | bool]: # load metamodel.yaml via ruamel.yaml metamodel = load_metamodel_data() + # Sphinx-Needs 6 requires extra options as dicts: {"name": ..., "schema": ...} + # Options WITH a schema get JSON schema validation (value must be a string). + # Options WITHOUT a schema are registered but not validated. + # non_schema_options = {"source_code_link", "testlink", "codelink"} + non_schema_options = {} # currently empty → all options get schema validation + extra_options_schema: list[dict[str, Any]] = [ + {"name": opt, "schema": {"type": "string"}} + for opt in metamodel.needs_extra_options + if opt not in non_schema_options + ] + extra_options_wo_schema: list[dict[str, Any]] = [ + {"name": opt} + for opt in metamodel.needs_extra_options + if opt in non_schema_options + ] + # extra_options = [{"name": opt} for opt in metamodel.needs_extra_options] + extra_options = extra_options_schema + extra_options_wo_schema + # Assign everything to Sphinx config app.config.needs_types = metamodel.needs_types app.config.needs_extra_links = metamodel.needs_extra_links - app.config.needs_extra_options = metamodel.needs_extra_options + app.config.needs_extra_options = extra_options app.config.graph_checks = metamodel.needs_graph_check app.config.prohibited_words_checks = metamodel.prohibited_words_checks @@ -251,6 +271,11 @@ def setup(app: Sphinx) -> dict[str, str | bool]: app.config.needs_reproducible_json = True app.config.needs_json_remove_defaults = True + # Generate schemas.json from the metamodel and register it with sphinx-needs. + # This enables sphinx-needs 6 schema validation: required fields, regex + # patterns on option values, and (eventually) link target type checks. + write_sn_schemas(app, metamodel) + # sphinx-collections runs on default prio 500. # We need to populate the sphinx-collections config before that happens. # --> 499 diff --git a/src/extensions/score_metamodel/metamodel.yaml b/src/extensions/score_metamodel/metamodel.yaml index 398195c7..129a5353 100644 --- a/src/extensions/score_metamodel/metamodel.yaml +++ b/src/extensions/score_metamodel/metamodel.yaml @@ -971,6 +971,12 @@ needs_extra_links: partially_verifies: incoming: partially_verified_by outgoing: partially_verifies + + # Decision Records + affects: + incoming: affected by + outgoing: affects + ############################################################## # Graph Checks # The graph checks focus on the relation of the needs and their attributes. diff --git a/src/extensions/score_metamodel/sn_schemas.py b/src/extensions/score_metamodel/sn_schemas.py new file mode 100644 index 00000000..0f94c8f9 --- /dev/null +++ b/src/extensions/score_metamodel/sn_schemas.py @@ -0,0 +1,292 @@ +# ******************************************************************************* +# Copyright (c) 2026 Contributors to the Eclipse Foundation +# +# See the NOTICE file(s) distributed with this work for additional +# information regarding copyright ownership. +# +# This program and the accompanying materials are made available under the +# terms of the Apache License Version 2.0 which is available at +# https://www.apache.org/licenses/LICENSE-2.0 +# +# SPDX-License-Identifier: Apache-2.0 +# ******************************************************************************* +"""Transforms the YAML metamodel into sphinx-needs >6 JSON schema definitions. + +Reads need types from the parsed metamodel (MetaModelData) and generates a +``schemas.json`` file that sphinx-needs uses to validate each need against +the S-CORE metamodel rules (required fields, regex patterns, link constraints). + +Schema structure per need type (sphinx-needs schema format): + - ``select`` : matches needs by their ``type`` field + - ``validate.local`` : validates the need's own properties (patterns, required) + - ``validate.network`` : validates properties of linked needs +""" + +import json +from pathlib import Path +from typing import Any + +from sphinx.application import Sphinx +from sphinx.config import Config +from sphinx_needs import logging + +from src.extensions.score_metamodel.metamodel_types import ScoreNeedType +from src.extensions.score_metamodel.yaml_parser import MetaModelData + +# Fields whose values are lists in sphinx-needs (e.g. tags: ["safety", "security"]). +# These need an "array of strings" JSON schema instead of a plain "string" schema. +SN_ARRAY_FIELDS = { + "tags", + "sections", +} + +# Fields to skip during schema generation. +IGNORE_FIELDS = { + "content", # not yet available in ubCode +} + +LOGGER = logging.get_logger(__name__) + + +def write_sn_schemas(app: Sphinx, metamodel: MetaModelData) -> None: + """Build sphinx-needs schema definitions from the metamodel and write to JSON. + + Iterates over all need types, builds a schema for each one via + ``_build_need_type_schema``, and writes the result to + ``/schemas.json``. + """ + config: Config = app.config + schemas: list[dict[str, Any]] = [] + + for need_type in metamodel.needs_types: + schema = _build_need_type_schema(need_type) + if schema is not None: + schemas.append(schema) + + schema_definitions: dict[str, Any] = {"schemas": schemas} + + # Write the complete schema definitions to a JSON file in confdir + schemas_output_path = Path(app.confdir) / "schemas.json" + with open(schemas_output_path, "w", encoding="utf-8") as f: + json.dump(schema_definitions, f, indent=2, ensure_ascii=False) + + # Tell sphinx-needs to load the schema from the JSON file + config.needs_schema_definitions_from_json = "schemas.json" + # config.needs_schema_definitions = schema_definitions + + +def _classify_links( + links: dict[str, Any], type_name: str, mandatory: bool +) -> tuple[dict[str, str], dict[str, list[str]]]: + """Classify link values into regex patterns vs. target type names. + + In the metamodel YAML, a link value can be either: + - A regex (starts with "^"), e.g. "^logic_arc_int(_op)*__.+$" + -> validated locally (the link ID must match the pattern) + - A plain type name, e.g. "comp" + -> validated via network (the linked need must have that type) + Multiple values are comma-separated, e.g. "comp, sw_unit". + + Returns: + A tuple of (regexes, targets) dicts, keyed by field name. + ``targets`` maps each field to a list of all allowed type names. + """ + label = "mandatory" if mandatory else "optional" + regexes: dict[str, str] = {} + targets: dict[str, list[str]] = {} + + for field, value in links.items(): + link_values = [v.strip() for v in value.split(",")] + for link_value in link_values: + if link_value.startswith("^"): + if field in regexes: + LOGGER.error( + f"Multiple regex patterns for {label} link field " + f"'{field}' in need type '{type_name}'. " + "Only the first one will be used in the schema." + ) + regexes[field] = link_value + else: + if field not in targets: + targets[field] = [] + targets[field].append(link_value) + + return regexes, targets + + +def _build_local_validator( + mandatory_fields: dict[str, str], + optional_fields: dict[str, str], + mandatory_links_regexes: dict[str, str], + optional_links_regexes: dict[str, str], + mandatory_links_targets: dict[str, list[str]] | None = None, +) -> dict[str, Any]: + """Build the local validator dict for a need type's schema. + + The local validator checks the need's own properties: + - Mandatory fields must be present and match their regex pattern. + - Optional fields, if present, must match their regex pattern. + - Mandatory links must have at least one entry. + """ + properties: dict[str, Any] = {} + required: list[str] = [] + + # Mandatory fields: must be present AND match the regex pattern + for field, pattern in mandatory_fields.items(): + if field in IGNORE_FIELDS: + continue + required.append(field) + properties[field] = get_field_pattern_schema(field, pattern) + + # Optional fields: if present, must match the regex pattern + for field, pattern in optional_fields.items(): + if field in IGNORE_FIELDS: + continue + properties[field] = get_field_pattern_schema(field, pattern) + + # Mandatory links (regex): must have at least one entry + # TODO: regex pattern matching on link IDs is not yet enabled + for field in mandatory_links_regexes: + properties[field] = {"type": "array", "minItems": 1} + required.append(field) + + # Mandatory links (plain target types): must have at least one entry. + # The type of the linked need is checked via validate.network, but the + # list length constraint belongs in the local validator. + # Skip fields already handled by mandatory_links_regexes (mixed regex + plain). + for field in mandatory_links_targets or {}: + if field not in properties: + properties[field] = {"type": "array", "minItems": 1} + required.append(field) + + # Optional links (regex): allowed but not required + # TODO: regex pattern matching on link IDs is not yet enabled + for field in optional_links_regexes: + properties[field] = {"type": "array"} + + return { + "properties": properties, + "required": required, + # "unevaluatedProperties": False, + } + + +def _build_need_type_schema(need_type: ScoreNeedType) -> dict[str, Any] | None: + """Build a sphinx-needs schema entry for a single need type. + + Returns ``None`` if the need type has no constraints (no mandatory/optional + fields or links), meaning no schema validation is needed. + + The returned dict has the sphinx-needs schema structure: + - ``select``: matches needs by their ``type`` field + - ``validate.local``: validates the need's own properties + - ``validate.network``: validates linked needs' types + """ + mandatory_fields = need_type.get("mandatory_options", {}) + optional_fields = need_type.get("optional_options", {}) + mandatory_links = need_type.get("mandatory_links", {}) + optional_links = need_type.get("optional_links", {}) + + # Skip need types that have no constraints at all + if not (mandatory_fields or optional_fields or mandatory_links or optional_links): + return None + + type_name = need_type["directive"] + + # Classify link values as regex patterns vs. target type names. + # Note: links are still plain strings at this point (before postprocess_need_links). + mandatory_links_regexes, mandatory_links_targets = _classify_links( + mandatory_links, type_name, mandatory=True + ) + optional_links_regexes, _ = _classify_links( + optional_links, type_name, mandatory=False + ) + + # Build validate.network for link fields with plain type targets. + # The network schema uses sphinx-needs' ValidateSchemaType format: + # each entry's ``items.local`` is a JSON Schema applied to each linked need. + network: dict[str, Any] = {} + + def add_network_entry(field: str, target_types: list[str]) -> None: + type_constraint: dict[str, Any] = ( + {"enum": target_types} + if len(target_types) > 1 + else {"const": target_types[0]} + ) + network[field] = { + "type": "array", + "items": { + "local": { + "properties": {"type": type_constraint}, + "required": ["type"], + } + }, + } + + # Only add network entries for *mandatory* links with exclusively plain + # type targets. Two categories are intentionally excluded: + # + # 1. Mixed regex+plain fields (e.g. + # "complies: std_wp, ^std_req__aspice_40__iic.*$"): + # The items schema would incorrectly require ALL linked needs to match + # the plain type, while some legitimately match the regex instead. + # + # 2. Optional links: The Python validate_links() in check_options.py treats + # optional link type violations as informational (treat_as_info=True), + # but schemas use a single severity ("violation") per need type. + # Including optional links would escalate info-level issues to errors. + # Optional link types are validated by the Python check instead. + for field, target_types in mandatory_links_targets.items(): + if field not in mandatory_links_regexes: + add_network_entry(field, target_types) + + type_schema: dict[str, Any] = { + "id": f"need-type-{type_name}", + "severity": "violation", + "message": "Need does not conform to S-CORE metamodel", + # Selector: only apply this schema to needs with matching type + "select": { + "properties": {"type": {"const": type_name}}, + "required": ["type"], + }, + "validate": { + "local": _build_local_validator( + mandatory_fields, + optional_fields, + mandatory_links_regexes, + optional_links_regexes, + mandatory_links_targets, + ), + }, + } + if network: + type_schema["validate"]["network"] = network + + return type_schema + + +def get_field_pattern_schema(field: str, pattern: str) -> dict[str, Any]: + """Return the appropriate JSON schema for a field's regex pattern. + + Array-valued fields (like ``tags``) get an array-of-strings schema; + scalar fields get a plain string schema. + """ + if field in SN_ARRAY_FIELDS: + return get_array_pattern_schema(pattern) + return get_pattern_schema(pattern) + + +def get_pattern_schema(pattern: str) -> dict[str, str]: + """Return a JSON schema that validates a string against a regex pattern.""" + return { + "type": "string", + "pattern": pattern, + } + + +def get_array_pattern_schema(pattern: str) -> dict[str, Any]: + """Return a JSON schema that validates an array where each item matches a regex.""" + return { + "type": "array", + "items": get_pattern_schema(pattern), + } diff --git a/src/extensions/score_metamodel/tests/test_metamodel_load.py b/src/extensions/score_metamodel/tests/test_metamodel_load.py index 3cb67965..72568592 100644 --- a/src/extensions/score_metamodel/tests/test_metamodel_load.py +++ b/src/extensions/score_metamodel/tests/test_metamodel_load.py @@ -40,8 +40,8 @@ def test_load_metamodel_data(): assert result.needs_types[0].get("color") == "blue" assert result.needs_types[0].get("style") == "bold" assert result.needs_types[0]["mandatory_options"] == { - # default id pattern: prefix + digits, lowercase letters and underscores - "id": "^T1[0-9a-z_]+$", + # default id pattern: prefix + digits, letters and underscores + "id": "^T1[0-9a-zA-Z_]+$", "opt1": "value1", } assert result.needs_types[0]["optional_options"] == { diff --git a/src/extensions/score_metamodel/tests/test_sn_schemas.py b/src/extensions/score_metamodel/tests/test_sn_schemas.py new file mode 100644 index 00000000..84b534d6 --- /dev/null +++ b/src/extensions/score_metamodel/tests/test_sn_schemas.py @@ -0,0 +1,504 @@ +# ******************************************************************************* +# Copyright (c) 2026 Contributors to the Eclipse Foundation +# +# See the NOTICE file(s) distributed with this work for additional +# information regarding copyright ownership. +# +# This program and the accompanying materials are made available under the +# terms of the Apache License Version 2.0 which is available at +# https://www.apache.org/licenses/LICENSE-2.0 +# +# SPDX-License-Identifier: Apache-2.0 +# ******************************************************************************* +# pyright: reportPrivateUsage=false +import json +from pathlib import Path +from typing import Any, cast +from unittest.mock import MagicMock, patch + +from src.extensions.score_metamodel.metamodel_types import ScoreNeedType +from src.extensions.score_metamodel.sn_schemas import ( + IGNORE_FIELDS, + SN_ARRAY_FIELDS, + _build_local_validator, + _build_need_type_schema, + _classify_links, + get_array_pattern_schema, + get_field_pattern_schema, + get_pattern_schema, + write_sn_schemas, +) + +# ============================================================================= +# Tests for get_pattern_schema +# ============================================================================= + + +class TestGetPatternSchema: + def test_returns_string_type_with_pattern(self) -> None: + result = get_pattern_schema("^[A-Z]+$") + assert result == {"type": "string", "pattern": "^[A-Z]+$"} + + def test_preserves_complex_regex(self) -> None: + pattern = r"^(feat|fix|chore)\/.+$" + result = get_pattern_schema(pattern) + assert result["type"] == "string" + assert result["pattern"] == pattern + + +# ============================================================================= +# Tests for get_array_pattern_schema +# ============================================================================= + + +class TestGetArrayPatternSchema: + def test_returns_array_type_with_items(self) -> None: + result = get_array_pattern_schema("^tag_.*$") + assert result == { + "type": "array", + "items": {"type": "string", "pattern": "^tag_.*$"}, + } + + def test_items_match_get_pattern_schema(self) -> None: + pattern = "^[a-z]+$" + result = get_array_pattern_schema(pattern) + assert result["items"] == get_pattern_schema(pattern) + + +# ============================================================================= +# Tests for get_field_pattern_schema +# ============================================================================= + + +class TestGetFieldPatternSchema: + def test_scalar_field_returns_string_schema(self) -> None: + result = get_field_pattern_schema("title", "^.+$") + assert result == {"type": "string", "pattern": "^.+$"} + + def test_array_field_returns_array_schema(self) -> None: + for array_field in SN_ARRAY_FIELDS: + result = get_field_pattern_schema(array_field, "^[a-z]+$") + assert result["type"] == "array", f"Field '{array_field}' should be array" + assert "items" in result + + def test_unknown_field_returns_string_schema(self) -> None: + result = get_field_pattern_schema("some_custom_field", "^.*$") + assert result["type"] == "string" + + +# ============================================================================= +# Tests for _classify_links +# ============================================================================= + + +class TestClassifyLinks: + def test_regex_link_classified_as_regex(self) -> None: + links = {"parent_need": "^logic_arc_int__.+$"} + regexes, targets = _classify_links(links, "my_type", mandatory=True) + assert regexes == {"parent_need": "^logic_arc_int__.+$"} + assert targets == {} + + def test_plain_type_classified_as_target(self) -> None: + links = {"satisfies": "comp"} + regexes, targets = _classify_links(links, "my_type", mandatory=False) + assert regexes == {} + assert targets == {"satisfies": ["comp"]} + + def test_comma_separated_mixed_values(self) -> None: + links = {"related": "^arc_.+$, comp"} + regexes, targets = _classify_links(links, "my_type", mandatory=True) + assert regexes == {"related": "^arc_.+$"} + assert targets == {"related": ["comp"]} + + def test_empty_links(self) -> None: + regexes, targets = _classify_links({}, "my_type", mandatory=True) + assert regexes == {} + assert targets == {} + + def test_multiple_fields(self) -> None: + links = { + "satisfies": "req", + "parent": "^parent__.+$", + } + regexes, targets = _classify_links(links, "my_type", mandatory=False) + assert regexes == {"parent": "^parent__.+$"} + assert targets == {"satisfies": ["req"]} + + def test_multiple_regex_for_same_field_logs_error(self) -> None: + links = {"field": "^regex1$, ^regex2$"} + with patch("src.extensions.score_metamodel.sn_schemas.LOGGER") as mock_logger: + regexes, _ = _classify_links(links, "my_type", mandatory=True) + mock_logger.error.assert_called_once() + # Last regex overwrites previous ones + assert regexes == {"field": "^regex2$"} + + def test_multiple_plain_targets_all_kept(self) -> None: + links = {"field": "comp, sw_unit"} + regexes, targets = _classify_links(links, "my_type", mandatory=True) + assert regexes == {} + assert targets == {"field": ["comp", "sw_unit"]} + + +# ============================================================================= +# Tests for _build_local_validator +# ============================================================================= + + +class TestBuildLocalValidator: + def test_mandatory_fields_are_required(self) -> None: + mandatory = {"status": "^(valid|draft)$"} + result = _build_local_validator(mandatory, {}, {}, {}) + assert "status" in result["required"] + assert "status" in result["properties"] + assert result["properties"]["status"]["pattern"] == "^(valid|draft)$" + + def test_optional_fields_not_required(self) -> None: + optional = {"comment": "^.*$"} + result = _build_local_validator({}, optional, {}, {}) + assert "comment" not in result["required"] + assert "comment" in result["properties"] + + def test_ignored_fields_excluded(self) -> None: + mandatory = {field: "^.*$" for field in IGNORE_FIELDS} + optional = {field: "^.*$" for field in IGNORE_FIELDS} + result = _build_local_validator(mandatory, optional, {}, {}) + for field in IGNORE_FIELDS: + assert field not in result["properties"] + assert field not in result["required"] + + def test_mandatory_link_regexes_required_with_min_items(self) -> None: + mandatory_link_regexes = {"satisfies": "^req__.+$"} + result = _build_local_validator({}, {}, mandatory_link_regexes, {}) + assert "satisfies" in result["required"] + assert result["properties"]["satisfies"] == {"type": "array", "minItems": 1} + + def test_optional_link_regexes_not_required(self) -> None: + optional_link_regexes = {"related": "^rel__.+$"} + result = _build_local_validator({}, {}, {}, optional_link_regexes) + assert "related" not in result["required"] + assert result["properties"]["related"] == {"type": "array"} + + def test_combined_fields_and_links(self) -> None: + mandatory = {"status": "^valid$"} + optional = {"comment": "^.*$"} + mandatory_link_re = {"satisfies": "^req__.+$"} + optional_link_re = {"related": "^rel__.+$"} + result = _build_local_validator( + mandatory, optional, mandatory_link_re, optional_link_re + ) + assert set(result["required"]) == {"status", "satisfies"} + assert set(result["properties"].keys()) == { + "status", + "comment", + "satisfies", + "related", + } + + def test_empty_inputs(self) -> None: + result = _build_local_validator({}, {}, {}, {}) + assert result["properties"] == {} + assert result["required"] == [] + + def test_array_field_in_mandatory(self) -> None: + mandatory = {"tags": "^(safety|security)$"} + result = _build_local_validator(mandatory, {}, {}, {}) + assert result["properties"]["tags"]["type"] == "array" + assert "items" in result["properties"]["tags"] + + def test_mandatory_link_targets_required_with_min_items(self) -> None: + mandatory_link_targets = {"satisfies": ["comp", "sw_unit"]} + result = _build_local_validator({}, {}, {}, {}, mandatory_link_targets) + assert "satisfies" in result["required"] + assert result["properties"]["satisfies"] == {"type": "array", "minItems": 1} + + +# ============================================================================= +# Tests for _build_need_type_schema +# ============================================================================= + + +def _make_need_type(**overrides: Any) -> ScoreNeedType: + """Helper to create a ScoreNeedType-like dict.""" + base: dict[str, Any] = { + "directive": "test_type", + "title": "Test Type", + "prefix": "TT_", + } + base.update(overrides) + return cast(ScoreNeedType, base) + + +class TestBuildNeedTypeSchema: + def test_returns_none_for_no_constraints(self) -> None: + need_type = _make_need_type() + assert _build_need_type_schema(need_type) is None + + def test_returns_none_for_empty_constraints(self) -> None: + need_type = _make_need_type( + mandatory_options={}, + optional_options={}, + mandatory_links={}, + optional_links={}, + ) + assert _build_need_type_schema(need_type) is None + + def test_schema_has_correct_structure(self) -> None: + need_type = _make_need_type( + mandatory_options={"status": "^valid$"}, + ) + schema = _build_need_type_schema(need_type) + assert schema is not None + assert schema["id"] == "need-type-test_type" + assert schema["severity"] == "violation" + assert "select" in schema + assert schema["select"]["properties"]["type"]["const"] == "test_type" + assert "validate" in schema + assert "local" in schema["validate"] + + def test_mandatory_fields_in_local_validator(self) -> None: + need_type = _make_need_type( + mandatory_options={"status": "^(valid|draft)$"}, + ) + schema = _build_need_type_schema(need_type) + assert schema is not None + local = schema["validate"]["local"] + assert "status" in local["required"] + assert "status" in local["properties"] + + def test_optional_fields_in_local_validator(self) -> None: + need_type = _make_need_type( + optional_options={"comment": "^.*$"}, + ) + schema = _build_need_type_schema(need_type) + assert schema is not None + local = schema["validate"]["local"] + assert "comment" not in local["required"] + assert "comment" in local["properties"] + + def test_mandatory_links_with_regex(self) -> None: + need_type = _make_need_type( + mandatory_links={"satisfies": "^req__.+$"}, + ) + schema = _build_need_type_schema(need_type) + assert schema is not None + local = schema["validate"]["local"] + assert "satisfies" in local["required"] + assert local["properties"]["satisfies"] == {"type": "array", "minItems": 1} + + def test_mandatory_links_with_plain_target(self) -> None: + need_type = _make_need_type( + mandatory_links={"satisfies": "comp"}, + ) + schema = _build_need_type_schema(need_type) + assert schema is not None + local = schema["validate"]["local"] + # Mandatory plain-target links get minItems: 1 in local validator + assert "satisfies" in local["required"] + assert local["properties"]["satisfies"] == {"type": "array", "minItems": 1} + + def test_optional_links_with_regex(self) -> None: + need_type = _make_need_type( + optional_links={"related": "^rel__.+$"}, + ) + schema = _build_need_type_schema(need_type) + assert schema is not None + local = schema["validate"]["local"] + assert "related" not in local["required"] + assert local["properties"]["related"] == {"type": "array"} + + +# ============================================================================= +# Tests for write_sn_schemas +# ============================================================================= + + +class TestWriteSnSchemas: + def test_writes_json_file(self, tmp_path: Path) -> None: + app = MagicMock() + app.confdir = str(tmp_path) + app.config = MagicMock() + + need_type: dict[str, Any] = { + "directive": "req", + "title": "Requirement", + "prefix": "REQ_", + "mandatory_options": {"status": "^valid$"}, + } + metamodel = MagicMock() + metamodel.needs_types = [need_type] + + write_sn_schemas(app, metamodel) + + output_path: Path = tmp_path / "schemas.json" + assert output_path.exists() + data = json.loads(output_path.read_text(encoding="utf-8")) + assert "schemas" in data + assert len(data["schemas"]) == 1 + assert data["schemas"][0]["id"] == "need-type-req" + + def test_sets_config_value(self, tmp_path: Path) -> None: + app = MagicMock() + app.confdir = str(tmp_path) + app.config = MagicMock() + + metamodel = MagicMock() + metamodel.needs_types = [] + + write_sn_schemas(app, metamodel) + + assert app.config.needs_schema_definitions_from_json == "schemas.json" + + def test_skips_need_types_without_constraints(self, tmp_path: Path) -> None: + app = MagicMock() + app.confdir = str(tmp_path) + app.config = MagicMock() + + need_type_with: dict[str, Any] = { + "directive": "req", + "title": "Requirement", + "prefix": "REQ_", + "mandatory_options": {"status": "^valid$"}, + } + need_type_without: dict[str, Any] = { + "directive": "info", + "title": "Info", + "prefix": "INF_", + } + metamodel = MagicMock() + metamodel.needs_types = [need_type_with, need_type_without] + + write_sn_schemas(app, metamodel) + + output_path: Path = tmp_path / "schemas.json" + data = json.loads(output_path.read_text(encoding="utf-8")) + assert len(data["schemas"]) == 1 + assert data["schemas"][0]["id"] == "need-type-req" + + def test_writes_valid_json_with_multiple_types(self, tmp_path: Path) -> None: + app = MagicMock() + app.confdir = str(tmp_path) + app.config = MagicMock() + + need_types: list[dict[str, Any]] = [ + { + "directive": "req", + "title": "Requirement", + "prefix": "REQ_", + "mandatory_options": {"status": "^valid$"}, + }, + { + "directive": "spec", + "title": "Specification", + "prefix": "SPEC_", + "optional_options": {"comment": "^.*$"}, + }, + ] + metamodel = MagicMock() + metamodel.needs_types = need_types + + write_sn_schemas(app, metamodel) + + output_path: Path = tmp_path / "schemas.json" + data = json.loads(output_path.read_text(encoding="utf-8")) + assert len(data["schemas"]) == 2 + ids = {s["id"] for s in data["schemas"]} + assert ids == {"need-type-req", "need-type-spec"} + + +# ============================================================================= +# Tests for validate.network schema generation +# ============================================================================= + + +class TestNetworkValidation: + def test_single_mandatory_target_type(self) -> None: + need_type = _make_need_type( + mandatory_links={"satisfies": "comp"}, + ) + schema = _build_need_type_schema(need_type) + assert schema is not None + network = schema["validate"].get("network") + assert network is not None + assert "satisfies" in network + entry = network["satisfies"] + assert entry["type"] == "array" + assert entry["items"]["local"]["properties"]["type"]["const"] == "comp" + assert entry["items"]["local"]["required"] == ["type"] + # minItems is in local validator, not network + assert "minItems" not in entry + + def test_optional_target_types_excluded_from_network(self) -> None: + """Optional links are not validated via network schema. + + The Python validate_links() treats optional link type violations as + informational (treat_as_info=True). Since schemas use a single severity + per need type, including optional links would escalate info-level issues + to errors. + """ + need_type = _make_need_type( + optional_links={"implements": "logic_arc_int, real_arc_int_op"}, + ) + schema = _build_need_type_schema(need_type) + assert schema is not None + assert "network" not in schema["validate"] + + def test_mandatory_and_optional_combined(self) -> None: + """Only mandatory links appear in network; optional links are excluded.""" + need_type = _make_need_type( + mandatory_links={"satisfies": "comp"}, + optional_links={"implements": "logic_arc_int, real_arc_int_op"}, + ) + schema = _build_need_type_schema(need_type) + assert schema is not None + network = schema["validate"].get("network") + assert network is not None + # Only mandatory links in network + assert set(network.keys()) == {"satisfies"} + assert ( + network["satisfies"]["items"]["local"]["properties"]["type"]["const"] + == "comp" + ) + + def test_mandatory_plain_target_gets_local_min_items(self) -> None: + need_type = _make_need_type( + mandatory_links={"satisfies": "comp"}, + ) + schema = _build_need_type_schema(need_type) + assert schema is not None + local = schema["validate"]["local"] + assert "satisfies" in local["required"] + assert local["properties"]["satisfies"] == {"type": "array", "minItems": 1} + + def test_optional_plain_target_no_local_min_items(self) -> None: + need_type = _make_need_type( + optional_links={"implements": "logic_arc_int"}, + ) + schema = _build_need_type_schema(need_type) + assert schema is not None + local = schema["validate"]["local"] + assert "implements" not in local.get("required", []) + + def test_no_network_when_only_regex_links(self) -> None: + need_type = _make_need_type( + mandatory_links={"includes": "^logic_arc_int__.+$"}, + ) + schema = _build_need_type_schema(need_type) + assert schema is not None + assert "network" not in schema["validate"] + + def test_mixed_regex_and_plain_skips_network(self) -> None: + """When a field mixes regex and plain targets, no network entry is generated. + + The items schema would require ALL linked needs to match the plain type, + but some legitimately match the regex instead. Validated by Python checks. + """ + need_type = _make_need_type( + optional_links={"complies": "std_wp, ^std_req__aspice_40__iic.*$"}, + ) + schema = _build_need_type_schema(need_type) + assert schema is not None + # Regex part goes to local validator + local = schema["validate"]["local"] + assert local["properties"]["complies"] == {"type": "array"} + # No network entry for mixed fields + assert "network" not in schema["validate"] diff --git a/src/extensions/score_metamodel/tests/test_sn_schemas_integration.py b/src/extensions/score_metamodel/tests/test_sn_schemas_integration.py new file mode 100644 index 00000000..2b7f3463 --- /dev/null +++ b/src/extensions/score_metamodel/tests/test_sn_schemas_integration.py @@ -0,0 +1,536 @@ +# ******************************************************************************* +# Copyright (c) 2026 Contributors to the Eclipse Foundation +# +# See the NOTICE file(s) distributed with this work for additional +# information regarding copyright ownership. +# +# This program and the accompanying materials are made available under the +# terms of the Apache License Version 2.0 which is available at +# https://www.apache.org/licenses/LICENSE-2.0 +# +# SPDX-License-Identifier: Apache-2.0 +# ******************************************************************************* +# pyright: reportPrivateUsage=false +"""Integration tests for schema generation against the real SCORE metamodel. + +Test Design +----------- + +Objective: + Verify that the schemas generated by ``sn_schemas.py`` from the real + ``metamodel.yaml`` correctly accept valid needs and reject invalid ones. + This proves the schema-based validation (sphinx-needs 6) is a faithful + translation of the metamodel rules previously enforced only by Python + checks in ``checks/check_options.py``. + +Approach: + 1. Load the S-Core ``metamodel.yaml`` via ``load_metamodel_data()``. + 2. Generate schemas for every need type via ``_build_need_type_schema()``. + 3. Validate sample needs against the generated schemas using + ``jsonschema_rs.Draft7Validator``, the same JSON Schema engine + sphinx-needs uses at build time. + +Test categories: + + **Structural sweep** (``TestAllSchemasStructural``) + Iterates over ALL need types from the S-Core metamodel and verifies: + - Every generated schema is a valid JSON Schema (constructable). + - Schema structure matches the sphinx-needs contract + (id, severity, select, validate.local). + - The ``select`` schema matches only the correct need type. + - Fields in ``IGNORE_FIELDS`` (e.g. ``content``) are excluded. + - Every ``mandatory_options`` field appears in ``required``. + - Every ``optional_options`` field appears in ``properties`` + but NOT in ``required``. + - Regex patterns in schemas match the metamodel definitions exactly. + - Types without any constraints produce no schema. + + **Representative type tests** (``TestFeatReqSchema``, ``TestCompSchema``, + ``TestFeatSchema``) + For a curated set of need types, construct valid and invalid need + dicts and assert the schema accepts or rejects them. This covers + the constraint categories that exist in the metamodel: + + - ``feat_req``: Mandatory fields with regex patterns + (reqtype, security, safety, status), optional fields with patterns + (reqcovered, testcovered), mandatory link with plain target + (satisfies -> stkh_req, no local link validation), and ``content`` + in ``IGNORE_FIELDS``. + - ``comp``: Mandatory fields only, no mandatory links. + - ``feat``: Mandatory link with regex pattern + (includes: ``^logic_arc_int(_op)*__.+$``), producing a local + ``minItems: 1`` constraint. + +Validation helpers: + ``assert_schema_valid`` / ``assert_schema_invalid`` replicate the + two-step sphinx-needs validation: first match the ``select`` schema + (ensures the schema applies to the need's type), then validate against + ``validate.local``. + +Limitations: + - Graph checks (safety level decomposition, prohibited words) remain in + Python code and are outside the scope of schema-based validation. + - The ``content`` field is excluded via ``IGNORE_FIELDS`` because it is + not yet available in ubCode; this exclusion is explicitly tested. +""" + +from typing import Any + +import jsonschema_rs +import pytest + +from src.extensions.score_metamodel.metamodel_types import ScoreNeedType +from src.extensions.score_metamodel.sn_schemas import ( + IGNORE_FIELDS, + _build_need_type_schema, +) +from src.extensions.score_metamodel.yaml_parser import ( + MetaModelData, + load_metamodel_data, +) + +# ============================================================================= +# Fixtures +# ============================================================================= + + +@pytest.fixture(scope="module") +def metamodel() -> MetaModelData: + """Load the S-Core metamodel.yaml once for all tests in this module.""" + return load_metamodel_data() + + +@pytest.fixture(scope="module") +def schemas_by_type(metamodel: MetaModelData) -> dict[str, dict[str, Any]]: + """Generate sphinx-needs schemas for all need types and index by directive name.""" + result: dict[str, dict[str, Any]] = {} + for need_type in metamodel.needs_types: + schema = _build_need_type_schema(need_type) + if schema is not None: + result[need_type["directive"]] = schema + return result + + +@pytest.fixture(scope="module") +def need_types_by_directive(metamodel: MetaModelData) -> dict[str, ScoreNeedType]: + """Index need types by directive name for easy lookup.""" + return {nt["directive"]: nt for nt in metamodel.needs_types} + + +# ============================================================================= +# Helpers +# ============================================================================= + + +def assert_schema_valid(need_dict: dict[str, Any], schema: dict[str, Any]) -> None: + """Assert that a need dict passes the schema's local validator.""" + select_validator = jsonschema_rs.Draft7Validator(schema["select"]) + assert select_validator.is_valid(need_dict), ( + f"Need type '{need_dict.get('type')}' did not match schema selector" + ) + local_validator = jsonschema_rs.Draft7Validator(schema["validate"]["local"]) + # raises ValidationError with details on failure + local_validator.validate(need_dict) + + +def assert_schema_invalid(need_dict: dict[str, Any], schema: dict[str, Any]) -> None: + """Assert that a need dict FAILS the schema's local validator.""" + select_validator = jsonschema_rs.Draft7Validator(schema["select"]) + assert select_validator.is_valid(need_dict), ( + f"Need type '{need_dict.get('type')}' did not match schema selector" + ) + local_validator = jsonschema_rs.Draft7Validator(schema["validate"]["local"]) + assert not local_validator.is_valid(need_dict), ( + f"Expected validation to fail for need: {need_dict}" + ) + + +# ============================================================================= +# Structural sweep over all types +# ============================================================================= + + +class TestAllSchemasStructural: + """Verify every schema generated from the real metamodel is well-formed.""" + + def test_at_least_one_schema_generated( + self, schemas_by_type: dict[str, dict[str, Any]] + ) -> None: + assert len(schemas_by_type) > 0 + + def test_all_schemas_are_valid_json_schemas( + self, schemas_by_type: dict[str, dict[str, Any]] + ) -> None: + """Every schema's select and validate.local must be constructable.""" + for schema in schemas_by_type.values(): + jsonschema_rs.Draft7Validator(schema["select"]) + jsonschema_rs.Draft7Validator(schema["validate"]["local"]) + + def test_every_schema_has_required_structure( + self, schemas_by_type: dict[str, dict[str, Any]] + ) -> None: + for type_name, schema in schemas_by_type.items(): + assert schema["id"] == f"need-type-{type_name}" + assert "severity" in schema + assert "select" in schema + assert "local" in schema["validate"] + + def test_select_matches_correct_type_only( + self, schemas_by_type: dict[str, dict[str, Any]] + ) -> None: + for type_name, schema in schemas_by_type.items(): + selector = jsonschema_rs.Draft7Validator(schema["select"]) + assert selector.is_valid({"type": type_name}) + assert not selector.is_valid({"type": f"NOT_{type_name}"}) + + def test_ignored_fields_never_in_schemas( + self, schemas_by_type: dict[str, dict[str, Any]] + ) -> None: + for type_name, schema in schemas_by_type.items(): + local = schema["validate"]["local"] + for field in IGNORE_FIELDS: + assert field not in local.get("properties", {}), ( + f"{type_name}: '{field}' should be ignored" + ) + assert field not in local.get("required", []), ( + f"{type_name}: '{field}' should be ignored" + ) + + def test_mandatory_options_are_required( + self, + schemas_by_type: dict[str, dict[str, Any]], + need_types_by_directive: dict[str, ScoreNeedType], + ) -> None: + for type_name, schema in schemas_by_type.items(): + need_type = need_types_by_directive[type_name] + local = schema["validate"]["local"] + for field in need_type.get("mandatory_options", {}): + if field in IGNORE_FIELDS: + continue + assert field in local["required"], ( + f"{type_name}: mandatory field '{field}' missing from required" + ) + + def test_optional_options_not_required( + self, + schemas_by_type: dict[str, dict[str, Any]], + need_types_by_directive: dict[str, ScoreNeedType], + ) -> None: + for type_name, schema in schemas_by_type.items(): + need_type = need_types_by_directive[type_name] + local = schema["validate"]["local"] + for field in need_type.get("optional_options", {}): + if field in IGNORE_FIELDS: + continue + assert field in local["properties"], ( + f"{type_name}: optional field '{field}' missing from properties" + ) + assert field not in local["required"], ( + f"{type_name}: optional field '{field}' should not be required" + ) + + def test_mandatory_option_patterns_match_metamodel( + self, + schemas_by_type: dict[str, dict[str, Any]], + need_types_by_directive: dict[str, ScoreNeedType], + ) -> None: + for type_name, schema in schemas_by_type.items(): + need_type = need_types_by_directive[type_name] + local = schema["validate"]["local"] + for field, pattern in need_type.get("mandatory_options", {}).items(): + if field in IGNORE_FIELDS: + continue + prop = local["properties"][field] + if prop.get("type") == "array": + assert prop["items"]["pattern"] == pattern, ( + f"{type_name}.{field}: pattern mismatch" + ) + else: + assert prop["pattern"] == pattern, ( + f"{type_name}.{field}: pattern mismatch" + ) + + def test_types_without_constraints_have_no_schema( + self, + metamodel: MetaModelData, + schemas_by_type: dict[str, dict[str, Any]], + ) -> None: + for nt in metamodel.needs_types: + directive = nt["directive"] + has_constraints = bool( + nt.get("mandatory_options") + or nt.get("optional_options") + or nt.get("mandatory_links") + or nt.get("optional_links") + ) + if not has_constraints: + assert directive not in schemas_by_type, ( + f"{directive} has no constraints but got a schema" + ) + + +# ============================================================================= +# feat_req: mandatory fields, mandatory link (plain target = no local link check) +# ============================================================================= + + +class TestFeatReqSchema: + """Integration tests for feat_req using the real metamodel.""" + + @staticmethod + def _make_valid() -> dict[str, Any]: + return { + "type": "feat_req", + "id": "feat_req__test__001", + "reqtype": "Functional", + "security": "YES", + "safety": "QM", + "status": "valid", + "satisfies": ["stkh_req__some_need"], + } + + def test_valid_need_passes( + self, schemas_by_type: dict[str, dict[str, Any]] + ) -> None: + assert_schema_valid(self._make_valid(), schemas_by_type["feat_req"]) + + def test_missing_status_fails( + self, schemas_by_type: dict[str, dict[str, Any]] + ) -> None: + need = self._make_valid() + del need["status"] + assert_schema_invalid(need, schemas_by_type["feat_req"]) + + def test_missing_safety_fails( + self, schemas_by_type: dict[str, dict[str, Any]] + ) -> None: + need = self._make_valid() + del need["safety"] + assert_schema_invalid(need, schemas_by_type["feat_req"]) + + def test_wrong_status_pattern_fails( + self, schemas_by_type: dict[str, dict[str, Any]] + ) -> None: + need = self._make_valid() + need["status"] = "approved" # not in ^(valid|invalid)$ + assert_schema_invalid(need, schemas_by_type["feat_req"]) + + def test_wrong_safety_pattern_fails( + self, schemas_by_type: dict[str, dict[str, Any]] + ) -> None: + need = self._make_valid() + need["safety"] = "ASIL_D" # not in ^(QM|ASIL_B)$ + assert_schema_invalid(need, schemas_by_type["feat_req"]) + + def test_wrong_reqtype_pattern_fails( + self, schemas_by_type: dict[str, dict[str, Any]] + ) -> None: + need = self._make_valid() + need["reqtype"] = "Performance" # not in ^(Functional|Interface|...)$ + assert_schema_invalid(need, schemas_by_type["feat_req"]) + + def test_content_not_validated( + self, schemas_by_type: dict[str, dict[str, Any]] + ) -> None: + """content is in IGNORE_FIELDS — missing content must not fail.""" + need = self._make_valid() + # no 'content' key at all — should still pass + assert_schema_valid(need, schemas_by_type["feat_req"]) + + def test_invalid_optional_field_fails( + self, schemas_by_type: dict[str, dict[str, Any]] + ) -> None: + need = self._make_valid() + need["reqcovered"] = "MAYBE" # not in ^(YES|NO)$ + assert_schema_invalid(need, schemas_by_type["feat_req"]) + + def test_valid_optional_field_passes( + self, schemas_by_type: dict[str, dict[str, Any]] + ) -> None: + need = self._make_valid() + need["reqcovered"] = "YES" + assert_schema_valid(need, schemas_by_type["feat_req"]) + + def test_extra_unknown_fields_pass( + self, schemas_by_type: dict[str, dict[str, Any]] + ) -> None: + need = self._make_valid() + need["unknown_field"] = "anything" + assert_schema_valid(need, schemas_by_type["feat_req"]) + + +# ============================================================================= +# comp: mandatory fields, no mandatory links +# ============================================================================= + + +class TestCompSchema: + """Integration tests for comp using the real metamodel.""" + + @staticmethod + def _make_valid() -> dict[str, Any]: + return { + "type": "comp", + "id": "comp__my_component", + "security": "YES", + "safety": "QM", + "status": "valid", + } + + def test_valid_need_passes( + self, schemas_by_type: dict[str, dict[str, Any]] + ) -> None: + assert_schema_valid(self._make_valid(), schemas_by_type["comp"]) + + def test_missing_security_fails( + self, schemas_by_type: dict[str, dict[str, Any]] + ) -> None: + need = self._make_valid() + del need["security"] + assert_schema_invalid(need, schemas_by_type["comp"]) + + def test_wrong_security_pattern_fails( + self, schemas_by_type: dict[str, dict[str, Any]] + ) -> None: + need = self._make_valid() + need["security"] = "MAYBE" # not in ^(YES|NO)$ + assert_schema_invalid(need, schemas_by_type["comp"]) + + +# ============================================================================= +# feat: mandatory link with regex (includes: ^logic_arc_int(_op)*__.+$) +# ============================================================================= + + +class TestFeatSchema: + """Integration tests for feat — has a mandatory link with regex pattern.""" + + @staticmethod + def _make_valid() -> dict[str, Any]: + return { + "type": "feat", + "id": "feat__my_feature", + "security": "YES", + "safety": "QM", + "status": "valid", + "includes": ["logic_arc_int__something"], + "consists_of": ["comp__some_component"], + } + + def test_valid_need_passes( + self, schemas_by_type: dict[str, dict[str, Any]] + ) -> None: + assert_schema_valid(self._make_valid(), schemas_by_type["feat"]) + + def test_missing_mandatory_link_fails( + self, schemas_by_type: dict[str, dict[str, Any]] + ) -> None: + need = self._make_valid() + del need["includes"] + assert_schema_invalid(need, schemas_by_type["feat"]) + + def test_empty_mandatory_link_fails( + self, schemas_by_type: dict[str, dict[str, Any]] + ) -> None: + need = self._make_valid() + need["includes"] = [] # minItems: 1 violated + assert_schema_invalid(need, schemas_by_type["feat"]) + + +# ============================================================================= +# Network validation: plain type targets produce validate.network entries +# ============================================================================= + + +class TestNetworkValidation: + """Verify validate.network schemas for types with plain-target links.""" + + def test_mandatory_link_has_network_entry( + self, schemas_by_type: dict[str, dict[str, Any]] + ) -> None: + """feat_req: satisfies -> stkh_req produces a network entry.""" + schema = schemas_by_type["feat_req"] + network = schema["validate"].get("network") + assert network is not None + assert "satisfies" in network + entry = network["satisfies"] + assert entry["type"] == "array" + assert entry["items"]["local"]["properties"]["type"]["const"] == "stkh_req" + assert entry["items"]["local"]["required"] == ["type"] + + def test_mandatory_link_has_local_min_items( + self, schemas_by_type: dict[str, dict[str, Any]] + ) -> None: + """feat_req: mandatory satisfies link gets minItems: 1 in local validator.""" + schema = schemas_by_type["feat_req"] + local = schema["validate"]["local"] + assert "satisfies" in local["required"] + assert local["properties"]["satisfies"] == {"type": "array", "minItems": 1} + + def test_optional_link_excluded_from_network( + self, schemas_by_type: dict[str, dict[str, Any]] + ) -> None: + """tool_req: satisfies is optional, so no network entry is generated. + + Optional link type violations are treated as informational by the Python + validate_links() check (treat_as_info=True). Since schemas use a single + severity per need type, optional links are excluded from network to avoid + escalating info-level issues to errors. + """ + schema = schemas_by_type["tool_req"] + network = schema["validate"].get("network", {}) + assert "satisfies" not in network + + def test_network_validates_linked_need_type( + self, schemas_by_type: dict[str, dict[str, Any]] + ) -> None: + """The local schema inside items validates a linked need's type field.""" + schema = schemas_by_type["feat_req"] + network = schema["validate"]["network"] + local_schema = network["satisfies"]["items"]["local"] + validator = jsonschema_rs.Draft7Validator(local_schema) + # Valid linked need + assert validator.is_valid({"type": "stkh_req"}) + # Invalid linked need type + assert not validator.is_valid({"type": "comp_req"}) + + def test_all_mandatory_plain_links_have_local_and_network( + self, + schemas_by_type: dict[str, dict[str, Any]], + need_types_by_directive: dict[str, ScoreNeedType], + ) -> None: + """Structural sweep: every mandatory plain-target link has both entries. + + Fields that mix regex and plain targets are excluded from network + validation (the items schema would incorrectly require ALL linked + needs to match the plain type). + """ + for type_name, schema in schemas_by_type.items(): + need_type = need_types_by_directive[type_name] + local = schema["validate"]["local"] + network = schema["validate"].get("network", {}) + for link_field, link_value in need_type.get("mandatory_links", {}).items(): + assert isinstance(link_value, str) # before postprocess_need_links + values = [v.strip() for v in link_value.split(",")] + plain_targets = [v for v in values if not v.startswith("^")] + has_regex = any(v.startswith("^") for v in values) + if not plain_targets: + continue + # Must have local minItems: 1 + assert link_field in local["required"], ( + f"{type_name}.{link_field}: missing from local required" + ) + assert local["properties"][link_field] == { + "type": "array", + "minItems": 1, + }, f"{type_name}.{link_field}: wrong local properties" + # Network type constraint only for non-mixed fields + if has_regex: + assert link_field not in network, ( + f"{type_name}.{link_field}: mixed field should NOT be in " + "network" + ) + else: + assert link_field in network, ( + f"{type_name}.{link_field}: missing from network" + ) diff --git a/src/extensions/score_metamodel/yaml_parser.py b/src/extensions/score_metamodel/yaml_parser.py index 64916a90..8c83b4e5 100644 --- a/src/extensions/score_metamodel/yaml_parser.py +++ b/src/extensions/score_metamodel/yaml_parser.py @@ -119,7 +119,7 @@ def _parse_need_type( # Ensure ID regex is set if "id" not in t["mandatory_options"]: prefix = t["prefix"] - t["mandatory_options"]["id"] = f"^{prefix}[0-9a-z_]+$" + t["mandatory_options"]["id"] = f"^{prefix}[0-9a-zA-Z_]+$" if "color" in yaml_data: t["color"] = yaml_data["color"] diff --git a/src/extensions/score_source_code_linker/__init__.py b/src/extensions/score_source_code_linker/__init__.py index 094ebf4a..5be814ee 100644 --- a/src/extensions/score_source_code_linker/__init__.py +++ b/src/extensions/score_source_code_linker/__init__.py @@ -318,6 +318,38 @@ def find_need(all_needs: NeedsMutable, id: str) -> NeedItem | None: return all_needs.get(id) +def _log_needs_with_existing_links(needs: NeedsMutable) -> None: + """Log needs that already have source_code_link or testlink set.""" + if LOGGER.getEffectiveLevel() >= 10: + for id, need in needs.items(): + if need.get("source_code_link"): + LOGGER.debug( + f"?? Need {id} already has source_code_link: " + f"{need.get('source_code_link')}" + ) + if need.get("testlink"): + LOGGER.debug( + f"?? Need {id} already has testlink: {need.get('testlink')}" + ) + + +def _warn_missing_need(source_code_links: SourceCodeLinks) -> None: + """Log warnings when a need referenced by source/test links is not found.""" + # TODO: print github annotations as in https://github.com/eclipse-score/bazel_registry/blob/7423b9996a45dd0a9ec868e06a970330ee71cf4f/tools/verify_semver_compatibility_level.py#L126-L129 + for n in source_code_links.links.CodeLinks: + LOGGER.warning( + f"{n.file}:{n.line}: Could not find {source_code_links.need} " + "in documentation [CODE LINK]", + type="score_source_code_linker", + ) + for n in source_code_links.links.TestLinks: + LOGGER.warning( + f"{n.file}:{n.line}: Could not find {source_code_links.need} " + "in documentation [TEST LINK]", + type="score_source_code_linker", + ) + + # re-qid: gd_req__req__attr_impl def inject_links_into_needs(app: Sphinx, env: BuildEnvironment) -> None: """ @@ -339,17 +371,7 @@ def inject_links_into_needs(app: Sphinx, env: BuildEnvironment) -> None: ) # TODO: why do we create a copy? Can we also needs_copy = needs[:]? copy(needs)? # Enabled automatically for DEBUGGING - if LOGGER.getEffectiveLevel() >= 10: - for id, need in needs.items(): - if need.get("source_code_link"): - LOGGER.debug( - f"?? Need {id} already has source_code_link: " - f"{need.get('source_code_link')}" - ) - if need.get("testlink"): - LOGGER.debug( - f"?? Need {id} already has testlink: {need.get('testlink')}" - ) + _log_needs_with_existing_links(needs) source_code_links_by_need = load_source_code_links_combined_json( get_cache_filename(app.outdir, "score_scl_grouped_cache.json") @@ -358,35 +380,30 @@ def inject_links_into_needs(app: Sphinx, env: BuildEnvironment) -> None: for source_code_links in source_code_links_by_need: need = find_need(needs_copy, source_code_links.need) if need is None: - # TODO: print github annotations as in https://github.com/eclipse-score/bazel_registry/blob/7423b9996a45dd0a9ec868e06a970330ee71cf4f/tools/verify_semver_compatibility_level.py#L126-L129 - for n in source_code_links.links.CodeLinks: - LOGGER.warning( - f"{n.file}:{n.line}: Could not find {source_code_links.need} " - "in documentation [CODE LINK]", - type="score_source_code_linker", - ) - for n in source_code_links.links.TestLinks: - LOGGER.warning( - f"{n.file}:{n.line}: Could not find {source_code_links.need} " - "in documentation [TEST LINK]", - type="score_source_code_linker", - ) + _warn_missing_need(source_code_links) continue need_as_dict = cast(dict[str, object], need) - need_as_dict["source_code_link"] = ", ".join( - f"{get_github_link(n)}<>{n.file}:{n.line}" - for n in source_code_links.links.CodeLinks - ) - need_as_dict["testlink"] = ", ".join( - f"{get_github_link(n)}<>{n.name}" for n in source_code_links.links.TestLinks - ) + modified_need = False + if source_code_links.links.CodeLinks: + modified_need = True + need_as_dict["source_code_link"] = ", ".join( + f"{get_github_link(n)}<>{n.file}:{n.line}" + for n in source_code_links.links.CodeLinks + ) + if source_code_links.links.TestLinks: + modified_need = True + need_as_dict["testlink"] = ", ".join( + f"{get_github_link(n)}<>{n.name}" + for n in source_code_links.links.TestLinks + ) - # NOTE: Removing & adding the need is important to make sure - # the needs gets 're-evaluated'. - Needs_Data.remove_need(need["id"]) - Needs_Data.add_need(need) + if modified_need: + # NOTE: Removing & adding the need is important to make sure + # the needs gets 're-evaluated'. + Needs_Data.remove_need(need["id"]) + Needs_Data.add_need(need) # ╭──────────────────────────────────────╮ diff --git a/src/extensions/score_sync_toml/__init__.py b/src/extensions/score_sync_toml/__init__.py index 79ebfb7a..72e598e6 100644 --- a/src/extensions/score_sync_toml/__init__.py +++ b/src/extensions/score_sync_toml/__init__.py @@ -59,6 +59,12 @@ def setup(app: Sphinx) -> dict[str, str | bool]: ] # TODO remove the suppress_warnings once fixed + app.config.needscfg_exclude_vars = [ + "needs_from_toml", + "needs_from_toml_table", + # "needs_schema_definitions_from_json", + ] + return { "version": "0.1", "parallel_read_safe": True,