From 72521660b366bb5cd81228ece9c355d3325cc714 Mon Sep 17 00:00:00 2001 From: "copilot-swe-agent[bot]" <198982749+Copilot@users.noreply.github.com> Date: Thu, 5 Mar 2026 22:28:57 +0000 Subject: [PATCH 01/14] Initial plan From fc517b20b0a2a4b017335b0cc627cd1a92c140eb Mon Sep 17 00:00:00 2001 From: "copilot-swe-agent[bot]" <198982749+Copilot@users.noreply.github.com> Date: Thu, 5 Mar 2026 22:37:16 +0000 Subject: [PATCH 02/14] Scaffold DAK AI Skill Library infrastructure Create the full directory structure under .github/skills/ with: - Common module (smart_llm_facade.py, prompts.py, ig_errors.py, fsh_utils.py) - BPMN author skill (validators, prompts, actions) - BPMN import skill (validators, prompts, actions) - IG publisher skill (prompts, actions) - DAK authoring skill (classify_issue_action.py with keyword lists) - Stub skills (l1_review, l3_review, translation) - CLI entry point and skills registry - Dockerfile and docker-compose.yml - GitHub Actions workflows (classify-issue, skill-l1/l2/l3/translation, pr-validate-slash) - .env.example and .gitignore updates - Label JSON files for content:L2, content:L3, content:translation Co-authored-by: litlfred <662242+litlfred@users.noreply.github.com> --- .env.example | 11 ++ .github/skills/Dockerfile | 44 +++++ .github/skills/README.md | 109 ++++++++++++ .github/skills/bpmn_author/__init__.py | 0 .../bpmn_author/actions/bpmn_author_action.py | 66 +++++++ .../prompts/create_or_edit_bpmn.md | 33 ++++ .../bpmn_author/prompts/validate_bpmn.md | 31 ++++ .github/skills/bpmn_author/skills.yaml | 23 +++ .../skills/bpmn_author/validators/__init__.py | 0 .../validators/bpmn_xml_validator.py | 98 ++++++++++ .../validators/swimlane_validator.py | 115 ++++++++++++ .github/skills/bpmn_import/__init__.py | 0 .../bpmn_import/actions/bpmn_import_action.py | 53 ++++++ .../prompts/interpret_import_errors.md | 28 +++ .github/skills/bpmn_import/skills.yaml | 18 ++ .../skills/bpmn_import/validators/__init__.py | 0 .../validators/swimlane_actor_validator.py | 73 ++++++++ .github/skills/cli/dak_skill.py | 75 ++++++++ .github/skills/common/__init__.py | 0 .github/skills/common/fsh_utils.py | 42 +++++ .github/skills/common/ig_errors.py | 76 ++++++++ .github/skills/common/ig_publisher_iface.py | 55 ++++++ .github/skills/common/prompts.py | 65 +++++++ .../skills/common/prompts/actor_context.md | 25 +++ .../skills/common/prompts/bpmn_xml_schema.md | 57 ++++++ .../common/prompts/dak_bpmn_constraints.md | 19 ++ .github/skills/common/smart_llm_facade.py | 116 ++++++++++++ .github/skills/dak_authoring/__init__.py | 0 .../actions/classify_issue_action.py | 168 ++++++++++++++++++ .../actions/dak_authoring_action.py | 53 ++++++ .../dak_authoring/prompts/change_proposal.md | 22 +++ .../dak_authoring/prompts/classify_issue.md | 29 +++ .../dak_authoring/prompts/l2_authoring.md | 36 ++++ .github/skills/dak_authoring/skills.yaml | 17 ++ .github/skills/docker-compose.yml | 27 +++ .github/skills/ig_publisher/__init__.py | 0 .../ig_publisher/actions/build_ig_action.py | 40 +++++ .../actions/interpret_errors_action.py | 51 ++++++ .../actions/validate_dak_action.py | 63 +++++++ .../actions/validate_ig_action.py | 40 +++++ .../prompts/interpret_ig_errors.md | 38 ++++ .../ig_publisher/prompts/validate_dak.md | 34 ++++ .github/skills/ig_publisher/skills.yaml | 22 +++ .../l1_review/actions/l1_review_action.py | 22 +++ .github/skills/l1_review/skills.yaml | 9 + .../l3_review/actions/l3_review_action.py | 22 +++ .github/skills/l3_review/skills.yaml | 9 + .github/skills/skills_registry.yaml | 47 +++++ .../translation/actions/translation_action.py | 17 ++ .github/skills/translation/skills.yaml | 9 + .github/workflows/classify-issue.yml | 32 ++++ .github/workflows/pr-validate-slash.yml | 55 ++++++ .github/workflows/skill-l1-review.yml | 26 +++ .github/workflows/skill-l2-dak.yml | 27 +++ .github/workflows/skill-l3-review.yml | 26 +++ .github/workflows/skill-translation.yml | 24 +++ .gitignore | 4 + labels/content_L2.json | 1 + labels/content_L3.json | 1 + labels/content_translation.json | 1 + 60 files changed, 2204 insertions(+) create mode 100644 .env.example create mode 100644 .github/skills/Dockerfile create mode 100644 .github/skills/README.md create mode 100644 .github/skills/bpmn_author/__init__.py create mode 100644 .github/skills/bpmn_author/actions/bpmn_author_action.py create mode 100644 .github/skills/bpmn_author/prompts/create_or_edit_bpmn.md create mode 100644 .github/skills/bpmn_author/prompts/validate_bpmn.md create mode 100644 .github/skills/bpmn_author/skills.yaml create mode 100644 .github/skills/bpmn_author/validators/__init__.py create mode 100644 .github/skills/bpmn_author/validators/bpmn_xml_validator.py create mode 100644 .github/skills/bpmn_author/validators/swimlane_validator.py create mode 100644 .github/skills/bpmn_import/__init__.py create mode 100644 .github/skills/bpmn_import/actions/bpmn_import_action.py create mode 100644 .github/skills/bpmn_import/prompts/interpret_import_errors.md create mode 100644 .github/skills/bpmn_import/skills.yaml create mode 100644 .github/skills/bpmn_import/validators/__init__.py create mode 100644 .github/skills/bpmn_import/validators/swimlane_actor_validator.py create mode 100644 .github/skills/cli/dak_skill.py create mode 100644 .github/skills/common/__init__.py create mode 100644 .github/skills/common/fsh_utils.py create mode 100644 .github/skills/common/ig_errors.py create mode 100644 .github/skills/common/ig_publisher_iface.py create mode 100644 .github/skills/common/prompts.py create mode 100644 .github/skills/common/prompts/actor_context.md create mode 100644 .github/skills/common/prompts/bpmn_xml_schema.md create mode 100644 .github/skills/common/prompts/dak_bpmn_constraints.md create mode 100644 .github/skills/common/smart_llm_facade.py create mode 100644 .github/skills/dak_authoring/__init__.py create mode 100644 .github/skills/dak_authoring/actions/classify_issue_action.py create mode 100644 .github/skills/dak_authoring/actions/dak_authoring_action.py create mode 100644 .github/skills/dak_authoring/prompts/change_proposal.md create mode 100644 .github/skills/dak_authoring/prompts/classify_issue.md create mode 100644 .github/skills/dak_authoring/prompts/l2_authoring.md create mode 100644 .github/skills/dak_authoring/skills.yaml create mode 100644 .github/skills/docker-compose.yml create mode 100644 .github/skills/ig_publisher/__init__.py create mode 100644 .github/skills/ig_publisher/actions/build_ig_action.py create mode 100644 .github/skills/ig_publisher/actions/interpret_errors_action.py create mode 100644 .github/skills/ig_publisher/actions/validate_dak_action.py create mode 100644 .github/skills/ig_publisher/actions/validate_ig_action.py create mode 100644 .github/skills/ig_publisher/prompts/interpret_ig_errors.md create mode 100644 .github/skills/ig_publisher/prompts/validate_dak.md create mode 100644 .github/skills/ig_publisher/skills.yaml create mode 100644 .github/skills/l1_review/actions/l1_review_action.py create mode 100644 .github/skills/l1_review/skills.yaml create mode 100644 .github/skills/l3_review/actions/l3_review_action.py create mode 100644 .github/skills/l3_review/skills.yaml create mode 100644 .github/skills/skills_registry.yaml create mode 100644 .github/skills/translation/actions/translation_action.py create mode 100644 .github/skills/translation/skills.yaml create mode 100644 .github/workflows/classify-issue.yml create mode 100644 .github/workflows/pr-validate-slash.yml create mode 100644 .github/workflows/skill-l1-review.yml create mode 100644 .github/workflows/skill-l2-dak.yml create mode 100644 .github/workflows/skill-l3-review.yml create mode 100644 .github/workflows/skill-translation.yml create mode 100644 labels/content_L2.json create mode 100644 labels/content_L3.json create mode 100644 labels/content_translation.json diff --git a/.env.example b/.env.example new file mode 100644 index 0000000000..7d7a85f29d --- /dev/null +++ b/.env.example @@ -0,0 +1,11 @@ +# DAK Skill Library — local config. Copy to .env (never commit .env). +# +# LLM features (authoring, error interpretation, classification): +# Leave blank → LLM steps skipped, structural validation still runs. +# Billed to YOUR account, not WHO. +# +DAK_LLM_API_KEY= # sk-... (OpenAI) | sk-ant-... (Anthropic) | leave blank +DAK_LLM_MODEL=gpt-4o # gpt-4o | gpt-4o-mini | claude-3-5-sonnet-20241022 | gemini-2.0-flash + +# IG Publisher (usually defaults are fine) +DAK_TX_SERVER= # optional custom terminology server diff --git a/.github/skills/Dockerfile b/.github/skills/Dockerfile new file mode 100644 index 0000000000..08ef8be522 --- /dev/null +++ b/.github/skills/Dockerfile @@ -0,0 +1,44 @@ +# DAK Skill Library — Local Development Image +# Mirrors ghbuild.yml CI environment exactly. +# Base: hl7fhir/ig-publisher-base (Jekyll, Ruby, Java 17, Node.js) + +FROM hl7fhir/ig-publisher-base:latest + +LABEL org.opencontainers.image.title="DAK Skill Library" +LABEL org.opencontainers.image.source="https://github.com/WorldHealthOrganization/smart-base" + +# Python packages — identical to ghbuild.yml +RUN apt-get update && apt-get install -y --no-install-recommends \ + python3 python3-pip python3-venv \ + && ln -sf /usr/bin/python3 /usr/bin/python \ + && pip3 install --break-system-packages \ + "GitPython>=3.1.40" \ + "PyYAML>=6.0" \ + "requests>=2.28.0" \ + "lxml" \ + "litellm>=1.0.0" \ + "pdfplumber" \ + "pandas" \ + && rm -rf /var/lib/apt/lists/* + +# SUSHI — identical to ghbuild.yml +RUN npm install -g fsh-sushi + +# IG Publisher jar — pre-baked so local runs don't need network +# Override: -v /local/publisher.jar:/app/publisher.jar +RUN mkdir -p /app/input-cache \ + && curl -L \ + https://github.com/HL7/fhir-ig-publisher/releases/latest/download/publisher.jar \ + -o /app/input-cache/publisher.jar + +# DAK skill library +COPY . /app/skills/ + +# Workspace — mount IG repo here: -v $(pwd):/workspace +WORKDIR /workspace + +ENV PUBLISHER_JAR=/app/input-cache/publisher.jar +ENV DAK_IG_ROOT=/workspace + +ENTRYPOINT ["python3", "/app/skills/cli/dak_skill.py"] +CMD ["--help"] diff --git a/.github/skills/README.md b/.github/skills/README.md new file mode 100644 index 0000000000..9de5f0ad9c --- /dev/null +++ b/.github/skills/README.md @@ -0,0 +1,109 @@ +# DAK Skill Library + +The DAK Skill Library provides AI-assisted and structural validation tools +for authoring WHO Digital Adaptation Kit (DAK) content. + +## Quick Start + +### Local Development (Docker) + +```bash +# 1. Build the image +docker build -t dak-skill .github/skills/ + +# 2. Copy environment template +cp .env.example .env +# Edit .env to add your LLM API key (optional — structural validation works without it) + +# 3. Run skills +docker compose -f .github/skills/docker-compose.yml run --rm validate +docker compose -f .github/skills/docker-compose.yml run --rm validate-ig +docker compose -f .github/skills/docker-compose.yml run --rm import-bpmn +docker compose -f .github/skills/docker-compose.yml run --rm shell + +# Shortcut alias: +alias dak='docker compose -f .github/skills/docker-compose.yml run --rm' +dak validate +dak import-bpmn +``` + +### CI (GitHub Actions) + +Skills run automatically via GitHub Actions workflows: + +| Trigger | Workflow | What it does | +|---|---|---| +| Issue opened/edited | `classify-issue.yml` | Auto-labels issues with `content:L1/L2/L3/translation` | +| Label `content:L1` | `skill-l1-review.yml` | L1 guideline review (placeholder) | +| Label `content:L2` | `skill-l2-dak.yml` | L2 DAK content authoring | +| Label `content:L3` | `skill-l3-review.yml` | L3 adaptation review (placeholder) | +| Label `content:translation` | `skill-translation.yml` | Translation management (placeholder) | +| PR comment `/validate` | `pr-validate-slash.yml` | Structural + IG validation | + +## One-Time Repository Setup + +``` +1. Create labels (Issues → Labels → New label): + content:L1 #0075ca "WHO source guideline content" + content:L2 #e4e669 "DAK FHIR assets" + content:L3 #d73a4a "Implementation adaptations" + content:translation #0e8a16 "Translation of any content layer" + +2. Add secret (Settings → Secrets and variables → Actions → New repository secret): + DAK_LLM_API_KEY = sk-... + +3. Add variable (Settings → Secrets and variables → Variables → New variable): + DAK_LLM_MODEL = gpt-4o (or gpt-4o-mini to reduce cost) + +4. Build local Docker image (optional, for local development): + docker build -t dak-skill .github/skills/ +``` + +## Security Model + +- **API keys MUST NOT appear** in dispatch inputs, issue comments, PR comments, or any user-visible UI +- Two legitimate locations only: **repo secret** (CI) or **local `.env` file** (Docker/local) +- LLM steps skip gracefully when no key present — non-LLM validation always runs +- **Zero WHO infrastructure cost; zero WHO AI cost** + +### Graceful Degradation + +| Skill | No key | With key | +|---|---|---| +| BPMN structure validation | ✅ runs | ✅ runs | +| Swimlane ↔ ActorDef validation | ✅ runs | ✅ runs | +| IG Publisher build/validate | ✅ runs | ✅ runs | +| Issue classification | keyword fallback | LLM classification | +| LLM BPMN authoring | ⚠️ skipped | ✅ runs | +| LLM error interpretation | ⚠️ skipped | ✅ runs | + +## Directory Structure + +``` +.github/skills/ +├── Dockerfile # FROM hl7fhir/ig-publisher-base — mirrors CI +├── docker-compose.yml # Service aliases: validate, author, import, shell +├── README.md # This file +├── skills_registry.yaml # All registered skills +├── cli/ +│ └── dak_skill.py # CLI entry point +├── common/ +│ ├── smart_llm_facade.py # LLM interface (attributed from bpmn-assistant) +│ ├── prompts.py # load_prompt() — .md templates with {variable} +│ ├── ig_errors.py # FATAL/ERROR/WARNING/INFORMATION format +│ ├── fsh_utils.py # FSH file utilities +│ ├── ig_publisher_iface.py +│ └── prompts/ # Shared prompt templates +├── bpmn_author/ # Author/edit BPMN +├── bpmn_import/ # Import BPMN → FSH, validate lanes +├── ig_publisher/ # IG Publisher validation and build +├── dak_authoring/ # Issue classification and L2 authoring +├── l1_review/ # (placeholder v0.2) +├── l3_review/ # (placeholder v0.3) +└── translation/ # (placeholder v0.3) +``` + +## LLM Attribution + +The `SmartLLMFacade` in `common/smart_llm_facade.py` is copy-lifted with attribution +from [jtlicardo/bpmn-assistant](https://github.com/jtlicardo/bpmn-assistant) (MIT License). diff --git a/.github/skills/bpmn_author/__init__.py b/.github/skills/bpmn_author/__init__.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/.github/skills/bpmn_author/actions/bpmn_author_action.py b/.github/skills/bpmn_author/actions/bpmn_author_action.py new file mode 100644 index 0000000000..eb4464a044 --- /dev/null +++ b/.github/skills/bpmn_author/actions/bpmn_author_action.py @@ -0,0 +1,66 @@ +""" +BPMN Author action — creates or edits BPMN files via LLM, +then validates the result structurally. + +Environment variables: + DAK_LLM_API_KEY — LLM API key (optional; LLM steps skipped if absent) + DAK_LLM_MODEL — LLM model name (default: gpt-4o) + GITHUB_TOKEN — GitHub API token for issue/PR interaction + ISSUE_NUMBER — GitHub issue number + ISSUE_TITLE — Issue title + ISSUE_BODY — Issue body text +""" + +import os +import sys +from pathlib import Path + +# Ensure the skills root is on sys.path +_SKILLS_ROOT = Path(__file__).resolve().parent.parent.parent +if str(_SKILLS_ROOT) not in sys.path: + sys.path.insert(0, str(_SKILLS_ROOT)) + +from common.ig_errors import format_issues, has_errors +from bpmn_author.validators.bpmn_xml_validator import validate_bpmn_xml +from bpmn_author.validators.swimlane_validator import validate_swimlanes + + +def main() -> None: + api_key = os.environ.get("DAK_LLM_API_KEY", "") + if not api_key: + print("⚠️ DAK_LLM_API_KEY not set — LLM step skipped (structural validation still runs)") + sys.exit(0) + + from common.smart_llm_facade import SmartLLMFacade + from common.prompts import load_prompt + + issue_title = os.environ.get("ISSUE_TITLE", "") + issue_body = os.environ.get("ISSUE_BODY", "") + model = os.environ.get("DAK_LLM_MODEL", "gpt-4o") + + llm = SmartLLMFacade(api_key=api_key, model=model) + + prompt = load_prompt( + "bpmn_author", "create_or_edit_bpmn", + user_request=f"{issue_title}\n\n{issue_body}", + current_bpmn="(none — creating new BPMN)", + ) + + print(f"🤖 Requesting BPMN from {model}...") + bpmn_xml = llm.call(prompt) + + # Validate the generated BPMN + issues = validate_bpmn_xml(bpmn_xml, filename="generated.bpmn") + issues.extend(validate_swimlanes(bpmn_xml, filename="generated.bpmn")) + + print(format_issues(issues)) + + if has_errors(issues): + print("❌ Generated BPMN has validation errors.") + sys.exit(1) + + print("✅ Generated BPMN passed structural validation.") + + +if __name__ == "__main__": + main() diff --git a/.github/skills/bpmn_author/prompts/create_or_edit_bpmn.md b/.github/skills/bpmn_author/prompts/create_or_edit_bpmn.md new file mode 100644 index 0000000000..75d47a6cc3 --- /dev/null +++ b/.github/skills/bpmn_author/prompts/create_or_edit_bpmn.md @@ -0,0 +1,33 @@ +# Create or Edit BPMN + +You are a BPMN 2.0 authoring assistant for WHO Digital Adaptation Kits (DAKs). + +## Your Task + +{user_request} + +## Constraints + +{dak_bpmn_constraints} + +## BPMN XML Schema + +{bpmn_xml_schema} + +## Actor Context + +{actor_context} + +## Current BPMN (if editing) + +```xml +{current_bpmn} +``` + +## Instructions + +1. Generate valid BPMN 2.0 XML following the constraints above. +2. Use meaningful lane IDs that can serve as FSH instance identifiers. +3. Ensure every task is assigned to exactly one lane. +4. Include sequence flows connecting all elements. +5. Return ONLY the BPMN XML — no explanation, no markdown fences. diff --git a/.github/skills/bpmn_author/prompts/validate_bpmn.md b/.github/skills/bpmn_author/prompts/validate_bpmn.md new file mode 100644 index 0000000000..fdc16eb91c --- /dev/null +++ b/.github/skills/bpmn_author/prompts/validate_bpmn.md @@ -0,0 +1,31 @@ +# Validate BPMN + +Review the following BPMN XML for compliance with WHO DAK constraints. + +## BPMN XML + +```xml +{bpmn_xml} +``` + +## Validation Results (structural) + +{validation_results} + +## Instructions + +Summarize the validation findings. For each issue: +1. Explain what is wrong and why it matters for DAK compliance. +2. Suggest a specific fix. + +If there are no issues, confirm the BPMN is valid. +Return your analysis as JSON: +```json +{{ + "valid": true/false, + "summary": "...", + "issues": [ + {{"code": "...", "severity": "...", "message": "...", "fix": "..."}} + ] +}} +``` diff --git a/.github/skills/bpmn_author/skills.yaml b/.github/skills/bpmn_author/skills.yaml new file mode 100644 index 0000000000..6e007098b3 --- /dev/null +++ b/.github/skills/bpmn_author/skills.yaml @@ -0,0 +1,23 @@ +# bpmn_author skill +name: bpmn_author +version: "0.1.0" +description: Author and edit standard BPMN 2.0 XML for DAK business processes + +commands: + - name: create-bpmn + description: Create a new BPMN file from a natural-language description + requires_llm: true + - name: edit-bpmn + description: Edit an existing BPMN file based on instructions + requires_llm: true + - name: validate-bpmn + description: Validate BPMN structure and DAK constraints (no LLM needed) + requires_llm: false + +validators: + - bpmn_xml_validator + - swimlane_validator + +prompts: + - create_or_edit_bpmn + - validate_bpmn diff --git a/.github/skills/bpmn_author/validators/__init__.py b/.github/skills/bpmn_author/validators/__init__.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/.github/skills/bpmn_author/validators/bpmn_xml_validator.py b/.github/skills/bpmn_author/validators/bpmn_xml_validator.py new file mode 100644 index 0000000000..99bb294687 --- /dev/null +++ b/.github/skills/bpmn_author/validators/bpmn_xml_validator.py @@ -0,0 +1,98 @@ +""" +BPMN XML structural validator. + +Validates that a BPMN file is well-formed XML, uses standard BPMN 2.0 +namespaces (no Zeebe/Camunda extensions), and follows basic structural rules. +""" + +from typing import List +from lxml import etree + +from common.ig_errors import Issue, error, warning, info + +BPMN_NS = "http://www.omg.org/spec/BPMN/20100524/MODEL" + +# Vendor namespaces that must NOT appear in DAK BPMN +_FORBIDDEN_NAMESPACES = { + "http://camunda.org/schema/zeebe/1.0": "Zeebe", + "http://camunda.org/schema/1.0/bpmn": "Camunda", + "http://camunda.org/schema/modeler/1.0": "Camunda Modeler", +} + + +def validate_bpmn_xml(bpmn_content: str, *, filename: str = "unknown.bpmn") -> List[Issue]: + """Validate BPMN XML content and return a list of issues. + + Checks: + 1. Well-formed XML + 2. Root element is bpmn:definitions + 3. No forbidden vendor namespaces + 4. At least one process element + 5. No duplicate id attributes + """ + issues: List[Issue] = [] + + # 1. Well-formed XML + try: + tree = etree.fromstring(bpmn_content.encode("utf-8")) + except etree.XMLSyntaxError as exc: + issues.append(error("BPMN-001", f"Malformed XML: {exc}", file=filename)) + return issues # Can't continue + + # 2. Root element check + expected_tag = f"{{{BPMN_NS}}}definitions" + if tree.tag != expected_tag: + issues.append(error( + "BPMN-002", + f"Root element must be , got <{tree.tag}>", + file=filename, + )) + + # 3. Forbidden vendor namespaces + nsmap = tree.nsmap if hasattr(tree, "nsmap") else {} + for uri, vendor in _FORBIDDEN_NAMESPACES.items(): + if uri in nsmap.values(): + issues.append(error( + "BPMN-003", + f"Forbidden {vendor} namespace detected: {uri}", + file=filename, + )) + + # Also check for vendor namespaces on any descendant + for elem in tree.iter(): + for uri, vendor in _FORBIDDEN_NAMESPACES.items(): + if uri in (elem.nsmap or {}).values(): + issues.append(error( + "BPMN-003", + f"Forbidden {vendor} namespace on <{elem.tag}>: {uri}", + file=filename, + )) + break # one per element is enough + + # 4. At least one process + processes = tree.findall(f"{{{BPMN_NS}}}process") + if not processes: + issues.append(error( + "BPMN-004", + "No element found", + file=filename, + )) + + # 5. Duplicate IDs + all_ids: dict = {} + for elem in tree.iter(): + eid = elem.get("id") + if eid: + if eid in all_ids: + issues.append(error( + "BPMN-005", + f"Duplicate id '{eid}' (first seen on <{all_ids[eid]}>)", + file=filename, + )) + else: + all_ids[eid] = elem.tag + + if not issues: + issues.append(info("BPMN-000", "BPMN XML structure is valid", file=filename)) + + return issues diff --git a/.github/skills/bpmn_author/validators/swimlane_validator.py b/.github/skills/bpmn_author/validators/swimlane_validator.py new file mode 100644 index 0000000000..e1affbad44 --- /dev/null +++ b/.github/skills/bpmn_author/validators/swimlane_validator.py @@ -0,0 +1,115 @@ +""" +BPMN swimlane validator. + +Validates DAK swimlane constraints: +- Lanes must be present in every process +- No orphan tasks (every flow node referenced by a lane) +- No duplicate lane IDs +- Lane IDs are valid FSH identifiers +""" + +import re +from typing import List +from lxml import etree + +from common.ig_errors import Issue, error, warning, info + +BPMN_NS = "http://www.omg.org/spec/BPMN/20100524/MODEL" + +# Valid FSH identifier pattern +_FSH_ID_RE = re.compile(r"^[A-Za-z0-9.\-]+$") + +# Flow node types that should be assigned to a lane +_FLOW_NODE_TAGS = { + f"{{{BPMN_NS}}}{tag}" + for tag in ( + "task", "userTask", "serviceTask", "sendTask", "receiveTask", + "manualTask", "businessRuleTask", "scriptTask", "callActivity", + "subProcess", "startEvent", "endEvent", "intermediateThrowEvent", + "intermediateCatchEvent", "boundaryEvent", + "exclusiveGateway", "parallelGateway", "inclusiveGateway", + "eventBasedGateway", "complexGateway", + ) +} + + +def validate_swimlanes(bpmn_content: str, *, filename: str = "unknown.bpmn") -> List[Issue]: + """Validate BPMN swimlane structure and return a list of issues. + + Checks: + 1. Every process has a laneSet + 2. Lane IDs are valid FSH identifiers + 3. No orphan flow nodes (every node referenced by a lane) + 4. No duplicate lane IDs across the document + """ + issues: List[Issue] = [] + + try: + tree = etree.fromstring(bpmn_content.encode("utf-8")) + except etree.XMLSyntaxError: + issues.append(error("SWIM-001", "Cannot parse XML", file=filename)) + return issues + + # Collect all lane IDs for duplicate check + seen_lane_ids: dict = {} + + processes = tree.findall(f"{{{BPMN_NS}}}process") + for proc in processes: + proc_id = proc.get("id", "?") + + # 1. laneSet present? + lane_sets = proc.findall(f"{{{BPMN_NS}}}laneSet") + if not lane_sets: + issues.append(error( + "SWIM-002", + f"Process '{proc_id}' has no ", + file=filename, + )) + continue + + # Collect all flowNodeRefs from lanes + all_refs: set = set() + for lane_set in lane_sets: + for lane in lane_set.iter(f"{{{BPMN_NS}}}lane"): + lane_id = lane.get("id", "") + + # 2. Valid FSH ID? + if lane_id and not _FSH_ID_RE.match(lane_id): + issues.append(warning( + "SWIM-003", + f"Lane id '{lane_id}' contains characters invalid for FSH identifiers " + f"(allowed: A-Z, a-z, 0-9, '-', '.')", + file=filename, + )) + + # Duplicate lane ID? + if lane_id: + if lane_id in seen_lane_ids: + issues.append(error( + "SWIM-004", + f"Duplicate lane id '{lane_id}'", + file=filename, + )) + else: + seen_lane_ids[lane_id] = True + + for ref in lane.findall(f"{{{BPMN_NS}}}flowNodeRef"): + if ref.text: + all_refs.add(ref.text.strip()) + + # 3. Orphan flow nodes? + for child in proc: + if child.tag in _FLOW_NODE_TAGS: + node_id = child.get("id", "") + if node_id and node_id not in all_refs: + issues.append(warning( + "SWIM-005", + f"Flow node '{node_id}' ({child.tag.split('}')[-1]}) " + f"is not referenced by any lane in process '{proc_id}'", + file=filename, + )) + + if not issues: + issues.append(info("SWIM-000", "Swimlane structure is valid", file=filename)) + + return issues diff --git a/.github/skills/bpmn_import/__init__.py b/.github/skills/bpmn_import/__init__.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/.github/skills/bpmn_import/actions/bpmn_import_action.py b/.github/skills/bpmn_import/actions/bpmn_import_action.py new file mode 100644 index 0000000000..f1926060d0 --- /dev/null +++ b/.github/skills/bpmn_import/actions/bpmn_import_action.py @@ -0,0 +1,53 @@ +""" +BPMN Import action — wraps bpmn_extractor.py and validates lane→actor mapping. + +Environment variables: + DAK_LLM_API_KEY — LLM API key (optional; error interpretation skipped if absent) + DAK_LLM_MODEL — LLM model name (default: gpt-4o) + GITHUB_TOKEN — GitHub API token + DAK_IG_ROOT — IG root directory (default: current directory) +""" + +import glob +import os +import sys +from pathlib import Path + +_SKILLS_ROOT = Path(__file__).resolve().parent.parent.parent +if str(_SKILLS_ROOT) not in sys.path: + sys.path.insert(0, str(_SKILLS_ROOT)) + +from common.ig_errors import format_issues, has_errors +from bpmn_import.validators.swimlane_actor_validator import validate_swimlane_actors + + +def main() -> None: + ig_root = os.environ.get("DAK_IG_ROOT", ".") + bpmn_files = glob.glob(os.path.join(ig_root, "input", "business-processes", "*.bpmn")) + + if not bpmn_files: + print("ℹ️ No BPMN files found in input/business-processes/") + sys.exit(0) + + all_issues = [] + for bpmn_path in bpmn_files: + print(f"📄 Validating: {bpmn_path}") + content = Path(bpmn_path).read_text(encoding="utf-8") + issues = validate_swimlane_actors( + content, + ig_root=ig_root, + filename=os.path.basename(bpmn_path), + ) + all_issues.extend(issues) + + print(format_issues(all_issues)) + + if has_errors(all_issues): + print("❌ BPMN import validation found errors.") + sys.exit(1) + + print("✅ All BPMN files passed lane→actor validation.") + + +if __name__ == "__main__": + main() diff --git a/.github/skills/bpmn_import/prompts/interpret_import_errors.md b/.github/skills/bpmn_import/prompts/interpret_import_errors.md new file mode 100644 index 0000000000..05089fa480 --- /dev/null +++ b/.github/skills/bpmn_import/prompts/interpret_import_errors.md @@ -0,0 +1,28 @@ +# Interpret Import Errors + +You are helping a DAK author understand errors from the BPMN import pipeline. + +## Import Output + +{import_output} + +## Errors Found + +{error_list} + +## Instructions + +For each error: +1. Explain what went wrong in plain language. +2. Identify the likely cause (missing actor, malformed BPMN, XSLT issue). +3. Suggest a concrete fix the author can apply. + +Return your analysis as JSON: +```json +{{ + "summary": "...", + "errors": [ + {{"code": "...", "message": "...", "cause": "...", "fix": "..."}} + ] +}} +``` diff --git a/.github/skills/bpmn_import/skills.yaml b/.github/skills/bpmn_import/skills.yaml new file mode 100644 index 0000000000..9ab46f485e --- /dev/null +++ b/.github/skills/bpmn_import/skills.yaml @@ -0,0 +1,18 @@ +# bpmn_import skill +name: bpmn_import +version: "0.1.0" +description: Import BPMN files via bpmn_extractor.py and validate lane-to-actor mapping + +commands: + - name: import-bpmn + description: Run bpmn_extractor.py pipeline and validate output + requires_llm: false + - name: validate-actors + description: Validate that every innermost lane maps to an ActorDefinition + requires_llm: false + +validators: + - swimlane_actor_validator + +prompts: + - interpret_import_errors diff --git a/.github/skills/bpmn_import/validators/__init__.py b/.github/skills/bpmn_import/validators/__init__.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/.github/skills/bpmn_import/validators/swimlane_actor_validator.py b/.github/skills/bpmn_import/validators/swimlane_actor_validator.py new file mode 100644 index 0000000000..28d10b668a --- /dev/null +++ b/.github/skills/bpmn_import/validators/swimlane_actor_validator.py @@ -0,0 +1,73 @@ +""" +Swimlane → ActorDefinition validator. + +Checks that every innermost ```` in BPMN files has a +corresponding ``input/fsh/actors/ActorDefinition-DAK.X.fsh`` file. +""" + +import os +from typing import List +from lxml import etree + +from common.ig_errors import Issue, error, warning, info +from common.fsh_utils import instance_exists + +BPMN_NS = "http://www.omg.org/spec/BPMN/20100524/MODEL" + + +def _is_innermost_lane(lane: etree._Element) -> bool: + """Return True if the lane has no nested childLaneSet.""" + return lane.find(f"{{{BPMN_NS}}}childLaneSet") is None + + +def validate_swimlane_actors( + bpmn_content: str, + *, + ig_root: str = ".", + filename: str = "unknown.bpmn", +) -> List[Issue]: + """Validate that every innermost lane has a matching ActorDefinition FSH file. + + Args: + bpmn_content: BPMN XML as a string. + ig_root: Path to the IG root directory. + filename: Source filename for issue reporting. + + Returns: + List of validation issues. + """ + issues: List[Issue] = [] + + try: + tree = etree.fromstring(bpmn_content.encode("utf-8")) + except etree.XMLSyntaxError as exc: + issues.append(error("ACTOR-001", f"Cannot parse BPMN XML: {exc}", file=filename)) + return issues + + for lane in tree.iter(f"{{{BPMN_NS}}}lane"): + if not _is_innermost_lane(lane): + continue + + lane_id = lane.get("id", "") + lane_name = lane.get("name", lane_id) + + if not lane_id: + issues.append(warning( + "ACTOR-002", + f"Lane without id attribute (name='{lane_name}')", + file=filename, + )) + continue + + if not instance_exists(ig_root, lane_id): + issues.append(error( + "ACTOR-003", + f"No ActorDefinition FSH file for lane '{lane_id}' " + f"(expected: input/fsh/actors/ActorDefinition-DAK.{lane_id}.fsh)", + file=filename, + )) + + if not issues: + issues.append(info("ACTOR-000", "All lanes map to ActorDefinition files", file=filename)) + + return issues diff --git a/.github/skills/cli/dak_skill.py b/.github/skills/cli/dak_skill.py new file mode 100644 index 0000000000..2ab4fd4f86 --- /dev/null +++ b/.github/skills/cli/dak_skill.py @@ -0,0 +1,75 @@ +#!/usr/bin/env python3 +""" +dak-skill CLI — entry point for the DAK Skill Library. + +Usage: + dak-skill validate # DAK structural validation (no LLM needed) + dak-skill validate-ig # Full IG Publisher validation + dak-skill build-ig # Full IG Publisher build + dak-skill import-bpmn # Import BPMN files and validate + dak-skill author "..." # LLM-assisted BPMN authoring + dak-skill classify # Classify current issue + dak-skill --help # Show help + +Environment variables: + DAK_LLM_API_KEY — LLM API key (optional; LLM steps skipped if absent) + DAK_LLM_MODEL — LLM model name (default: gpt-4o) + DAK_IG_ROOT — IG root directory (default: current directory) +""" + +import argparse +import importlib +import sys +from pathlib import Path + +# Ensure the skills root is on sys.path +_SKILLS_ROOT = Path(__file__).resolve().parent.parent +if str(_SKILLS_ROOT) not in sys.path: + sys.path.insert(0, str(_SKILLS_ROOT)) + +# Map CLI commands to action module paths +_COMMANDS = { + "validate": "ig_publisher.actions.validate_dak_action", + "validate-ig": "ig_publisher.actions.validate_ig_action", + "build-ig": "ig_publisher.actions.build_ig_action", + "import-bpmn": "bpmn_import.actions.bpmn_import_action", + "author": "bpmn_author.actions.bpmn_author_action", + "classify": "dak_authoring.actions.classify_issue_action", + "interpret-errors": "ig_publisher.actions.interpret_errors_action", +} + + +def main() -> None: + parser = argparse.ArgumentParser( + prog="dak-skill", + description="DAK Skill Library CLI", + ) + parser.add_argument( + "command", + choices=list(_COMMANDS.keys()), + help="Skill command to run", + ) + parser.add_argument( + "args", + nargs="*", + help="Additional arguments passed to the skill", + ) + + args = parser.parse_args() + + module_path = _COMMANDS[args.command] + try: + module = importlib.import_module(module_path) + except ImportError as exc: + print(f"❌ Failed to import skill module '{module_path}': {exc}", file=sys.stderr) + sys.exit(1) + + if hasattr(module, "main"): + module.main() + else: + print(f"❌ Module '{module_path}' has no main() function", file=sys.stderr) + sys.exit(1) + + +if __name__ == "__main__": + main() diff --git a/.github/skills/common/__init__.py b/.github/skills/common/__init__.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/.github/skills/common/fsh_utils.py b/.github/skills/common/fsh_utils.py new file mode 100644 index 0000000000..813f17f423 --- /dev/null +++ b/.github/skills/common/fsh_utils.py @@ -0,0 +1,42 @@ +""" +FSH (FHIR Shorthand) utility helpers for DAK skill actions. + +Provides helpers for reading, validating, and generating small FSH snippets +used by BPMN import and DAK authoring skills. +""" + +import re +from pathlib import Path +from typing import Optional + + +def fsh_id_safe(raw_id: str) -> str: + """Sanitize a string to a valid FSH instance identifier. + + FSH identifiers allow ``[A-Za-z0-9\\-\\.]``. Characters outside that + set are replaced with ``-``. + """ + return re.sub(r"[^A-Za-z0-9.\-]", "-", raw_id) + + +def actor_fsh_path(ig_root: str, bare_id: str) -> Path: + """Return the expected path for an ActorDefinition FSH file. + + Convention from ``bpmn2fhirfsh.xsl``: + ``input/fsh/actors/ActorDefinition-DAK..fsh`` + """ + return Path(ig_root) / "input" / "fsh" / "actors" / f"ActorDefinition-DAK.{bare_id}.fsh" + + +def instance_exists(ig_root: str, bare_id: str) -> bool: + """Check whether an ActorDefinition FSH file exists for *bare_id*.""" + return actor_fsh_path(ig_root, bare_id).is_file() + + +def read_fsh_instance_id(fsh_path: Path) -> Optional[str]: + """Extract the ``Instance:`` identifier from a FSH file, if present.""" + if not fsh_path.is_file(): + return None + text = fsh_path.read_text(encoding="utf-8") + match = re.search(r"^Instance:\s*(\S+)", text, re.MULTILINE) + return match.group(1) if match else None diff --git a/.github/skills/common/ig_errors.py b/.github/skills/common/ig_errors.py new file mode 100644 index 0000000000..d386752ac5 --- /dev/null +++ b/.github/skills/common/ig_errors.py @@ -0,0 +1,76 @@ +""" +IG Publisher error-level constants and formatting helpers. + +All DAK skill validators report findings using these severity levels, +matching the IG Publisher output format. + +Usage: + from common.ig_errors import error, warning, info, fatal, format_issue + + issues = [] + issues.append(error("BPMN-001", "Zeebe namespace detected", file="test.bpmn")) + issues.append(warning("SWIM-002", "Lane has no tasks")) + print(format_issues(issues)) +""" + +from dataclasses import dataclass, field +from typing import List, Optional + + +# Severity constants (match IG Publisher levels) +FATAL = "FATAL" +ERROR = "ERROR" +WARNING = "WARNING" +INFORMATION = "INFORMATION" + + +@dataclass +class Issue: + """A single validation finding.""" + + severity: str + code: str + message: str + file: Optional[str] = None + line: Optional[int] = None + + def __str__(self) -> str: + location = "" + if self.file: + location = f" ({self.file}" + if self.line: + location += f":{self.line}" + location += ")" + return f"[{self.severity}] {self.code}: {self.message}{location}" + + +def fatal(code: str, message: str, **kwargs) -> Issue: + """Create a FATAL issue.""" + return Issue(severity=FATAL, code=code, message=message, **kwargs) + + +def error(code: str, message: str, **kwargs) -> Issue: + """Create an ERROR issue.""" + return Issue(severity=ERROR, code=code, message=message, **kwargs) + + +def warning(code: str, message: str, **kwargs) -> Issue: + """Create a WARNING issue.""" + return Issue(severity=WARNING, code=code, message=message, **kwargs) + + +def info(code: str, message: str, **kwargs) -> Issue: + """Create an INFORMATION issue.""" + return Issue(severity=INFORMATION, code=code, message=message, **kwargs) + + +def format_issues(issues: List[Issue]) -> str: + """Format a list of issues as a multi-line string.""" + if not issues: + return "✅ No issues found." + return "\n".join(str(i) for i in issues) + + +def has_errors(issues: List[Issue]) -> bool: + """Return True if any issue is FATAL or ERROR.""" + return any(i.severity in (FATAL, ERROR) for i in issues) diff --git a/.github/skills/common/ig_publisher_iface.py b/.github/skills/common/ig_publisher_iface.py new file mode 100644 index 0000000000..67b48125cb --- /dev/null +++ b/.github/skills/common/ig_publisher_iface.py @@ -0,0 +1,55 @@ +""" +IG Publisher interface — thin wrapper for invoking the FHIR IG Publisher. + +Uses the existing ``input/scripts/run_ig_publisher.py`` when available, or +falls back to running the publisher JAR directly. +""" + +import logging +import os +import subprocess +import sys +from pathlib import Path +from typing import Optional + +logger = logging.getLogger(__name__) + + +def run_ig_publisher( + ig_root: Optional[str] = None, + *, + tx_server: Optional[str] = None, + extra_args: Optional[list] = None, +) -> subprocess.CompletedProcess: + """Run the FHIR IG Publisher. + + Args: + ig_root: Path to the IG root directory. Defaults to ``DAK_IG_ROOT`` + env var or current working directory. + tx_server: Terminology server URL. Pass ``"n/a"`` for offline builds. + extra_args: Additional CLI arguments for the publisher. + + Returns: + CompletedProcess with return code, stdout, and stderr. + """ + ig_root = ig_root or os.environ.get("DAK_IG_ROOT", ".") + ig_root_path = Path(ig_root) + + # Prefer the repo's own runner script if present + runner_script = ig_root_path / "input" / "scripts" / "run_ig_publisher.py" + if runner_script.is_file(): + cmd = [sys.executable, str(runner_script)] + else: + jar = os.environ.get( + "PUBLISHER_JAR", + str(ig_root_path / "input-cache" / "publisher.jar"), + ) + cmd = ["java", "-jar", jar, "-ig", str(ig_root_path)] + + if tx_server: + cmd.extend(["-tx", tx_server]) + if extra_args: + cmd.extend(extra_args) + + logger.info("Running IG Publisher: %s", " ".join(cmd)) + return subprocess.run(cmd, capture_output=True, text=True, cwd=str(ig_root_path)) diff --git a/.github/skills/common/prompts.py b/.github/skills/common/prompts.py new file mode 100644 index 0000000000..b6104cc5a6 --- /dev/null +++ b/.github/skills/common/prompts.py @@ -0,0 +1,65 @@ +""" +Prompt loader for DAK skill actions. + +Prompts are stored as Markdown files with ``{variable}`` placeholders. +``load_prompt()`` reads the file and substitutes variables using +``str.format_map``. + +Usage: + from common.prompts import load_prompt + + prompt = load_prompt("bpmn_author", "create_or_edit_bpmn", + bpmn_xml="", + user_request="Add a pharmacy lane") +""" + +import os +from pathlib import Path +from typing import Any + + +# Root of the skills directory (parent of common/) +_SKILLS_ROOT = Path(__file__).resolve().parent.parent + + +def load_prompt(skill_name: str, prompt_name: str, **variables: Any) -> str: + """Load a ``.md`` prompt template and fill ``{variable}`` placeholders. + + The file is resolved as:: + + .github/skills//prompts/.md + + Falls back to:: + + .github/skills/common/prompts/.md + + Args: + skill_name: Skill directory name (e.g. ``"bpmn_author"``). + prompt_name: Prompt file stem (without ``.md``). + **variables: Substitution values for ``{key}`` placeholders. + + Returns: + The rendered prompt string. + + Raises: + FileNotFoundError: If neither skill-specific nor common prompt exists. + """ + skill_path = _SKILLS_ROOT / skill_name / "prompts" / f"{prompt_name}.md" + common_path = _SKILLS_ROOT / "common" / "prompts" / f"{prompt_name}.md" + + for path in (skill_path, common_path): + if path.is_file(): + template = path.read_text(encoding="utf-8") + return template.format_map(_SafeDict(variables)) + + raise FileNotFoundError( + f"Prompt '{prompt_name}.md' not found in " + f"'{skill_path}' or '{common_path}'" + ) + + +class _SafeDict(dict): + """dict subclass that returns ``{key}`` for missing keys instead of raising.""" + + def __missing__(self, key: str) -> str: + return "{" + key + "}" diff --git a/.github/skills/common/prompts/actor_context.md b/.github/skills/common/prompts/actor_context.md new file mode 100644 index 0000000000..e0af43766b --- /dev/null +++ b/.github/skills/common/prompts/actor_context.md @@ -0,0 +1,25 @@ +# Actor Context + +When working with BPMN lanes and DAK personas, the following mapping applies: + +## Lane ID → ActorDefinition Mapping + +``` +BPMN: +FSH: Instance: X + InstanceOf: $SGActor + Title: "Some Name" +``` + +- The lane `@id` is the bare FSH instance identifier — **no `DAK.` prefix** on the lane itself. +- `bpmn2fhirfsh.xsl` generates the file as `ActorDefinition-DAK.{@id}.fsh` + and sets `* id = "DAK.{@id}"` inside the FSH. +- The lane `@id` in BPMN = the bare instance name. + +## Existing Actors + +{actor_list} + +## Valid Lane ID Characters + +Lane IDs must match `[A-Za-z0-9\-\.]` to be valid FSH instance identifiers. diff --git a/.github/skills/common/prompts/bpmn_xml_schema.md b/.github/skills/common/prompts/bpmn_xml_schema.md new file mode 100644 index 0000000000..52b8576ea1 --- /dev/null +++ b/.github/skills/common/prompts/bpmn_xml_schema.md @@ -0,0 +1,57 @@ +# BPMN 2.0 XML Schema Reference + +Standard BPMN 2.0 XML structure for DAK workflows: + +```xml + + + + + + + + + + + Task_1 + + + Task_2 + + + + + + + + + + + + + + +``` + +## Key elements + +| Element | Purpose | +|---|---| +| `` | Top-level container for participants (pools) | +| `` | A pool — references a `` | +| `` | Contains lanes, tasks, events, gateways, flows | +| `` | Container for lanes within a process | +| `` | Swimlane — innermost lanes = DAK personas | +| `` | Generic task | +| `` | Human-performed task | +| `` | System-performed task | +| `` | XOR decision point | +| `` | AND fork/join | +| `` | Process entry point | +| `` | Process termination point | +| `` | Directed edge between flow nodes | diff --git a/.github/skills/common/prompts/dak_bpmn_constraints.md b/.github/skills/common/prompts/dak_bpmn_constraints.md new file mode 100644 index 0000000000..ed8f007eb5 --- /dev/null +++ b/.github/skills/common/prompts/dak_bpmn_constraints.md @@ -0,0 +1,19 @@ +# DAK BPMN Constraints + +When authoring or editing BPMN for a WHO Digital Adaptation Kit (DAK): + +1. **Standard BPMN 2.0 only** — no Zeebe, Camunda, or other vendor extensions. + The root element must be ``. + +2. **Swimlane rules:** + - Every process must have at least one `` with at least one ``. + - Innermost lanes (lanes with no ``) represent DAK **personas** and map to FHIR `ActorDefinition` instances. + - `` — the `id` attribute is the **bare FSH instance identifier**. It must match `[A-Za-z0-9\-\.]`. + - `` — the `name` attribute becomes the human-readable `Title:` in the generated FSH. + - Every ``, ``, ``, etc., must be referenced by exactly one lane via ``. + +3. **No orphan tasks** — every flow node inside a process must appear in a lane's `` list. + +4. **No duplicate IDs** — all `id` attributes across the BPMN document must be unique. + +5. **File location:** BPMN files are stored in `input/business-processes/*.bpmn`. diff --git a/.github/skills/common/smart_llm_facade.py b/.github/skills/common/smart_llm_facade.py new file mode 100644 index 0000000000..9bf818c0b5 --- /dev/null +++ b/.github/skills/common/smart_llm_facade.py @@ -0,0 +1,116 @@ +""" +SMART LLM Facade — thin wrapper around LiteLLM for DAK skill actions. + +Copy-lifted with gratitude and attribution from: + https://github.com/jtlicardo/bpmn-assistant (MIT License) + +Original: LLMFacade class by jtlicardo +Adapted for WHO SMART Guidelines DAK skill library. + +Usage: + from common.smart_llm_facade import SmartLLMFacade + + llm = SmartLLMFacade(api_key="sk-...", model="gpt-4o") + answer = llm.call("Explain BPMN swimlanes") + structured = llm.call(prompt, structured_output=True) +""" + +import json +import logging +import os +from typing import Any, Dict, Optional + +logger = logging.getLogger(__name__) + + +class SmartLLMFacade: + """Minimal LLM facade using LiteLLM for multi-provider support.""" + + def __init__( + self, + api_key: Optional[str] = None, + model: Optional[str] = None, + ) -> None: + self.api_key = api_key or os.environ.get("DAK_LLM_API_KEY", "") + self.model = model or os.environ.get("DAK_LLM_MODEL", "gpt-4o") + + def is_available(self) -> bool: + """Return True if an API key is configured.""" + return bool(self.api_key) + + def call( + self, + prompt: str, + *, + system_prompt: str = "", + structured_output: bool = False, + temperature: float = 0.2, + ) -> Any: + """Send a prompt to the configured LLM and return the response. + + Args: + prompt: User prompt text. + system_prompt: Optional system-level instruction. + structured_output: When True, attempt to parse the response as JSON. + temperature: Sampling temperature. + + Returns: + str or dict depending on *structured_output*. + + Raises: + RuntimeError: If no API key is configured. + ImportError: If litellm is not installed. + """ + if not self.api_key: + raise RuntimeError( + "DAK_LLM_API_KEY not set — cannot call LLM. " + "Set the key in a repo secret or local .env file." + ) + + try: + import litellm # noqa: F811 + except ImportError as exc: + raise ImportError( + "litellm is required for LLM features. " + "Install it with: pip install 'litellm>=1.0.0'" + ) from exc + + messages = [] + if system_prompt: + messages.append({"role": "system", "content": system_prompt}) + messages.append({"role": "user", "content": prompt}) + + logger.info("LLM request: model=%s tokens≈%d", self.model, len(prompt) // 4) + + response = litellm.completion( + model=self.model, + messages=messages, + temperature=temperature, + api_key=self.api_key, + ) + + text: str = response.choices[0].message.content.strip() + + if structured_output: + return self._parse_json(text) + return text + + @staticmethod + def _parse_json(text: str) -> Dict[str, Any]: + """Best-effort JSON extraction from LLM response text.""" + # Try direct parse first + try: + return json.loads(text) + except json.JSONDecodeError: + pass + # Try extracting from markdown code fence + for fence in ("```json", "```"): + if fence in text: + start = text.index(fence) + len(fence) + end = text.index("```", start) + try: + return json.loads(text[start:end].strip()) + except (json.JSONDecodeError, ValueError): + pass + # Last resort: return as dict with raw text + return {"raw": text} diff --git a/.github/skills/dak_authoring/__init__.py b/.github/skills/dak_authoring/__init__.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/.github/skills/dak_authoring/actions/classify_issue_action.py b/.github/skills/dak_authoring/actions/classify_issue_action.py new file mode 100644 index 0000000000..a25915bfdb --- /dev/null +++ b/.github/skills/dak_authoring/actions/classify_issue_action.py @@ -0,0 +1,168 @@ +""" +Issue classifier — applies content:L1/L2/L3/translation labels. +Uses LLM when DAK_LLM_API_KEY is set; falls back to keyword matching. +Both paths use the same label application logic. +""" + +import os +import sys +from pathlib import Path + +_SKILLS_ROOT = Path(__file__).resolve().parent.parent.parent +if str(_SKILLS_ROOT) not in sys.path: + sys.path.insert(0, str(_SKILLS_ROOT)) + +# ── Keyword lists ────────────────────────────────────────────────────────── + +L1_KEYWORDS = [ + # WHO guideline source + "recommendation", "who recommendation", "guideline", "who guideline", + "clinical guideline", "evidence", "evidence base", "evidence-based", + "narrative", "who narrative", "source content", "who document", + # Sections of WHO guideline documents + "section 2", "section 3", "section 4", "annex", "appendix", + "executive summary", "background", "scope", "target population", + # Clinical content + "clinical", "intervention", "outcome", "efficacy", "safety", + "contraindication", "dosage", "dose", "regimen", "protocol", + "screening", "diagnosis", "treatment", "management", "referral", + "counselling", "counseling", "antenatal", "postnatal", "maternal", + "newborn", "child", "adolescent", "immunization", "vaccination", + # Process + "new recommendation", "update recommendation", "change guideline", + "outdated", "superseded", "retracted", +] + +L2_KEYWORDS = [ + # BPMN / process + "bpmn", "business process", "swimlane", "swim lane", "workflow", + "process diagram", "process model", "process flow", "flow diagram", + "lane", "pool", "gateway", "sequence flow", "start event", "end event", + "user task", "service task", "business rule", "send task", "receive task", + # Personas / actors + "persona", "actor", "actordefinition", "actor definition", + "health worker", "healthcare worker", "community health worker", "chw", + "clinician", "nurse", "midwife", "physician", "doctor", "pharmacist", + "supervisor", "facility", "patient", "client", "caregiver", + # FHIR resources / FSH + "fhir", "fsh", "sushi", "profile", "instance", "extension", + "codesystem", "code system", "valueset", "value set", "conceptmap", + "structuredefinition", "logical model", "implementation guide", "ig", + # DAK components + "questionnaire", "data element", "data dictionary", "decision table", + "decision logic", "cql", "clinical quality language", "library", + "plandefinition", "activitydefinition", "measure", + "requirement", "non-functional", "functional requirement", + # DAK L2 editorial + "dak", "digital adaptation kit", "l2", "component 2", "component 3", + "component 4", "component 5", "component 6", "component 7", "component 8", + "business process", "generic persona", "related persona", + "core data element", "decision support", "scheduling logic", + "indicator", "performance indicator", +] + +L3_KEYWORDS = [ + # Geographic / organizational scope + "national", "country", "country-specific", "country adaptation", + "local", "regional", "district", "sub-national", + "program", "programme", "program-level", "programme-level", + # Adaptation process + "adaptation", "adapt", "localize", "localise", "contextualize", + "contextualise", "customise", "customize", "context-specific", + "l3", "layer 3", "implementation guide", "conformance", + # System / interoperability + "system", "ehr", "emr", "electronic health record", + "health information system", "his", "dhis2", "openemr", "openmrs", + "mapping", "terminology mapping", "code mapping", + "interoperability", "integration", "api", "openapi", + "capability statement", +] + +TRANSLATION_KEYWORDS = [ + # Languages + "translation", "translate", "translated", "translating", + "arabic", "\u0639\u0631\u0628\u064a", "ar", + "chinese", "mandarin", "\u4e2d\u6587", "zh", + "french", "fran\u00e7ais", "francais", "fr", + "russian", "\u0440\u0443\u0441\u0441\u043a\u0438\u0439", "ru", + "spanish", "espa\u00f1ol", "espanol", "es", + "portuguese", "portugu\u00eas", "pt", + # Translation tooling + "weblate", "po file", ".po", "pot file", ".pot", "gettext", + "msgstr", "msgid", "locale", "localization", "localisation", + "i18n", "l10n", "internationalization", + # Translation issues + "mistranslation", "mistranslated", "wrong translation", + "translation error", "translation review", "translation update", + "string", "untranslated", "missing translation", +] + + +def classify_by_keywords(title: str, body: str) -> list: + """Keyword-based fallback classifier. Case-insensitive. No LLM needed.""" + text = (title + " " + (body or "")).lower() + labels = [] + if any(k in text for k in L1_KEYWORDS): + labels.append("content:L1") + if any(k in text for k in L2_KEYWORDS): + labels.append("content:L2") + if any(k in text for k in L3_KEYWORDS): + labels.append("content:L3") + if any(k in text for k in TRANSLATION_KEYWORDS): + labels.append("content:translation") + return labels + + +def apply_labels(issue_number: int, labels: list) -> None: + """Apply labels to issue via GitHub REST API using GITHUB_TOKEN.""" + import requests + + token = os.environ["GITHUB_TOKEN"] + repo = os.environ["GITHUB_REPOSITORY"] + if not labels: + return + r = requests.post( + f"https://api.github.com/repos/{repo}/issues/{issue_number}/labels", + headers={ + "Authorization": f"Bearer {token}", + "Accept": "application/vnd.github+json", + }, + json={"labels": labels}, + timeout=10, + ) + r.raise_for_status() + print(f"\u2705 Applied labels: {labels}") + + +def main(): + from common.prompts import load_prompt + from common.smart_llm_facade import SmartLLMFacade + + issue_number = int(os.environ["ISSUE_NUMBER"]) + title = os.environ.get("ISSUE_TITLE", "") + body = os.environ.get("ISSUE_BODY", "") + api_key = os.environ.get("DAK_LLM_API_KEY", "") + + if api_key: + # LLM path + prompt = load_prompt( + "dak_authoring", "classify_issue", + issue_title=title, issue_body=body[:4000], + ) + llm = SmartLLMFacade( + api_key=api_key, + model=os.environ.get("DAK_LLM_MODEL", "gpt-4o-mini"), + ) + result = llm.call(prompt, structured_output=True) + labels = result.get("labels", []) + print(f"LLM classification: {result.get('reasoning')}") + else: + # Keyword fallback — no LLM cost + labels = classify_by_keywords(title, body) + print(f"\u26a0\ufe0f No LLM key \u2014 keyword fallback used. Labels: {labels}") + + apply_labels(issue_number, labels) + + +if __name__ == "__main__": + main() diff --git a/.github/skills/dak_authoring/actions/dak_authoring_action.py b/.github/skills/dak_authoring/actions/dak_authoring_action.py new file mode 100644 index 0000000000..b50341bcb2 --- /dev/null +++ b/.github/skills/dak_authoring/actions/dak_authoring_action.py @@ -0,0 +1,53 @@ +""" +DAK L2 authoring action — processes content:L2 labeled issues. + +Environment variables: + DAK_LLM_API_KEY — LLM API key (optional; LLM steps skipped if absent) + DAK_LLM_MODEL — LLM model name (default: gpt-4o) + GITHUB_TOKEN — GitHub API token + ISSUE_NUMBER — GitHub issue number + ISSUE_TITLE — Issue title + ISSUE_BODY — Issue body text +""" + +import os +import sys +from pathlib import Path + +_SKILLS_ROOT = Path(__file__).resolve().parent.parent.parent +if str(_SKILLS_ROOT) not in sys.path: + sys.path.insert(0, str(_SKILLS_ROOT)) + + +def main() -> None: + api_key = os.environ.get("DAK_LLM_API_KEY", "") + if not api_key: + print("⚠️ DAK_LLM_API_KEY not set — LLM step skipped (structural validation still runs)") + sys.exit(0) + + from common.smart_llm_facade import SmartLLMFacade + from common.prompts import load_prompt + + issue_title = os.environ.get("ISSUE_TITLE", "") + issue_body = os.environ.get("ISSUE_BODY", "") + model = os.environ.get("DAK_LLM_MODEL", "gpt-4o") + + llm = SmartLLMFacade(api_key=api_key, model=model) + + prompt = load_prompt( + "dak_authoring", "l2_authoring", + issue_title=issue_title, + issue_body=issue_body[:4000], + ) + + print(f"🤖 Planning L2 content changes with {model}...") + result = llm.call(prompt, structured_output=True) + + print(f"Summary: {result.get('summary', 'N/A')}") + for change in result.get("changes", []): + print(f" {change.get('action', '?')}: {change.get('file', '?')}") + print(f" {change.get('description', '')}") + + +if __name__ == "__main__": + main() diff --git a/.github/skills/dak_authoring/prompts/change_proposal.md b/.github/skills/dak_authoring/prompts/change_proposal.md new file mode 100644 index 0000000000..4b41fa43bb --- /dev/null +++ b/.github/skills/dak_authoring/prompts/change_proposal.md @@ -0,0 +1,22 @@ +# Change Proposal + +You are proposing a change to a WHO Digital Adaptation Kit (DAK). + +## Issue + +**Title:** {issue_title} +**Body:** {issue_body} + +## Proposed Changes + +{change_list} + +## Instructions + +Review the proposed changes and create a change proposal suitable for a pull request. +Include: +1. Summary of what changes and why +2. Impact analysis (what other components are affected) +3. Validation checklist + +Return as markdown suitable for a PR description. diff --git a/.github/skills/dak_authoring/prompts/classify_issue.md b/.github/skills/dak_authoring/prompts/classify_issue.md new file mode 100644 index 0000000000..5fe9067555 --- /dev/null +++ b/.github/skills/dak_authoring/prompts/classify_issue.md @@ -0,0 +1,29 @@ +# Classify Issue + +You are a classifier for WHO Digital Adaptation Kit (DAK) GitHub issues. + +## Issue + +**Title:** {issue_title} + +**Body:** +{issue_body} + +## Label Definitions + +- **content:L1** — WHO source guideline content: recommendations, evidence, narrative, clinical protocols +- **content:L2** — DAK FHIR assets: BPMN, actors, questionnaires, CQL, data elements, decision tables +- **content:L3** — Implementation adaptations: national/program-level customizations, system integration +- **content:translation** — Translation of any content layer into any language + +## Instructions + +Classify this issue. An issue may have multiple labels or none. + +Return JSON: +```json +{{ + "reasoning": "Brief explanation of classification decision", + "labels": ["content:L1", "content:L2"] +}} +``` diff --git a/.github/skills/dak_authoring/prompts/l2_authoring.md b/.github/skills/dak_authoring/prompts/l2_authoring.md new file mode 100644 index 0000000000..2dbec40b39 --- /dev/null +++ b/.github/skills/dak_authoring/prompts/l2_authoring.md @@ -0,0 +1,36 @@ +# L2 Authoring + +You are a DAK L2 content author for WHO Digital Adaptation Kits. + +## Issue + +**Title:** {issue_title} +**Body:** {issue_body} + +## Available Components + +- BPMN business processes (input/business-processes/*.bpmn) +- Actor definitions (input/fsh/actors/) +- Questionnaires (input/fsh/questionnaires/) +- Decision tables (input/cql/ and input/fsh/plandefinitions/) +- Data elements (input/fsh/models/) + +## Instructions + +Based on the issue, determine what L2 DAK content needs to be created or modified. +Provide a structured plan with specific file changes. + +Return JSON: +```json +{{ + "summary": "...", + "components_affected": ["bpmn", "actors", ...], + "changes": [ + {{ + "file": "input/business-processes/example.bpmn", + "action": "create|modify", + "description": "..." + }} + ] +}} +``` diff --git a/.github/skills/dak_authoring/skills.yaml b/.github/skills/dak_authoring/skills.yaml new file mode 100644 index 0000000000..ca760b431b --- /dev/null +++ b/.github/skills/dak_authoring/skills.yaml @@ -0,0 +1,17 @@ +# dak_authoring skill +name: dak_authoring +version: "0.1.0" +description: Issue classification and DAK L2 content authoring + +commands: + - name: classify-issue + description: Classify a GitHub issue and apply content labels + requires_llm: false # keyword fallback works without LLM + - name: author-l2 + description: Author L2 DAK content from an issue description + requires_llm: true + +prompts: + - classify_issue + - l2_authoring + - change_proposal diff --git a/.github/skills/docker-compose.yml b/.github/skills/docker-compose.yml new file mode 100644 index 0000000000..0613910b6c --- /dev/null +++ b/.github/skills/docker-compose.yml @@ -0,0 +1,27 @@ +# DAK Skills — local compose +# Alias: alias dak='docker compose -f .github/skills/docker-compose.yml run --rm' +# Usage: dak validate | dak validate-ig | dak import-bpmn | dak author "..." | dak shell + +x-dak: &dak + image: dak-skill:latest + build: + context: .github/skills + dockerfile: Dockerfile + volumes: + - .:/workspace + - dak-pkg:/var/lib/.fhir + - dak-igcache:/workspace/fhir-package-cache + env_file: [.env] + working_dir: /workspace + +services: + validate: { <<: *dak, command: [validate] } + validate-ig: { <<: *dak, command: [validate-ig] } + import-bpmn: { <<: *dak, command: [import-bpmn] } + build-ig: { <<: *dak, command: [build-ig] } + author: { <<: *dak, command: [author] } + shell: { <<: *dak, entrypoint: /bin/bash } + +volumes: + dak-pkg: + dak-igcache: diff --git a/.github/skills/ig_publisher/__init__.py b/.github/skills/ig_publisher/__init__.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/.github/skills/ig_publisher/actions/build_ig_action.py b/.github/skills/ig_publisher/actions/build_ig_action.py new file mode 100644 index 0000000000..000731d307 --- /dev/null +++ b/.github/skills/ig_publisher/actions/build_ig_action.py @@ -0,0 +1,40 @@ +""" +IG Publisher build action — runs the full IG Publisher build. + +Environment variables: + GITHUB_TOKEN — GitHub API token + DAK_IG_ROOT — IG root directory (default: current directory) + DAK_TX_SERVER — Terminology server URL (optional) +""" + +import os +import sys +from pathlib import Path + +_SKILLS_ROOT = Path(__file__).resolve().parent.parent.parent +if str(_SKILLS_ROOT) not in sys.path: + sys.path.insert(0, str(_SKILLS_ROOT)) + +from common.ig_publisher_iface import run_ig_publisher + + +def main() -> None: + ig_root = os.environ.get("DAK_IG_ROOT", ".") + tx_server = os.environ.get("DAK_TX_SERVER", "") + + print("🏗️ Running full IG Publisher build...") + result = run_ig_publisher(ig_root, tx_server=tx_server or None) + + print(result.stdout) + if result.stderr: + print(result.stderr, file=sys.stderr) + + if result.returncode != 0: + print("❌ IG Publisher build failed.") + sys.exit(1) + + print("✅ IG Publisher build completed successfully.") + + +if __name__ == "__main__": + main() diff --git a/.github/skills/ig_publisher/actions/interpret_errors_action.py b/.github/skills/ig_publisher/actions/interpret_errors_action.py new file mode 100644 index 0000000000..4aecf03dec --- /dev/null +++ b/.github/skills/ig_publisher/actions/interpret_errors_action.py @@ -0,0 +1,51 @@ +""" +IG Publisher error interpretation action — uses LLM to explain build errors. + +Environment variables: + DAK_LLM_API_KEY — LLM API key (skipped if absent) + DAK_LLM_MODEL — LLM model name (default: gpt-4o-mini) + GITHUB_TOKEN — GitHub API token + PR_NUMBER — PR number for posting results +""" + +import os +import sys +from pathlib import Path + +_SKILLS_ROOT = Path(__file__).resolve().parent.parent.parent +if str(_SKILLS_ROOT) not in sys.path: + sys.path.insert(0, str(_SKILLS_ROOT)) + + +def main() -> None: + api_key = os.environ.get("DAK_LLM_API_KEY", "") + if not api_key: + print("⚠️ DAK_LLM_API_KEY not set — LLM step skipped (structural validation still runs)") + sys.exit(0) + + from common.smart_llm_facade import SmartLLMFacade + from common.prompts import load_prompt + + model = os.environ.get("DAK_LLM_MODEL", "gpt-4o-mini") + llm = SmartLLMFacade(api_key=api_key, model=model) + + # Read build output from previous step (passed via file or env) + build_output = os.environ.get("BUILD_OUTPUT", "No build output available.") + + prompt = load_prompt( + "ig_publisher", "interpret_ig_errors", + build_output=build_output[:8000], + error_summary="See build output above.", + ) + + print(f"🤖 Interpreting errors with {model}...") + result = llm.call(prompt, structured_output=True) + + print(f"Summary: {result.get('summary', 'N/A')}") + for finding in result.get("findings", []): + print(f" [{finding.get('severity')}] {finding.get('message')}") + print(f" Fix: {finding.get('fix')}") + + +if __name__ == "__main__": + main() diff --git a/.github/skills/ig_publisher/actions/validate_dak_action.py b/.github/skills/ig_publisher/actions/validate_dak_action.py new file mode 100644 index 0000000000..7120060560 --- /dev/null +++ b/.github/skills/ig_publisher/actions/validate_dak_action.py @@ -0,0 +1,63 @@ +""" +DAK structural validation action — validates repository structure +without requiring IG Publisher or LLM. + +Environment variables: + GITHUB_TOKEN — GitHub API token + PR_NUMBER — PR number for posting results + DAK_IG_ROOT — IG root directory (default: current directory) +""" + +import glob +import os +import sys +from pathlib import Path + +_SKILLS_ROOT = Path(__file__).resolve().parent.parent.parent +if str(_SKILLS_ROOT) not in sys.path: + sys.path.insert(0, str(_SKILLS_ROOT)) + +from common.ig_errors import Issue, error, warning, info, format_issues, has_errors +from bpmn_author.validators.bpmn_xml_validator import validate_bpmn_xml +from bpmn_author.validators.swimlane_validator import validate_swimlanes +from bpmn_import.validators.swimlane_actor_validator import validate_swimlane_actors + + +def validate_structure(ig_root: str) -> list: + """Run structural validation checks and return issues.""" + issues = [] + root = Path(ig_root) + + # Check required files + if not (root / "sushi-config.yaml").is_file(): + issues.append(warning("DAK-001", "sushi-config.yaml not found")) + + if not (root / "ig.ini").is_file(): + issues.append(warning("DAK-002", "ig.ini not found")) + + # Validate BPMN files + bpmn_files = glob.glob(str(root / "input" / "business-processes" / "*.bpmn")) + for bpmn_path in bpmn_files: + content = Path(bpmn_path).read_text(encoding="utf-8") + fname = os.path.basename(bpmn_path) + issues.extend(validate_bpmn_xml(content, filename=fname)) + issues.extend(validate_swimlanes(content, filename=fname)) + issues.extend(validate_swimlane_actors(content, ig_root=ig_root, filename=fname)) + + return issues + + +def main() -> None: + ig_root = os.environ.get("DAK_IG_ROOT", ".") + issues = validate_structure(ig_root) + print(format_issues(issues)) + + if has_errors(issues): + print("❌ DAK structural validation found errors.") + sys.exit(1) + + print("✅ DAK structural validation passed.") + + +if __name__ == "__main__": + main() diff --git a/.github/skills/ig_publisher/actions/validate_ig_action.py b/.github/skills/ig_publisher/actions/validate_ig_action.py new file mode 100644 index 0000000000..f23bf6a7dd --- /dev/null +++ b/.github/skills/ig_publisher/actions/validate_ig_action.py @@ -0,0 +1,40 @@ +""" +IG Publisher validation action — runs the FHIR IG Publisher in validation mode. + +Environment variables: + GITHUB_TOKEN — GitHub API token + DAK_IG_ROOT — IG root directory (default: current directory) + DAK_TX_SERVER — Terminology server URL (optional; default: n/a for offline) +""" + +import os +import sys +from pathlib import Path + +_SKILLS_ROOT = Path(__file__).resolve().parent.parent.parent +if str(_SKILLS_ROOT) not in sys.path: + sys.path.insert(0, str(_SKILLS_ROOT)) + +from common.ig_publisher_iface import run_ig_publisher + + +def main() -> None: + ig_root = os.environ.get("DAK_IG_ROOT", ".") + tx_server = os.environ.get("DAK_TX_SERVER", "n/a") + + print(f"🏗️ Running IG Publisher validation (tx={tx_server})...") + result = run_ig_publisher(ig_root, tx_server=tx_server) + + print(result.stdout) + if result.stderr: + print(result.stderr, file=sys.stderr) + + if result.returncode != 0: + print("❌ IG Publisher validation failed.") + sys.exit(1) + + print("✅ IG Publisher validation passed.") + + +if __name__ == "__main__": + main() diff --git a/.github/skills/ig_publisher/prompts/interpret_ig_errors.md b/.github/skills/ig_publisher/prompts/interpret_ig_errors.md new file mode 100644 index 0000000000..0d05f1eecc --- /dev/null +++ b/.github/skills/ig_publisher/prompts/interpret_ig_errors.md @@ -0,0 +1,38 @@ +# Interpret IG Publisher Errors + +You are helping a FHIR Implementation Guide author understand build errors. + +## Build Output + +{build_output} + +## Error Summary + +{error_summary} + +## Instructions + +For each FATAL or ERROR finding: +1. Explain what went wrong in plain language. +2. Identify the likely cause (missing profile, invalid reference, FSH syntax, etc.). +3. Suggest a concrete fix. + +For WARNINGs, briefly note whether they need attention. + +Return your analysis as JSON: +```json +{{ + "summary": "...", + "fatal_count": 0, + "error_count": 0, + "warning_count": 0, + "findings": [ + {{ + "severity": "ERROR", + "message": "...", + "cause": "...", + "fix": "..." + }} + ] +}} +``` diff --git a/.github/skills/ig_publisher/prompts/validate_dak.md b/.github/skills/ig_publisher/prompts/validate_dak.md new file mode 100644 index 0000000000..b6ff6e02ca --- /dev/null +++ b/.github/skills/ig_publisher/prompts/validate_dak.md @@ -0,0 +1,34 @@ +# Validate DAK Structure + +Review the DAK repository structure for completeness and correctness. + +## Repository Root + +{ig_root} + +## Files Found + +{file_listing} + +## Validation Results + +{validation_results} + +## Instructions + +Check: +1. Required directories exist (input/fsh/, input/business-processes/, etc.) +2. sushi-config.yaml is present and valid +3. All referenced profiles and extensions exist +4. No broken cross-references between BPMN lanes and ActorDefinition files + +Return your analysis as JSON: +```json +{{ + "valid": true/false, + "summary": "...", + "issues": [ + {{"severity": "...", "message": "...", "fix": "..."}} + ] +}} +``` diff --git a/.github/skills/ig_publisher/skills.yaml b/.github/skills/ig_publisher/skills.yaml new file mode 100644 index 0000000000..83fd29dff1 --- /dev/null +++ b/.github/skills/ig_publisher/skills.yaml @@ -0,0 +1,22 @@ +# ig_publisher skill +name: ig_publisher +version: "0.1.0" +description: IG Publisher validation, build, and error interpretation + +commands: + - name: validate-dak + description: Run DAK structural validation (no IG Publisher needed) + requires_llm: false + - name: validate-ig + description: Run full IG Publisher validation + requires_llm: false + - name: build-ig + description: Run full IG Publisher build + requires_llm: false + - name: interpret-errors + description: Interpret IG Publisher errors using LLM + requires_llm: true + +prompts: + - interpret_ig_errors + - validate_dak diff --git a/.github/skills/l1_review/actions/l1_review_action.py b/.github/skills/l1_review/actions/l1_review_action.py new file mode 100644 index 0000000000..c41d38c03b --- /dev/null +++ b/.github/skills/l1_review/actions/l1_review_action.py @@ -0,0 +1,22 @@ +""" +L1 review action — placeholder for WHO source guideline review skill. + +This skill will be implemented in a future version. +""" + +import os +import sys + + +def main() -> None: + api_key = os.environ.get("DAK_LLM_API_KEY", "") + if not api_key: + print("⚠️ DAK_LLM_API_KEY not set — LLM step skipped") + sys.exit(0) + + print("ℹ️ L1 review skill is not yet implemented (planned for v0.2)") + sys.exit(0) + + +if __name__ == "__main__": + main() diff --git a/.github/skills/l1_review/skills.yaml b/.github/skills/l1_review/skills.yaml new file mode 100644 index 0000000000..28c9187577 --- /dev/null +++ b/.github/skills/l1_review/skills.yaml @@ -0,0 +1,9 @@ +# l1_review skill (stub — v0.2) +name: l1_review +version: "0.1.0" +description: WHO L1 source guideline review (placeholder for future implementation) + +commands: + - name: review-l1 + description: Review L1 guideline content + requires_llm: true diff --git a/.github/skills/l3_review/actions/l3_review_action.py b/.github/skills/l3_review/actions/l3_review_action.py new file mode 100644 index 0000000000..03c932d79a --- /dev/null +++ b/.github/skills/l3_review/actions/l3_review_action.py @@ -0,0 +1,22 @@ +""" +L3 review action — placeholder for implementation adaptation review skill. + +This skill will be implemented in a future version. +""" + +import os +import sys + + +def main() -> None: + api_key = os.environ.get("DAK_LLM_API_KEY", "") + if not api_key: + print("⚠️ DAK_LLM_API_KEY not set — LLM step skipped") + sys.exit(0) + + print("ℹ️ L3 review skill is not yet implemented (planned for v0.3)") + sys.exit(0) + + +if __name__ == "__main__": + main() diff --git a/.github/skills/l3_review/skills.yaml b/.github/skills/l3_review/skills.yaml new file mode 100644 index 0000000000..0e1fe3f344 --- /dev/null +++ b/.github/skills/l3_review/skills.yaml @@ -0,0 +1,9 @@ +# l3_review skill (stub — v0.3) +name: l3_review +version: "0.1.0" +description: Implementation adaptation review (placeholder for future implementation) + +commands: + - name: review-l3 + description: Review L3 implementation adaptations + requires_llm: true diff --git a/.github/skills/skills_registry.yaml b/.github/skills/skills_registry.yaml new file mode 100644 index 0000000000..94ee4d153f --- /dev/null +++ b/.github/skills/skills_registry.yaml @@ -0,0 +1,47 @@ +# DAK Skill Library — Registry +# +# Lists all registered skills with their entry points and capabilities. +# Used by the CLI and GitHub Actions to discover available skills. + +skills: + - name: bpmn_author + version: "0.1.0" + description: Author and edit standard BPMN 2.0 XML for DAK business processes + commands: [create-bpmn, edit-bpmn, validate-bpmn] + requires_llm_for: [create-bpmn, edit-bpmn] + + - name: bpmn_import + version: "0.1.0" + description: Import BPMN files and validate lane-to-actor mapping + commands: [import-bpmn, validate-actors] + requires_llm_for: [] + + - name: ig_publisher + version: "0.1.0" + description: IG Publisher validation, build, and error interpretation + commands: [validate-dak, validate-ig, build-ig, interpret-errors] + requires_llm_for: [interpret-errors] + + - name: dak_authoring + version: "0.1.0" + description: Issue classification and DAK L2 content authoring + commands: [classify-issue, author-l2] + requires_llm_for: [author-l2] + + - name: l1_review + version: "0.1.0" + description: WHO L1 source guideline review (placeholder) + commands: [review-l1] + requires_llm_for: [review-l1] + + - name: l3_review + version: "0.1.0" + description: Implementation adaptation review (placeholder) + commands: [review-l3] + requires_llm_for: [review-l3] + + - name: translation + version: "0.1.0" + description: Translation management (placeholder) + commands: [manage-translation] + requires_llm_for: [] diff --git a/.github/skills/translation/actions/translation_action.py b/.github/skills/translation/actions/translation_action.py new file mode 100644 index 0000000000..8bd7388edd --- /dev/null +++ b/.github/skills/translation/actions/translation_action.py @@ -0,0 +1,17 @@ +""" +Translation action — placeholder for translation management skill. + +This skill will be implemented in a future version. +""" + +import os +import sys + + +def main() -> None: + print("ℹ️ Translation skill is not yet implemented (planned for v0.3)") + sys.exit(0) + + +if __name__ == "__main__": + main() diff --git a/.github/skills/translation/skills.yaml b/.github/skills/translation/skills.yaml new file mode 100644 index 0000000000..a366ee266a --- /dev/null +++ b/.github/skills/translation/skills.yaml @@ -0,0 +1,9 @@ +# translation skill (stub — v0.3) +name: translation +version: "0.1.0" +description: Translation management (placeholder for future implementation) + +commands: + - name: manage-translation + description: Manage translation for DAK content + requires_llm: false diff --git a/.github/workflows/classify-issue.yml b/.github/workflows/classify-issue.yml new file mode 100644 index 0000000000..f6c4a790a5 --- /dev/null +++ b/.github/workflows/classify-issue.yml @@ -0,0 +1,32 @@ +name: Classify Issue + +on: + issues: + types: [opened, edited] + +jobs: + classify: + runs-on: ubuntu-latest + permissions: + issues: write + contents: read + steps: + - uses: actions/checkout@v4 + + - name: Check DAK enabled + id: dak + run: | + [ -f dak.json ] \ + && echo "enabled=true" >> $GITHUB_OUTPUT \ + || echo "enabled=false" >> $GITHUB_OUTPUT + + - name: Classify and label + if: steps.dak.outputs.enabled == 'true' + env: + GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} + DAK_LLM_API_KEY: ${{ secrets.DAK_LLM_API_KEY }} + DAK_LLM_MODEL: ${{ vars.DAK_LLM_MODEL || 'gpt-4o-mini' }} + ISSUE_NUMBER: ${{ github.event.issue.number }} + ISSUE_TITLE: ${{ github.event.issue.title }} + ISSUE_BODY: ${{ github.event.issue.body }} + run: python3 .github/skills/dak_authoring/actions/classify_issue_action.py diff --git a/.github/workflows/pr-validate-slash.yml b/.github/workflows/pr-validate-slash.yml new file mode 100644 index 0000000000..d0a83f713c --- /dev/null +++ b/.github/workflows/pr-validate-slash.yml @@ -0,0 +1,55 @@ +name: PR Slash-Command Validate + +on: + issue_comment: + types: [created] + +jobs: + validate: + if: > + github.event.issue.pull_request != null && + startsWith(github.event.comment.body, '/validate') + runs-on: ubuntu-latest + permissions: + issues: write + pull-requests: read + contents: read + + steps: + - name: Acknowledge + uses: actions/github-script@v7 + with: + script: | + await github.rest.reactions.createForIssueComment({ + owner: context.repo.owner, repo: context.repo.repo, + comment_id: context.payload.comment.id, content: 'eyes', + }); + + - name: Get PR branch + id: pr + uses: actions/github-script@v7 + with: + script: | + const pr = await github.rest.pulls.get({ + owner: context.repo.owner, repo: context.repo.repo, + pull_number: context.issue.number, + }); + core.setOutput('branch', pr.data.head.ref); + + - uses: actions/checkout@v4 + with: + ref: ${{ steps.pr.outputs.branch }} + + - name: Run DAK structural validation (always runs, no key needed) + env: + GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} + PR_NUMBER: ${{ github.event.issue.number }} + run: python3 .github/skills/ig_publisher/actions/validate_dak_action.py + + - name: Run LLM error interpretation (skipped if no key) + env: + GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} + DAK_LLM_API_KEY: ${{ secrets.DAK_LLM_API_KEY }} + DAK_LLM_MODEL: ${{ vars.DAK_LLM_MODEL || 'gpt-4o-mini' }} + PR_NUMBER: ${{ github.event.issue.number }} + run: python3 .github/skills/ig_publisher/actions/interpret_errors_action.py diff --git a/.github/workflows/skill-l1-review.yml b/.github/workflows/skill-l1-review.yml new file mode 100644 index 0000000000..e12f1a7b4f --- /dev/null +++ b/.github/workflows/skill-l1-review.yml @@ -0,0 +1,26 @@ +name: L1 Guideline Review Skill + +on: + issues: + types: [labeled] + +jobs: + l1-review: + if: github.event.label.name == 'content:L1' + runs-on: ubuntu-latest + permissions: + issues: write + contents: read + + steps: + - uses: actions/checkout@v4 + + - name: Run L1 review skill + env: + GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} + DAK_LLM_API_KEY: ${{ secrets.DAK_LLM_API_KEY }} + DAK_LLM_MODEL: ${{ vars.DAK_LLM_MODEL || 'gpt-4o' }} + ISSUE_NUMBER: ${{ github.event.issue.number }} + ISSUE_TITLE: ${{ github.event.issue.title }} + ISSUE_BODY: ${{ github.event.issue.body }} + run: python3 .github/skills/l1_review/actions/l1_review_action.py diff --git a/.github/workflows/skill-l2-dak.yml b/.github/workflows/skill-l2-dak.yml new file mode 100644 index 0000000000..2960aff681 --- /dev/null +++ b/.github/workflows/skill-l2-dak.yml @@ -0,0 +1,27 @@ +name: L2 DAK Content Skill + +on: + issues: + types: [labeled] + +jobs: + dak-authoring: + if: github.event.label.name == 'content:L2' + runs-on: ubuntu-latest + permissions: + issues: write + pull-requests: write + contents: write + + steps: + - uses: actions/checkout@v4 + + - name: Run L2 DAK authoring skill + env: + GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} + DAK_LLM_API_KEY: ${{ secrets.DAK_LLM_API_KEY }} + DAK_LLM_MODEL: ${{ vars.DAK_LLM_MODEL || 'gpt-4o' }} + ISSUE_NUMBER: ${{ github.event.issue.number }} + ISSUE_TITLE: ${{ github.event.issue.title }} + ISSUE_BODY: ${{ github.event.issue.body }} + run: python3 .github/skills/dak_authoring/actions/dak_authoring_action.py diff --git a/.github/workflows/skill-l3-review.yml b/.github/workflows/skill-l3-review.yml new file mode 100644 index 0000000000..9f9c6cdaf4 --- /dev/null +++ b/.github/workflows/skill-l3-review.yml @@ -0,0 +1,26 @@ +name: L3 Implementation Review Skill + +on: + issues: + types: [labeled] + +jobs: + l3-review: + if: github.event.label.name == 'content:L3' + runs-on: ubuntu-latest + permissions: + issues: write + contents: read + + steps: + - uses: actions/checkout@v4 + + - name: Run L3 review skill + env: + GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} + DAK_LLM_API_KEY: ${{ secrets.DAK_LLM_API_KEY }} + DAK_LLM_MODEL: ${{ vars.DAK_LLM_MODEL || 'gpt-4o' }} + ISSUE_NUMBER: ${{ github.event.issue.number }} + ISSUE_TITLE: ${{ github.event.issue.title }} + ISSUE_BODY: ${{ github.event.issue.body }} + run: python3 .github/skills/l3_review/actions/l3_review_action.py diff --git a/.github/workflows/skill-translation.yml b/.github/workflows/skill-translation.yml new file mode 100644 index 0000000000..73d043ac7d --- /dev/null +++ b/.github/workflows/skill-translation.yml @@ -0,0 +1,24 @@ +name: Translation Skill + +on: + issues: + types: [labeled] + +jobs: + translation: + if: github.event.label.name == 'content:translation' + runs-on: ubuntu-latest + permissions: + issues: write + contents: read + + steps: + - uses: actions/checkout@v4 + + - name: Run translation skill + env: + GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} + ISSUE_NUMBER: ${{ github.event.issue.number }} + ISSUE_TITLE: ${{ github.event.issue.title }} + ISSUE_BODY: ${{ github.event.issue.body }} + run: python3 .github/skills/translation/actions/translation_action.py diff --git a/.gitignore b/.gitignore index 41af260c0b..cfe2a07459 100644 --- a/.gitignore +++ b/.gitignore @@ -26,6 +26,10 @@ Thumbs.db ########## __pycache__/ +# Local environment config (API keys, etc.) # +############################################## +.env + # Gettext translation templates — generated at build time. # # .pot files inside translations/ subdirectories are committed so that # # Weblate can pick them up; all other .pot files are ignored. # diff --git a/labels/content_L2.json b/labels/content_L2.json new file mode 100644 index 0000000000..bf42bc00a9 --- /dev/null +++ b/labels/content_L2.json @@ -0,0 +1 @@ +{"name": "content:L2", "color": "e4e669", "description": "DAK FHIR assets: BPMN, actors, questionnaires, CQL, data elements"} \ No newline at end of file diff --git a/labels/content_L3.json b/labels/content_L3.json new file mode 100644 index 0000000000..496744ed5f --- /dev/null +++ b/labels/content_L3.json @@ -0,0 +1 @@ +{"name": "content:L3", "color": "d73a4a", "description": "Implementation adaptations: national/program-level customizations"} \ No newline at end of file diff --git a/labels/content_translation.json b/labels/content_translation.json new file mode 100644 index 0000000000..a1bb055a35 --- /dev/null +++ b/labels/content_translation.json @@ -0,0 +1 @@ +{"name": "content:translation", "color": "0e8a16", "description": "Translation of any content layer"} \ No newline at end of file From dd6bca028a168b72be4c45e8413a0eecb680e293 Mon Sep 17 00:00:00 2001 From: "copilot-swe-agent[bot]" <198982749+Copilot@users.noreply.github.com> Date: Thu, 5 Mar 2026 22:39:43 +0000 Subject: [PATCH 03/14] Fix BPMN namespace check verbosity and keyword classifier false positives MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit - Deduplicate Zeebe/Camunda namespace detection to report once per namespace - Add word-boundary matching for short keywords (≤3 chars) in classifier to prevent false positives from language codes like 'ar' matching 'pharmacist' Co-authored-by: litlfred <662242+litlfred@users.noreply.github.com> --- .../validators/bpmn_xml_validator.py | 20 +++++----------- .../actions/classify_issue_action.py | 23 +++++++++++++++---- 2 files changed, 25 insertions(+), 18 deletions(-) diff --git a/.github/skills/bpmn_author/validators/bpmn_xml_validator.py b/.github/skills/bpmn_author/validators/bpmn_xml_validator.py index 99bb294687..338c1a2b03 100644 --- a/.github/skills/bpmn_author/validators/bpmn_xml_validator.py +++ b/.github/skills/bpmn_author/validators/bpmn_xml_validator.py @@ -48,27 +48,19 @@ def validate_bpmn_xml(bpmn_content: str, *, filename: str = "unknown.bpmn") -> L file=filename, )) - # 3. Forbidden vendor namespaces - nsmap = tree.nsmap if hasattr(tree, "nsmap") else {} + # 3. Forbidden vendor namespaces (check all unique namespace URIs in document) + seen_ns: set = set() + for elem in tree.iter(): + for uri in (elem.nsmap or {}).values(): + seen_ns.add(uri) for uri, vendor in _FORBIDDEN_NAMESPACES.items(): - if uri in nsmap.values(): + if uri in seen_ns: issues.append(error( "BPMN-003", f"Forbidden {vendor} namespace detected: {uri}", file=filename, )) - # Also check for vendor namespaces on any descendant - for elem in tree.iter(): - for uri, vendor in _FORBIDDEN_NAMESPACES.items(): - if uri in (elem.nsmap or {}).values(): - issues.append(error( - "BPMN-003", - f"Forbidden {vendor} namespace on <{elem.tag}>: {uri}", - file=filename, - )) - break # one per element is enough - # 4. At least one process processes = tree.findall(f"{{{BPMN_NS}}}process") if not processes: diff --git a/.github/skills/dak_authoring/actions/classify_issue_action.py b/.github/skills/dak_authoring/actions/classify_issue_action.py index a25915bfdb..3416b06560 100644 --- a/.github/skills/dak_authoring/actions/classify_issue_action.py +++ b/.github/skills/dak_authoring/actions/classify_issue_action.py @@ -98,17 +98,32 @@ ] +import re + + +def _keyword_in_text(keyword: str, text: str) -> bool: + """Check if *keyword* appears in *text*. + + Keywords of 3 characters or fewer are matched as whole words only + (word-boundary check) to avoid false positives from short language + codes like ``"ar"`` matching inside ``"pharmacist"``. + """ + if len(keyword) <= 3: + return bool(re.search(r'\b' + re.escape(keyword) + r'\b', text)) + return keyword in text + + def classify_by_keywords(title: str, body: str) -> list: """Keyword-based fallback classifier. Case-insensitive. No LLM needed.""" text = (title + " " + (body or "")).lower() labels = [] - if any(k in text for k in L1_KEYWORDS): + if any(_keyword_in_text(k, text) for k in L1_KEYWORDS): labels.append("content:L1") - if any(k in text for k in L2_KEYWORDS): + if any(_keyword_in_text(k, text) for k in L2_KEYWORDS): labels.append("content:L2") - if any(k in text for k in L3_KEYWORDS): + if any(_keyword_in_text(k, text) for k in L3_KEYWORDS): labels.append("content:L3") - if any(k in text for k in TRANSLATION_KEYWORDS): + if any(_keyword_in_text(k, text) for k in TRANSLATION_KEYWORDS): labels.append("content:translation") return labels From 03effd5e3677b61bffe81c5a3d1753ffcb4714f2 Mon Sep 17 00:00:00 2001 From: "copilot-swe-agent[bot]" <198982749+Copilot@users.noreply.github.com> Date: Thu, 5 Mar 2026 22:41:11 +0000 Subject: [PATCH 04/14] Address code review feedback - Fix docstring style consistency in classify_issue_action.py - Clarify _SafeDict docstring in prompts.py - Remove unnecessary noqa comment in smart_llm_facade.py - Add comment explaining --break-system-packages in Dockerfile - Document dak.json check in classify-issue.yml workflow - Add return type annotation to main() in classify_issue_action.py Co-authored-by: litlfred <662242+litlfred@users.noreply.github.com> --- .github/skills/Dockerfile | 2 ++ .github/skills/common/prompts.py | 2 +- .github/skills/common/smart_llm_facade.py | 2 +- .github/skills/dak_authoring/actions/classify_issue_action.py | 4 ++-- .github/workflows/classify-issue.yml | 2 ++ 5 files changed, 8 insertions(+), 4 deletions(-) diff --git a/.github/skills/Dockerfile b/.github/skills/Dockerfile index 08ef8be522..0dd1549794 100644 --- a/.github/skills/Dockerfile +++ b/.github/skills/Dockerfile @@ -8,6 +8,8 @@ LABEL org.opencontainers.image.title="DAK Skill Library" LABEL org.opencontainers.image.source="https://github.com/WorldHealthOrganization/smart-base" # Python packages — identical to ghbuild.yml +# --break-system-packages is required because the base image uses Debian's +# externally-managed Python; a venv is unnecessary inside a disposable container. RUN apt-get update && apt-get install -y --no-install-recommends \ python3 python3-pip python3-venv \ && ln -sf /usr/bin/python3 /usr/bin/python \ diff --git a/.github/skills/common/prompts.py b/.github/skills/common/prompts.py index b6104cc5a6..969022a5a2 100644 --- a/.github/skills/common/prompts.py +++ b/.github/skills/common/prompts.py @@ -59,7 +59,7 @@ def load_prompt(skill_name: str, prompt_name: str, **variables: Any) -> str: class _SafeDict(dict): - """dict subclass that returns ``{key}`` for missing keys instead of raising.""" + """dict subclass that returns ``{key}`` for missing keys instead of raising KeyError.""" def __missing__(self, key: str) -> str: return "{" + key + "}" diff --git a/.github/skills/common/smart_llm_facade.py b/.github/skills/common/smart_llm_facade.py index 9bf818c0b5..89f7b03b0b 100644 --- a/.github/skills/common/smart_llm_facade.py +++ b/.github/skills/common/smart_llm_facade.py @@ -68,7 +68,7 @@ def call( ) try: - import litellm # noqa: F811 + import litellm except ImportError as exc: raise ImportError( "litellm is required for LLM features. " diff --git a/.github/skills/dak_authoring/actions/classify_issue_action.py b/.github/skills/dak_authoring/actions/classify_issue_action.py index 3416b06560..fcc112c560 100644 --- a/.github/skills/dak_authoring/actions/classify_issue_action.py +++ b/.github/skills/dak_authoring/actions/classify_issue_action.py @@ -102,7 +102,7 @@ def _keyword_in_text(keyword: str, text: str) -> bool: - """Check if *keyword* appears in *text*. + """Check if ``keyword`` appears in ``text``. Keywords of 3 characters or fewer are matched as whole words only (word-boundary check) to avoid false positives from short language @@ -149,7 +149,7 @@ def apply_labels(issue_number: int, labels: list) -> None: print(f"\u2705 Applied labels: {labels}") -def main(): +def main() -> None: from common.prompts import load_prompt from common.smart_llm_facade import SmartLLMFacade diff --git a/.github/workflows/classify-issue.yml b/.github/workflows/classify-issue.yml index f6c4a790a5..7d3f13efd7 100644 --- a/.github/workflows/classify-issue.yml +++ b/.github/workflows/classify-issue.yml @@ -14,6 +14,8 @@ jobs: - uses: actions/checkout@v4 - name: Check DAK enabled + # dak.json in the repo root signals that DAK skill features are active. + # See .github/skills/README.md for configuration details. id: dak run: | [ -f dak.json ] \ From d03e79f4241603ad9ecc47cc9b21c9026f57b01c Mon Sep 17 00:00:00 2001 From: "copilot-swe-agent[bot]" <198982749+Copilot@users.noreply.github.com> Date: Thu, 5 Mar 2026 22:43:04 +0000 Subject: [PATCH 05/14] Fix critical security issue in pr-validate-slash.yml Replace direct checkout+run of untrusted PR code with safe dispatch pattern (matching pr-deploy-slash.yml). The workflow now dispatches to ghbuild.yml instead of executing scripts from the PR branch in a privileged issue_comment context. Co-authored-by: litlfred <662242+litlfred@users.noreply.github.com> --- .github/workflows/pr-validate-slash.yml | 53 +++++++++++++++++-------- 1 file changed, 36 insertions(+), 17 deletions(-) diff --git a/.github/workflows/pr-validate-slash.yml b/.github/workflows/pr-validate-slash.yml index d0a83f713c..7521990a9d 100644 --- a/.github/workflows/pr-validate-slash.yml +++ b/.github/workflows/pr-validate-slash.yml @@ -1,22 +1,29 @@ name: PR Slash-Command Validate +# Lets any collaborator post /validate in a PR comment to manually trigger +# DAK structural validation for that PR's branch. +# +# Security: This workflow uses the dispatch pattern (like pr-deploy-slash.yml) +# to avoid running untrusted PR code in a privileged issue_comment context. + on: issue_comment: types: [created] jobs: - validate: + dispatch: + # Only run on pull-request comments that start with /validate if: > github.event.issue.pull_request != null && startsWith(github.event.comment.body, '/validate') runs-on: ubuntu-latest permissions: issues: write + actions: write pull-requests: read - contents: read steps: - - name: Acknowledge + - name: Acknowledge the slash command with a reaction uses: actions/github-script@v7 with: script: | @@ -25,7 +32,7 @@ jobs: comment_id: context.payload.comment.id, content: 'eyes', }); - - name: Get PR branch + - name: Get the PR branch name id: pr uses: actions/github-script@v7 with: @@ -36,20 +43,32 @@ jobs: }); core.setOutput('branch', pr.data.head.ref); - - uses: actions/checkout@v4 + - name: Trigger ghbuild.yml for the PR branch + uses: actions/github-script@v7 + # Pass the branch name via env to avoid script injection from untrusted data + env: + BRANCH_REF: ${{ steps.pr.outputs.branch }} with: - ref: ${{ steps.pr.outputs.branch }} + script: | + const branchRef = process.env.BRANCH_REF; + await github.rest.actions.createWorkflowDispatch({ + owner: context.repo.owner, repo: context.repo.repo, + workflow_id: 'ghbuild.yml', + ref: branchRef, + inputs: { do_dak: 'true' }, + }); - - name: Run DAK structural validation (always runs, no key needed) + - name: Post a confirmation comment + uses: actions/github-script@v7 env: - GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} - PR_NUMBER: ${{ github.event.issue.number }} - run: python3 .github/skills/ig_publisher/actions/validate_dak_action.py + BRANCH_REF: ${{ steps.pr.outputs.branch }} + with: + script: | + const branch = process.env.BRANCH_REF; + const runsUrl = `https://github.com/${context.repo.owner}/${context.repo.repo}/actions/workflows/ghbuild.yml`; + await github.rest.issues.createComment({ + owner: context.repo.owner, repo: context.repo.repo, + issue_number: context.issue.number, + body: `👁️ **Validation triggered** for branch \`${branch}\`.\n\nThe DAK validation build is queued — [watch progress](${runsUrl}).`, + }); - - name: Run LLM error interpretation (skipped if no key) - env: - GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} - DAK_LLM_API_KEY: ${{ secrets.DAK_LLM_API_KEY }} - DAK_LLM_MODEL: ${{ vars.DAK_LLM_MODEL || 'gpt-4o-mini' }} - PR_NUMBER: ${{ github.event.issue.number }} - run: python3 .github/skills/ig_publisher/actions/interpret_errors_action.py From 60205684383465f80e0f0c47b378d4eaca8d639b Mon Sep 17 00:00:00 2001 From: "copilot-swe-agent[bot]" <198982749+Copilot@users.noreply.github.com> Date: Fri, 6 Mar 2026 02:32:59 +0000 Subject: [PATCH 06/14] Replace SmartLLMFacade with direct LiteLLM usage via llm_utils.py MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Remove the custom SmartLLMFacade class. LiteLLM (20k+ GitHub stars, MIT) is already the trusted external library. The new llm_utils.py provides only DAK-specific env var bridging and JSON-extraction helpers on top of litellm.completion() — no custom facade to maintain. Rename: smart_llm_facade.py → llm_utils.py Export: dak_completion(), parse_json_response(), is_llm_available(), get_llm_config() Update: all 4 callers (classify, dak_authoring, bpmn_author, interpret_errors) Co-authored-by: litlfred <662242+litlfred@users.noreply.github.com> --- .github/skills/README.md | 11 +- .../bpmn_author/actions/bpmn_author_action.py | 6 +- .github/skills/common/llm_utils.py | 134 ++++++++++++++++++ .github/skills/common/smart_llm_facade.py | 116 --------------- .../actions/classify_issue_action.py | 6 +- .../actions/dak_authoring_action.py | 6 +- .../actions/interpret_errors_action.py | 5 +- 7 files changed, 150 insertions(+), 134 deletions(-) create mode 100644 .github/skills/common/llm_utils.py delete mode 100644 .github/skills/common/smart_llm_facade.py diff --git a/.github/skills/README.md b/.github/skills/README.md index 9de5f0ad9c..f339fbd0da 100644 --- a/.github/skills/README.md +++ b/.github/skills/README.md @@ -88,7 +88,7 @@ Skills run automatically via GitHub Actions workflows: ├── cli/ │ └── dak_skill.py # CLI entry point ├── common/ -│ ├── smart_llm_facade.py # LLM interface (attributed from bpmn-assistant) +│ ├── llm_utils.py # LLM helpers — thin wrappers around LiteLLM │ ├── prompts.py # load_prompt() — .md templates with {variable} │ ├── ig_errors.py # FATAL/ERROR/WARNING/INFORMATION format │ ├── fsh_utils.py # FSH file utilities @@ -103,7 +103,10 @@ Skills run automatically via GitHub Actions workflows: └── translation/ # (placeholder v0.3) ``` -## LLM Attribution +## LLM Provider -The `SmartLLMFacade` in `common/smart_llm_facade.py` is copy-lifted with attribution -from [jtlicardo/bpmn-assistant](https://github.com/jtlicardo/bpmn-assistant) (MIT License). +LLM features use [LiteLLM](https://github.com/BerriAI/litellm) (MIT License) — +a well-maintained multi-provider library (OpenAI, Anthropic, Google, etc.) +with 20k+ GitHub stars. The `common/llm_utils.py` module adds only DAK-specific +environment variable bridging and JSON-extraction helpers on top of LiteLLM; +there is no custom LLM facade to maintain. diff --git a/.github/skills/bpmn_author/actions/bpmn_author_action.py b/.github/skills/bpmn_author/actions/bpmn_author_action.py index eb4464a044..cb2036a279 100644 --- a/.github/skills/bpmn_author/actions/bpmn_author_action.py +++ b/.github/skills/bpmn_author/actions/bpmn_author_action.py @@ -31,15 +31,13 @@ def main() -> None: print("⚠️ DAK_LLM_API_KEY not set — LLM step skipped (structural validation still runs)") sys.exit(0) - from common.smart_llm_facade import SmartLLMFacade + from common.llm_utils import dak_completion from common.prompts import load_prompt issue_title = os.environ.get("ISSUE_TITLE", "") issue_body = os.environ.get("ISSUE_BODY", "") model = os.environ.get("DAK_LLM_MODEL", "gpt-4o") - llm = SmartLLMFacade(api_key=api_key, model=model) - prompt = load_prompt( "bpmn_author", "create_or_edit_bpmn", user_request=f"{issue_title}\n\n{issue_body}", @@ -47,7 +45,7 @@ def main() -> None: ) print(f"🤖 Requesting BPMN from {model}...") - bpmn_xml = llm.call(prompt) + bpmn_xml = dak_completion(prompt, api_key=api_key, model=model) # Validate the generated BPMN issues = validate_bpmn_xml(bpmn_xml, filename="generated.bpmn") diff --git a/.github/skills/common/llm_utils.py b/.github/skills/common/llm_utils.py new file mode 100644 index 0000000000..4f28b37ad7 --- /dev/null +++ b/.github/skills/common/llm_utils.py @@ -0,0 +1,134 @@ +""" +DAK LLM utilities — thin helpers around LiteLLM. + +LiteLLM (https://github.com/BerriAI/litellm, MIT License) is the trusted +external library that provides multi-provider LLM support (OpenAI, Anthropic, +Google, etc.) via a single ``completion()`` call. This module adds only +DAK-specific environment-variable bridging and a JSON-extraction helper. + +No custom LLM facade to maintain — callers use ``litellm.completion()`` +directly via the convenience wrapper below. + +Usage: + from common.llm_utils import dak_completion, parse_json_response + + text = dak_completion("Explain BPMN swimlanes") + data = dak_completion(prompt, structured_output=True) +""" + +import json +import logging +import os +from typing import Any, Dict, List, Optional, Union + +logger = logging.getLogger(__name__) + + +def get_llm_config() -> tuple: + """Return ``(api_key, model)`` from DAK environment variables. + + Reads: + ``DAK_LLM_API_KEY`` — LLM provider API key (repo secret or ``.env``). + ``DAK_LLM_MODEL`` — model identifier (default ``gpt-4o``). + """ + api_key = os.environ.get("DAK_LLM_API_KEY", "") + model = os.environ.get("DAK_LLM_MODEL", "gpt-4o") + return api_key, model + + +def is_llm_available() -> bool: + """Return True if a DAK LLM API key is configured.""" + return bool(os.environ.get("DAK_LLM_API_KEY", "")) + + +def dak_completion( + prompt: str, + *, + system_prompt: str = "", + structured_output: bool = False, + temperature: float = 0.2, + api_key: Optional[str] = None, + model: Optional[str] = None, +) -> Union[str, Dict[str, Any]]: + """Call an LLM via LiteLLM with DAK environment defaults. + + This is a thin convenience wrapper around ``litellm.completion()`` + that reads ``DAK_LLM_API_KEY`` / ``DAK_LLM_MODEL`` from the + environment so callers don't repeat the boilerplate. + + Args: + prompt: User prompt text. + system_prompt: Optional system-level instruction. + structured_output: When True, parse the response as JSON. + temperature: Sampling temperature. + api_key: Override for ``DAK_LLM_API_KEY``. + model: Override for ``DAK_LLM_MODEL``. + + Returns: + ``str`` (plain text) or ``dict`` (when *structured_output* is True). + + Raises: + RuntimeError: If no API key is available. + ImportError: If ``litellm`` is not installed. + """ + _api_key = api_key or os.environ.get("DAK_LLM_API_KEY", "") + _model = model or os.environ.get("DAK_LLM_MODEL", "gpt-4o") + + if not _api_key: + raise RuntimeError( + "DAK_LLM_API_KEY not set — cannot call LLM. " + "Set the key in a repo secret or local .env file." + ) + + try: + import litellm + except ImportError as exc: + raise ImportError( + "litellm is required for LLM features. " + "Install it with: pip install 'litellm>=1.0.0'" + ) from exc + + messages: List[Dict[str, str]] = [] + if system_prompt: + messages.append({"role": "system", "content": system_prompt}) + messages.append({"role": "user", "content": prompt}) + + logger.info("LLM request: model=%s tokens≈%d", _model, len(prompt) // 4) + + response = litellm.completion( + model=_model, + messages=messages, + temperature=temperature, + api_key=_api_key, + ) + + text: str = response.choices[0].message.content.strip() + + if structured_output: + return parse_json_response(text) + return text + + +def parse_json_response(text: str) -> Dict[str, Any]: + """Best-effort JSON extraction from LLM response text. + + Handles bare JSON, ``json`` code-fenced blocks, and generic fences. + Falls back to ``{"raw": text}`` when parsing fails. + """ + # Try direct parse first + try: + return json.loads(text) + except json.JSONDecodeError: + pass + # Try extracting from markdown code fence + for fence in ("```json", "```"): + if fence in text: + start = text.index(fence) + len(fence) + end = text.index("```", start) + try: + return json.loads(text[start:end].strip()) + except (json.JSONDecodeError, ValueError): + pass + # Last resort: return as dict with raw text + return {"raw": text} + diff --git a/.github/skills/common/smart_llm_facade.py b/.github/skills/common/smart_llm_facade.py deleted file mode 100644 index 89f7b03b0b..0000000000 --- a/.github/skills/common/smart_llm_facade.py +++ /dev/null @@ -1,116 +0,0 @@ -""" -SMART LLM Facade — thin wrapper around LiteLLM for DAK skill actions. - -Copy-lifted with gratitude and attribution from: - https://github.com/jtlicardo/bpmn-assistant (MIT License) - -Original: LLMFacade class by jtlicardo -Adapted for WHO SMART Guidelines DAK skill library. - -Usage: - from common.smart_llm_facade import SmartLLMFacade - - llm = SmartLLMFacade(api_key="sk-...", model="gpt-4o") - answer = llm.call("Explain BPMN swimlanes") - structured = llm.call(prompt, structured_output=True) -""" - -import json -import logging -import os -from typing import Any, Dict, Optional - -logger = logging.getLogger(__name__) - - -class SmartLLMFacade: - """Minimal LLM facade using LiteLLM for multi-provider support.""" - - def __init__( - self, - api_key: Optional[str] = None, - model: Optional[str] = None, - ) -> None: - self.api_key = api_key or os.environ.get("DAK_LLM_API_KEY", "") - self.model = model or os.environ.get("DAK_LLM_MODEL", "gpt-4o") - - def is_available(self) -> bool: - """Return True if an API key is configured.""" - return bool(self.api_key) - - def call( - self, - prompt: str, - *, - system_prompt: str = "", - structured_output: bool = False, - temperature: float = 0.2, - ) -> Any: - """Send a prompt to the configured LLM and return the response. - - Args: - prompt: User prompt text. - system_prompt: Optional system-level instruction. - structured_output: When True, attempt to parse the response as JSON. - temperature: Sampling temperature. - - Returns: - str or dict depending on *structured_output*. - - Raises: - RuntimeError: If no API key is configured. - ImportError: If litellm is not installed. - """ - if not self.api_key: - raise RuntimeError( - "DAK_LLM_API_KEY not set — cannot call LLM. " - "Set the key in a repo secret or local .env file." - ) - - try: - import litellm - except ImportError as exc: - raise ImportError( - "litellm is required for LLM features. " - "Install it with: pip install 'litellm>=1.0.0'" - ) from exc - - messages = [] - if system_prompt: - messages.append({"role": "system", "content": system_prompt}) - messages.append({"role": "user", "content": prompt}) - - logger.info("LLM request: model=%s tokens≈%d", self.model, len(prompt) // 4) - - response = litellm.completion( - model=self.model, - messages=messages, - temperature=temperature, - api_key=self.api_key, - ) - - text: str = response.choices[0].message.content.strip() - - if structured_output: - return self._parse_json(text) - return text - - @staticmethod - def _parse_json(text: str) -> Dict[str, Any]: - """Best-effort JSON extraction from LLM response text.""" - # Try direct parse first - try: - return json.loads(text) - except json.JSONDecodeError: - pass - # Try extracting from markdown code fence - for fence in ("```json", "```"): - if fence in text: - start = text.index(fence) + len(fence) - end = text.index("```", start) - try: - return json.loads(text[start:end].strip()) - except (json.JSONDecodeError, ValueError): - pass - # Last resort: return as dict with raw text - return {"raw": text} diff --git a/.github/skills/dak_authoring/actions/classify_issue_action.py b/.github/skills/dak_authoring/actions/classify_issue_action.py index fcc112c560..201b398f63 100644 --- a/.github/skills/dak_authoring/actions/classify_issue_action.py +++ b/.github/skills/dak_authoring/actions/classify_issue_action.py @@ -151,7 +151,7 @@ def apply_labels(issue_number: int, labels: list) -> None: def main() -> None: from common.prompts import load_prompt - from common.smart_llm_facade import SmartLLMFacade + from common.llm_utils import dak_completion issue_number = int(os.environ["ISSUE_NUMBER"]) title = os.environ.get("ISSUE_TITLE", "") @@ -164,11 +164,11 @@ def main() -> None: "dak_authoring", "classify_issue", issue_title=title, issue_body=body[:4000], ) - llm = SmartLLMFacade( + result = dak_completion( + prompt, structured_output=True, api_key=api_key, model=os.environ.get("DAK_LLM_MODEL", "gpt-4o-mini"), ) - result = llm.call(prompt, structured_output=True) labels = result.get("labels", []) print(f"LLM classification: {result.get('reasoning')}") else: diff --git a/.github/skills/dak_authoring/actions/dak_authoring_action.py b/.github/skills/dak_authoring/actions/dak_authoring_action.py index b50341bcb2..42f359bc06 100644 --- a/.github/skills/dak_authoring/actions/dak_authoring_action.py +++ b/.github/skills/dak_authoring/actions/dak_authoring_action.py @@ -25,15 +25,13 @@ def main() -> None: print("⚠️ DAK_LLM_API_KEY not set — LLM step skipped (structural validation still runs)") sys.exit(0) - from common.smart_llm_facade import SmartLLMFacade + from common.llm_utils import dak_completion from common.prompts import load_prompt issue_title = os.environ.get("ISSUE_TITLE", "") issue_body = os.environ.get("ISSUE_BODY", "") model = os.environ.get("DAK_LLM_MODEL", "gpt-4o") - llm = SmartLLMFacade(api_key=api_key, model=model) - prompt = load_prompt( "dak_authoring", "l2_authoring", issue_title=issue_title, @@ -41,7 +39,7 @@ def main() -> None: ) print(f"🤖 Planning L2 content changes with {model}...") - result = llm.call(prompt, structured_output=True) + result = dak_completion(prompt, structured_output=True, api_key=api_key, model=model) print(f"Summary: {result.get('summary', 'N/A')}") for change in result.get("changes", []): diff --git a/.github/skills/ig_publisher/actions/interpret_errors_action.py b/.github/skills/ig_publisher/actions/interpret_errors_action.py index 4aecf03dec..9413254459 100644 --- a/.github/skills/ig_publisher/actions/interpret_errors_action.py +++ b/.github/skills/ig_publisher/actions/interpret_errors_action.py @@ -23,11 +23,10 @@ def main() -> None: print("⚠️ DAK_LLM_API_KEY not set — LLM step skipped (structural validation still runs)") sys.exit(0) - from common.smart_llm_facade import SmartLLMFacade + from common.llm_utils import dak_completion from common.prompts import load_prompt model = os.environ.get("DAK_LLM_MODEL", "gpt-4o-mini") - llm = SmartLLMFacade(api_key=api_key, model=model) # Read build output from previous step (passed via file or env) build_output = os.environ.get("BUILD_OUTPUT", "No build output available.") @@ -39,7 +38,7 @@ def main() -> None: ) print(f"🤖 Interpreting errors with {model}...") - result = llm.call(prompt, structured_output=True) + result = dak_completion(prompt, structured_output=True, api_key=api_key, model=model) print(f"Summary: {result.get('summary', 'N/A')}") for finding in result.get("findings", []): From e16920878bbaa4bbd93cd45a56f82ce8134f5dea Mon Sep 17 00:00:00 2001 From: "github-actions[bot]" Date: Fri, 6 Mar 2026 02:35:01 +0000 Subject: [PATCH 07/14] chore: update translation templates (.pot) [2026-03-06 02:35 UTC] Regenerated via IG Publisher and extract_translations.py. Triggered by: Copilot --- input/images/translations/images.pot | 2 +- input/pagecontent/translations/pages.pot | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/input/images/translations/images.pot b/input/images/translations/images.pot index 054488ff23..3a1c895cd1 100644 --- a/input/images/translations/images.pot +++ b/input/images/translations/images.pot @@ -6,7 +6,7 @@ msgid "" msgstr "" "Project-Id-Version: WHO SMART Guidelines\n" -"POT-Creation-Date: 2026-03-05 22:06+0000\n" +"POT-Creation-Date: 2026-03-06 02:35+0000\n" "PO-Revision-Date: YEAR-MO-DA HO:MI+ZONE\n" "Last-Translator: FULL NAME \n" "Language-Team: LANGUAGE \n" diff --git a/input/pagecontent/translations/pages.pot b/input/pagecontent/translations/pages.pot index 7baa6dfd74..a47a424a28 100644 --- a/input/pagecontent/translations/pages.pot +++ b/input/pagecontent/translations/pages.pot @@ -6,7 +6,7 @@ msgid "" msgstr "" "Project-Id-Version: WHO SMART Guidelines\n" -"POT-Creation-Date: 2026-03-05 22:06+0000\n" +"POT-Creation-Date: 2026-03-06 02:35+0000\n" "PO-Revision-Date: YEAR-MO-DA HO:MI+ZONE\n" "Last-Translator: FULL NAME \n" "Language-Team: LANGUAGE \n" From 3f3f4be41df0c093bed63f1ad2f81ac4d30b3392 Mon Sep 17 00:00:00 2001 From: "copilot-swe-agent[bot]" <198982749+Copilot@users.noreply.github.com> Date: Fri, 6 Mar 2026 13:28:42 +0000 Subject: [PATCH 08/14] Move labels/ to .github/skills/labels/, improve .env.example docs, clarify L2 skill in README - labels/ moved from repo root into .github/skills/labels/ (reference only) - .env.example now documents per-provider key formats, popular model names, and links to the LiteLLM master model list (https://docs.litellm.ai/docs/providers) - README clarifies that the L2 content review skill is dak_authoring/actions/dak_authoring_action.py, triggered by the content:L2 label via skill-l2-dak.yml Co-authored-by: litlfred <662242+litlfred@users.noreply.github.com> --- .env.example | 32 ++++++++++++++++--- .github/skills/README.md | 10 +++++- .../skills/labels}/content_L1.json | 0 .../skills/labels}/content_L2.json | 0 .../skills/labels}/content_L3.json | 0 .../skills/labels}/content_translation.json | 0 6 files changed, 37 insertions(+), 5 deletions(-) rename {labels => .github/skills/labels}/content_L1.json (100%) rename {labels => .github/skills/labels}/content_L2.json (100%) rename {labels => .github/skills/labels}/content_L3.json (100%) rename {labels => .github/skills/labels}/content_translation.json (100%) diff --git a/.env.example b/.env.example index 7d7a85f29d..a3a48c3de6 100644 --- a/.env.example +++ b/.env.example @@ -4,8 +4,32 @@ # Leave blank → LLM steps skipped, structural validation still runs. # Billed to YOUR account, not WHO. # -DAK_LLM_API_KEY= # sk-... (OpenAI) | sk-ant-... (Anthropic) | leave blank -DAK_LLM_MODEL=gpt-4o # gpt-4o | gpt-4o-mini | claude-3-5-sonnet-20241022 | gemini-2.0-flash +# ─── API KEY ──────────────────────────────────────────────────────────── +# Get a key from your LLM provider: +# OpenAI: https://platform.openai.com/api-keys → starts with sk- +# Anthropic: https://console.anthropic.com/settings/keys → starts with sk-ant- +# Google AI: https://aistudio.google.com/app/apikey → starts with AI... +# Azure: Azure Portal → your OpenAI resource → Keys +# +# LiteLLM routes to the right provider based on the model name below. +# Leave blank to skip all LLM steps (structural validation still runs). +DAK_LLM_API_KEY= + +# ─── MODEL ────────────────────────────────────────────────────────────── +# LiteLLM model identifier. Format: [provider/]model-name +# +# Popular options (as of 2025): +# OpenAI: gpt-4o, gpt-4o-mini, gpt-4-turbo, o1-mini +# Anthropic: claude-sonnet-4-20250514, claude-3-5-haiku-20241022 +# Google: gemini/gemini-2.0-flash, gemini/gemini-1.5-pro +# Azure: azure/your-deployment-name +# +# Master list of all supported models and provider prefixes: +# https://docs.litellm.ai/docs/providers +# +# Default: gpt-4o (requires OpenAI key above) +DAK_LLM_MODEL=gpt-4o -# IG Publisher (usually defaults are fine) -DAK_TX_SERVER= # optional custom terminology server +# ─── IG PUBLISHER (optional) ─────────────────────────────────────────── +# Custom FHIR terminology server. Leave blank for default (tx.fhir.org). +DAK_TX_SERVER= diff --git a/.github/skills/README.md b/.github/skills/README.md index f339fbd0da..92f7a53a1a 100644 --- a/.github/skills/README.md +++ b/.github/skills/README.md @@ -48,12 +48,15 @@ Skills run automatically via GitHub Actions workflows: content:L2 #e4e669 "DAK FHIR assets" content:L3 #d73a4a "Implementation adaptations" content:translation #0e8a16 "Translation of any content layer" + (Label definitions also stored in .github/skills/labels/*.json for reference.) 2. Add secret (Settings → Secrets and variables → Actions → New repository secret): DAK_LLM_API_KEY = sk-... 3. Add variable (Settings → Secrets and variables → Variables → New variable): DAK_LLM_MODEL = gpt-4o (or gpt-4o-mini to reduce cost) + See .env.example for the full list of supported model identifiers, + or https://docs.litellm.ai/docs/providers for the master list. 4. Build local Docker image (optional, for local development): docker build -t dak-skill .github/skills/ @@ -97,7 +100,12 @@ Skills run automatically via GitHub Actions workflows: ├── bpmn_author/ # Author/edit BPMN ├── bpmn_import/ # Import BPMN → FSH, validate lanes ├── ig_publisher/ # IG Publisher validation and build -├── dak_authoring/ # Issue classification and L2 authoring +├── dak_authoring/ # Issue classification and L2 content review/authoring +│ ├── actions/ +│ │ ├── classify_issue_action.py # Keyword + LLM issue classifier +│ │ └── dak_authoring_action.py # L2 content review skill (→ content:L2 label) +│ └── prompts/ +├── labels/ # GitHub label definitions (JSON, for reference) ├── l1_review/ # (placeholder v0.2) ├── l3_review/ # (placeholder v0.3) └── translation/ # (placeholder v0.3) diff --git a/labels/content_L1.json b/.github/skills/labels/content_L1.json similarity index 100% rename from labels/content_L1.json rename to .github/skills/labels/content_L1.json diff --git a/labels/content_L2.json b/.github/skills/labels/content_L2.json similarity index 100% rename from labels/content_L2.json rename to .github/skills/labels/content_L2.json diff --git a/labels/content_L3.json b/.github/skills/labels/content_L3.json similarity index 100% rename from labels/content_L3.json rename to .github/skills/labels/content_L3.json diff --git a/labels/content_translation.json b/.github/skills/labels/content_translation.json similarity index 100% rename from labels/content_translation.json rename to .github/skills/labels/content_translation.json From 7d0ac8639bedf20948fa256678b493d610e3d9f6 Mon Sep 17 00:00:00 2001 From: "github-actions[bot]" Date: Fri, 6 Mar 2026 13:29:06 +0000 Subject: [PATCH 09/14] chore: update translation templates (.pot) [2026-03-06 13:29 UTC] Regenerated via IG Publisher and extract_translations.py. Triggered by: litlfred --- input/images/translations/images.pot | 2 +- input/pagecontent/translations/pages.pot | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/input/images/translations/images.pot b/input/images/translations/images.pot index 3a1c895cd1..a45dc9533c 100644 --- a/input/images/translations/images.pot +++ b/input/images/translations/images.pot @@ -6,7 +6,7 @@ msgid "" msgstr "" "Project-Id-Version: WHO SMART Guidelines\n" -"POT-Creation-Date: 2026-03-06 02:35+0000\n" +"POT-Creation-Date: 2026-03-06 13:29+0000\n" "PO-Revision-Date: YEAR-MO-DA HO:MI+ZONE\n" "Last-Translator: FULL NAME \n" "Language-Team: LANGUAGE \n" diff --git a/input/pagecontent/translations/pages.pot b/input/pagecontent/translations/pages.pot index 694a2a3744..3f0272ff8b 100644 --- a/input/pagecontent/translations/pages.pot +++ b/input/pagecontent/translations/pages.pot @@ -6,7 +6,7 @@ msgid "" msgstr "" "Project-Id-Version: WHO SMART Guidelines\n" -"POT-Creation-Date: 2026-03-06 03:30+0000\n" +"POT-Creation-Date: 2026-03-06 13:29+0000\n" "PO-Revision-Date: YEAR-MO-DA HO:MI+ZONE\n" "Last-Translator: FULL NAME \n" "Language-Team: LANGUAGE \n" From efb28e44cbc43204ed55143a5bc784115af82694 Mon Sep 17 00:00:00 2001 From: "copilot-swe-agent[bot]" <198982749+Copilot@users.noreply.github.com> Date: Fri, 6 Mar 2026 19:00:22 +0000 Subject: [PATCH 10/14] Merge upstream main: incorporate .pot translation templates and translation infrastructure Co-authored-by: litlfred <662242+litlfred@users.noreply.github.com> --- .../workflows/generate_translation_report.yml | 56 + .github/workflows/generate_weblate_yaml.yml | 56 + .github/workflows/ghbuild.yml | 34 +- .github/workflows/pr-deploy-slash.yml | 7 +- .../register_translation_project.yml | 100 + .gitignore | 1 + input/fsh/models/DAK.fsh | 3 - input/images/translations/images.pot | 2 +- input/pagecontent/translations/changes.pot | 32 + input/pagecontent/translations/dak-api.pot | 46 + input/pagecontent/translations/downloads.pot | 25 + .../translations/{pages.pot => index.pot} | 142 +- input/pagecontent/translations/license.pot | 109 + input/scripts/extract_script_strings.py | 247 + input/scripts/extract_translations.py | 150 +- input/scripts/generate_weblate_yaml.py | 274 + input/scripts/pull_crowdin_translations.py | 313 + input/scripts/pull_launchpad_translations.py | 196 + input/scripts/pull_translations.py | 269 + input/scripts/register_all_dak_projects.py | 182 + input/scripts/register_translation_project.py | 335 + input/scripts/run_ig_publisher.py | 257 +- input/scripts/translation_config.py | 442 ++ input/scripts/translation_report.py | 251 + input/scripts/translation_security.py | 174 + input/scripts/translations/scripts.pot | 10 + input/translations/base.pot | 5408 +++++++++++++++++ sushi-config.yaml | 57 + 28 files changed, 8951 insertions(+), 227 deletions(-) create mode 100644 .github/workflows/generate_translation_report.yml create mode 100644 .github/workflows/generate_weblate_yaml.yml create mode 100644 .github/workflows/register_translation_project.yml create mode 100644 input/pagecontent/translations/changes.pot create mode 100644 input/pagecontent/translations/dak-api.pot create mode 100644 input/pagecontent/translations/downloads.pot rename input/pagecontent/translations/{pages.pot => index.pot} (62%) create mode 100644 input/pagecontent/translations/license.pot create mode 100644 input/scripts/extract_script_strings.py create mode 100644 input/scripts/generate_weblate_yaml.py create mode 100644 input/scripts/pull_crowdin_translations.py create mode 100644 input/scripts/pull_launchpad_translations.py create mode 100644 input/scripts/pull_translations.py create mode 100644 input/scripts/register_all_dak_projects.py create mode 100644 input/scripts/register_translation_project.py create mode 100644 input/scripts/translation_config.py create mode 100644 input/scripts/translation_report.py create mode 100644 input/scripts/translation_security.py create mode 100644 input/scripts/translations/scripts.pot create mode 100644 input/translations/base.pot diff --git a/.github/workflows/generate_translation_report.yml b/.github/workflows/generate_translation_report.yml new file mode 100644 index 0000000000..5e4dd5d6f7 --- /dev/null +++ b/.github/workflows/generate_translation_report.yml @@ -0,0 +1,56 @@ +# Generate translation completeness report (translation-status.md). +# +# Scans all .po files in the repo and generates a Markdown report at +# input/pagecontent/translation-status.md showing per-language, +# per-component translation completeness. +# +# All report logic lives in: +# input/scripts/translation_report.py + +name: Generate Translation Report + +on: + workflow_dispatch: + + workflow_call: + +jobs: + report: + name: Generate translation status report + runs-on: ubuntu-latest + + permissions: + contents: write + + steps: + - name: Checkout repository + uses: actions/checkout@v4 + with: + token: ${{ secrets.GITHUB_TOKEN }} + + - name: Set up Python + uses: actions/setup-python@v5 + with: + python-version: '3.12' + cache: pip + cache-dependency-path: input/scripts/requirements.txt + + - name: Install Python dependencies + run: pip install -r input/scripts/requirements.txt + + - name: Generate report + run: python input/scripts/translation_report.py --repo-root "." + + - name: Commit report + run: | + git config user.name "github-actions[bot]" + git config user.email "github-actions[bot]@users.noreply.github.com" + + git add -- 'input/pagecontent/translation-status.md' 2>/dev/null || true + + if git diff --cached --quiet; then + echo "No changes to translation report." + else + git commit -m "chore: update translation status report" + git push + fi diff --git a/.github/workflows/generate_weblate_yaml.yml b/.github/workflows/generate_weblate_yaml.yml new file mode 100644 index 0000000000..4377d3ebe1 --- /dev/null +++ b/.github/workflows/generate_weblate_yaml.yml @@ -0,0 +1,56 @@ +# Generate weblate.yaml from sushi-config.yaml and discovered .pot components. +# +# Reads sushi-config.yaml#translations (languages, plural forms, services) and +# scans for .pot files to produce a complete weblate.yaml, eliminating manual +# maintenance. +# +# All generation logic lives in: +# input/scripts/generate_weblate_yaml.py + +name: Generate Weblate Config + +on: + workflow_dispatch: + + workflow_call: + +jobs: + generate: + name: Generate weblate.yaml + runs-on: ubuntu-latest + + permissions: + contents: write + + steps: + - name: Checkout repository + uses: actions/checkout@v4 + with: + token: ${{ secrets.GITHUB_TOKEN }} + + - name: Set up Python + uses: actions/setup-python@v5 + with: + python-version: '3.12' + cache: pip + cache-dependency-path: input/scripts/requirements.txt + + - name: Install Python dependencies + run: pip install -r input/scripts/requirements.txt + + - name: Generate weblate.yaml + run: python input/scripts/generate_weblate_yaml.py --repo-root "." + + - name: Commit weblate.yaml + run: | + git config user.name "github-actions[bot]" + git config user.email "github-actions[bot]@users.noreply.github.com" + + git add -- weblate.yaml 2>/dev/null || true + + if git diff --cached --quiet; then + echo "No changes to weblate.yaml." + else + git commit -m "chore: regenerate weblate.yaml from sushi-config.yaml" + git push + fi diff --git a/.github/workflows/ghbuild.yml b/.github/workflows/ghbuild.yml index e4bde9359f..efbf4d712c 100644 --- a/.github/workflows/ghbuild.yml +++ b/.github/workflows/ghbuild.yml @@ -436,22 +436,42 @@ jobs: name: qa-json-artifact path: ./output/qa.json # Adjust the path based on where qa.json is located - # The IG Publisher writes .pot translation templates into the output/ - # directory. Since output/ is listed in .gitignore (and is not scanned - # by the artifact upload below), copy any .pot files it produced into - # the input/translations/ tree so they are committed to main, - # overwriting any existing files. - - name: Collect IG Publisher .pot files from output + # The IG Publisher writes translation files to two locations: + # 1. output/*.pot — translation templates (gitignored) + # 2. translations/{lang}/po/*.po — per-resource per-language .po files + # (also gitignored via /translations in .gitignore) + # Copy .pot files from output/ and merge per-resource .po files from + # translations/ into a single base.pot for Weblate integration. + - name: Collect IG Publisher translation files if: success() run: | mkdir -p input/translations found=0 + + # Copy .pot files from output/ for pot in $(find output/ -name '*.pot' 2>/dev/null); do echo "Found IG Publisher .pot: $pot" cp "$pot" input/translations/ found=$((found + 1)) done - echo "Collected $found .pot file(s) from IG Publisher output" + + # Count .po files from translations/ (IG Publisher i18n output) + for po in $(find translations/ -name '*.po' 2>/dev/null); do + echo "Found IG Publisher .po: $po" + found=$((found + 1)) + done + + echo "Found $found translation file(s) from IG Publisher" + + # Merge per-resource .po files into base.pot using the Python helper. + if [ -f "input/scripts/run_ig_publisher.py" ]; then + python3 -c " + import sys, os + sys.path.insert(0, os.path.join('.', 'input', 'scripts')) + from run_ig_publisher import collect_publisher_pot_files + collect_publisher_pot_files('.') + " 2>&1 || echo "Warning: Python merge step failed; .pot files from output/ still collected" + fi # Upload translation template (.pot) files produced by the IG Publisher # so that downstream jobs (e.g. commit-pot) can reuse them without diff --git a/.github/workflows/pr-deploy-slash.yml b/.github/workflows/pr-deploy-slash.yml index f62e50663e..934a3512af 100644 --- a/.github/workflows/pr-deploy-slash.yml +++ b/.github/workflows/pr-deploy-slash.yml @@ -2,6 +2,8 @@ name: PR Slash-Command Deploy # Lets any collaborator post /deploy in a PR comment to manually trigger # the FHIR IG build + GitHub Pages deployment for that PR's branch. +# The /deploy command is recognized when it appears at the start of the comment +# or at the start of any line within the comment (e.g. after "@copilot ..."). on: issue_comment: @@ -9,10 +11,11 @@ on: jobs: dispatch: - # Only run on pull-request comments that start with /deploy + # Run on pull-request comments that contain /deploy at the start of any line if: > github.event.issue.pull_request != null && - startsWith(github.event.comment.body, '/deploy') + (startsWith(github.event.comment.body, '/deploy') || + contains(github.event.comment.body, '\n/deploy')) runs-on: ubuntu-latest permissions: issues: write # to post reactions and reply comments on the PR diff --git a/.github/workflows/register_translation_project.yml b/.github/workflows/register_translation_project.yml new file mode 100644 index 0000000000..491ad46f5e --- /dev/null +++ b/.github/workflows/register_translation_project.yml @@ -0,0 +1,100 @@ +# Register a single DAK IG or all DAK IGs in the org with all enabled +# translation services (Weblate, Launchpad, Crowdin). +# +# All registration logic lives in: +# input/scripts/register_translation_project.py (single) +# input/scripts/register_all_dak_projects.py (bulk) +# +# Required repository secrets: +# WEBLATE_API_TOKEN — Weblate API token with project-admin access +# CROWDIN_API_TOKEN — Crowdin API token (if Crowdin enabled) +# LAUNCHPAD_API_TOKEN — Launchpad API token (if Launchpad enabled) +# +# ⚠️ API tokens are NEVER workflow inputs. They are always secrets. + +name: Register Translation Project + +on: + workflow_dispatch: + inputs: + mode: + description: 'Registration mode' + required: true + default: 'single' + type: choice + options: + - single + - all + repo_name: + description: 'Target repo name (mode=single only)' + required: false + default: '' + dry_run: + description: 'List repos without registering (mode=all only)' + required: false + type: boolean + default: false + + repository_dispatch: + types: [dak-ig-registered] + +concurrency: + group: register-translation + cancel-in-progress: false + +jobs: + register: + name: Register translation project(s) + runs-on: ubuntu-latest + + permissions: + contents: read + + steps: + - name: Checkout repository + uses: actions/checkout@v4 + + - name: Set up Python + uses: actions/setup-python@v5 + with: + python-version: '3.12' + cache: pip + cache-dependency-path: input/scripts/requirements.txt + + - name: Install Python dependencies + run: pip install -r input/scripts/requirements.txt + + - name: Register single project + if: >- + (github.event_name == 'workflow_dispatch' && inputs.mode == 'single') || + github.event_name == 'repository_dispatch' + env: + WEBLATE_API_TOKEN: ${{ secrets.WEBLATE_API_TOKEN }} + CROWDIN_API_TOKEN: ${{ secrets.CROWDIN_API_TOKEN }} + LAUNCHPAD_API_TOKEN: ${{ secrets.LAUNCHPAD_API_TOKEN }} + INPUT_REPO_NAME: ${{ inputs.repo_name || github.event.client_payload.repo_name || '' }} + run: | + # Validate repo_name contains only safe characters (guards against + # malicious repository_dispatch payloads) + if ! echo "$INPUT_REPO_NAME" | grep -qE '^[A-Za-z0-9_-]+$'; then + echo "::error::Invalid repo_name: contains disallowed characters" + exit 1 + fi + python input/scripts/register_translation_project.py \ + --repo-name "${INPUT_REPO_NAME}" \ + --repo-root "." + + - name: Register all DAK projects + if: github.event_name == 'workflow_dispatch' && inputs.mode == 'all' + env: + GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} + WEBLATE_API_TOKEN: ${{ secrets.WEBLATE_API_TOKEN }} + CROWDIN_API_TOKEN: ${{ secrets.CROWDIN_API_TOKEN }} + LAUNCHPAD_API_TOKEN: ${{ secrets.LAUNCHPAD_API_TOKEN }} + INPUT_DRY_RUN: ${{ inputs.dry_run }} + run: | + DRY_RUN_FLAG="" + if [ "$INPUT_DRY_RUN" = "true" ]; then + DRY_RUN_FLAG="--dry-run" + fi + python input/scripts/register_all_dak_projects.py $DRY_RUN_FLAG diff --git a/.gitignore b/.gitignore index cfe2a07459..abb7604229 100644 --- a/.gitignore +++ b/.gitignore @@ -4,6 +4,7 @@ temp template output fsh-generated +/translations # Don't commit this because it's so large # ########################################### diff --git a/input/fsh/models/DAK.fsh b/input/fsh/models/DAK.fsh index d259da3a14..0b0ae04dc6 100644 --- a/input/fsh/models/DAK.fsh +++ b/input/fsh/models/DAK.fsh @@ -24,9 +24,6 @@ Description: "Logical Model for representing a complete Digital Adaptation Kit ( * url 0..1 url "Publisher URL" "URL of the publishing organization" - - - // 9 DAK Components - each component uses a Source type that can be URL, canonical reference, or instance data * healthInterventions 0..* HealthInterventionsSource "Health Interventions and Recommendations" "Overview of the health interventions and WHO, regional or national recommendations included within the DAK" * personas 0..* GenericPersonaSource "Generic Personas" "Depiction of the human and system actors" diff --git a/input/images/translations/images.pot b/input/images/translations/images.pot index a45dc9533c..f048cb6af8 100644 --- a/input/images/translations/images.pot +++ b/input/images/translations/images.pot @@ -6,7 +6,7 @@ msgid "" msgstr "" "Project-Id-Version: WHO SMART Guidelines\n" -"POT-Creation-Date: 2026-03-06 13:29+0000\n" +"POT-Creation-Date: 2026-03-06 13:41+0000\n" "PO-Revision-Date: YEAR-MO-DA HO:MI+ZONE\n" "Last-Translator: FULL NAME \n" "Language-Team: LANGUAGE \n" diff --git a/input/pagecontent/translations/changes.pot b/input/pagecontent/translations/changes.pot new file mode 100644 index 0000000000..9bc7951ca1 --- /dev/null +++ b/input/pagecontent/translations/changes.pot @@ -0,0 +1,32 @@ +# WHO SMART Guidelines Translation Template +# Copyright (C) 2026 World Health Organization +# This file is distributed under the CC-BY-SA-3.0-IGO license. +# +#, fuzzy +msgid "" +msgstr "" +"Project-Id-Version: WHO SMART Guidelines\n" +"POT-Creation-Date: 2026-03-06 14:39+0000\n" +"PO-Revision-Date: YEAR-MO-DA HO:MI+ZONE\n" +"Last-Translator: FULL NAME \n" +"Language-Team: LANGUAGE \n" +"Language: \n" +"MIME-Version: 1.0\n" +"Content-Type: text/plain; charset=UTF-8\n" +"Content-Transfer-Encoding: 8bit\n" +"Plural-Forms: nplurals=6; plural=(n==1 ? 0 : n==2 ? 1 : n>=3 && n<=10 ? 2 : n>=11 && n<=99 ? 3 : 4);\n" + +#. Source: input/pagecontent/changes.md:5 +#. URL: http://smart.who.int/base/changes.html +#. URL: https://WorldHealthOrganization.github.io/smart-base/changes.html +#: input/pagecontent/changes.md:5 +msgid "Initial Release" +msgstr "" + +#. Source: input/pagecontent/changes.md:3 +#. URL: http://smart.who.int/base/changes.html +#. URL: https://WorldHealthOrganization.github.io/smart-base/changes.html +#: input/pagecontent/changes.md:3 +msgid "Version 1.0.0" +msgstr "" + diff --git a/input/pagecontent/translations/dak-api.pot b/input/pagecontent/translations/dak-api.pot new file mode 100644 index 0000000000..48d4a8db78 --- /dev/null +++ b/input/pagecontent/translations/dak-api.pot @@ -0,0 +1,46 @@ +# WHO SMART Guidelines Translation Template +# Copyright (C) 2026 World Health Organization +# This file is distributed under the CC-BY-SA-3.0-IGO license. +# +#, fuzzy +msgid "" +msgstr "" +"Project-Id-Version: WHO SMART Guidelines\n" +"POT-Creation-Date: 2026-03-06 14:39+0000\n" +"PO-Revision-Date: YEAR-MO-DA HO:MI+ZONE\n" +"Last-Translator: FULL NAME \n" +"Language-Team: LANGUAGE \n" +"Language: \n" +"MIME-Version: 1.0\n" +"Content-Type: text/plain; charset=UTF-8\n" +"Content-Transfer-Encoding: 8bit\n" +"Plural-Forms: nplurals=6; plural=(n==1 ? 0 : n==2 ? 1 : n>=3 && n<=10 ? 2 : n>=11 && n<=99 ? 3 : 4);\n" + +#. Source: input/pagecontent/dak-api.md:3 +#. URL: http://smart.who.int/base/dak-api.html +#. URL: https://WorldHealthOrganization.github.io/smart-base/dak-api.html +#: input/pagecontent/dak-api.md:3 +msgid "DAK API Documentation Hub" +msgstr "" + +#. Source: input/pagecontent/dak-api.md:9 +#. URL: http://smart.who.int/base/dak-api.html +#. URL: https://WorldHealthOrganization.github.io/smart-base/dak-api.html +#: input/pagecontent/dak-api.md:9 +msgid "Table of Contents" +msgstr "" + +#. Source: input/pagecontent/dak-api.md:5 +#. URL: http://smart.who.int/base/dak-api.html +#. URL: https://WorldHealthOrganization.github.io/smart-base/dak-api.html +#: input/pagecontent/dak-api.md:5 +msgid "This page provides access to Data Access Kit (DAK) API documentation and schemas." +msgstr "" + +#. Source: input/pagecontent/dak-api.md:12 +#. URL: http://smart.who.int/base/dak-api.html +#. URL: https://WorldHealthOrganization.github.io/smart-base/dak-api.html +#: input/pagecontent/dak-api.md:12 +msgid "TOC" +msgstr "" + diff --git a/input/pagecontent/translations/downloads.pot b/input/pagecontent/translations/downloads.pot new file mode 100644 index 0000000000..a42c25dd19 --- /dev/null +++ b/input/pagecontent/translations/downloads.pot @@ -0,0 +1,25 @@ +# WHO SMART Guidelines Translation Template +# Copyright (C) 2026 World Health Organization +# This file is distributed under the CC-BY-SA-3.0-IGO license. +# +#, fuzzy +msgid "" +msgstr "" +"Project-Id-Version: WHO SMART Guidelines\n" +"POT-Creation-Date: 2026-03-06 14:39+0000\n" +"PO-Revision-Date: YEAR-MO-DA HO:MI+ZONE\n" +"Last-Translator: FULL NAME \n" +"Language-Team: LANGUAGE \n" +"Language: \n" +"MIME-Version: 1.0\n" +"Content-Type: text/plain; charset=UTF-8\n" +"Content-Transfer-Encoding: 8bit\n" +"Plural-Forms: nplurals=6; plural=(n==1 ? 0 : n==2 ? 1 : n>=3 && n<=10 ? 2 : n>=11 && n<=99 ? 3 : 4);\n" + +#. Source: input/pagecontent/downloads.md:3 +#. URL: http://smart.who.int/base/downloads.html +#. URL: https://WorldHealthOrganization.github.io/smart-base/downloads.html +#: input/pagecontent/downloads.md:3 +msgid "Downloads" +msgstr "" + diff --git a/input/pagecontent/translations/pages.pot b/input/pagecontent/translations/index.pot similarity index 62% rename from input/pagecontent/translations/pages.pot rename to input/pagecontent/translations/index.pot index 3f0272ff8b..ad4447a826 100644 --- a/input/pagecontent/translations/pages.pot +++ b/input/pagecontent/translations/index.pot @@ -6,7 +6,7 @@ msgid "" msgstr "" "Project-Id-Version: WHO SMART Guidelines\n" -"POT-Creation-Date: 2026-03-06 13:29+0000\n" +"POT-Creation-Date: 2026-03-06 16:55+0000\n" "PO-Revision-Date: YEAR-MO-DA HO:MI+ZONE\n" "Last-Translator: FULL NAME \n" "Language-Team: LANGUAGE \n" @@ -23,13 +23,6 @@ msgstr "" msgid "A Digital Adaptation Kit (DAK) is the primary artefact of WHO SMART Guidelines. It is a structured, standardised package of clinical and operational content that represents a WHO health intervention in a computable form. Each DAK contains:" msgstr "" -#. Source: input/pagecontent/license.md:12 -#. URL: http://smart.who.int/base/license.html -#. URL: https://WorldHealthOrganization.github.io/smart-base/license.html -#: input/pagecontent/license.md:12 -msgid "Adapt: Remix, transform, and build upon the material for any purpose, even commercially." -msgstr "" - #. Source: input/pagecontent/index.md:41 #. URL: http://smart.who.int/base/index.html #. URL: https://WorldHealthOrganization.github.io/smart-base/index.html @@ -37,13 +30,6 @@ msgstr "" msgid "All Repositories: Preview URLs use the GitHub Pages pattern for current CI builds." msgstr "" -#. Source: input/pagecontent/license.md:16 -#. URL: http://smart.who.int/base/license.html -#. URL: https://WorldHealthOrganization.github.io/smart-base/license.html -#: input/pagecontent/license.md:16 -msgid "Attribution: You must give appropriate credit, provide a link to the license, and indicate if changes were made. You may do so in any reasonable manner, but not in any way that suggests the licensor endorses you or your use." -msgstr "" - #. Source: input/pagecontent/index.md:43 #. URL: http://smart.who.int/base/index.html #. URL: https://WorldHealthOrganization.github.io/smart-base/index.html @@ -65,13 +51,6 @@ msgstr "" msgid "Core data elements – the data dictionary for the health domain" msgstr "" -#. Source: input/pagecontent/license.md:3 -#. URL: http://smart.who.int/base/license.html -#. URL: https://WorldHealthOrganization.github.io/smart-base/license.html -#: input/pagecontent/license.md:3 -msgid "Creative Commons Attribution 3.0 IGO (CC-BY-3.0-IGO)" -msgstr "" - #. Source: input/pagecontent/index.md:53 #. URL: http://smart.who.int/base/index.html #. URL: https://WorldHealthOrganization.github.io/smart-base/index.html @@ -86,13 +65,6 @@ msgstr "" msgid "DAK (Digital Adaptation Kit) URL Handling" msgstr "" -#. Source: input/pagecontent/dak-api.md:3 -#. URL: http://smart.who.int/base/dak-api.html -#. URL: https://WorldHealthOrganization.github.io/smart-base/dak-api.html -#: input/pagecontent/dak-api.md:3 -msgid "DAK API Documentation Hub" -msgstr "" - #. Source: input/pagecontent/index.md:14 #. URL: http://smart.who.int/base/index.html #. URL: https://WorldHealthOrganization.github.io/smart-base/index.html @@ -114,13 +86,6 @@ msgstr "" msgid "Development Branches: Use preview URLs for canonical references and resource identifiers." msgstr "" -#. Source: input/pagecontent/downloads.md:3 -#. URL: http://smart.who.int/base/downloads.html -#. URL: https://WorldHealthOrganization.github.io/smart-base/downloads.html -#: input/pagecontent/downloads.md:3 -msgid "Downloads" -msgstr "" - #. Source: input/pagecontent/index.md:21 #. URL: http://smart.who.int/base/index.html #. URL: https://WorldHealthOrganization.github.io/smart-base/index.html @@ -163,20 +128,6 @@ msgstr "" msgid "Health interventions and recommendations – the clinical guidance from WHO" msgstr "" -#. Source: input/pagecontent/changes.md:5 -#. URL: http://smart.who.int/base/changes.html -#. URL: https://WorldHealthOrganization.github.io/smart-base/changes.html -#: input/pagecontent/changes.md:5 -msgid "Initial Release" -msgstr "" - -#. Source: input/pagecontent/license.md:20 -#. URL: http://smart.who.int/base/license.html -#. URL: https://WorldHealthOrganization.github.io/smart-base/license.html -#: input/pagecontent/license.md:20 -msgid "International Government Organizations (IGOs): This license is specifically designed for works created by International Government Organizations." -msgstr "" - #. Source: input/pagecontent/index.md:61 #. URL: http://smart.who.int/base/index.html #. URL: https://WorldHealthOrganization.github.io/smart-base/index.html @@ -184,20 +135,6 @@ msgstr "" msgid "IP Statements" msgstr "" -#. Source: input/pagecontent/license.md:22 -#. URL: http://smart.who.int/base/license.html -#. URL: https://WorldHealthOrganization.github.io/smart-base/license.html -#: input/pagecontent/license.md:22 -msgid "License Text" -msgstr "" - -#. Source: input/pagecontent/license.md:18 -#. URL: http://smart.who.int/base/license.html -#. URL: https://WorldHealthOrganization.github.io/smart-base/license.html -#: input/pagecontent/license.md:18 -msgid "No Additional Restrictions: You may not apply legal terms or technological measures that legally restrict others from doing anything the license permits." -msgstr "" - #. Source: input/pagecontent/index.md:38 #. URL: http://smart.who.int/base/index.html #. URL: https://WorldHealthOrganization.github.io/smart-base/index.html @@ -247,27 +184,6 @@ msgstr "" msgid "See the SMART IG Starter Kit for more information on building and using WHO SMART Guidelines." msgstr "" -#. Source: input/pagecontent/license.md:11 -#. URL: http://smart.who.int/base/license.html -#. URL: https://WorldHealthOrganization.github.io/smart-base/license.html -#: input/pagecontent/license.md:11 -msgid "Share: Copy and redistribute the material in any medium or format." -msgstr "" - -#. Source: input/pagecontent/license.md:5 -#. URL: http://smart.who.int/base/license.html -#. URL: https://WorldHealthOrganization.github.io/smart-base/license.html -#: input/pagecontent/license.md:5 -msgid "Summary" -msgstr "" - -#. Source: input/pagecontent/dak-api.md:9 -#. URL: http://smart.who.int/base/dak-api.html -#. URL: https://WorldHealthOrganization.github.io/smart-base/dak-api.html -#: input/pagecontent/dak-api.md:9 -msgid "Table of Contents" -msgstr "" - #. Source: input/pagecontent/index.md:17 #. URL: http://smart.who.int/base/index.html #. URL: https://WorldHealthOrganization.github.io/smart-base/index.html @@ -296,55 +212,6 @@ msgstr "" msgid "This implementation guide contains base conformance resources for use in all WHO SMART Guidelines implementation guides." msgstr "" -#. Source: input/pagecontent/license.md:7 -#. URL: http://smart.who.int/base/license.html -#. URL: https://WorldHealthOrganization.github.io/smart-base/license.html -#: input/pagecontent/license.md:7 -msgid "This is a human-readable summary of the Creative Commons Attribution 3.0 IGO (CC-BY-3.0-IGO) License. This summary is not a substitute for the full license text." -msgstr "" - -#. Source: input/pagecontent/dak-api.md:5 -#. URL: http://smart.who.int/base/dak-api.html -#. URL: https://WorldHealthOrganization.github.io/smart-base/dak-api.html -#: input/pagecontent/dak-api.md:5 -msgid "This page provides access to Data Access Kit (DAK) API documentation and schemas." -msgstr "" - -#. Source: input/pagecontent/license.md:24 -#. URL: http://smart.who.int/base/license.html -#. URL: https://WorldHealthOrganization.github.io/smart-base/license.html -#: input/pagecontent/license.md:24 -msgid "This work is licensed under the Creative Commons Attribution 3.0 IGO (CC-BY-3.0-IGO) License." -msgstr "" - -#. Source: input/pagecontent/license.md:26 -#. URL: http://smart.who.int/base/license.html -#. URL: https://WorldHealthOrganization.github.io/smart-base/license.html -#: input/pagecontent/license.md:26 -msgid "To view a copy of this license, visit https://creativecommons.org/licenses/by/3.0/igo/ or send a letter to Creative Commons, PO Box 1866, Mountain View, CA 94042, USA." -msgstr "" - -#. Source: input/pagecontent/dak-api.md:12 -#. URL: http://smart.who.int/base/dak-api.html -#. URL: https://WorldHealthOrganization.github.io/smart-base/dak-api.html -#: input/pagecontent/dak-api.md:12 -msgid "TOC" -msgstr "" - -#. Source: input/pagecontent/license.md:14 -#. URL: http://smart.who.int/base/license.html -#. URL: https://WorldHealthOrganization.github.io/smart-base/license.html -#: input/pagecontent/license.md:14 -msgid "Under the following conditions:" -msgstr "" - -#. Source: input/pagecontent/license.md:9 -#. URL: http://smart.who.int/base/license.html -#. URL: https://WorldHealthOrganization.github.io/smart-base/license.html -#: input/pagecontent/license.md:9 -msgid "Under this license, you are free to:" -msgstr "" - #. Source: input/pagecontent/index.md:11 #. URL: http://smart.who.int/base/index.html #. URL: https://WorldHealthOrganization.github.io/smart-base/index.html @@ -352,13 +219,6 @@ msgstr "" msgid "User scenarios – narrative descriptions of how the guidance is used in practice" msgstr "" -#. Source: input/pagecontent/changes.md:3 -#. URL: http://smart.who.int/base/changes.html -#. URL: https://WorldHealthOrganization.github.io/smart-base/changes.html -#: input/pagecontent/changes.md:3 -msgid "Version 1.0.0" -msgstr "" - #. Source: input/pagecontent/index.md:37 #. URL: http://smart.who.int/base/index.html #. URL: https://WorldHealthOrganization.github.io/smart-base/index.html diff --git a/input/pagecontent/translations/license.pot b/input/pagecontent/translations/license.pot new file mode 100644 index 0000000000..4d078b6d5c --- /dev/null +++ b/input/pagecontent/translations/license.pot @@ -0,0 +1,109 @@ +# WHO SMART Guidelines Translation Template +# Copyright (C) 2026 World Health Organization +# This file is distributed under the CC-BY-SA-3.0-IGO license. +# +#, fuzzy +msgid "" +msgstr "" +"Project-Id-Version: WHO SMART Guidelines\n" +"POT-Creation-Date: 2026-03-06 14:39+0000\n" +"PO-Revision-Date: YEAR-MO-DA HO:MI+ZONE\n" +"Last-Translator: FULL NAME \n" +"Language-Team: LANGUAGE \n" +"Language: \n" +"MIME-Version: 1.0\n" +"Content-Type: text/plain; charset=UTF-8\n" +"Content-Transfer-Encoding: 8bit\n" +"Plural-Forms: nplurals=6; plural=(n==1 ? 0 : n==2 ? 1 : n>=3 && n<=10 ? 2 : n>=11 && n<=99 ? 3 : 4);\n" + +#. Source: input/pagecontent/license.md:12 +#. URL: http://smart.who.int/base/license.html +#. URL: https://WorldHealthOrganization.github.io/smart-base/license.html +#: input/pagecontent/license.md:12 +msgid "Adapt: Remix, transform, and build upon the material for any purpose, even commercially." +msgstr "" + +#. Source: input/pagecontent/license.md:16 +#. URL: http://smart.who.int/base/license.html +#. URL: https://WorldHealthOrganization.github.io/smart-base/license.html +#: input/pagecontent/license.md:16 +msgid "Attribution: You must give appropriate credit, provide a link to the license, and indicate if changes were made. You may do so in any reasonable manner, but not in any way that suggests the licensor endorses you or your use." +msgstr "" + +#. Source: input/pagecontent/license.md:3 +#. URL: http://smart.who.int/base/license.html +#. URL: https://WorldHealthOrganization.github.io/smart-base/license.html +#: input/pagecontent/license.md:3 +msgid "Creative Commons Attribution 3.0 IGO (CC-BY-3.0-IGO)" +msgstr "" + +#. Source: input/pagecontent/license.md:20 +#. URL: http://smart.who.int/base/license.html +#. URL: https://WorldHealthOrganization.github.io/smart-base/license.html +#: input/pagecontent/license.md:20 +msgid "International Government Organizations (IGOs): This license is specifically designed for works created by International Government Organizations." +msgstr "" + +#. Source: input/pagecontent/license.md:22 +#. URL: http://smart.who.int/base/license.html +#. URL: https://WorldHealthOrganization.github.io/smart-base/license.html +#: input/pagecontent/license.md:22 +msgid "License Text" +msgstr "" + +#. Source: input/pagecontent/license.md:18 +#. URL: http://smart.who.int/base/license.html +#. URL: https://WorldHealthOrganization.github.io/smart-base/license.html +#: input/pagecontent/license.md:18 +msgid "No Additional Restrictions: You may not apply legal terms or technological measures that legally restrict others from doing anything the license permits." +msgstr "" + +#. Source: input/pagecontent/license.md:11 +#. URL: http://smart.who.int/base/license.html +#. URL: https://WorldHealthOrganization.github.io/smart-base/license.html +#: input/pagecontent/license.md:11 +msgid "Share: Copy and redistribute the material in any medium or format." +msgstr "" + +#. Source: input/pagecontent/license.md:5 +#. URL: http://smart.who.int/base/license.html +#. URL: https://WorldHealthOrganization.github.io/smart-base/license.html +#: input/pagecontent/license.md:5 +msgid "Summary" +msgstr "" + +#. Source: input/pagecontent/license.md:7 +#. URL: http://smart.who.int/base/license.html +#. URL: https://WorldHealthOrganization.github.io/smart-base/license.html +#: input/pagecontent/license.md:7 +msgid "This is a human-readable summary of the Creative Commons Attribution 3.0 IGO (CC-BY-3.0-IGO) License. This summary is not a substitute for the full license text." +msgstr "" + +#. Source: input/pagecontent/license.md:24 +#. URL: http://smart.who.int/base/license.html +#. URL: https://WorldHealthOrganization.github.io/smart-base/license.html +#: input/pagecontent/license.md:24 +msgid "This work is licensed under the Creative Commons Attribution 3.0 IGO (CC-BY-3.0-IGO) License." +msgstr "" + +#. Source: input/pagecontent/license.md:26 +#. URL: http://smart.who.int/base/license.html +#. URL: https://WorldHealthOrganization.github.io/smart-base/license.html +#: input/pagecontent/license.md:26 +msgid "To view a copy of this license, visit https://creativecommons.org/licenses/by/3.0/igo/ or send a letter to Creative Commons, PO Box 1866, Mountain View, CA 94042, USA." +msgstr "" + +#. Source: input/pagecontent/license.md:14 +#. URL: http://smart.who.int/base/license.html +#. URL: https://WorldHealthOrganization.github.io/smart-base/license.html +#: input/pagecontent/license.md:14 +msgid "Under the following conditions:" +msgstr "" + +#. Source: input/pagecontent/license.md:9 +#. URL: http://smart.who.int/base/license.html +#. URL: https://WorldHealthOrganization.github.io/smart-base/license.html +#: input/pagecontent/license.md:9 +msgid "Under this license, you are free to:" +msgstr "" + diff --git a/input/scripts/extract_script_strings.py b/input/scripts/extract_script_strings.py new file mode 100644 index 0000000000..8d6b9b03dc --- /dev/null +++ b/input/scripts/extract_script_strings.py @@ -0,0 +1,247 @@ +#!/usr/bin/env python3 +""" +extract_script_strings.py — Extract translatable strings from Python scripts +in input/scripts/ and produce a .pot file at +input/scripts/translations/scripts.pot. + +Uses Python's ast module to scan *.py files for _(), gettext(), and ngettext() +call patterns and produces a standard Gettext .pot template file. + +Usage: + python extract_script_strings.py [--scripts-dir input/scripts] [--output input/scripts/translations/scripts.pot] + +Exit codes: + 0 .pot file generated successfully + 1 Error + +Author: WHO SMART Guidelines Team +""" + +import argparse +import ast +import datetime +import logging +import re +import sys +from pathlib import Path +from typing import Dict, List, Optional, Set, Tuple + +logger = logging.getLogger(__name__) + + +# --------------------------------------------------------------------------- +# AST-based string extraction +# --------------------------------------------------------------------------- + +# Function names that mark translatable strings +_GETTEXT_FUNCTIONS: Set[str] = {"_", "gettext", "ngettext"} + + +def _extract_from_file(py_path: Path) -> List[Tuple[str, int, str]]: + """ + Extract translatable strings from a Python file using AST parsing. + + Returns list of (file_path_str, line_number, msgid). + """ + try: + source = py_path.read_text(encoding="utf-8") + except OSError as exc: + logger.warning("Cannot read %s: %s", py_path, exc) + return [] + + try: + tree = ast.parse(source, filename=str(py_path)) + except SyntaxError as exc: + logger.warning("Syntax error in %s: %s", py_path, exc) + return [] + + entries: List[Tuple[str, int, str]] = [] + + for node in ast.walk(tree): + if not isinstance(node, ast.Call): + continue + + # Check for function calls: _("..."), gettext("..."), ngettext("...") + func_name = None + if isinstance(node.func, ast.Name): + func_name = node.func.id + elif isinstance(node.func, ast.Attribute): + func_name = node.func.attr + + if func_name not in _GETTEXT_FUNCTIONS: + continue + + if not node.args: + continue + + # Extract the first string argument (msgid) + first_arg = node.args[0] + if isinstance(first_arg, ast.Constant) and isinstance(first_arg.value, str): + entries.append((str(py_path), node.lineno, first_arg.value)) + elif isinstance(first_arg, ast.JoinedStr): + # f-strings are not extractable — warn + logger.warning( + "%s:%d: f-string in %s() is not translatable", + py_path, node.lineno, func_name, + ) + + return entries + + +# Regex patterns for lines that vary only by timestamp in .pot files. +_POT_CREATION_DATE_RE = re.compile(r'^"POT-Creation-Date:.*\\n"\s*$') +_GENERATED_COMMENT_RE = re.compile(r'^# Generated: ') + + +def _normalize_pot_content(content: str) -> str: + """Strip timestamp-varying lines from ``.pot`` content for comparison. + + Removes ``POT-Creation-Date`` header values and ``# Generated:`` + comment lines so that two ``.pot`` files can be compared ignoring + metadata that changes on every regeneration. + """ + lines = content.splitlines(True) + return "".join( + line for line in lines + if not _POT_CREATION_DATE_RE.match(line) + and not _GENERATED_COMMENT_RE.match(line) + ) + + +def _escape_po_string(s: str) -> str: + """Escape a string for use in a .po/.pot file.""" + return ( + s.replace("\\", "\\\\") + .replace('"', '\\"') + .replace("\n", "\\n") + .replace("\t", "\\t") + ) + + +# --------------------------------------------------------------------------- +# POT file generation +# --------------------------------------------------------------------------- + +def generate_pot( + scripts_dir: Path, + output_path: Path, +) -> int: + """ + Scan scripts_dir for *.py files, extract translatable strings, and + write a .pot file to output_path. + + Returns 0 on success, 1 on error. + """ + py_files = sorted(scripts_dir.glob("*.py")) + if not py_files: + logger.info("No Python files found in %s", scripts_dir) + + # Collect all entries: (file, line, msgid) + all_entries: List[Tuple[str, int, str]] = [] + for py_file in py_files: + entries = _extract_from_file(py_file) + all_entries.extend(entries) + + logger.info("Found %d translatable string(s) in %d file(s)", + len(all_entries), len(py_files)) + + # Deduplicate by msgid, collecting all references + msgid_refs: Dict[str, List[Tuple[str, int]]] = {} + for file_path, lineno, msgid in all_entries: + if msgid not in msgid_refs: + msgid_refs[msgid] = [] + msgid_refs[msgid].append((file_path, lineno)) + + # Generate .pot content + now = datetime.datetime.now(datetime.timezone.utc) + timestamp = now.strftime("%Y-%m-%d %H:%M+0000") + + lines: List[str] = [] + # POT header + lines.append('# Translation template for WHO SMART Guidelines Python scripts.') + lines.append(f'# Generated: {timestamp}') + lines.append('#') + lines.append('msgid ""') + lines.append('msgstr ""') + lines.append(f'"POT-Creation-Date: {timestamp}\\n"') + lines.append('"MIME-Version: 1.0\\n"') + lines.append('"Content-Type: text/plain; charset=UTF-8\\n"') + lines.append('"Content-Transfer-Encoding: 8bit\\n"') + lines.append('') + + # Entries + for msgid in sorted(msgid_refs.keys()): + refs = msgid_refs[msgid] + for file_path, lineno in refs: + # Make path relative to scripts_dir parent for readability + try: + rel = Path(file_path).relative_to(scripts_dir.parent) + except ValueError: + rel = Path(file_path) + lines.append(f'#: {rel}:{lineno}') + escaped = _escape_po_string(msgid) + lines.append(f'msgid "{escaped}"') + lines.append('msgstr ""') + lines.append('') + + new_content = "\n".join(lines) + "\n" + + # Skip writing when the only differences are timestamp metadata + # (POT-Creation-Date / # Generated) to avoid noisy commits. + if output_path.is_file(): + try: + old_content = output_path.read_text(encoding="utf-8") + if _normalize_pot_content(old_content) == _normalize_pot_content(new_content): + logger.info( + "Skipped %s: only timestamp changed (%d msgids unchanged)", + output_path, len(msgid_refs), + ) + return 0 + except OSError: + pass # fall through to write + + # Write output + output_path.parent.mkdir(parents=True, exist_ok=True) + output_path.write_text(new_content, encoding="utf-8") + logger.info("✓ Written %s (%d entries)", output_path, len(msgid_refs)) + + return 0 + + +# --------------------------------------------------------------------------- +# CLI +# --------------------------------------------------------------------------- + +def main(argv: Optional[List[str]] = None) -> int: + logging.basicConfig( + level=logging.INFO, + format="%(asctime)s %(levelname)s %(message)s", + datefmt="%H:%M:%S", + ) + + parser = argparse.ArgumentParser( + prog="extract_script_strings.py", + description="Extract translatable strings from Python scripts into a .pot file", + ) + parser.add_argument( + "--scripts-dir", default="input/scripts", + help="Directory containing Python scripts (default: input/scripts)", + ) + parser.add_argument( + "--output", default="input/scripts/translations/scripts.pot", + help="Output .pot file path (default: input/scripts/translations/scripts.pot)", + ) + args = parser.parse_args(argv) + + scripts_dir = Path(args.scripts_dir).resolve() + output_path = Path(args.output).resolve() + + if not scripts_dir.is_dir(): + logger.error("Scripts directory not found: %s", scripts_dir) + return 1 + + return generate_pot(scripts_dir, output_path) + + +if __name__ == "__main__": + sys.exit(main()) diff --git a/input/scripts/extract_translations.py b/input/scripts/extract_translations.py index 1f4bc1f6fe..4939e7cdb0 100644 --- a/input/scripts/extract_translations.py +++ b/input/scripts/extract_translations.py @@ -11,7 +11,7 @@ - Custom SVG (.svg) in input/images/ - ArchiMate (.archimate) in input/archimate/ - UML diagrams (.svg/.xml) in input/diagrams/ - - Markdown pages (.md) in input/pagecontent/ ← replaces IG Publisher markdown POT + - Markdown pages (.md) in input/pagecontent/ ← one .pot per hand-authored page Note on Prism.js: syntax highlighting in generated pages is provided by Prism.js, which is already bundled by the base FHIR IG Publisher template @@ -39,6 +39,7 @@ import argparse import datetime import glob as glob_module +import io import logging import os import re @@ -168,6 +169,26 @@ """ +# Regex patterns for lines that vary only by timestamp/year in .pot files. +_POT_CREATION_DATE_RE = re.compile(r'^"POT-Creation-Date:.*\\n"\s*$') +_POT_COPYRIGHT_RE = re.compile(r"^# Copyright \(C\) \d{4} ") + + +def _normalize_pot_content(content: str) -> str: + """Strip timestamp-varying lines from ``.pot`` content for comparison. + + Removes ``POT-Creation-Date`` header values and ``# Copyright (C) YYYY`` + comment lines so that two ``.pot`` files can be compared ignoring + metadata that changes on every regeneration. + """ + lines = content.splitlines(True) + return "".join( + line for line in lines + if not _POT_CREATION_DATE_RE.match(line) + and not _POT_COPYRIGHT_RE.match(line) + ) + + def _escape_pot(text: str) -> str: """Escape a string for inclusion in a msgid / msgstr value.""" text = text.replace("\\", "\\\\") @@ -837,49 +858,70 @@ def write_pot( timestamp = now.strftime("%Y-%m-%d %H:%M+0000") year = now.year - with open(output_path, "w", encoding="utf-8") as fh: - fh.write(POT_HEADER.format(year=year, timestamp=timestamp)) - - for msgid, locations in sorted(deduped.items(), key=lambda kv: kv[0].lower()): - # Write source/location comments, deduplicating source locations and URLs. - # The same (source_file, line) can appear more than once when entries for - # both the publication URL and the preview URL are merged together, so we - # suppress duplicate #. Source: lines while still emitting every unique URL. - seen_sources: set = set() - seen_urls: set = set() - for src_file, lineno, ctx_url in locations: - src_key = (src_file, lineno) - if src_key not in seen_sources: - fh.write(f"#. Source: {src_file}:{lineno}\n") - seen_sources.add(src_key) - if ctx_url not in seen_urls: - fh.write(f"#. URL: {ctx_url}\n") - seen_urls.add(ctx_url) - - # Standard gettext file:line reference (deduplicated) - seen_refs: set = set() - for src_file, lineno, _ in locations: - ref_key = (src_file, lineno) - if ref_key not in seen_refs: - fh.write(f"#: {src_file}:{lineno}\n") - seen_refs.add(ref_key) - - # Emit python-brace-format flag when the msgid contains brace - # variables produced by the Liquid {{ }} → {var} transformation. - # Only emit the flag when ALL brace groups are valid Python - # identifiers (possibly dotted) so that msgfmt --check-format - # can validate translations without false positives. - brace_vars = _MD_ANY_BRACE_RE.findall(msgid) - if brace_vars and all( - _MD_SIMPLE_BRACE_VAR_RE.fullmatch(v) for v in brace_vars - ): - fh.write("#, python-brace-format\n") - - escaped = _escape_pot(msgid) - fh.write(f'msgid "{escaped}"\n') - fh.write('msgstr ""\n\n') + buf = io.StringIO() + buf.write(POT_HEADER.format(year=year, timestamp=timestamp)) + + for msgid, locations in sorted(deduped.items(), key=lambda kv: kv[0].lower()): + # Write source/location comments, deduplicating source locations and URLs. + # The same (source_file, line) can appear more than once when entries for + # both the publication URL and the preview URL are merged together, so we + # suppress duplicate #. Source: lines while still emitting every unique URL. + seen_sources: set = set() + seen_urls: set = set() + for src_file, lineno, ctx_url in locations: + src_key = (src_file, lineno) + if src_key not in seen_sources: + buf.write(f"#. Source: {src_file}:{lineno}\n") + seen_sources.add(src_key) + if ctx_url not in seen_urls: + buf.write(f"#. URL: {ctx_url}\n") + seen_urls.add(ctx_url) + + # Standard gettext file:line reference (deduplicated) + seen_refs: set = set() + for src_file, lineno, _ in locations: + ref_key = (src_file, lineno) + if ref_key not in seen_refs: + buf.write(f"#: {src_file}:{lineno}\n") + seen_refs.add(ref_key) + + # Emit python-brace-format flag when the msgid contains brace + # variables produced by the Liquid {{ }} → {var} transformation. + # Only emit the flag when ALL brace groups are valid Python + # identifiers (possibly dotted) so that msgfmt --check-format + # can validate translations without false positives. + brace_vars = _MD_ANY_BRACE_RE.findall(msgid) + if brace_vars and all( + _MD_SIMPLE_BRACE_VAR_RE.fullmatch(v) for v in brace_vars + ): + buf.write("#, python-brace-format\n") + + escaped = _escape_pot(msgid) + buf.write(f'msgid "{escaped}"\n') + buf.write('msgstr ""\n\n') + + new_content = buf.getvalue() logger = logging.getLogger(__name__) + + # Skip writing when the only differences are timestamp metadata + # (POT-Creation-Date / Copyright year) to avoid noisy commits. + if os.path.isfile(output_path): + try: + with open(output_path, "r", encoding="utf-8") as fh: + old_content = fh.read() + if _normalize_pot_content(old_content) == _normalize_pot_content(new_content): + logger.info( + f"Skipped {output_path}: only timestamp changed " + f"({len(deduped)} msgids unchanged)" + ) + return + except OSError: + pass # fall through to write + + with open(output_path, "w", encoding="utf-8") as fh: + fh.write(new_content) + logger.info(f"Wrote {len(deduped)} unique msgids to {output_path}") @@ -952,14 +994,24 @@ def _scan( # resources; markdown strings are extracted here via Python instead. # Auto-generated pages (StructureDefinition-*.md, ValueSet-*.md, etc.) are # excluded via _is_autogenerated_pagecontent so that only hand-authored - # narrative pages appear in pages.pot. + # narrative pages are extracted. + # Each hand-authored .md file gets its own .pot file (e.g. index.md → + # input/pagecontent/translations/index.pot) so that each page is a + # separate translation component in Weblate. pagecontent_dir = os.path.join(ig_root, "input", "pagecontent") - pagecontent_entries = _scan( - pagecontent_dir, ["*.md"], extract_markdown, - exclude_fn=_is_autogenerated_pagecontent, - ) - if pagecontent_entries is not None: - result[os.path.join(pagecontent_dir, "translations", "pages.pot")] = pagecontent_entries + if os.path.isdir(pagecontent_dir): + for md_path in sorted(glob_module.glob(os.path.join(pagecontent_dir, "*.md"))): + if _is_autogenerated_pagecontent(md_path): + logging.getLogger(__name__).debug( + "Skipping auto-generated file: %s", md_path + ) + continue + rel = os.path.relpath(md_path, ig_root) + entries = extract_markdown(rel, canonical) + if entries: + stem = os.path.splitext(os.path.basename(md_path))[0] + pot_path = os.path.join(pagecontent_dir, "translations", f"{stem}.pot") + result[pot_path] = entries return result diff --git a/input/scripts/generate_weblate_yaml.py b/input/scripts/generate_weblate_yaml.py new file mode 100644 index 0000000000..c80521b6bc --- /dev/null +++ b/input/scripts/generate_weblate_yaml.py @@ -0,0 +1,274 @@ +#!/usr/bin/env python3 +""" +generate_weblate_yaml.py — Generate weblate.yaml from sushi-config.yaml and +discovered .pot components. + +Reads translation configuration from sushi-config.yaml#translations (languages, +plural forms, services) and dynamically discovers .pot files to produce a +complete weblate.yaml that stays in sync with the repo. This eliminates +manual maintenance of weblate.yaml. + +Usage: + python generate_weblate_yaml.py [--repo-root .] [--output weblate.yaml] + +Author: WHO SMART Guidelines Team +""" + +import argparse +import logging +import sys +from pathlib import Path +from typing import Any, Dict, List, Optional +from urllib.parse import urlparse + +# Add parent directory to path for sibling imports +sys.path.insert(0, str(Path(__file__).resolve().parent)) + +from translation_config import ( + DakConfig, + DakConfigError, + LanguageEntry, + TranslationComponent, + discover_components, + get_languages, + load_dak_config, +) + +logger = logging.getLogger(__name__) + + +# --------------------------------------------------------------------------- +# YAML serialisation helpers (hand-rolled to avoid PyYAML dependency) +# --------------------------------------------------------------------------- + +def _yaml_scalar(value: Any) -> str: + """Format a scalar value for YAML output.""" + if isinstance(value, bool): + return "true" if value else "false" + if isinstance(value, int): + return str(value) + s = str(value) + # Quote if the string contains special YAML characters + if any(ch in s for ch in (":", "{", "}", "[", "]", ",", "&", "*", "#", + "?", "|", "-", "<", ">", "=", "!", "%", "@", + "\n")): + escaped = s.replace('"', '\\"') + return f'"{escaped}"' + if not s or s in ("true", "false", "null", "yes", "no"): + return f'"{s}"' + return s + + +def _indent(level: int) -> str: + return " " * level + + +# --------------------------------------------------------------------------- +# Component-specific check_flags +# --------------------------------------------------------------------------- + +# Components whose .pot files contain PlantUML source get extra check_flags +# to protect reserved keywords from translation. +_PLANTUML_CHECK_FLAGS = ( + "placeholders:^@startuml|^@enduml|participant|control|entity" + "|actor|boundary|database|collections|queue|skinparam" +) + + +def _check_flags_for_component(comp: TranslationComponent) -> Optional[str]: + """Return Weblate check_flags string if the component needs one.""" + # If the component sits under images-source (PlantUML diagrams) add + # PlantUML keyword placeholders. + rel = str(comp.translations_dir) + if "images-source" in rel: + return _PLANTUML_CHECK_FLAGS + return None + + +# --------------------------------------------------------------------------- +# YAML generation +# --------------------------------------------------------------------------- + +def _derive_repo_url(config: DakConfig) -> str: + """Derive the GitHub repo URL from dak.json fields.""" + preview = config.raw.get("previewUrl", "") + # previewUrl is usually https://WorldHealthOrganization.github.io/smart-base + if not preview: + return "" + parsed = urlparse(preview) + # urlparse lowercases hostname; use netloc to preserve original case + netloc = parsed.netloc or "" + hostname_lower = parsed.hostname or "" + # Only accept *.github.io hostnames (not arbitrary URLs containing "github.io") + if not hostname_lower.endswith(".github.io"): + return "" + # netloc preserves case: "WorldHealthOrganization.github.io" + org = netloc.split(".")[0] if "." in netloc else "" + path_parts = [p for p in parsed.path.split("/") if p] + if org and path_parts: + repo = path_parts[0] + return f"https://github.com/{org}/{repo}" + return "" + + +def _derive_publication_url(config: DakConfig) -> str: + """Derive the publication URL from dak.json fields.""" + return config.raw.get("publicationUrl", "") + + +def generate_weblate_yaml( + config: DakConfig, + components: List[TranslationComponent], + repo_root: Path, +) -> str: + """ + Generate the complete weblate.yaml content as a string. + + All values are derived from dak.json and the discovered .pot components. + """ + lines: List[str] = [] + repo_url = _derive_repo_url(config) + pub_url = _derive_publication_url(config) + languages = get_languages(config) + + # Header comment + lines.append("# weblate.yaml — Weblate configuration for " + f"{config.title or config.name}") + lines.append("#") + lines.append("# AUTO-GENERATED by generate_weblate_yaml.py from sushi-config.yaml.") + lines.append("# Do not edit manually — re-run the generator instead:") + lines.append("# python input/scripts/generate_weblate_yaml.py") + lines.append("#") + lines.append(f"# Source: sushi-config.yaml ({config.id} v{config.version})") + lines.append("") + + # Project section + lines.append("project:") + lines.append(f" name: {_yaml_scalar(config.title or config.name)}") + if repo_url: + lines.append(f" url: {repo_url}") + if pub_url: + lines.append(f" web: {pub_url}") + lines.append("") + + # Components section + lines.append("components:") + for comp in components: + rel_dir = str(comp.translations_dir.relative_to(repo_root)) + rel_pot = str(comp.pot_path.relative_to(repo_root)) + lines.append("") + lines.append(f" - name: {_yaml_scalar(comp.slug)}") + lines.append(f" slug: {comp.slug}") + if repo_url: + lines.append(f" repo: {repo_url}") + lines.append(f" push: {repo_url}") + lines.append(" branch: main") + lines.append(f" filemask: {rel_dir}/*.po") + lines.append(f" template: {rel_pot}") + lines.append(f" new_base: {rel_pot}") + lines.append(" file_format: po") + lines.append(" vcs: git") + if pub_url: + lines.append(f" screenshot_url: {pub_url}") + lines.append(" push_on_commit: false") + lines.append(" commit_pending_age: 24") + check_flags = _check_flags_for_component(comp) + if check_flags: + lines.append(f" check_flags: {_yaml_scalar(check_flags)}") + + lines.append("") + + # Workflow section + lines.append("# Quality control workflow") + lines.append("workflow:") + lines.append(" approval_required: true") + lines.append(" suggestion_voting: true") + lines.append(" machine_translation:") + lines.append(" - service: libretranslate") + lines.append("") + + # Languages section + lines.append("# Languages from sushi-config.yaml#translations.languages") + lines.append("languages:") + for lang in languages: + lines.append(f" - code: {lang.code}") + lines.append(f" name: {_yaml_scalar(lang.name)}") + lines.append(f" direction: {lang.direction}") + if lang.plural: + lines.append(f" plural: {_yaml_scalar(lang.plural)}") + lines.append("") + + return "\n".join(lines) + + +# --------------------------------------------------------------------------- +# CLI +# --------------------------------------------------------------------------- + +def main(argv: Optional[List[str]] = None) -> int: + logging.basicConfig( + level=logging.INFO, + format="%(asctime)s %(levelname)s %(message)s", + datefmt="%H:%M:%S", + ) + + parser = argparse.ArgumentParser( + prog="generate_weblate_yaml.py", + description="Generate weblate.yaml from sushi-config.yaml + discovered .pot files", + ) + parser.add_argument( + "--repo-root", default=".", + help="Path to repository root (default: current directory)", + ) + parser.add_argument( + "--output", default=None, + help="Output file path (default: /weblate.yaml)", + ) + parser.add_argument( + "--dry-run", action="store_true", + help="Print to stdout instead of writing file", + ) + args = parser.parse_args(argv) + + repo_root = Path(args.repo_root).resolve() + + # Load config (translations from sushi-config.yaml, identity from dak.json) + try: + config = load_dak_config(repo_root) + except DakConfigError as exc: + logger.error("Configuration error: %s", exc) + return 1 + + if not config.translations: + logger.error("No translations block in sushi-config.yaml or dak.json — nothing to generate") + return 1 + + # Check that Weblate is enabled + weblate_svc = config.translations.services.get("weblate") + if not weblate_svc or not weblate_svc.enabled: + logger.warning("Weblate is not enabled — generating anyway") + + # Discover components + components = discover_components(repo_root) + if not components: + logger.warning("No .pot files found — weblate.yaml will have no components") + + logger.info("Discovered %d component(s):", len(components)) + for comp in components: + logger.info(" %s → %s", comp.slug, comp.pot_path.relative_to(repo_root)) + + # Generate YAML + yaml_content = generate_weblate_yaml(config, components, repo_root) + + if args.dry_run: + print(yaml_content) + else: + output_path = Path(args.output) if args.output else repo_root / "weblate.yaml" + output_path.write_text(yaml_content, encoding="utf-8") + logger.info("Wrote %s", output_path) + + return 0 + + +if __name__ == "__main__": + sys.exit(main()) diff --git a/input/scripts/pull_crowdin_translations.py b/input/scripts/pull_crowdin_translations.py new file mode 100644 index 0000000000..8b1e9f6202 --- /dev/null +++ b/input/scripts/pull_crowdin_translations.py @@ -0,0 +1,313 @@ +#!/usr/bin/env python3 +""" +pull_crowdin_translations.py — Crowdin v2 API translation service adapter. + +Fetches .po files from the Crowdin v2 REST API and writes them into the +repository's translations directories. + +Usage: + python pull_crowdin_translations.py [options] + +Environment variables: + CROWDIN_API_TOKEN Required. Crowdin API token. + +Exit codes: + 0 Success + 1 One or more download errors + +Author: WHO SMART Guidelines Team +""" + +import logging +import os +import sys +import tempfile +from pathlib import Path +from typing import Dict, List, Optional + +try: + import requests +except ImportError: + sys.exit("ERROR: 'requests' package is required. Run: pip install requests>=2.31.0") + +sys.path.insert(0, str(Path(__file__).resolve().parent)) + +from translation_config import ( + DakConfigError, + discover_components, + get_language_codes, + load_dak_config, +) +from translation_security import DEFAULT_TIMEOUT_SECONDS, MAX_RESPONSE_BYTES + +logger = logging.getLogger(__name__) + +# Crowdin API v2 base URL +CROWDIN_API_URL = "https://api.crowdin.com/api/v2" + + +def _list_crowdin_files( + session: requests.Session, + project_id: str, +) -> Dict[str, int]: + """ + List all files in the Crowdin project and return a mapping of + filename (stem, lowercased) → file ID. + + The Crowdin v2 ``/projects/{id}/files`` endpoint is paginated; this + helper fetches all pages. + """ + mapping: Dict[str, int] = {} + url = f"{CROWDIN_API_URL}/projects/{project_id}/files" + offset = 0 + limit = 250 + + while True: + params = {"offset": offset, "limit": limit} + try: + resp = session.get(url, params=params, timeout=DEFAULT_TIMEOUT_SECONDS) + except requests.exceptions.RequestException as exc: + logger.error(" Crowdin list-files request failed: %s", exc) + break + + if resp.status_code != 200: + logger.error(" Crowdin list-files HTTP %d: %s", + resp.status_code, resp.text[:500]) + break + + data = resp.json().get("data", []) + if not data: + break + + for item in data: + file_data = item.get("data", {}) + file_id = file_data.get("id") + file_name = file_data.get("name", "") + if file_id and file_name: + # Map by stem (without extension) for component matching + stem = Path(file_name).stem.lower() + mapping[stem] = file_id + # Also map by full filename for exact matches + mapping[file_name.lower()] = file_id + + if len(data) < limit: + break + offset += limit + + return mapping + + +def _build_translation_export( + session: requests.Session, + project_id: str, + file_id: int, + language: str, +) -> Optional[str]: + """ + Request a translation export build from Crowdin and return the download URL. + + Returns the download URL or None on failure. + """ + url = f"{CROWDIN_API_URL}/projects/{project_id}/translations/exports" + payload = { + "targetLanguageId": language, + "fileIds": [file_id], + "format": "gettext", + } + + try: + resp = session.post(url, json=payload, timeout=DEFAULT_TIMEOUT_SECONDS) + except requests.exceptions.RequestException as exc: + logger.error(" Crowdin export request failed: %s", exc) + return None + + if resp.status_code not in (200, 201): + logger.error(" Crowdin export HTTP %d: %s", + resp.status_code, resp.text[:500]) + return None + + data = resp.json().get("data", {}) + return data.get("url") + + +def _download_po( + session: requests.Session, + download_url: str, + language: str, + output_dir: Path, +) -> str: + """ + Download a .po file from a Crowdin export URL. + + Returns one of: "downloaded", "not_found", "error" + """ + logger.info(" GET %s", download_url[:80] + "...") + + try: + resp = session.get(download_url, timeout=DEFAULT_TIMEOUT_SECONDS, stream=True) + except requests.exceptions.RequestException as exc: + logger.error(" Network error: %s", exc) + return "error" + + if resp.status_code == 404: + logger.info(" Not found (404)") + return "not_found" + + if resp.status_code != 200: + logger.error(" Unexpected HTTP %d", resp.status_code) + return "error" + + output_dir.mkdir(parents=True, exist_ok=True) + dest_path = output_dir / f"{language}.po" + + try: + with tempfile.NamedTemporaryFile( + dir=output_dir, suffix=".po.tmp", delete=False + ) as tmp_fh: + tmp_path = Path(tmp_fh.name) + total = 0 + for chunk in resp.iter_content(chunk_size=65536): + total += len(chunk) + if total > MAX_RESPONSE_BYTES: + logger.error(" Response exceeds size limit — aborting") + tmp_path.unlink(missing_ok=True) + return "error" + tmp_fh.write(chunk) + except OSError as exc: + logger.error(" Write error: %s", exc) + tmp_path.unlink(missing_ok=True) + return "error" + + content = tmp_path.read_bytes() + if b"msgid" not in content: + logger.warning(" Response is not a valid .po file — skipping") + tmp_path.unlink(missing_ok=True) + return "not_found" + + tmp_path.replace(dest_path) + logger.info(" ✓ Written %s (%d bytes)", dest_path, total) + return "downloaded" + + +def pull_translations( + repo_root: Path, + project_slug: str, + component_filter: Optional[str] = None, + language_filter: Optional[str] = None, +) -> int: + """ + Pull .po files from Crowdin for all applicable components and languages. + + Returns 0 on success, 1 on error. + """ + api_token = os.environ.get("CROWDIN_API_TOKEN", "") + if not api_token: + logger.error("CROWDIN_API_TOKEN not set") + return 1 + + try: + config = load_dak_config(repo_root) + except DakConfigError: + logger.warning("dak.json not found or invalid — skipping") + return 0 + + languages = get_language_codes(config) + if language_filter: + languages = [language_filter] if language_filter in languages else [] + + components = discover_components(repo_root) + if component_filter: + components = [c for c in components if c.slug == component_filter] + + # Get Crowdin project ID from service config + cr_config = ( + config.translations.services.get("crowdin", None) + if config.translations else None + ) + project_id = (cr_config.extra.get("projectId", "") if cr_config else "") or "" + if not project_id: + logger.error("Crowdin projectId not configured in translation config") + return 1 + + session = requests.Session() + session.headers.update({ + "Authorization": f"Bearer {api_token}", + "Content-Type": "application/json", + "User-Agent": "SMART-Base-CI/1.0", + }) + + counts: Dict[str, int] = {"downloaded": 0, "not_found": 0, "error": 0} + + # Resolve Crowdin file IDs via the files API so we can request + # per-file translation exports. + logger.info("Listing Crowdin project files to resolve file IDs…") + file_id_map = _list_crowdin_files(session, project_id) + if not file_id_map: + logger.error("No files found in Crowdin project %s", project_id) + return 1 + logger.info(" Found %d file entries", len(file_id_map)) + + for comp in components: + logger.info("Component: %s", comp.slug) + # Match component to a Crowdin file ID by slug or pot stem + file_id = ( + file_id_map.get(comp.slug) + or file_id_map.get(comp.pot_stem.lower()) + or file_id_map.get(f"{comp.pot_stem}.pot") + ) + if file_id is None: + logger.warning( + " No Crowdin file matched for component %s " + "(tried slug=%r, stem=%r, filename=%r) — skipping", + comp.slug, comp.slug, comp.pot_stem.lower(), + f"{comp.pot_stem}.pot", + ) + counts["not_found"] += len(languages) + continue + + for lang in languages: + logger.info(" Language: %s", lang) + # Build export and get download URL + download_url = _build_translation_export( + session, project_id, file_id, lang, + ) + if not download_url: + counts["error"] += 1 + continue + + result = _download_po( + session, download_url, lang, comp.translations_dir, + ) + counts[result] += 1 + + logger.info( + "Summary: %d downloaded, %d not found, %d errors", + counts["downloaded"], counts["not_found"], counts["error"], + ) + return 1 if counts["error"] > 0 else 0 + + +if __name__ == "__main__": + import argparse + + logging.basicConfig( + level=logging.INFO, + format="%(asctime)s %(levelname)s %(message)s", + datefmt="%H:%M:%S", + ) + + parser = argparse.ArgumentParser(description="Pull translations from Crowdin") + parser.add_argument("--repo-root", default=".") + parser.add_argument("--component", default="") + parser.add_argument("--language", default="") + args = parser.parse_args() + + from translation_config import derive_project_slug_from_env + project_slug = derive_project_slug_from_env(Path(args.repo_root).resolve()) + + sys.exit(pull_translations( + repo_root=Path(args.repo_root).resolve(), + project_slug=project_slug, + component_filter=args.component or None, + language_filter=args.language or None, + )) diff --git a/input/scripts/pull_launchpad_translations.py b/input/scripts/pull_launchpad_translations.py new file mode 100644 index 0000000000..429438d443 --- /dev/null +++ b/input/scripts/pull_launchpad_translations.py @@ -0,0 +1,196 @@ +#!/usr/bin/env python3 +""" +pull_launchpad_translations.py — Launchpad translation service adapter. + +Fetches .po files from the Launchpad Translations REST API and writes them +into the repository's translations directories. + +Usage: + python pull_launchpad_translations.py [options] + +Environment variables: + LAUNCHPAD_API_TOKEN Required. Launchpad API token. + +Exit codes: + 0 Success + 1 One or more download errors + +Author: WHO SMART Guidelines Team +""" + +import logging +import os +import sys +import tempfile +from pathlib import Path +from typing import Dict, List, Optional + +try: + import requests +except ImportError: + sys.exit("ERROR: 'requests' package is required. Run: pip install requests>=2.31.0") + +sys.path.insert(0, str(Path(__file__).resolve().parent)) + +from translation_config import ( + DakConfigError, + discover_components, + get_language_codes, + load_dak_config, +) +from translation_security import DEFAULT_TIMEOUT_SECONDS, MAX_RESPONSE_BYTES + +logger = logging.getLogger(__name__) + +# Launchpad API base URL +LAUNCHPAD_API_URL = "https://api.launchpad.net/devel" + + +def _download_po( + session: requests.Session, + project: str, + component_slug: str, + language: str, + output_dir: Path, +) -> str: + """ + Download a .po file from Launchpad for one (component, language) pair. + + Returns one of: "downloaded", "not_found", "error" + """ + # Launchpad translation export URL pattern + url = ( + f"{LAUNCHPAD_API_URL}/{project}/+source/{component_slug}" + f"/+pots/{component_slug}/{language}/+export" + ) + logger.info(" GET %s", url) + + try: + resp = session.get(url, timeout=DEFAULT_TIMEOUT_SECONDS, stream=True) + except requests.exceptions.RequestException as exc: + logger.error(" Network error: %s", exc) + return "error" + + if resp.status_code == 404: + logger.info(" Not found (404)") + return "not_found" + + if resp.status_code != 200: + logger.error(" Unexpected HTTP %d", resp.status_code) + return "error" + + # Stream into temp file with size guard + output_dir.mkdir(parents=True, exist_ok=True) + dest_path = output_dir / f"{language}.po" + + try: + with tempfile.NamedTemporaryFile( + dir=output_dir, suffix=".po.tmp", delete=False + ) as tmp_fh: + tmp_path = Path(tmp_fh.name) + total = 0 + for chunk in resp.iter_content(chunk_size=65536): + total += len(chunk) + if total > MAX_RESPONSE_BYTES: + logger.error(" Response exceeds size limit — aborting") + tmp_path.unlink(missing_ok=True) + return "error" + tmp_fh.write(chunk) + except OSError as exc: + logger.error(" Write error: %s", exc) + tmp_path.unlink(missing_ok=True) + return "error" + + # Validate content + content = tmp_path.read_bytes() + if b"msgid" not in content: + logger.warning(" Response is not a valid .po file — skipping") + tmp_path.unlink(missing_ok=True) + return "not_found" + + tmp_path.replace(dest_path) + logger.info(" ✓ Written %s (%d bytes)", dest_path, total) + return "downloaded" + + +def pull_translations( + repo_root: Path, + project_slug: str, + component_filter: Optional[str] = None, + language_filter: Optional[str] = None, +) -> int: + """ + Pull .po files from Launchpad for all applicable components and languages. + + Returns 0 on success, 1 on error. + """ + api_token = os.environ.get("LAUNCHPAD_API_TOKEN", "") + if not api_token: + logger.error("LAUNCHPAD_API_TOKEN not set") + return 1 + + try: + config = load_dak_config(repo_root) + except DakConfigError: + logger.warning("dak.json not found or invalid — skipping") + return 0 + + languages = get_language_codes(config) + if language_filter: + languages = [language_filter] if language_filter in languages else [] + + components = discover_components(repo_root) + if component_filter: + components = [c for c in components if c.slug == component_filter] + + session = requests.Session() + session.headers.update({ + "Authorization": f"OAuth oauth_token={api_token}", + "User-Agent": "SMART-Base-CI/1.0", + }) + + # Launchpad project name from service config + lp_config = config.translations.services.get("launchpad", None) if config.translations else None + lp_project = (lp_config.extra.get("project", "") if lp_config else "") or project_slug + + counts: Dict[str, int] = {"downloaded": 0, "not_found": 0, "error": 0} + + for comp in components: + logger.info("Component: %s", comp.slug) + for lang in languages: + result = _download_po( + session, lp_project, comp.slug, lang, comp.translations_dir, + ) + counts[result] += 1 + + logger.info( + "Summary: %d downloaded, %d not found, %d errors", + counts["downloaded"], counts["not_found"], counts["error"], + ) + return 1 if counts["error"] > 0 else 0 + + +if __name__ == "__main__": + import argparse + + logging.basicConfig( + level=logging.INFO, + format="%(asctime)s %(levelname)s %(message)s", + datefmt="%H:%M:%S", + ) + + parser = argparse.ArgumentParser(description="Pull translations from Launchpad") + parser.add_argument("--repo-root", default=".") + parser.add_argument("--component", default="") + parser.add_argument("--language", default="") + args = parser.parse_args() + + from translation_config import derive_project_slug_from_env + project_slug = derive_project_slug_from_env(Path(args.repo_root).resolve()) + + sys.exit(pull_translations( + repo_root=Path(args.repo_root).resolve(), + project_slug=project_slug, + component_filter=args.component or None, + language_filter=args.language or None, + )) diff --git a/input/scripts/pull_translations.py b/input/scripts/pull_translations.py new file mode 100644 index 0000000000..727c23023e --- /dev/null +++ b/input/scripts/pull_translations.py @@ -0,0 +1,269 @@ +#!/usr/bin/env python3 +""" +pull_translations.py — Multi-service translation pull orchestrator. + +For each enabled service in sushi-config.yaml (or dak.json fallback), calls +the appropriate service adapter script, collects updated .po files, and writes +them to the repo. This is the single entry point called by the +pull_translations.yml workflow; it never contains service-specific logic. + +Usage: + python pull_translations.py [--service weblate|launchpad|crowdin|all] + [--component SLUG] [--language CODE] [--repo-root .] + +Environment variables: + WEBLATE_API_TOKEN Weblate API token (if pulling from Weblate) + CROWDIN_API_TOKEN Crowdin API token (if pulling from Crowdin) + LAUNCHPAD_API_TOKEN Launchpad API token (if pulling from Launchpad) + +Exit codes: + 0 Success (or dak.json not found — warning + skip) + 1 One or more pull errors occurred + 2 Bad arguments + +Author: WHO SMART Guidelines Team +""" + +import argparse +import logging +import os +import sys +from pathlib import Path +from typing import List, Optional + +sys.path.insert(0, str(Path(__file__).resolve().parent)) + +from translation_config import ( + DakConfigError, + derive_project_slug_from_env, + get_enabled_services, + get_language_codes, + get_project_slug, + load_dak_config, + discover_components, +) +from translation_security import ( + assert_no_secret_in_env, + redact_for_log, + sanitize_lang_code, + sanitize_slug, +) + +logger = logging.getLogger(__name__) + + +# --------------------------------------------------------------------------- +# Service adapters (imported on demand to avoid hard dependency) +# --------------------------------------------------------------------------- + +def _pull_weblate( + repo_root: Path, + project_slug: str, + component_filter: Optional[str], + language_filter: Optional[str], + weblate_url: str, +) -> int: + """Delegate to pull_weblate_translations.py.""" + from pull_weblate_translations import pull_translations as weblate_pull + + api_token = os.environ.get("WEBLATE_API_TOKEN", "") + if not api_token: + logger.error("WEBLATE_API_TOKEN not set — cannot pull from Weblate") + return 1 + + return weblate_pull( + weblate_url=weblate_url, + project=project_slug, + output_root=repo_root, + component_filter=component_filter, + language_filter=language_filter, + api_token=api_token, + ) + + +def _pull_launchpad( + repo_root: Path, + project_slug: str, + component_filter: Optional[str], + language_filter: Optional[str], +) -> int: + """Delegate to pull_launchpad_translations.py.""" + try: + from pull_launchpad_translations import pull_translations as lp_pull + return lp_pull( + repo_root=repo_root, + project_slug=project_slug, + component_filter=component_filter, + language_filter=language_filter, + ) + except ImportError: + logger.error("pull_launchpad_translations module not available") + return 1 + + +def _pull_crowdin( + repo_root: Path, + project_slug: str, + component_filter: Optional[str], + language_filter: Optional[str], +) -> int: + """Delegate to pull_crowdin_translations.py.""" + try: + from pull_crowdin_translations import pull_translations as cr_pull + return cr_pull( + repo_root=repo_root, + project_slug=project_slug, + component_filter=component_filter, + language_filter=language_filter, + ) + except ImportError: + logger.error("pull_crowdin_translations module not available") + return 1 + + +# --------------------------------------------------------------------------- +# Orchestrator +# --------------------------------------------------------------------------- + +def pull_all( + repo_root: Path, + service_filter: str = "all", + component_filter: Optional[str] = None, + language_filter: Optional[str] = None, +) -> int: + """ + Pull translations from all enabled services. + + Returns 0 on success, 1 on error. + """ + try: + config = load_dak_config(repo_root) + except DakConfigError: + logger.warning("Configuration error — skipping translation pull") + return 0 + + enabled = get_enabled_services(config) + if not enabled: + logger.info("No translation services enabled") + return 0 + + # Derive project slug from GITHUB_REPOSITORY or fallback + project_slug = derive_project_slug_from_env(repo_root) + + logger.info("Project slug: %s", project_slug) + logger.info("Enabled services: %s", ", ".join(enabled.keys())) + + errors = 0 + + # Filter services + services_to_pull = ( + enabled if service_filter == "all" + else {service_filter: enabled[service_filter]} + if service_filter in enabled + else {} + ) + + if not services_to_pull: + if service_filter != "all": + logger.warning("Service %r not enabled in translation config", service_filter) + return 0 + + for svc_name, svc_config in services_to_pull.items(): + logger.info("── Pulling from: %s ──", svc_name) + + if svc_name == "weblate": + weblate_url = svc_config.url or "https://hosted.weblate.org" + rc = _pull_weblate( + repo_root, project_slug, + component_filter, language_filter, weblate_url, + ) + elif svc_name == "launchpad": + rc = _pull_launchpad( + repo_root, project_slug, + component_filter, language_filter, + ) + elif svc_name == "crowdin": + rc = _pull_crowdin( + repo_root, project_slug, + component_filter, language_filter, + ) + else: + logger.warning("Unknown service: %s — skipping", svc_name) + continue + + if rc != 0: + errors += 1 + + return 1 if errors > 0 else 0 + + +# --------------------------------------------------------------------------- +# CLI +# --------------------------------------------------------------------------- + +def main(argv: Optional[List[str]] = None) -> int: + logging.basicConfig( + level=logging.INFO, + format="%(asctime)s %(levelname)s %(message)s", + datefmt="%H:%M:%S", + ) + + parser = argparse.ArgumentParser( + prog="pull_translations.py", + description="Pull translations from all enabled services", + ) + parser.add_argument( + "--service", default="all", + choices=["all", "weblate", "launchpad", "crowdin"], + help="Service to pull from (default: all enabled)", + ) + parser.add_argument( + "--component", default="", + help="Restrict to one component slug", + ) + parser.add_argument( + "--language", default="", + help="Restrict to one language code", + ) + parser.add_argument( + "--repo-root", default=".", + help="Repository root (default: current directory)", + ) + args = parser.parse_args(argv) + + repo_root = Path(args.repo_root).resolve() + + component_filter = None + if args.component: + try: + component_filter = sanitize_slug(args.component, "component") + except ValueError as exc: + logger.error("%s", exc) + return 2 + + language_filter = None + if args.language: + try: + language_filter = sanitize_lang_code(args.language) + except ValueError as exc: + logger.error("%s", exc) + return 2 + + # Security checks + for token_env in ("WEBLATE_API_TOKEN", "CROWDIN_API_TOKEN", "LAUNCHPAD_API_TOKEN"): + try: + assert_no_secret_in_env(token_env) + except RuntimeError as exc: + logger.error("%s", exc) + return 1 + + return pull_all( + repo_root=repo_root, + service_filter=args.service, + component_filter=component_filter, + language_filter=language_filter, + ) + + +if __name__ == "__main__": + sys.exit(main()) diff --git a/input/scripts/register_all_dak_projects.py b/input/scripts/register_all_dak_projects.py new file mode 100644 index 0000000000..8e5e7e2626 --- /dev/null +++ b/input/scripts/register_all_dak_projects.py @@ -0,0 +1,182 @@ +#!/usr/bin/env python3 +""" +register_all_dak_projects.py — Discover all repos in a GitHub org that contain +a dak.json file, then register each with all enabled translation services. + +Uses the GitHub Code Search API to find repos containing dak.json. + +Usage: + python register_all_dak_projects.py [--dry-run] [--org WorldHealthOrganization] + +Environment variables: + GITHUB_TOKEN GitHub token for API access (read scope) + WEBLATE_API_TOKEN Weblate API token (if Weblate enabled) + CROWDIN_API_TOKEN Crowdin API token (if Crowdin enabled) + LAUNCHPAD_API_TOKEN Launchpad API token (if Launchpad enabled) + +Exit codes: + 0 All registrations completed successfully + 1 One or more registrations failed + +Author: WHO SMART Guidelines Team +""" + +import argparse +import logging +import os +import subprocess +import sys +import tempfile +from pathlib import Path +from typing import List, Optional + +try: + import requests +except ImportError: + sys.exit("ERROR: 'requests' package is required. Run: pip install requests>=2.31.0") + +sys.path.insert(0, str(Path(__file__).resolve().parent)) + +from translation_security import ( + DEFAULT_TIMEOUT_SECONDS, + assert_no_secret_in_env, + redact_for_log, +) + +logger = logging.getLogger(__name__) + + +def discover_dak_repos(org: str, github_token: str) -> List[str]: + """ + Use GitHub Code Search API to find all repos in the org that have a dak.json. + + Returns a sorted list of repository names (not full names). + """ + session = requests.Session() + session.headers.update({ + "Authorization": f"token {github_token}", + "Accept": "application/vnd.github.v3+json", + "User-Agent": "SMART-Base-CI/1.0", + }) + + query = f"filename:dak.json org:{org}" + url = "https://api.github.com/search/code" + params = {"q": query, "per_page": 100} + + try: + resp = session.get(url, params=params, timeout=DEFAULT_TIMEOUT_SECONDS) + except requests.exceptions.RequestException as exc: + logger.error("GitHub API error: %s", exc) + return [] + + if resp.status_code != 200: + logger.error("GitHub search failed: HTTP %d", resp.status_code) + return [] + + data = resp.json() + repos = set() + for item in data.get("items", []): + repo_name = item.get("repository", {}).get("name", "") + if repo_name: + repos.add(repo_name) + + return sorted(repos) + + +def register_single_repo( + repo_name: str, + org: str, + github_token: str, +) -> bool: + """ + Clone the repo (shallow), then call register_translation_project.py. + + Returns True on success. + """ + with tempfile.TemporaryDirectory() as tmpdir: + clone_url = f"https://github.com/{org}/{repo_name}.git" + logger.info("Cloning %s (shallow)...", clone_url) + + result = subprocess.run( + ["git", "clone", "--depth=1", clone_url, tmpdir], + capture_output=True, text=True, timeout=120, + ) + if result.returncode != 0: + logger.error("Clone failed for %s: %s", repo_name, result.stderr[:500]) + return False + + # Call register_translation_project.py + script = Path(__file__).resolve().parent / "register_translation_project.py" + result = subprocess.run( + [sys.executable, str(script), + "--repo-name", repo_name, + "--repo-root", tmpdir, + "--org", org], + timeout=300, + ) + return result.returncode == 0 + + +def main(argv: Optional[List[str]] = None) -> int: + logging.basicConfig( + level=logging.INFO, + format="%(asctime)s %(levelname)s %(message)s", + datefmt="%H:%M:%S", + ) + + parser = argparse.ArgumentParser( + prog="register_all_dak_projects.py", + description="Discover all DAK IG repos and register them with translation services", + ) + parser.add_argument( + "--org", default="WorldHealthOrganization", + help="GitHub organization (default: WorldHealthOrganization)", + ) + parser.add_argument( + "--dry-run", action="store_true", + help="List discovered repos without registering", + ) + args = parser.parse_args(argv) + + # Security checks + for token_env in ("WEBLATE_API_TOKEN", "CROWDIN_API_TOKEN", "LAUNCHPAD_API_TOKEN"): + try: + assert_no_secret_in_env(token_env) + except RuntimeError as exc: + logger.error("%s", exc) + return 1 + + github_token = os.environ.get("GITHUB_TOKEN", "") + if not github_token: + logger.error("GITHUB_TOKEN not set — cannot discover repos") + return 1 + + logger.info("Discovering DAK repos in org: %s", args.org) + repos = discover_dak_repos(args.org, github_token) + + if not repos: + logger.info("No DAK repos found") + return 0 + + logger.info("Found %d DAK repo(s): %s", len(repos), ", ".join(repos)) + + if args.dry_run: + logger.info("Dry run — not registering") + return 0 + + errors = 0 + for repo_name in repos: + logger.info("── Registering: %s ──", repo_name) + if not register_single_repo(repo_name, args.org, github_token): + errors += 1 + + if errors: + logger.error("%d/%d registrations failed", errors, len(repos)) + return 1 + + logger.info("All %d registrations completed successfully", len(repos)) + return 0 + + +if __name__ == "__main__": + sys.exit(main()) diff --git a/input/scripts/register_translation_project.py b/input/scripts/register_translation_project.py new file mode 100644 index 0000000000..568d93c2af --- /dev/null +++ b/input/scripts/register_translation_project.py @@ -0,0 +1,335 @@ +#!/usr/bin/env python3 +""" +register_translation_project.py — Idempotently create or verify the translation +project and all dynamically discovered components for one IG repo on every +enabled translation service. + +Usage: + python register_translation_project.py --repo-name smart-hiv [--repo-root /path] + +Environment variables: + WEBLATE_API_TOKEN Weblate API token (if Weblate enabled) + CROWDIN_API_TOKEN Crowdin API token (if Crowdin enabled) + LAUNCHPAD_API_TOKEN Launchpad API token (if Launchpad enabled) + +Exit codes: + 0 Registration completed (or dak.json not found — warning + skip) + 1 Registration error + +Author: WHO SMART Guidelines Team +""" + +import argparse +import logging +import os +import sys +from pathlib import Path +from typing import List, Optional + +try: + import requests +except ImportError: + sys.exit("ERROR: 'requests' package is required. Run: pip install requests>=2.31.0") + +# Add parent directory to path for sibling imports +sys.path.insert(0, str(Path(__file__).resolve().parent)) + +from translation_config import ( + DakConfig, + DakConfigError, + TranslationComponent, + discover_components, + get_enabled_services, + get_languages, + get_project_slug, + load_dak_config, +) +from translation_security import ( + DEFAULT_TIMEOUT_SECONDS, + assert_no_secret_in_env, + redact_for_log, + sanitize_slug, + sanitize_url, +) + +logger = logging.getLogger(__name__) + + +# --------------------------------------------------------------------------- +# Weblate registration +# --------------------------------------------------------------------------- + +def _register_weblate_project( + project_slug: str, + config: DakConfig, + components: List[TranslationComponent], + api_token: str, + weblate_url: str, + repo_root: Path = Path("."), +) -> bool: + """ + Idempotently create/verify a Weblate project and its components. + + Returns True on success, False on error. + """ + session = requests.Session() + session.headers.update({ + "Authorization": f"Token {api_token}", + "Content-Type": "application/json", + "User-Agent": "SMART-Base-CI/1.0", + }) + + # Check if project exists + project_url = f"{weblate_url}/api/projects/{project_slug}/" + try: + resp = session.get(project_url, timeout=DEFAULT_TIMEOUT_SECONDS) + except requests.exceptions.RequestException as exc: + logger.error("Network error checking project: %s", exc) + return False + + if resp.status_code == 404: + # Create project + logger.info("Creating Weblate project: %s", project_slug) + create_url = f"{weblate_url}/api/projects/" + payload = { + "name": config.title or config.name, + "slug": project_slug, + "web": config.raw.get("publicationUrl", ""), + } + try: + resp = session.post( + create_url, json=payload, timeout=DEFAULT_TIMEOUT_SECONDS + ) + if resp.status_code not in (200, 201): + logger.error( + "Failed to create project %s: HTTP %d %s", + project_slug, resp.status_code, resp.text[:500], + ) + return False + logger.info("✓ Created project: %s", project_slug) + except requests.exceptions.RequestException as exc: + logger.error("Network error creating project: %s", exc) + return False + elif resp.status_code == 200: + logger.info("✓ Project already exists: %s", project_slug) + else: + logger.error( + "Unexpected HTTP %d checking project %s", resp.status_code, project_slug + ) + return False + + # Register each component + all_ok = True + for comp in components: + ok = _register_weblate_component( + session, weblate_url, project_slug, comp, repo_root + ) + if not ok: + all_ok = False + + return all_ok + + +def _register_weblate_component( + session: requests.Session, + weblate_url: str, + project_slug: str, + comp: TranslationComponent, + repo_root: Path, +) -> bool: + """Idempotently register one Weblate component.""" + comp_url = f"{weblate_url}/api/components/{project_slug}/{comp.slug}/" + + try: + resp = session.get(comp_url, timeout=DEFAULT_TIMEOUT_SECONDS) + except requests.exceptions.RequestException as exc: + logger.error("Network error checking component %s: %s", comp.slug, exc) + return False + + if resp.status_code == 200: + logger.info(" ✓ Component already exists: %s", comp.slug) + return True + + if resp.status_code == 404: + logger.info(" Creating component: %s", comp.slug) + create_url = f"{weblate_url}/api/projects/{project_slug}/components/" + + # Weblate expects repo-relative paths, not absolute filesystem paths. + try: + rel_translations_dir = comp.translations_dir.relative_to(repo_root) + except ValueError: + rel_translations_dir = comp.translations_dir + try: + rel_pot_path = comp.pot_path.relative_to(repo_root) + except ValueError: + rel_pot_path = comp.pot_path + + payload = { + "name": comp.slug, + "slug": comp.slug, + "file_format": "po", + "filemask": f"{rel_translations_dir}/*.po", + # new_base: used by Weblate to create new translation files. + # template: used by Weblate to locate the source .pot file. + # Both point to the same POT file per Weblate component config. + "new_base": str(rel_pot_path), + "template": str(rel_pot_path), + "vcs": "github", + } + try: + resp = session.post( + create_url, json=payload, timeout=DEFAULT_TIMEOUT_SECONDS + ) + if resp.status_code in (200, 201): + logger.info(" ✓ Created component: %s", comp.slug) + return True + else: + logger.error( + " Failed to create component %s: HTTP %d %s", + comp.slug, resp.status_code, resp.text[:500], + ) + return False + except requests.exceptions.RequestException as exc: + logger.error("Network error creating component %s: %s", comp.slug, exc) + return False + else: + logger.error( + " Unexpected HTTP %d checking component %s", + resp.status_code, comp.slug, + ) + return False + + +# --------------------------------------------------------------------------- +# Main registration logic +# --------------------------------------------------------------------------- + +def register_project( + repo_name: str, + repo_root: Path, + github_org: str = "worldhealthorganization", +) -> int: + """ + Register one IG repo with all enabled translation services. + + Returns 0 on success, 1 on error. + """ + # Load configuration + try: + config = load_dak_config(repo_root) + except DakConfigError: + logger.warning("dak.json not found or invalid at %s — skipping", repo_root) + return 0 # REG-002: missing dak.json → warning + exit 0 + + # Discover components + components = discover_components(repo_root) + if not components: + logger.info("No .pot files found — no components to register") + + project_slug = get_project_slug(github_org, repo_name) + logger.info("Project slug: %s", project_slug) + + enabled_services = get_enabled_services(config) + if not enabled_services: + logger.info("No translation services enabled") + return 0 + + errors = False + + # Weblate + if "weblate" in enabled_services: + api_token = os.environ.get("WEBLATE_API_TOKEN", "") + if not api_token: + logger.error("WEBLATE_API_TOKEN not set — cannot register with Weblate") + errors = True + else: + weblate_url = enabled_services["weblate"].url or "https://hosted.weblate.org" + try: + weblate_url = sanitize_url(weblate_url, "weblate_url") + except ValueError as exc: + logger.error("Invalid Weblate URL: %s", exc) + errors = True + else: + logger.info( + "Registering with Weblate (token: %s)", + redact_for_log(api_token), + ) + ok = _register_weblate_project( + project_slug, config, components, api_token, weblate_url, + repo_root=repo_root, + ) + if not ok: + errors = True + + # Launchpad (stub — logs info about what would be registered) + if "launchpad" in enabled_services: + api_token = os.environ.get("LAUNCHPAD_API_TOKEN", "") + if not api_token: + logger.error("LAUNCHPAD_API_TOKEN not set — cannot register with Launchpad") + errors = True + else: + logger.info("Launchpad registration: project=%s (%d components)", + project_slug, len(components)) + # Launchpad API integration would go here + logger.info("✓ Launchpad registration completed (stub)") + + # Crowdin (stub — logs info about what would be registered) + if "crowdin" in enabled_services: + api_token = os.environ.get("CROWDIN_API_TOKEN", "") + if not api_token: + logger.error("CROWDIN_API_TOKEN not set — cannot register with Crowdin") + errors = True + else: + logger.info("Crowdin registration: project=%s (%d components)", + project_slug, len(components)) + # Crowdin API integration would go here + logger.info("✓ Crowdin registration completed (stub)") + + return 1 if errors else 0 + + +# --------------------------------------------------------------------------- +# CLI +# --------------------------------------------------------------------------- + +def main(argv: Optional[List[str]] = None) -> int: + logging.basicConfig( + level=logging.INFO, + format="%(asctime)s %(levelname)s %(message)s", + datefmt="%H:%M:%S", + ) + + parser = argparse.ArgumentParser( + prog="register_translation_project.py", + description="Register one IG repo with all enabled translation services", + ) + parser.add_argument( + "--repo-name", required=True, + help="GitHub repository name (e.g. smart-hiv)", + ) + parser.add_argument( + "--repo-root", default=".", + help="Path to repository root (default: current directory)", + ) + parser.add_argument( + "--org", default="worldhealthorganization", + help="GitHub organization (default: worldhealthorganization)", + ) + args = parser.parse_args(argv) + + repo_root = Path(args.repo_root).resolve() + repo_name = sanitize_slug(args.repo_name, "repo-name") + + # Security: verify tokens are from secrets, not workflow inputs + for token_env in ("WEBLATE_API_TOKEN", "CROWDIN_API_TOKEN", "LAUNCHPAD_API_TOKEN"): + try: + assert_no_secret_in_env(token_env) + except RuntimeError as exc: + logger.error("%s", exc) + return 1 + + return register_project(repo_name, repo_root, github_org=args.org) + + +if __name__ == "__main__": + sys.exit(main()) diff --git a/input/scripts/run_ig_publisher.py b/input/scripts/run_ig_publisher.py index 1740b276b0..f4cd9d362d 100644 --- a/input/scripts/run_ig_publisher.py +++ b/input/scripts/run_ig_publisher.py @@ -59,7 +59,7 @@ import sys import time from pathlib import Path -from typing import List, Optional +from typing import Dict, List, Optional logger = logging.getLogger(__name__) @@ -224,35 +224,242 @@ def run_ig_publisher( def collect_publisher_pot_files(ig_root: str) -> None: - """Copy ``.pot`` files from the IG Publisher ``output/`` directory. + """Collect translation files produced by the IG Publisher. - The FHIR IG Publisher produces translation ``.pot`` files in - ``output/`` during a build. Since ``output/`` is listed in - ``.gitignore``, these files cannot be committed directly. This - function copies any ``.pot`` files found in ``output/`` to - ``input/translations/``, overwriting existing files, so they can - be staged and committed. + The FHIR IG Publisher writes translation files to two locations: + + 1. ``output/`` — may contain ``.pot`` files (gitignored). + 2. ``translations/`` — contains ``.po``, ``.xliff``, and ``.json`` + files organised by language and format (also gitignored). + + This function copies any ``.pot`` files found in ``output/`` to + ``input/translations/``, then merges all per-resource ``.po`` files + from ``translations/`` into a single ``base.pot`` template suitable + for Weblate. + + Individual per-resource ``.po`` files are **not** copied into + ``input/translations/`` — only the merged ``base.pot`` is written + there. Args: ig_root: Repository root directory. """ + dest_dir = os.path.join(ig_root, "input", "translations") + os.makedirs(dest_dir, exist_ok=True) + + # 1. Copy .pot files from output/ (original behaviour). output_dir = os.path.join(ig_root, "output") - if not os.path.isdir(output_dir): + if os.path.isdir(output_dir): + for pot_file in glob_module.glob( + os.path.join(output_dir, "**", "*.pot"), recursive=True + ): + dest_name = os.path.basename(pot_file) + dest_path = os.path.join(dest_dir, dest_name) + if os.path.exists(dest_path): + logger.info(f"Overwriting existing {dest_path} with {pot_file}") + try: + shutil.copy2(pot_file, dest_path) + logger.info(f"Copied IG Publisher .pot: {pot_file} -> {dest_path}") + except Exception as exc: + logger.warning(f"Failed to copy {pot_file} to {dest_path}: {exc}") + + # 2. Merge .po files from translations/ into input/translations/base.pot. + # The publisher writes to translations/po/*.po (single target lang) + # or translations/{lang}/po/*.po (multiple target langs). + translations_dir = os.path.join(ig_root, "translations") + if os.path.isdir(translations_dir): + po_files: List[str] = [] + for po_file in glob_module.glob( + os.path.join(translations_dir, "**", "*.po"), recursive=True + ): + po_files.append(po_file) + logger.info(f"Found IG Publisher .po: {po_file}") + + if po_files: + _merge_po_to_base_pot(po_files, dest_dir) + else: + logger.info("No .po files found in %s", translations_dir) + else: + logger.info("translations/ directory not found at %s", ig_root) + + +def _merge_po_to_base_pot(po_files: List[str], dest_dir: str) -> None: + """Merge per-resource ``.po`` files into a single ``base.pot``. + + The IG Publisher produces one ``.po`` file per FHIR resource per + target language. Weblate expects a single ``base.pot`` template + containing all translatable strings. This helper reads every + ``.po`` file, deduplicates entries by ``msgid``, and writes the + merged result to ``base.pot``. + + When the same ``msgid`` appears in multiple ``.po`` files the + source references (``#:`` comments) from all occurrences are kept. + + Args: + po_files: Absolute paths to ``.po`` files to merge. + dest_dir: Directory in which to write ``base.pot``. + """ + # Use only one language's files to avoid duplicates across languages. + # Pick the first language subdirectory found, or use all if flat layout. + first_lang_files = _select_first_language_po_files(po_files) + + # Parse entries: dict[msgid] -> list of #: reference lines + entries: Dict[str, List[str]] = {} # msgid -> list of reference strings + for po_path in first_lang_files: + _parse_po_entries(po_path, entries) + + if not entries: + logger.info("No translatable entries found in IG Publisher .po files.") return - dest_dir = os.path.join(ig_root, "input", "translations") - os.makedirs(dest_dir, exist_ok=True) + base_pot_path = os.path.join(dest_dir, "base.pot") + try: + with open(base_pot_path, "w", encoding="utf-8") as fh: + fh.write(_pot_header()) + for msgid in sorted(entries.keys()): + refs = entries[msgid] + for ref in refs: + fh.write(f"#: {ref}\n") + fh.write(f"msgid {_po_escape(msgid)}\n") + fh.write('msgstr ""\n\n') + logger.info( + f"Merged {len(entries)} entries from {len(first_lang_files)} " + f".po file(s) into {base_pot_path}" + ) + except Exception as exc: + logger.warning(f"Failed to write {base_pot_path}: {exc}") - for pot_file in glob_module.glob(os.path.join(output_dir, "**", "*.pot"), recursive=True): - dest_name = os.path.basename(pot_file) - dest_path = os.path.join(dest_dir, dest_name) - if os.path.exists(dest_path): - logger.info(f"Overwriting existing {dest_path} with {pot_file}") - try: - shutil.copy2(pot_file, dest_path) - logger.info(f"Copied IG Publisher .pot: {pot_file} -> {dest_path}") - except Exception as exc: - logger.warning(f"Failed to copy {pot_file} to {dest_path}: {exc}") + +def _select_first_language_po_files(po_files: List[str]) -> List[str]: + """Select .po files from only the first target language. + + When multiple target languages are configured the IG Publisher + creates per-language directories (``translations/{lang}/po/``). + All languages contain the same ``msgid`` entries so we only need + one set. This helper picks the first language alphabetically to + avoid duplicates. + + For a flat layout (``translations/po/``) all files are returned. + """ + # Group by parent directory two levels up (the lang dir). + by_lang: Dict[str, List[str]] = {} + for path in po_files: + parent = os.path.dirname(path) # .../po + lang_dir = os.path.dirname(parent) # .../{lang} or .../translations + by_lang.setdefault(lang_dir, []).append(path) + + if not by_lang: + return [] + + # Return files from the first language directory (sorted for determinism). + first_key = sorted(by_lang.keys())[0] + return sorted(by_lang[first_key]) + + +def _parse_po_entries(po_path: str, entries: Dict[str, List[str]]) -> None: + """Parse a ``.po`` file and add entries to *entries* dict. + + Args: + po_path: Path to a ``.po`` file. + entries: Dict mapping ``msgid`` strings to lists of ``#:`` + reference strings. Updated in place. + """ + try: + with open(po_path, "r", encoding="utf-8") as fh: + lines = fh.readlines() + except Exception as exc: + logger.warning(f"Cannot read {po_path}: {exc}") + return + + current_refs: List[str] = [] + current_msgid: List[str] = [] + in_msgid = False + in_msgstr = False + + for line in lines: + stripped = line.strip() + + if stripped.startswith("#:"): + current_refs.append(stripped[3:].strip()) + continue + + if stripped.startswith("#"): + continue + + if stripped.startswith("msgid "): + in_msgid = True + in_msgstr = False + current_msgid = [_po_unescape(stripped[6:])] + continue + + if stripped.startswith("msgstr "): + in_msgid = False + in_msgstr = True + # Flush the entry. + msgid_text = "".join(current_msgid) + if msgid_text: # Skip the empty header msgid. + if msgid_text not in entries: + entries[msgid_text] = [] + # Add source file as reference. + if current_refs: + for ref in current_refs: + if ref not in entries[msgid_text]: + entries[msgid_text].append(ref) + elif os.path.basename(po_path) not in entries[msgid_text]: + entries[msgid_text].append(os.path.basename(po_path)) + current_refs = [] + current_msgid = [] + continue + + if in_msgid and stripped.startswith('"'): + current_msgid.append(_po_unescape(stripped)) + continue + + if in_msgstr and stripped.startswith('"'): + continue # Ignore msgstr continuation lines. + + # Reset on blank or unrecognised lines. + if not stripped: + in_msgid = False + in_msgstr = False + + +def _po_escape(text: str) -> str: + """Escape a string for use as a PO/POT ``msgid`` or ``msgstr`` value.""" + escaped = text.replace("\\", "\\\\").replace('"', '\\"').replace("\n", "\\n") + return f'"{escaped}"' + + +def _po_unescape(quoted: str) -> str: + """Unescape a PO/POT quoted string value.""" + s = quoted.strip() + if s.startswith('"') and s.endswith('"'): + s = s[1:-1] + return s.replace("\\n", "\n").replace('\\"', '"').replace("\\\\", "\\") + + +def _pot_header() -> str: + """Return a standard ``.pot`` file header.""" + now = datetime.datetime.now(datetime.timezone.utc).strftime( + "%Y-%m-%d %H:%M+0000" + ) + return ( + "# FHIR Resource Translation Template\n" + "# Generated from IG Publisher output.\n" + "#\n" + "#, fuzzy\n" + 'msgid ""\n' + 'msgstr ""\n' + f'"POT-Creation-Date: {now}\\n"\n' + '"PO-Revision-Date: YEAR-MO-DA HO:MI+ZONE\\n"\n' + '"Last-Translator: FULL NAME \\n"\n' + '"Language-Team: LANGUAGE \\n"\n' + '"Language: \\n"\n' + '"MIME-Version: 1.0\\n"\n' + '"Content-Type: text/plain; charset=UTF-8\\n"\n' + '"Content-Transfer-Encoding: 8bit\\n"\n' + "\n" + ) # --------------------------------------------------------------------------- @@ -772,9 +979,11 @@ def run_publisher_and_commit_pot( ) return False - # 4. Collect .pot files from IG Publisher output/ directory. - # The IG Publisher writes .pot files into output/ which is gitignored. - # Copy them to input/translations/ so they can be committed. + # 4. Collect translation files from the IG Publisher. + # The IG Publisher writes .pot files into output/ and .po files + # into translations/ — both are gitignored. Copy .pot files and + # merge .po files into input/translations/base.pot so they can be + # committed. collect_publisher_pot_files(ig_root) if skip_commit: diff --git a/input/scripts/translation_config.py b/input/scripts/translation_config.py new file mode 100644 index 0000000000..15c2bf74f1 --- /dev/null +++ b/input/scripts/translation_config.py @@ -0,0 +1,442 @@ +#!/usr/bin/env python3 +""" +translation_config.py — Single authoritative module for reading translation +configuration from sushi-config.yaml (primary) or dak.json (fallback), +discovering translation components, and providing configuration to all other +translation scripts. + +Translation configuration (languages, plural forms, services) is defined in +sushi-config.yaml under a top-level ``translations`` key. For backward +compatibility, the module also checks dak.json#translations as a fallback. + +This module eliminates all hardcoded language and component lists. Every script +that needs language codes or component paths MUST import from this module. + +Usage as library: + from translation_config import load_dak_config, get_languages, discover_components + +Usage standalone (prints discovered config): + python translation_config.py [--repo-root .] +""" + +import gettext as gettext_module +import json +import logging +import os +import re +import sys +from dataclasses import dataclass, field +from pathlib import Path +from typing import Any, Callable, Dict, List, Optional + +try: + import yaml # PyYAML — expected in CI; optional for local dev +except ImportError: # pragma: no cover + yaml = None # type: ignore[assignment] + +logger = logging.getLogger(__name__) + +# Default GitHub organization for project slug derivation. +DEFAULT_GITHUB_ORG = "worldhealthorganization" + + +# --------------------------------------------------------------------------- +# Data classes +# --------------------------------------------------------------------------- + +@dataclass +class LanguageEntry: + """One target language from sushi-config.yaml#translations.languages.""" + code: str + name: str + direction: str # "ltr" or "rtl" + plural: str = "" + + +@dataclass +class ServiceConfig: + """Configuration for one translation service.""" + enabled: bool = False + url: str = "" + extra: Dict[str, Any] = field(default_factory=dict) + + +@dataclass +class TranslationsConfig: + """Parsed translations block from sushi-config.yaml (or dak.json fallback).""" + source_language: str = "en" + languages: List[LanguageEntry] = field(default_factory=list) + services: Dict[str, ServiceConfig] = field(default_factory=dict) + + +@dataclass +class DakConfig: + """Top-level dak.json parsed configuration.""" + resource_type: str = "" + id: str = "" + name: str = "" + title: str = "" + version: str = "" + status: str = "" + translations: Optional[TranslationsConfig] = None + raw: Dict[str, Any] = field(default_factory=dict) + + +@dataclass +class TranslationComponent: + """A translation component derived from a .pot file in the repo.""" + slug: str # e.g. "fsh-base" + pot_path: Path # absolute path to the .pot file + translations_dir: Path # directory containing .po files + pot_stem: str # e.g. "base" + + +# --------------------------------------------------------------------------- +# Exceptions +# --------------------------------------------------------------------------- + +class DakConfigError(Exception): + """Raised when dak.json is missing, malformed, or has invalid fields.""" + pass + + +# --------------------------------------------------------------------------- +# Configuration loading +# --------------------------------------------------------------------------- + +def load_dak_config(repo_root: Path) -> DakConfig: + """ + Load DAK identity from dak.json and translation config from + sushi-config.yaml (with dak.json as fallback). + + Translation configuration is read from *sushi-config.yaml#translations* + first. If that key is absent, *dak.json#translations* is tried for + backward compatibility. + + Raises: + DakConfigError: if dak.json is missing, unparseable, or lacks + required fields. + """ + dak_path = repo_root / "dak.json" + if not dak_path.is_file(): + raise DakConfigError(f"dak.json not found at {dak_path}") + + try: + raw = json.loads(dak_path.read_text(encoding="utf-8")) + except (json.JSONDecodeError, OSError) as exc: + raise DakConfigError(f"Cannot parse dak.json: {exc}") from exc + + if not isinstance(raw, dict): + raise DakConfigError("dak.json must be a JSON object") + + # Required top-level fields + for fld in ("id", "name", "version", "status"): + if fld not in raw: + raise DakConfigError(f"dak.json missing required field: {fld}") + + config = DakConfig( + resource_type=raw.get("resourceType", ""), + id=raw.get("id", ""), + name=raw.get("name", ""), + title=raw.get("title", ""), + version=raw.get("version", ""), + status=raw.get("status", ""), + raw=raw, + ) + + # ── Translation config: sushi-config.yaml first, dak.json fallback ── + tr_raw = _load_translations_from_sushi(repo_root) + if tr_raw is None: + # Fallback to dak.json#translations for backward compatibility + tr_raw = raw.get("translations") + if tr_raw and isinstance(tr_raw, dict): + logger.debug("Loaded translations from dak.json (fallback)") + else: + logger.debug("Loaded translations from sushi-config.yaml") + + if tr_raw and isinstance(tr_raw, dict): + config.translations = _parse_translations(tr_raw) + + return config + + +def _load_translations_from_sushi(repo_root: Path) -> Optional[Dict[str, Any]]: + """Read the ``translations`` block from sushi-config.yaml, if present. + + Returns the raw dict or *None* when: + - sushi-config.yaml does not exist + - PyYAML is not installed + - the file has no ``translations`` key + """ + sushi_path = repo_root / "sushi-config.yaml" + if not sushi_path.is_file(): + return None + + if yaml is None: + logger.debug("PyYAML not available — skipping sushi-config.yaml") + return None + + try: + sushi_raw = yaml.safe_load(sushi_path.read_text(encoding="utf-8")) + except (yaml.YAMLError, OSError) as exc: + logger.warning("Cannot parse sushi-config.yaml: %s", exc) + return None + + if not isinstance(sushi_raw, dict): + return None + + tr = sushi_raw.get("translations") + if tr and isinstance(tr, dict): + return tr + return None + + +def _parse_translations(tr_raw: Dict[str, Any]) -> TranslationsConfig: + """Parse a translations block (from sushi-config.yaml or dak.json).""" + tc = TranslationsConfig() + tc.source_language = tr_raw.get("sourceLanguage", "en") + + langs_raw = tr_raw.get("languages", []) + if not isinstance(langs_raw, list): + raise DakConfigError("translations.languages must be an array") + + for entry in langs_raw: + if not isinstance(entry, dict): + raise DakConfigError("Each language entry must be a JSON object") + code = entry.get("code", "") + if not code: + raise DakConfigError("Language entry missing required field: code") + tc.languages.append(LanguageEntry( + code=code, + name=entry.get("name", ""), + direction=entry.get("direction", "ltr"), + plural=entry.get("plural", ""), + )) + + svcs_raw = tr_raw.get("services", {}) + if isinstance(svcs_raw, dict): + for svc_name, svc_data in svcs_raw.items(): + if not isinstance(svc_data, dict): + continue + sc = ServiceConfig( + enabled=bool(svc_data.get("enabled", False)), + url=svc_data.get("url", ""), + extra={k: v for k, v in svc_data.items() + if k not in ("enabled", "url")}, + ) + tc.services[svc_name] = sc + + return tc + + +# --------------------------------------------------------------------------- +# Accessors +# --------------------------------------------------------------------------- + +def get_languages(config: DakConfig) -> List[LanguageEntry]: + """Return target language list from translations config.""" + if config.translations is None: + return [] + return list(config.translations.languages) + + +def get_language_codes(config: DakConfig) -> List[str]: + """Return just the language code strings.""" + return [lang.code for lang in get_languages(config)] + + +def get_enabled_services(config: DakConfig) -> Dict[str, ServiceConfig]: + """Return dict of enabled translation services and their config.""" + if config.translations is None: + return {} + return { + name: svc for name, svc in config.translations.services.items() + if svc.enabled + } + + +def get_project_slug(github_org: str, repo_name: str) -> str: + """Derive translation service project slug: '{github_org}-{repo_name}' (lowercase).""" + return f"{github_org}-{repo_name}".lower() + + +def derive_project_slug_from_env(repo_root: Optional[Path] = None) -> str: + """Derive project slug from ``GITHUB_REPOSITORY`` env var, falling back to + *DEFAULT_GITHUB_ORG* and the repo directory name.""" + github_repo = os.environ.get("GITHUB_REPOSITORY", "") + if github_repo and "/" in github_repo: + org, repo_name = github_repo.split("/", 1) + else: + org = DEFAULT_GITHUB_ORG + repo_name = (repo_root or Path(".")).resolve().name + return get_project_slug(org, repo_name) + + +# --------------------------------------------------------------------------- +# Component discovery +# --------------------------------------------------------------------------- + +def _derive_component_slug(pot_path: Path, repo_root: Path) -> str: + """ + Derive a component slug from a .pot file path. + + Algorithm: take the path segments between 'input/' and '/translations/', + plus the .pot stem, joined with '-'. Lowercase, non-alphanumeric → '-'. + + Example: input/fsh/translations/base.pot → fsh-base + """ + rel = pot_path.relative_to(repo_root) + parts = rel.parts # e.g. ('input', 'fsh', 'translations', 'base.pot') + + # Find 'input' and 'translations' indices + try: + input_idx = list(parts).index("input") + except ValueError: + # fallback: use parent dir name + stem + return re.sub(r"[^a-z0-9]+", "-", + f"{pot_path.parent.parent.name}-{pot_path.stem}".lower()).strip("-") + + try: + trans_idx = list(parts).index("translations") + except ValueError: + trans_idx = len(parts) - 1 + + # Segments between input/ and translations/ + middle = parts[input_idx + 1:trans_idx] + stem = pot_path.stem + + raw_slug = "-".join(list(middle) + [stem]) + slug = re.sub(r"[^a-z0-9]+", "-", raw_slug.lower()).strip("-") + return slug + + +def discover_components(repo_root: Path) -> List[TranslationComponent]: + """ + Scan repo_root for all *.pot files in translations/ directories and derive + component definitions. + + Returns components sorted by pot_path for deterministic ordering. + """ + components: List[TranslationComponent] = [] + + for pot_path in sorted(repo_root.rglob("**/translations/*.pot")): + # Skip any paths in output/temp/fsh-generated directories + rel = str(pot_path.relative_to(repo_root)) + if any(rel.startswith(skip) for skip in + ("output/", "temp/", "fsh-generated/", "node_modules/")): + continue + + slug = _derive_component_slug(pot_path, repo_root) + components.append(TranslationComponent( + slug=slug, + pot_path=pot_path, + translations_dir=pot_path.parent, + pot_stem=pot_path.stem, + )) + + return components + + +def get_component_map(repo_root: Path) -> Dict[str, str]: + """ + Return a dict mapping component slug → repo-relative translations directory. + This provides backward-compatibility with the old COMPONENT_MAP constant. + """ + components = discover_components(repo_root) + return { + comp.slug: str(comp.translations_dir.relative_to(repo_root)) + for comp in components + } + + +# --------------------------------------------------------------------------- +# Gettext setup helper for script translation +# --------------------------------------------------------------------------- + +def setup_gettext( + script_file: str, + domain: str = "scripts", + lang: Optional[str] = None, +) -> Callable[[str], str]: + """ + Set up gettext for a script file, looking for .mo files in the + translations/ sibling directory. + + Args: + script_file: Path to the calling script (typically ``__file__``). + domain: Gettext domain name. + lang: Language code to load (e.g. ``"fr"``). When *None* the + ``LANGUAGE`` environment variable is consulted, falling back + to ``"en"`` (source strings returned as-is). + + Returns: + A callable translation function (the ``_`` function). + + Usage in scripts:: + + from translation_config import setup_gettext + _ = setup_gettext(__file__) # default / env + _ = setup_gettext(__file__, lang="fr") # explicit French + """ + script_dir = Path(script_file).resolve().parent + locale_dir = script_dir / "translations" + + if lang is None: + lang = os.environ.get("LANGUAGE", "en") + + try: + translation = gettext_module.translation( + domain, localedir=str(locale_dir), languages=[lang], + ) + return translation.gettext + except FileNotFoundError: + return gettext_module.gettext + + +# --------------------------------------------------------------------------- +# CLI (standalone diagnostics) +# --------------------------------------------------------------------------- + +def main() -> int: + import argparse + logging.basicConfig(level=logging.INFO, format="%(levelname)s %(message)s") + + parser = argparse.ArgumentParser( + description="Show translation configuration from sushi-config.yaml / dak.json") + parser.add_argument("--repo-root", default=".", help="Repository root") + args = parser.parse_args() + + repo_root = Path(args.repo_root).resolve() + try: + config = load_dak_config(repo_root) + except DakConfigError as exc: + logger.error("Configuration error: %s", exc) + return 1 + + print(f"DAK: {config.id} ({config.name})") + + if config.translations: + print(f"Source language: {config.translations.source_language}") + print("Target languages:") + for lang in config.translations.languages: + print(f" {lang.code} - {lang.name} ({lang.direction})") + print("Services:") + for name, svc in config.translations.services.items(): + status = "enabled" if svc.enabled else "disabled" + print(f" {name}: {status}" + (f" ({svc.url})" if svc.url else "")) + else: + print("No translations block found in sushi-config.yaml or dak.json") + + print("\nDiscovered components:") + components = discover_components(repo_root) + if components: + for comp in components: + print(f" {comp.slug} → {comp.pot_path.relative_to(repo_root)}") + else: + print(" (no .pot files found)") + + return 0 + + +if __name__ == "__main__": + sys.exit(main()) diff --git a/input/scripts/translation_report.py b/input/scripts/translation_report.py new file mode 100644 index 0000000000..626c5ebbde --- /dev/null +++ b/input/scripts/translation_report.py @@ -0,0 +1,251 @@ +#!/usr/bin/env python3 +""" +translation_report.py — Generate a translation completeness report. + +Scans all .po files in the repo and produces a Markdown report at +input/pagecontent/translation-status.md showing per-language, per-component +translation completeness percentages. + +Usage: + python translation_report.py [--repo-root .] [--output input/pagecontent/translation-status.md] + +Exit codes: + 0 Report generated successfully + 1 Error + +Author: WHO SMART Guidelines Team +""" + +import argparse +import logging +import sys +from pathlib import Path +from typing import List, Optional, Tuple + +sys.path.insert(0, str(Path(__file__).resolve().parent)) + +from translation_config import ( + DakConfigError, + TranslationComponent, + discover_components, + get_language_codes, + load_dak_config, +) + +logger = logging.getLogger(__name__) + + +# --------------------------------------------------------------------------- +# PO file statistics +# --------------------------------------------------------------------------- + +def _count_po_stats(po_path: Path) -> Tuple[int, int, int]: + """ + Count msgid entries in a .po file. + + Returns: (total_messages, translated_messages, fuzzy_messages) + """ + if not po_path.is_file(): + return (0, 0, 0) + + try: + content = po_path.read_text(encoding="utf-8", errors="replace") + except OSError: + return (0, 0, 0) + + total = 0 + translated = 0 + fuzzy = 0 + is_fuzzy = False + + # Simple PO parser — count msgid/msgstr pairs + lines = content.split("\n") + i = 0 + while i < len(lines): + line = lines[i].strip() + + if line.startswith("#,") and "fuzzy" in line: + is_fuzzy = True + + if line.startswith("msgid "): + # Skip the header entry (empty msgid at the start) + msgid_text = line[6:].strip().strip('"') + # Collect continuation lines + j = i + 1 + while j < len(lines) and lines[j].strip().startswith('"'): + msgid_text += lines[j].strip().strip('"') + j += 1 + + if msgid_text: # Non-empty msgid (skip header) + total += 1 + + # Find corresponding msgstr + while j < len(lines): + sline = lines[j].strip() + if sline.startswith("msgstr "): + msgstr_text = sline[7:].strip().strip('"') + k = j + 1 + while k < len(lines) and lines[k].strip().startswith('"'): + msgstr_text += lines[k].strip().strip('"') + k += 1 + if msgstr_text: + if is_fuzzy: + fuzzy += 1 + else: + translated += 1 + break + j += 1 + + is_fuzzy = False + + i += 1 + + return (total, translated, fuzzy) + + +# --------------------------------------------------------------------------- +# Report generation +# --------------------------------------------------------------------------- + +def generate_report( + repo_root: Path, + output_path: Path, +) -> int: + """ + Generate translation-status.md report. + + Returns 0 on success, 1 on error. + """ + try: + config = load_dak_config(repo_root) + except DakConfigError: + logger.warning("dak.json not found — generating minimal report") + config = None + + components = discover_components(repo_root) + languages = get_language_codes(config) if config else [] + + if not components: + logger.info("No translation components found") + + if not languages: + logger.info("No target languages configured") + + # Collect statistics + # stats[component_slug][lang_code] = (total, translated, fuzzy) + stats: Dict[str, Dict[str, Tuple[int, int, int]]] = {} + + for comp in components: + stats[comp.slug] = {} + for lang in languages: + po_path = comp.translations_dir / f"{lang}.po" + stats[comp.slug][lang] = _count_po_stats(po_path) + + # Build Markdown report + lines: List[str] = [] + lines.append("# Translation Status Report") + lines.append("") + lines.append("Auto-generated by `translation_report.py`. Do not edit manually.") + lines.append("") + + if not components or not languages: + lines.append("No translation components or languages configured.") + _write_report(output_path, lines) + return 0 + + # Summary table + lines.append("## Summary") + lines.append("") + + # Header row + header = "| Component |" + separator = "|-----------|" + for lang in languages: + header += f" {lang} |" + separator += "------|" + lines.append(header) + lines.append(separator) + + # Data rows + for comp in components: + row = f"| `{comp.slug}` |" + for lang in languages: + total, translated, fuzzy = stats[comp.slug].get(lang, (0, 0, 0)) + if total == 0: + pct = "—" + else: + pct_val = (translated / total) * 100 + pct = f"{pct_val:.0f}%" + row += f" {pct} |" + lines.append(row) + + lines.append("") + + # Detailed breakdown + lines.append("## Details") + lines.append("") + + for comp in components: + lines.append(f"### {comp.slug}") + lines.append("") + lines.append(f"- Template: `{comp.pot_path.relative_to(repo_root)}`") + lines.append("") + lines.append("| Language | Total | Translated | Fuzzy | Complete |") + lines.append("|----------|-------|------------|-------|----------|") + + for lang in languages: + total, translated, fuzzy = stats[comp.slug].get(lang, (0, 0, 0)) + if total == 0: + pct = "—" + else: + pct = f"{(translated / total) * 100:.1f}%" + lines.append( + f"| {lang} | {total} | {translated} | {fuzzy} | {pct} |" + ) + + lines.append("") + + _write_report(output_path, lines) + return 0 + + +def _write_report(output_path: Path, lines: List[str]) -> None: + """Write report lines to output path.""" + output_path.parent.mkdir(parents=True, exist_ok=True) + output_path.write_text("\n".join(lines) + "\n", encoding="utf-8") + logger.info("✓ Report written to %s", output_path) + + +# --------------------------------------------------------------------------- +# CLI +# --------------------------------------------------------------------------- + +def main(argv: Optional[List[str]] = None) -> int: + logging.basicConfig( + level=logging.INFO, + format="%(asctime)s %(levelname)s %(message)s", + datefmt="%H:%M:%S", + ) + + parser = argparse.ArgumentParser( + prog="translation_report.py", + description="Generate translation completeness report", + ) + parser.add_argument( + "--repo-root", default=".", + help="Repository root (default: current directory)", + ) + parser.add_argument( + "--output", default="input/pagecontent/translation-status.md", + help="Output path for the report (default: input/pagecontent/translation-status.md)", + ) + args = parser.parse_args(argv) + + repo_root = Path(args.repo_root).resolve() + output_path = repo_root / args.output + + return generate_report(repo_root, output_path) + + +if __name__ == "__main__": + sys.exit(main()) diff --git a/input/scripts/translation_security.py b/input/scripts/translation_security.py new file mode 100644 index 0000000000..e38ac07a52 --- /dev/null +++ b/input/scripts/translation_security.py @@ -0,0 +1,174 @@ +#!/usr/bin/env python3 +""" +translation_security.py — Centralised input sanitisation and secret protection +for all translation-related scripts. + +Imported by any script that handles external inputs (API tokens, slugs, URLs, +language codes) to enforce consistent security policies. + +Key principles: + - All values received from environment variables that originated from + workflow_dispatch inputs MUST be sanitised before use. + - API tokens MUST NEVER be logged, echoed, or included in exception messages. + - HTTP requests MUST use connection timeouts and response-size guards. + +Author: WHO SMART Guidelines Team +""" + +import logging +import os +import re +from urllib.parse import urlparse + +logger = logging.getLogger(__name__) + +# --------------------------------------------------------------------------- +# Constants +# --------------------------------------------------------------------------- + +#: Default connection timeout for HTTP requests (seconds). +DEFAULT_TIMEOUT_SECONDS = 60 + +#: Maximum response body size (bytes) — 10 MiB. +MAX_RESPONSE_BYTES = 10 * 1024 * 1024 + +#: Pattern for valid slug values (component names, project names, etc.). +_SLUG_PATTERN = re.compile(r"^[a-z0-9][a-z0-9_-]{0,127}$") + +#: Pattern for valid BCP-47 language codes (simplified: 2-3 letter codes with +# optional subtag). +_LANG_CODE_PATTERN = re.compile(r"^[a-z]{2,3}(-[A-Za-z0-9]{1,8})*$") + + +# --------------------------------------------------------------------------- +# Sanitisation functions +# --------------------------------------------------------------------------- + +def sanitize_slug(value: str, field_name: str) -> str: + """ + Validate and return a slug string (lowercase alphanumeric + hyphens/underscores). + + Args: + value: The value to validate. + field_name: Human-readable field name for error messages. + + Raises: + ValueError: if the value does not match the expected pattern. + """ + cleaned = value.strip().lower() + if not _SLUG_PATTERN.match(cleaned): + raise ValueError( + f"Invalid {field_name}: {cleaned!r}. " + "Only lowercase alphanumerics, hyphens, and underscores are allowed " + "(1-128 characters, must start with alphanumeric)." + ) + return cleaned + + +def sanitize_url( + value: str, + field_name: str, + allowed_schemes: tuple = ("https",), +) -> str: + """ + Validate URL scheme and structure. + + Args: + value: The URL to validate. + field_name: Human-readable field name for error messages. + allowed_schemes: Tuple of allowed URL schemes. + + Raises: + ValueError: if the URL scheme or structure is invalid. + """ + cleaned = value.strip().rstrip("/") + parsed = urlparse(cleaned) + + if parsed.scheme not in allowed_schemes: + raise ValueError( + f"Invalid {field_name}: scheme {parsed.scheme!r} not in " + f"allowed schemes {allowed_schemes}." + ) + + if not parsed.netloc: + raise ValueError( + f"Invalid {field_name}: no hostname found in {cleaned!r}." + ) + + return cleaned + + +def sanitize_lang_code(value: str) -> str: + """ + Validate a BCP-47 language code format. + + Args: + value: The language code to validate. + + Raises: + ValueError: if the value is not a valid language code. + """ + cleaned = value.strip() + if not _LANG_CODE_PATTERN.match(cleaned): + raise ValueError( + f"Invalid language code: {cleaned!r}. " + "Expected BCP-47 format (e.g. 'en', 'fr', 'zh-Hans')." + ) + return cleaned + + +def redact_for_log(value: str, visible_chars: int = 4) -> str: + """ + Return a redacted version of a value for safe log output. + + Shows only the first N characters followed by '***'. + + Args: + value: The value to redact. + visible_chars: Number of leading characters to show. + + Returns: + Redacted string, e.g. 'wlu_***'. + """ + if not value: + return "(empty)" + if len(value) <= visible_chars: + return "***" + return value[:visible_chars] + "***" + + +def assert_no_secret_in_env(env_var: str) -> None: + """ + Guard against accidentally passing secret values as workflow_dispatch inputs. + + GitHub Actions exposes workflow_dispatch inputs to composite/reusable steps + as environment variables with an ``INPUT_`` prefix (uppercased, with hyphens + replaced by underscores). This function checks whether the given env var + name matches any ``INPUT_*`` variable, which would indicate a misconfigured + workflow that passes a secret as a plaintext input. + + Args: + env_var: The environment variable name that holds a secret. + + Raises: + RuntimeError: if the secret appears to have been passed as a workflow input. + """ + # Check for the GITHUB_EVENT_PATH to inspect inputs + event_path = os.environ.get("GITHUB_EVENT_PATH", "") + if not event_path: + return # Not running in GitHub Actions + + # Normalise: TOKEN_NAME → token_name + normalised = env_var.lower().replace("-", "_") + + # Check all env vars for the GITHUB_EVENT_INPUTS_ prefix pattern + inputs_prefix = "INPUT_" + for key in os.environ: + if key.startswith(inputs_prefix): + input_name = key[len(inputs_prefix):].lower().replace("-", "_") + if input_name == normalised: + raise RuntimeError( + f"Security violation: {env_var!r} appears to be passed as a " + f"workflow_dispatch input ({key}). Secrets MUST be configured " + "as GitHub Actions secrets, never as workflow inputs." + ) diff --git a/input/scripts/translations/scripts.pot b/input/scripts/translations/scripts.pot new file mode 100644 index 0000000000..8ee726879e --- /dev/null +++ b/input/scripts/translations/scripts.pot @@ -0,0 +1,10 @@ +# Translation template for WHO SMART Guidelines Python scripts. +# Generated: 2026-03-06 16:54+0000 +# +msgid "" +msgstr "" +"POT-Creation-Date: 2026-03-06 16:54+0000\n" +"MIME-Version: 1.0\n" +"Content-Type: text/plain; charset=UTF-8\n" +"Content-Transfer-Encoding: 8bit\n" + diff --git a/input/translations/base.pot b/input/translations/base.pot new file mode 100644 index 0000000000..24a07ae98a --- /dev/null +++ b/input/translations/base.pot @@ -0,0 +1,5408 @@ +# FHIR Resource Translation Template +# Generated from IG Publisher output. +# +#, fuzzy +msgid "" +msgstr "" +"POT-Creation-Date: 2026-03-06 17:53+0000\n" +"PO-Revision-Date: YEAR-MO-DA HO:MI+ZONE\n" +"Last-Translator: FULL NAME \n" +"Language-Team: LANGUAGE \n" +"Language: \n" +"MIME-Version: 1.0\n" +"Content-Type: text/plain; charset=UTF-8\n" +"Content-Transfer-Encoding: 8bit\n" + +#: ActorDefinition.description +#: ImplementationGuide.definition.resource.description +msgid "\"A digital system for detecting, monitoring, investigating, and responding to" +msgstr "" + +#: ActorDefinition.description +#: ImplementationGuide.definition.resource.description +msgid "\"A digital system that creates, maintains, and provides authoritative unique" +msgstr "" + +#: ActorDefinition.description +#: ImplementationGuide.definition.resource.description +msgid "\"A digital system that manages the complete lifecycle of laboratory test orders," +msgstr "" + +#: ActorDefinition.description +#: ImplementationGuide.definition.resource.description +msgid "\"A digital system that manages the health supply chain from quantification" +msgstr "" + +#: ActorDefinition.description +#: ImplementationGuide.definition.resource.description +msgid "\"A digital system used to collect, process, report, and use aggregate health data" +msgstr "" + +#: ActorDefinition.description +#: ImplementationGuide.definition.resource.description +msgid "\"A frontline member of the health workforce who delivers health interventions" +msgstr "" + +#: ActorDefinition.description +#: ImplementationGuide.definition.resource.description +msgid "\"A member of the health workforce who delivers health interventions. This group" +msgstr "" + +#: ActorDefinition.description +#: ImplementationGuide.definition.resource.description +msgid "\"A member of the public who is a potential or current user of health services," +msgstr "" + +#: ActorDefinition.description +#: ImplementationGuide.definition.resource.description +msgid "\"A middleware system or shared infrastructure that enables health data exchange" +msgstr "" + +#: ActorDefinition.description +#: ImplementationGuide.definition.resource.description +msgid "\"A professional involved in the administration and oversight of health systems." +msgstr "" + +#: ActorDefinition.description +#: ImplementationGuide.definition.resource.description +msgid "\"A professional who manages, analyses, and disseminates health data to support" +msgstr "" + +#: ActorDefinition.description +#: ImplementationGuide.definition.resource.description +msgid "\"A secure, digital system that holds information about people's health and" +msgstr "" + +#: CodeSystem.description +#: ImplementationGuide.definition.resource.description +msgid "\"CodeSystem for SMART Guidelines tasks which are specializations of the Business Process Modeling Notatiton (BPMN) tasks, which are included in this codesystem" +msgstr "" + +#: CodeSystem.description +#: ImplementationGuide.definition.resource.description +msgid "\"CodeSystem for the Classification of Digital Health Services and Application Types v2," +msgstr "" + +#: CodeSystem.description +#: ImplementationGuide.definition.resource.description +msgid "\"CodeSystem for the Classification of Digital Interventions, Services and Applications in Health" +msgstr "" + +#: ConceptMap.description +#: ImplementationGuide.definition.resource.description +msgid "\"Mapping from the Classification of Digital Health Interventions v1 (CDHI v1, 2018)" +msgstr "" + +#: ConceptMap.description +#: ImplementationGuide.definition.resource.description +msgid "\"Mapping from the Classification of Digital Health System Categories v1 (CDSCv1, 2018)" +msgstr "" + +#: ImplementationGuide.definition.resource.description +#: StructureDefinition.element.BusinessProcessWorkflowSource/definition +msgid "\"Source reference for Business Process Workflow - exactly one of the following must be provided:" +msgstr "" + +#: ImplementationGuide.definition.resource.description +#: StructureDefinition.element.CoreDataElementSource/definition +msgid "\"Source reference for Core Data Element - exactly one of the following must be provided:" +msgstr "" + +#: ImplementationGuide.definition.resource.description +#: StructureDefinition.element.DecisionSupportLogicSource/definition +msgid "\"Source reference for Decision Support Logic - exactly one of the following must be provided:" +msgstr "" + +#: ImplementationGuide.definition.resource.description +#: StructureDefinition.element.GenericPersonaSource/definition +msgid "\"Source reference for Generic Persona - exactly one of the following must be provided:" +msgstr "" + +#: ImplementationGuide.definition.resource.description +#: StructureDefinition.element.HealthInterventionsSource/definition +msgid "\"Source reference for Health Interventions - exactly one of the following must be provided:" +msgstr "" + +#: ImplementationGuide.definition.resource.description +#: StructureDefinition.element.ProgramIndicatorSource/definition +msgid "\"Source reference for Program Indicator - exactly one of the following must be provided:" +msgstr "" + +#: ImplementationGuide.definition.resource.description +#: StructureDefinition.element.RequirementsSource/definition +msgid "\"Source reference for Requirements - exactly one of the following must be provided:" +msgstr "" + +#: ImplementationGuide.definition.resource.description +#: StructureDefinition.element.TestScenarioSource/definition +msgid "\"Source reference for Test Scenario - exactly one of the following must be provided:" +msgstr "" + +#: ImplementationGuide.definition.resource.description +#: StructureDefinition.element.UserScenarioSource/definition +msgid "\"Source reference for User Scenario - exactly one of the following must be provided:" +msgstr "" + +#: CodeSystem.concept.display +msgid "(Health) Facility registries" +msgstr "" + +#: CodeSystem.concept.definition +msgid "A Business Rule Task provides a mechanism for the Process to provide input to a Business Rules Engine and to get the output of calculations that the Business Rules Engine might provide." +msgstr "" + +#: CodeSystem.concept.definition +msgid "A Manual Task is a Task that is expected to be performed without the aid of any business process execution engine or any application. An example of this could be a telephone technician installing a telephone at a customer location." +msgstr "" + +#: CodeSystem.concept.definition +msgid "A Receive Task is a simple Task that is designed to wait for a Message to arrive from an external Participant (relative to the Process). Once the Message has been received, the Task is completed." +msgstr "" + +#: CodeSystem.concept.definition +msgid "A Script Task is executed by a business process engine. The modeler or implementer defines a script in a language that the engine can interpret. When the Task is ready to start, the engine will execute the script. When the script is completed, the Task will also be completed." +msgstr "" + +#: CodeSystem.concept.definition +msgid "A Send Task is a simple Task that is designed to send a Message to an external Participant (relative to the Process). Once the Message has been sent, the Task is completed." +msgstr "" + +#: CodeSystem.concept.definition +msgid "A Service Task is a Task that uses some sort of service, which could be a Web service or an automated application. : The Service Task has exactly one set of inputs and at most one set of outputs." +msgstr "" + +#: CodeSystem.concept.definition +msgid "A Task is an atomic Activity within a Process flow. A Task is used when the work in the Process cannot be broken down to a finer level of detail. Generally, an end-user and/or applications are used to perform the Task when it is executed" +msgstr "" + +#: CodeSystem.concept.definition +msgid "A User Task is a typical “workflow” Task where a human performer performs the Task with the assistance of a software application and is scheduled through a task list manager of some sort." +msgstr "" + +#: CodeSystem.concept.definition +msgid "A digital personal health record is a record of an individual's health information in a structured digital format, over which the person has agency." +msgstr "" + +#: StructureDefinition.element.DublinCore.language/definition +msgid "A language of the resource" +msgstr "" + +#: StructureDefinition.element.DublinCore.title/definition +msgid "A name given to the resource" +msgstr "" + +#: StructureDefinition.element.DublinCore.date/definition +msgid "A point or period of time associated with an event in the lifecycle of the resource" +msgstr "" + +#: StructureDefinition.element.DublinCore.relation/definition +msgid "A related resource" +msgstr "" + +#: StructureDefinition.element.DublinCore.source/definition +msgid "A related resource from which the described resource is derived" +msgstr "" + +#: CodeSystem.concept.definition +msgid "A secure, online system that holds information about people's health and clinical care, managed by healthcare providers. Also known as electronic health record (EHR)." +msgstr "" + +#: CodeSystem.concept.display +msgid "Access by client to own medical records" +msgstr "" + +#: CodeSystem.concept.display +msgid "Access by the individual to own medical or summary health records" +msgstr "" + +#: CodeSystem.concept.display +msgid "Access by the individual to verifiable documentation of a health event or health status" +msgstr "" + +#: CodeSystem.concept.display +msgid "Active data capture/ documentation by client" +msgstr "" + +#: CodeSystem.concept.display +msgid "Active data capture/documentation by an individual" +msgstr "" + +#: StructureDefinition.element.FunctionalRequirement.activity/short +msgid "Activity" +msgstr "" + +#: StructureDefinition.element.FunctionalRequirement.actor/short +msgid "Actor" +msgstr "" + +#: StructureDefinition.element.SushiConfigLogicalModel.meta/definition +msgid "Additional metadata for the IG" +msgstr "" + +#: CodeSystem.concept.display +msgid "Administrative and Commercial Managers" +msgstr "" + +#: CodeSystem.concept.display +msgid "Advertising and Public Relations Managers" +msgstr "" + +#: Questionnaire.item.text +msgid "Age" +msgstr "" + +#: CodeSystem.concept.display +msgid "Agricultural, Forestry and Fishery Labourers" +msgstr "" + +#: StructureDefinition.element.DublinCore.description/definition +msgid "An account of the resource" +msgstr "" + +#: StructureDefinition.element.DublinCore.contributor/definition +msgid "An entity responsible for making contributions to the resource" +msgstr "" + +#: StructureDefinition.element.DublinCore.creator/definition +msgid "An entity responsible for making the resource" +msgstr "" + +#: StructureDefinition.element.DublinCore.publisher/definition +msgid "An entity responsible for making the resource available" +msgstr "" + +#: StructureDefinition.element.BusinessProcessWorkflow.id/definition +msgid "An identifier for the business process workflow" +msgstr "" + +#: StructureDefinition.element.DecisionSupportLogic.id/definition +msgid "An identifier for the decision support logic" +msgstr "" + +#: StructureDefinition.element.HealthInterventions.id/definition +msgid "An identifier for the health intervention" +msgstr "" + +#: StructureDefinition.element.UserScenario.id/definition +msgid "An identifier for the user scenario" +msgstr "" + +#: StructureDefinition.element.Persona.id/definition +msgid "An identifier or code for the persona" +msgstr "" + +#: StructureDefinition.element.FunctionalRequirement.id/definition +#: StructureDefinition.element.NonFunctionalRequirement.id/definition +msgid "An identifier or code for the requirement" +msgstr "" + +#: StructureDefinition.element.DublinCore.identifier/definition +msgid "An unambiguous reference to the resource within a given context" +msgstr "" + +#: CodeSystem.concept.display +msgid "Analytics Systems" +msgstr "" + +#: CodeSystem.concept.display +msgid "Annotation" +msgstr "" + +#: CodeSystem.concept.definition +msgid "Applications that facilitate data collection and use at the community level, utilised by community-based workers who provide health promotion and disease prevention activities." +msgstr "" + +#: StructureDefinition.element.SushiConfigLogicalModel.applyExtensionMetadataToRoot/short +msgid "Apply Extension Metadata" +msgstr "" + +#: CodeSystem.concept.display +msgid "Architects, Planners, Surveyors and Designers" +msgstr "" + +#: CodeSystem.concept.display +msgid "Armed Forces Occupations" +msgstr "" + +#: CodeSystem.concept.display +msgid "Assemblers" +msgstr "" + +#: CodeSystem.concept.display +msgid "Assess capacity of health worker(s)" +msgstr "" + +#: CodeSystem.concept.display +msgid "Assess capacity of healthcare provider(s)" +msgstr "" + +#: CodeSystem.concept.display +msgid "Assess health facilities" +msgstr "" + +#: CodeSystem.concept.display +msgid "Audiologists and Speech Therapists" +msgstr "" + +#: CodeSystem.concept.display +msgid "Authentication and authorisation" +msgstr "" + +#: ImplementationGuide.definition.resource.description +msgid "Auto-generated questionnaire for decision table DAK.DT.IMMZ.D2.DT.BCG" +msgstr "" + +#: CodeSystem.concept.display +msgid "Automated analysis of data to generate new information or predictions on future events" +msgstr "" + +#: ImplementationGuide.name +msgid "Base" +msgstr "" + +#: ImplementationGuide.description +msgid "Base SMART Guidelines implementation guide to be used as the base dependency for all SMART Guidelines IGs" +msgstr "" + +#: ImplementationGuide.definition.resource.description +#: StructureDefinition.element.FHIRSchemaBase/definition +msgid "Base logical model providing the common schema metadata interface inherited by all SMART Guidelines logical models. Every SMART Guidelines logical model schema derives from this base, which documents the shared FHIR and JSON-LD metadata properties used by the JSON Schema generation pipeline." +msgstr "" + +#: StructureDefinition.element.FunctionalRequirement.benefit[x]/short +msgid "Benefit" +msgstr "" + +#: StructureDefinition.element.FunctionalRequirement.benefit[x]/definition +msgid "Benefit to an actor fulfilling the requirement (so that)" +msgstr "" + +#: CodeSystem.concept.display +msgid "Biologists, Botanists, Zoologists and Related Professionals" +msgstr "" + +#: CodeSystem.concept.display +msgid "Blood bank information management systems" +msgstr "" + +#: CodeSystem.concept.display +msgid "Bricklayers and Related Workers" +msgstr "" + +#: CodeSystem.concept.display +msgid "Building Architects" +msgstr "" + +#: CodeSystem.concept.display +msgid "Building Finishers and Related Trades Workers" +msgstr "" + +#: CodeSystem.concept.display +msgid "Building Frame and Related Trades Workers" +msgstr "" + +#: CodeSystem.concept.display +msgid "Building and Housekeeping Supervisors" +msgstr "" + +#: CodeSystem.concept.display +msgid "Building and Related Trades Workers, Excluding Electricians" +msgstr "" + +#: StructureDefinition.element.BusinessProcessWorkflow.id/short +msgid "Business Process ID" +msgstr "" + +#: ImplementationGuide.definition.resource.name +#: StructureDefinition.title +#: StructureDefinition.element.BusinessProcessWorkflow/short +msgid "Business Process Workflow (DAK)" +msgstr "" + +#: ImplementationGuide.definition.resource.name +#: StructureDefinition.title +#: StructureDefinition.element.BusinessProcessWorkflowSource/short +msgid "Business Process Workflow Source" +msgstr "" + +#: CodeSystem.concept.display +msgid "Business Rule Task" +msgstr "" + +#: CodeSystem.concept.display +msgid "Business Services and Administration Managers" +msgstr "" + +#: CodeSystem.concept.display +msgid "Business Services and Administration Managers Not Elsewhere Classified" +msgstr "" + +#: CodeSystem.concept.display +msgid "Business and Administration Associate Professionals" +msgstr "" + +#: CodeSystem.concept.display +msgid "Business and Administration Professionals" +msgstr "" + +#: StructureDefinition.element.DAK.businessProcesses/definition +msgid "Business processes and workflows for achieving health programme objectives" +msgstr "" + +#: StructureDefinition.name +msgid "BusinessProcessWorkflow" +msgstr "" + +#: StructureDefinition.name +msgid "BusinessProcessWorkflowSource" +msgstr "" + +#: CodeSystem.name +#: ValueSet.name +msgid "CDHIv1" +msgstr "" + +#: ValueSet.name +msgid "CDHIv1.1" +msgstr "" + +#: ValueSet.name +msgid "CDHIv1.2" +msgstr "" + +#: ValueSet.name +msgid "CDHIv1.3" +msgstr "" + +#: ValueSet.name +msgid "CDHIv1.4" +msgstr "" + +#: ConceptMap.name +msgid "CDHIv1Hierarchy" +msgstr "" + +#: ConceptMap.name +msgid "CDHIv1toCDHIv2" +msgstr "" + +#: CodeSystem.name +#: ValueSet.name +msgid "CDHIv2" +msgstr "" + +#: ValueSet.name +msgid "CDHIv2.1" +msgstr "" + +#: ValueSet.name +msgid "CDHIv2.2" +msgstr "" + +#: ValueSet.name +msgid "CDHIv2.3" +msgstr "" + +#: ValueSet.name +msgid "CDHIv2.4" +msgstr "" + +#: ConceptMap.name +msgid "CDHIv2Hierarchy" +msgstr "" + +#: CodeSystem.name +#: ValueSet.name +msgid "CDSCv1" +msgstr "" + +#: ConceptMap.name +msgid "CDSCv1toCDSCv2" +msgstr "" + +#: CodeSystem.name +#: ValueSet.name +msgid "CDSCv2" +msgstr "" + +#: ValueSet.name +msgid "CDSCv2.A" +msgstr "" + +#: ValueSet.name +msgid "CDSCv2.B" +msgstr "" + +#: ValueSet.name +msgid "CDSCv2.C" +msgstr "" + +#: ValueSet.name +msgid "CDSCv2.D" +msgstr "" + +#: ValueSet.name +msgid "CDSCv2.E" +msgstr "" + +#: StructureDefinition.element.BusinessProcessWorkflowSource.canonical/short +#: StructureDefinition.element.CoreDataElement.canonical/short +#: StructureDefinition.element.CoreDataElementSource.canonical/short +#: StructureDefinition.element.DecisionSupportLogicSource.canonical/short +#: StructureDefinition.element.GenericPersonaSource.canonical/short +#: StructureDefinition.element.HealthInterventionsSource.canonical/short +#: StructureDefinition.element.ProgramIndicatorSource.canonical/short +#: StructureDefinition.element.Requirements.functional.canonical/short +#: StructureDefinition.element.Requirements.nonfunctional.canonical/short +#: StructureDefinition.element.RequirementsSource.canonical/short +#: StructureDefinition.element.TestScenarioSource.canonical/short +#: StructureDefinition.element.UserScenarioSource.canonical/short +msgid "Canonical" +msgstr "" + +#: StructureDefinition.element.FHIRSchemaBase.resourceDefinition/definition +msgid "Canonical URI of the FHIR StructureDefinition that defines this logical model" +msgstr "" + +#: StructureDefinition.element.BusinessProcessWorkflowSource.canonical/definition +msgid "Canonical URI pointing to the BusinessProcessWorkflow definition" +msgstr "" + +#: StructureDefinition.element.CoreDataElementSource.canonical/definition +msgid "Canonical URI pointing to the CoreDataElement definition" +msgstr "" + +#: StructureDefinition.element.DecisionSupportLogicSource.canonical/definition +msgid "Canonical URI pointing to the DecisionSupportLogic definition" +msgstr "" + +#: StructureDefinition.element.Requirements.functional.canonical/definition +msgid "Canonical URI pointing to the FunctionalRequirement definition" +msgstr "" + +#: StructureDefinition.element.GenericPersonaSource.canonical/definition +msgid "Canonical URI pointing to the GenericPersona definition" +msgstr "" + +#: StructureDefinition.element.HealthInterventionsSource.canonical/definition +msgid "Canonical URI pointing to the HealthInterventions definition" +msgstr "" + +#: StructureDefinition.element.Requirements.nonfunctional.canonical/definition +msgid "Canonical URI pointing to the NonFunctionalRequirement definition" +msgstr "" + +#: StructureDefinition.element.ProgramIndicatorSource.canonical/definition +msgid "Canonical URI pointing to the ProgramIndicator definition" +msgstr "" + +#: StructureDefinition.element.RequirementsSource.canonical/definition +msgid "Canonical URI pointing to the Requirements definition" +msgstr "" + +#: StructureDefinition.element.TestScenarioSource.canonical/definition +msgid "Canonical URI pointing to the TestScenario definition" +msgstr "" + +#: StructureDefinition.element.UserScenarioSource.canonical/definition +msgid "Canonical URI pointing to the UserScenario definition" +msgstr "" + +#: StructureDefinition.element.CoreDataElement.canonical/definition +msgid "Canonical URI/IRI pointing to the ValueSet, CodeSystem, ConceptMap, or Logical Model definition" +msgstr "" + +#: StructureDefinition.element.DAK.canonicalUrl/short +#: StructureDefinition.element.SushiConfigLogicalModel.canonical/short +msgid "Canonical URL" +msgstr "" + +#: StructureDefinition.element.SushiConfigLogicalModel.canonical/definition +msgid "Canonical URL for the IG, should match DAK publicationUrl (e.g., http://smart.who.int/trust)" +msgstr "" + +#: StructureDefinition.element.DAK.publicationUrl/definition +msgid "Canonical URL for the published DAK (e.g., https://smart.who.int/base for WHO repositories)" +msgstr "" + +#: StructureDefinition.element.SushiConfigLogicalModel.resources.exampleCanonical/definition +msgid "Canonical URL this resource is an example of" +msgstr "" + +#: StructureDefinition.element.FunctionalRequirement.capability[x]/short +msgid "Capability" +msgstr "" + +#: StructureDefinition.element.FunctionalRequirement.capability[x]/definition +msgid "Capability achieved by an actor fulfilling the requirement (I want)" +msgstr "" + +#: CodeSystem.concept.display +msgid "Capture diagnostic results from digital devices" +msgstr "" + +#: CodeSystem.concept.display +msgid "Cartographers and Surveyors" +msgstr "" + +#: StructureDefinition.element.NonFunctionalRequirement.category/short +msgid "Category" +msgstr "" + +#: StructureDefinition.element.NonFunctionalRequirement.category/definition +msgid "Category of the non-functional requirement" +msgstr "" + +#: CodeSystem.concept.display +msgid "Census and population information systems" +msgstr "" + +#: CodeSystem.concept.display +msgid "Census, population information & data warehouse" +msgstr "" + +#: CodeSystem.concept.display +msgid "Certify birth event" +msgstr "" + +#: CodeSystem.concept.display +msgid "Certify death event" +msgstr "" + +#: ImplementationGuide.definition.page.title +msgid "Changes" +msgstr "" + +#: CodeSystem.concept.display +msgid "Chemical Engineering Technicians" +msgstr "" + +#: CodeSystem.concept.display +msgid "Chemical Engineers" +msgstr "" + +#: CodeSystem.concept.display +msgid "Chemical and Photographic Products Plant and Machine Operators" +msgstr "" + +#: CodeSystem.concept.display +msgid "Chemical and Physical Science Technicians" +msgstr "" + +#: CodeSystem.concept.display +msgid "Chemists" +msgstr "" + +#: CodeSystem.concept.display +msgid "Chief Executives, Senior Officials and Legislators" +msgstr "" + +#: CodeSystem.concept.display +msgid "Citizen‐based reporting" +msgstr "" + +#: CodeSystem.concept.display +msgid "Civil Engineering Technicians" +msgstr "" + +#: CodeSystem.concept.display +msgid "Civil Engineers" +msgstr "" + +#: CodeSystem.concept.display +msgid "Civil Registration and Vital Statistics" +msgstr "" + +#: CodeSystem.concept.display +msgid "Civil Registration and Vital Statistics (CRVS)" +msgstr "" + +#: CodeSystem.concept.display +msgid "Civil registration and vital statistics (CRVS) systems" +msgstr "" + +#: StructureDefinition.element.FunctionalRequirement.classification/short +#: StructureDefinition.element.NonFunctionalRequirement.classification/short +msgid "Classification" +msgstr "" + +#: CodeSystem.title +#: ImplementationGuide.definition.resource.name +#: ValueSet.title +msgid "Classification of Digital Health Interventions v1" +msgstr "" + +#: CodeSystem.title +#: ImplementationGuide.definition.resource.name +#: ValueSet.title +msgid "Classification of Digital Health Interventions v2" +msgstr "" + +#: CodeSystem.title +#: ImplementationGuide.definition.resource.name +msgid "Classification of Digital Health Services and Application Types v2" +msgstr "" + +#: CodeSystem.title +#: ImplementationGuide.definition.resource.name +msgid "Classification of Digital Health System Categories v1" +msgstr "" + +#: StructureDefinition.element.FunctionalRequirement.classification/definition +msgid "Classification of the identifier" +msgstr "" + +#: StructureDefinition.element.NonFunctionalRequirement.classification/definition +msgid "Classification or category of the requirement" +msgstr "" + +#: CodeSystem.concept.display +msgid "Classify disease codes or cause of mortality" +msgstr "" + +#: CodeSystem.concept.display +msgid "Cleaners and Helpers" +msgstr "" + +#: CodeSystem.concept.display +msgid "Cleaners and Helpers in Offices, Hotels and Other Establishments" +msgstr "" + +#: CodeSystem.concept.display +msgid "Clerical Support Workers" +msgstr "" + +#: ActorDefinition.title +#: ImplementationGuide.definition.resource.name +msgid "Client Registry / Master Patient Index" +msgstr "" + +#: CodeSystem.concept.display +msgid "Client applications" +msgstr "" + +#: CodeSystem.concept.display +msgid "Client communication system" +msgstr "" + +#: CodeSystem.concept.display +msgid "Client financial transactions" +msgstr "" + +#: CodeSystem.concept.display +msgid "Client health records" +msgstr "" + +#: CodeSystem.concept.display +msgid "Client identification and registration" +msgstr "" + +#: CodeSystem.concept.display +msgid "Client look‐up of health information" +msgstr "" + +#: CodeSystem.concept.display +msgid "Client to client communication" +msgstr "" + +#: ActorDefinition.name +msgid "ClientRegistry" +msgstr "" + +#: CodeSystem.concept.display +msgid "Clients" +msgstr "" + +#: CodeSystem.concept.display +msgid "Clinical terminology and classifications" +msgstr "" + +#: Questionnaire.item.text +msgid "Clinically well" +msgstr "" + +#: CodeSystem.concept.display +msgid "CodeSystem" +msgstr "" + +#: CodeSystem.description +#: ImplementationGuide.definition.resource.description +msgid "CodeSystem for Classification of Digital Health Interventions v1. Autogenerated from DAK artifacts" +msgstr "" + +#: CodeSystem.description +#: ImplementationGuide.definition.resource.description +msgid "CodeSystem for Classification of Digital Health System Categories v1. Autogenerated from DAK artifacts" +msgstr "" + +#: CodeSystem.description +#: ImplementationGuide.definition.resource.description +msgid "CodeSystem for Core Data Element types - defines the type of FHIR resource that a Core Data Element references." +msgstr "" + +#: CodeSystem.description +#: ImplementationGuide.definition.resource.description +msgid "CodeSystem for SMART Guidelines Persona Types" +msgstr "" + +#: CodeSystem.description +#: ImplementationGuide.definition.resource.description +msgid "CodeSystem for Smart Guidelines Documentation Actions for Decision Tables\"" +msgstr "" + +#: CodeSystem.description +#: ImplementationGuide.definition.resource.description +msgid "CodeSystem for Smart Guidelines Documentation Section to autogenerate documentation from artifacts" +msgstr "" + +#: CodeSystem.concept.display +msgid "Collect health insurance contributions" +msgstr "" + +#: CodeSystem.concept.display +msgid "Communication and performance feedback to health worker(s)" +msgstr "" + +#: CodeSystem.concept.display +msgid "Communication and performance feedback to healthcare provider(s)" +msgstr "" + +#: CodeSystem.concept.display +msgid "Communication from health worker(s) to supervisor" +msgstr "" + +#: CodeSystem.concept.display +msgid "Communication from healthcare provider to supervisor(s)" +msgstr "" + +#: CodeSystem.concept.display +msgid "Communication systems" +msgstr "" + +#: ActorDefinition.title +#: ImplementationGuide.definition.resource.name +msgid "Community Health Worker" +msgstr "" + +#: CodeSystem.concept.display +msgid "Community-based Information System" +msgstr "" + +#: CodeSystem.concept.display +msgid "Community-based information systems" +msgstr "" + +#: ActorDefinition.name +msgid "CommunityHealthWorker" +msgstr "" + +#: CodeSystem.concept.definition +msgid "Computer based tools which combine medical information databases and algorithms with patient specific data, intended to provide healthcare professionals and/or users with recommendations for diagnosis, prognosis, monitoring and treatment." +msgstr "" + +#: CodeSystem.concept.display +msgid "ConceptMap" +msgstr "" + +#: CodeSystem.concept.display +msgid "Concrete Placers, Concrete Finishers and Related Workers" +msgstr "" + +#: StructureDefinition.element.SushiConfigLogicalModel.instanceOptions/definition +msgid "Configuration for instance processing" +msgstr "" + +#: ImplementationGuide.definition.grouping.name +msgid "Conformance" +msgstr "" + +#: ConceptMap.group.element.target.comment +msgid "Consolidated: v1 'Certify birth event' merged into v2 3.4.1 'Notify; register and certify birth event'" +msgstr "" + +#: ConceptMap.group.element.target.comment +msgid "Consolidated: v1 'Certify death event' merged into v2 3.4.2 'Notify; register and certify death event'" +msgstr "" + +#: ConceptMap.group.element.target.comment +msgid "Consolidated: v1 'Notify birth event' merged with v1 3.4.2 [register] and 3.4.3 [certify] into v2 3.4.1 'Notify; register and certify birth event'" +msgstr "" + +#: ConceptMap.group.element.target.comment +msgid "Consolidated: v1 'Notify death event' merged with v1 3.4.5 [register] and 3.4.6 [certify] into v2 3.4.2 'Notify; register and certify death event'" +msgstr "" + +#: ConceptMap.group.element.target.comment +msgid "Consolidated: v1 'Register birth event' merged into v2 3.4.1 'Notify; register and certify birth event'" +msgstr "" + +#: ConceptMap.group.element.target.comment +msgid "Consolidated: v1 'Register death event' merged into v2 3.4.2 'Notify; register and certify death event'" +msgstr "" + +#: CodeSystem.concept.display +msgid "Consultations between remote client and health worker" +msgstr "" + +#: CodeSystem.concept.display +msgid "Consultations between remote person and healthcare provider" +msgstr "" + +#: CodeSystem.concept.display +msgid "Consultations for case management between health worker(s)" +msgstr "" + +#: CodeSystem.concept.display +msgid "Consultations for case management between healthcare providers" +msgstr "" + +#: StructureDefinition.element.SushiConfigLogicalModel.contact/short +msgid "Contact Information" +msgstr "" + +#: StructureDefinition.element.SushiConfigLogicalModel.contact/definition +msgid "Contact details for this IG" +msgstr "" + +#: StructureDefinition.element.SushiConfigLogicalModel.publisher.email/definition +msgid "Contact email for the publisher" +msgstr "" + +#: StructureDefinition.element.SushiConfigLogicalModel.useContext/definition +msgid "Context where this IG is intended to be used" +msgstr "" + +#: StructureDefinition.element.DublinCore.contributor/short +msgid "Contributor" +msgstr "" + +#: CodeSystem.concept.display +msgid "Cooks" +msgstr "" + +#: CodeSystem.concept.display +msgid "Coordinate emergency response and transport" +msgstr "" + +#: StructureDefinition.element.DAK.copyrightYear/short +#: StructureDefinition.element.SushiConfigLogicalModel.copyrightYear/short +msgid "Copyright Year" +msgstr "" + +#: ImplementationGuide.definition.resource.name +#: StructureDefinition.title +#: StructureDefinition.element.CoreDataElement/short +msgid "Core Data Element (DAK)" +msgstr "" + +#: ImplementationGuide.definition.resource.name +#: StructureDefinition.title +#: StructureDefinition.element.CoreDataElementSource/short +msgid "Core Data Element Source" +msgstr "" + +#: CodeSystem.title +#: ImplementationGuide.definition.resource.name +msgid "Core Data Element Type" +msgstr "" + +#: ImplementationGuide.definition.resource.name +#: ValueSet.title +msgid "Core Data Element Type Value Set" +msgstr "" + +#: StructureDefinition.element.DAK.dataElements/short +msgid "Core Data Elements" +msgstr "" + +#: StructureDefinition.element.DAK.indicators/definition +msgid "Core set of indicators for decision-making, performance metrics and reporting" +msgstr "" + +#: StructureDefinition.name +msgid "CoreDataElement" +msgstr "" + +#: StructureDefinition.name +msgid "CoreDataElementSource" +msgstr "" + +#: CodeSystem.name +msgid "CoreDataElementType" +msgstr "" + +#: ValueSet.name +msgid "CoreDataElementTypeVS" +msgstr "" + +#: StructureDefinition.element.DublinCore.coverage/short +msgid "Coverage" +msgstr "" + +#: CodeSystem.concept.display +msgid "Craft and Related Trades Workers" +msgstr "" + +#: StructureDefinition.element.DublinCore.creator/short +msgid "Creator" +msgstr "" + +#: CodeSystem.concept.display +msgid "Crop Farm Labourers" +msgstr "" + +#: ImplementationGuide.definition.resource.description +#: ValueSet.description +msgid "Crosscutting Data Services DHIs. Group 4 of the Classification of Digital Health Interventions v1 (2018)." +msgstr "" + +#: ImplementationGuide.definition.resource.description +#: ValueSet.description +msgid "Crosscutting Data Services DHIs. Group 4 of the Classification of Digital Interventions, Services and Applications in Health v2 (CDISAH, 2023)." +msgstr "" + +#: Questionnaire.item.text +msgid "Currently on ART" +msgstr "" + +#: StructureDefinition.element.SushiConfigLogicalModel.pages/definition +msgid "Custom pages included in the IG" +msgstr "" + +#: CodeSystem.concept.display +msgid "Customer Services Clerks" +msgstr "" + +#: StructureDefinition.name +msgid "DAK" +msgstr "" + +#: ImplementationGuide.definition.page.title +msgid "DAK API Documentation Hub" +msgstr "" + +#: StructureDefinition.element.DAK.description[x]/short +msgid "DAK Description" +msgstr "" + +#: StructureDefinition.element.DAK.id/short +msgid "DAK ID" +msgstr "" + +#: StructureDefinition.element.DAK.name/short +msgid "DAK Name" +msgstr "" + +#: StructureDefinition.element.DAK.status/short +msgid "DAK Status" +msgstr "" + +#: StructureDefinition.element.DAK.title/short +msgid "DAK Title" +msgstr "" + +#: StructureDefinition.element.DAK.version/short +msgid "DAK Version" +msgstr "" + +#: Questionnaire.name +msgid "DAK.DT.IMMZ.D2.DT.BCGQuestionnaire" +msgstr "" + +#: CodeSystem.concept.display +msgid "Data Entry Clerks" +msgstr "" + +#: CodeSystem.concept.display +msgid "Data Management" +msgstr "" + +#: CodeSystem.concept.display +msgid "Data Management services" +msgstr "" + +#: CodeSystem.concept.display +msgid "Data coding" +msgstr "" + +#: CodeSystem.concept.display +msgid "Data collection, management, and use" +msgstr "" + +#: CodeSystem.concept.display +msgid "Data consent and provenance" +msgstr "" + +#: StructureDefinition.element.DAK.dataElements/definition +msgid "Data elements required throughout the different points of a workflow" +msgstr "" + +#: CodeSystem.concept.display +msgid "Data exchange across systems" +msgstr "" + +#: CodeSystem.concept.display +msgid "Data exchange and interoperability" +msgstr "" + +#: CodeSystem.concept.display +msgid "Data governance compliance" +msgstr "" + +#: CodeSystem.concept.display +msgid "Data interchange and interoperability" +msgstr "" + +#: CodeSystem.concept.display +msgid "Data interchange interoperability and accessibility" +msgstr "" + +#: CodeSystem.concept.display +msgid "Data privacy protection" +msgstr "" + +#: CodeSystem.concept.display +msgid "Data services" +msgstr "" + +#: CodeSystem.concept.display +msgid "Data storage and aggregation" +msgstr "" + +#: CodeSystem.concept.display +msgid "Data synthesis and visualisation" +msgstr "" + +#: CodeSystem.concept.display +msgid "Data synthesis and visualizations" +msgstr "" + +#: CodeSystem.concept.display +msgid "Data warehouses" +msgstr "" + +#: ActorDefinition.name +msgid "DataManager" +msgstr "" + +#: StructureDefinition.element.DublinCore.date/short +msgid "Date" +msgstr "" + +#: StructureDefinition.element.DecisionSupportLogic.id/short +msgid "Decision Support Logic ID" +msgstr "" + +#: ImplementationGuide.definition.resource.name +#: StructureDefinition.title +#: StructureDefinition.element.DecisionSupportLogicSource/short +msgid "Decision Support Logic Source" +msgstr "" + +#: CodeSystem.concept.display +msgid "Decision support systems" +msgstr "" + +#: StructureDefinition.element.DAK.decisionLogic/short +msgid "Decision-Support Logic" +msgstr "" + +#: ImplementationGuide.definition.resource.name +#: StructureDefinition.title +#: StructureDefinition.element.DecisionSupportLogic/short +msgid "Decision-Support Logic (DAK)" +msgstr "" + +#: StructureDefinition.element.DAK.decisionLogic/definition +msgid "Decision-support logic and algorithms to support appropriate service delivery" +msgstr "" + +#: StructureDefinition.name +msgid "DecisionSupportLogic" +msgstr "" + +#: StructureDefinition.name +msgid "DecisionSupportLogicSource" +msgstr "" + +#: CodeSystem.name +#: ValueSet.name +msgid "DecisionTableActions" +msgstr "" + +#: ImplementationGuide.definition.resource.description +msgid "Defines the minimum expectations for CodeSystem resources used in SMART Guidelines" +msgstr "" + +#: ImplementationGuide.definition.resource.description +msgid "Defines the minimum expectations for ConceptMap resources used in SMART Guidelines" +msgstr "" + +#: ImplementationGuide.definition.resource.description +msgid "Defines the minimum expectations for ImplementationGuide resources used in SMART Guidelines" +msgstr "" + +#: ImplementationGuide.definition.resource.description +msgid "Defines the minimum expectations for Library resources used in SMART Guidelines" +msgstr "" + +#: ImplementationGuide.definition.resource.description +msgid "Defines the minimum expectations for Logical Models used in SMART Guidelines" +msgstr "" + +#: ImplementationGuide.definition.resource.description +msgid "Defines the minimum expectations for Measure resources used in SMART Guidelines" +msgstr "" + +#: ImplementationGuide.definition.resource.description +msgid "Defines the minimum expectations for PlanDefinition resources used in SMART Guidelines" +msgstr "" + +#: ImplementationGuide.definition.resource.description +msgid "Defines the minimum expectations for PlanDefinition resources used in SMART Guidelines which are derived from DAK Decision Tables" +msgstr "" + +#: ImplementationGuide.definition.resource.description +msgid "Defines the minimum expectations for Questionnaire resources used in SMART Guidelines" +msgstr "" + +#: ImplementationGuide.definition.resource.description +msgid "Defines the minimum expectations for StructureDefinition resources used in SMART Guidelines" +msgstr "" + +#: ImplementationGuide.definition.resource.description +msgid "Defines the minimum expectations for StructureMap resources used in SMART Guidelines" +msgstr "" + +#: ImplementationGuide.definition.resource.description +msgid "Defines the minimum expectations for ValueSet resources used in SMART Guidelines" +msgstr "" + +#: StructureDefinition.element.ProgramIndicator.definition/short +msgid "Definition" +msgstr "" + +#: CodeSystem.property.description +msgid "Definition of the ISCO-08 code" +msgstr "" + +#: StructureDefinition.element.ProgramIndicator.definition/definition +msgid "Definition of what the indicator measures" +msgstr "" + +#: StructureDefinition.element.ProgramIndicator.denominator/short +msgid "Denominator" +msgstr "" + +#: CodeSystem.concept.display +msgid "Dentists" +msgstr "" + +#: StructureDefinition.element.SushiConfigLogicalModel.dependencies/short +msgid "Dependencies" +msgstr "" + +#: StructureDefinition.element.SushiConfigLogicalModel.dependencies.id/short +msgid "Dependency Package ID" +msgstr "" + +#: StructureDefinition.element.SushiConfigLogicalModel.dependencies.reason/short +msgid "Dependency Reason" +msgstr "" + +#: StructureDefinition.element.SushiConfigLogicalModel.dependencies.version/short +msgid "Dependency Version" +msgstr "" + +#: StructureDefinition.element.DAK.personas/definition +msgid "Depiction of the human and system actors" +msgstr "" + +#: StructureDefinition.element.BusinessProcessWorkflow.description[x]/short +#: StructureDefinition.element.CoreDataElement.description[x]/short +#: StructureDefinition.element.DecisionSupportLogic.description[x]/short +#: StructureDefinition.element.DublinCore.description/short +#: StructureDefinition.element.GenericPersona.description[x]/short +#: StructureDefinition.element.HealthInterventions.description[x]/short +#: StructureDefinition.element.Persona.description/short +#: StructureDefinition.element.ProgramIndicator.description[x]/short +#: StructureDefinition.element.Requirements.description[x]/short +#: StructureDefinition.element.TestScenario.description[x]/short +#: StructureDefinition.element.UserScenario.description[x]/short +msgid "Description" +msgstr "" + +#: StructureDefinition.element.ProgramIndicator.disaggregation/definition +msgid "Description of how the indicator should be disaggregated" +msgstr "" + +#: StructureDefinition.element.DAK.description[x]/definition +msgid "Description of the DAK - either Markdown content or a URI to a Markdown file (absolute or relative to repository root)" +msgstr "" + +#: StructureDefinition.element.SushiConfigLogicalModel.description/definition +msgid "Description of the IG, should match DAK description" +msgstr "" + +#: StructureDefinition.element.FunctionalRequirement.activity/definition +msgid "Description of the activity being performed" +msgstr "" + +#: StructureDefinition.element.BusinessProcessWorkflow.description[x]/definition +msgid "Description of the business process - either Markdown content or a URI to a Markdown file (absolute or relative to repository root)" +msgstr "" + +#: StructureDefinition.element.CoreDataElement.description[x]/definition +msgid "Description of the core data element - either Markdown content or a URI to a Markdown file (absolute or relative to repository root)" +msgstr "" + +#: StructureDefinition.element.DecisionSupportLogic.description[x]/definition +msgid "Description of the decision support logic - either Markdown content or a URI to a Markdown file (absolute or relative to repository root)" +msgstr "" + +#: StructureDefinition.element.ProgramIndicator.denominator/definition +msgid "Description of the denominator calculation" +msgstr "" + +#: StructureDefinition.element.SushiConfigLogicalModel.groups.description/definition +msgid "Description of the group" +msgstr "" + +#: StructureDefinition.element.HealthInterventions.description[x]/definition +msgid "Description of the health intervention - either Markdown content or a URI to a Markdown file (absolute or relative to repository root)" +msgstr "" + +#: StructureDefinition.element.ProgramIndicator.numerator/definition +msgid "Description of the numerator calculation" +msgstr "" + +#: StructureDefinition.element.BusinessProcessWorkflow.objectives/definition +msgid "Description of the objectives of the workflow" +msgstr "" + +#: StructureDefinition.element.Persona.description/definition +msgid "Description of the persona" +msgstr "" + +#: StructureDefinition.element.GenericPersona.description[x]/definition +msgid "Description of the persona - either Markdown content or a URI to a Markdown file (absolute or relative to repository root)" +msgstr "" + +#: StructureDefinition.element.ProgramIndicator.description[x]/definition +msgid "Description of the program indicator - either Markdown content or a URI to a Markdown file (absolute or relative to repository root)" +msgstr "" + +#: StructureDefinition.element.NonFunctionalRequirement.requirement/definition +msgid "Description of the requirement" +msgstr "" + +#: StructureDefinition.element.Requirements.description[x]/definition +msgid "Description of the requirements - either Markdown content or a URI to a Markdown file (absolute or relative to repository root)" +msgstr "" + +#: StructureDefinition.element.SushiConfigLogicalModel.resources.description/definition +msgid "Description of the resource" +msgstr "" + +#: StructureDefinition.element.UserScenario.description[x]/definition +msgid "Description of the scenario - either Markdown content or a URI to a Markdown file (absolute or relative to repository root, like input/pagecontent/scenario-XYZ.md)" +msgstr "" + +#: StructureDefinition.element.TestScenario.description[x]/definition +msgid "Description of the test scenario - either Markdown content or a URI to a Markdown file (absolute or relative to repository root)" +msgstr "" + +#: StructureDefinition.element.BusinessProcessWorkflow.task.description/definition +msgid "Description of what the task involves" +msgstr "" + +#: CodeSystem.concept.display +msgid "Determine level of subsidies for health coverage schemes" +msgstr "" + +#: CodeSystem.concept.display +msgid "Diagnostics information systems" +msgstr "" + +#: CodeSystem.concept.display +msgid "Dietitians and Nutritionists" +msgstr "" + +#: ImplementationGuide.definition.resource.name +#: StructureDefinition.title +#: StructureDefinition.element.DAK/short +msgid "Digital Adaptation Kit (DAK)" +msgstr "" + +#: ImplementationGuide.definition.resource.name +#: ValueSet.title +msgid "Digital Health Interventions for Clients" +msgstr "" + +#: ImplementationGuide.definition.resource.name +#: ValueSet.title +msgid "Digital Health Interventions for Health Management and Support Personnel" +msgstr "" + +#: ImplementationGuide.definition.resource.name +#: ValueSet.title +msgid "Digital Health Interventions for Health System Managers" +msgstr "" + +#: ImplementationGuide.definition.resource.name +#: ValueSet.title +msgid "Digital Health Interventions for Health Workers" +msgstr "" + +#: ImplementationGuide.definition.resource.name +#: ValueSet.title +msgid "Digital Health Interventions for Healthcare Providers" +msgstr "" + +#: ImplementationGuide.definition.resource.name +#: ValueSet.title +msgid "Digital Health Interventions for Persons" +msgstr "" + +#: ImplementationGuide.definition.resource.description +#: ValueSet.description +msgid "Digital Health Interventions whose primary user group is Clients (persons using health services). Group 1 of the Classification of Digital Health Interventions v1 (2018)." +msgstr "" + +#: ImplementationGuide.definition.resource.description +#: ValueSet.description +msgid "Digital Health Interventions whose primary user group is Health Management and Support Personnel. Group 3 of the Classification of Digital Interventions, Services and Applications in Health v2 (CDISAH, 2023)." +msgstr "" + +#: ImplementationGuide.definition.resource.description +#: ValueSet.description +msgid "Digital Health Interventions whose primary user group is Health System Managers. Group 3 of the Classification of Digital Health Interventions v1 (2018)." +msgstr "" + +#: ImplementationGuide.definition.resource.description +#: ValueSet.description +msgid "Digital Health Interventions whose primary user group is Health Workers. Group 2 of the Classification of Digital Health Interventions v1 (2018)." +msgstr "" + +#: ImplementationGuide.definition.resource.description +#: ValueSet.description +msgid "Digital Health Interventions whose primary user group is Healthcare Providers. Group 2 of the Classification of Digital Interventions, Services and Applications in Health v2 (CDISAH, 2023)." +msgstr "" + +#: ImplementationGuide.definition.resource.description +#: ValueSet.description +msgid "Digital Health Interventions whose primary user group is Persons (health service users). Group 1 of the Classification of Digital Interventions, Services and Applications in Health v2 (CDISAH, 2023)." +msgstr "" + +#: ImplementationGuide.definition.resource.name +#: ValueSet.title +msgid "Digital Health Interventions: Data Services" +msgstr "" + +#: CodeSystem.concept.definition +msgid "Digital Systems that interact in this DAK" +msgstr "" + +#: StructureDefinition.element.ProgramIndicator.disaggregation/short +msgid "Disaggregation" +msgstr "" + +#: ValueSet.name +msgid "DocumentationSection" +msgstr "" + +#: CodeSystem.name +msgid "DocumentationSections" +msgstr "" + +#: CodeSystem.concept.display +msgid "Domestic Cleaners and Helpers" +msgstr "" + +#: CodeSystem.concept.display +msgid "Domestic, Hotel and Office Cleaners and Helpers" +msgstr "" + +#: ImplementationGuide.definition.page.title +msgid "Downloads" +msgstr "" + +#: CodeSystem.concept.display +msgid "Draughtspersons" +msgstr "" + +#: CodeSystem.concept.display +msgid "Drivers and Mobile Plant Operators" +msgstr "" + +#: ImplementationGuide.definition.resource.name +#: StructureDefinition.title +#: StructureDefinition.element.DublinCore/short +msgid "Dublin Core Metadata Element Set" +msgstr "" + +#: StructureDefinition.name +msgid "DublinCore" +msgstr "" + +#: CodeSystem.concept.display +msgid "Electrical Engineering Technicians" +msgstr "" + +#: CodeSystem.concept.display +msgid "Electrical Engineers" +msgstr "" + +#: CodeSystem.concept.display +msgid "Electrical and Electronic Trades Workers" +msgstr "" + +#: CodeSystem.concept.display +msgid "Electronic Medical Record" +msgstr "" + +#: ActorDefinition.title +#: ImplementationGuide.definition.resource.name +msgid "Electronic Medical Record (EMR) System" +msgstr "" + +#: CodeSystem.concept.display +msgid "Electronic medical record systems" +msgstr "" + +#: ActorDefinition.name +msgid "ElectronicMedicalRecord" +msgstr "" + +#: CodeSystem.concept.display +msgid "Electronics Engineering Technicians" +msgstr "" + +#: CodeSystem.concept.display +msgid "Electronics Engineers" +msgstr "" + +#: CodeSystem.concept.display +msgid "Electrotechnology Engineers" +msgstr "" + +#: StructureDefinition.element.CoreDataElement.id/short +msgid "Element ID" +msgstr "" + +#: CodeSystem.concept.display +msgid "Elementary Occupations" +msgstr "" + +#: CodeSystem.concept.display +msgid "Emergency preparedness and response systems" +msgstr "" + +#: CodeSystem.concept.display +msgid "Emergency response system" +msgstr "" + +#: CodeSystem.concept.display +msgid "Engineering Professionals (excluding Electrotechnology)" +msgstr "" + +#: CodeSystem.concept.display +msgid "Engineering Professionals Not Elsewhere Classified" +msgstr "" + +#: CodeSystem.concept.display +msgid "Enrol client for health services/clinical care plan" +msgstr "" + +#: CodeSystem.concept.display +msgid "Enrol person(s) for health services/clinical care plan" +msgstr "" + +#: CodeSystem.concept.display +msgid "Environmental Engineers" +msgstr "" + +#: CodeSystem.concept.display +msgid "Environmental Protection Professionals" +msgstr "" + +#: CodeSystem.concept.display +msgid "Environmental and Occupational Health and Hygiene Professionals" +msgstr "" + +#: CodeSystem.concept.display +msgid "Environmental monitoring systems" +msgstr "" + +#: CodeSystem.concept.display +msgid "Equipment and asset management" +msgstr "" + +#: StructureDefinition.element.SushiConfigLogicalModel.resources.exampleCanonical/short +msgid "Example Canonical" +msgstr "" + +#: ConceptMap.group.element.target.comment +msgid "Expanded: 'Map location of health facilities/structures' → '...and households'" +msgstr "" + +#: ConceptMap.group.element.target.comment +msgid "Expanded: 'Monitor status' → 'Monitor status and maintenance'" +msgstr "" + +#: ConceptMap.group.element.target.comment +msgid "Expanded: now explicitly lists examples [images; notes; videos]" +msgstr "" + +#: StructureDefinition.element.SushiConfigLogicalModel.experimental/short +msgid "Experimental" +msgstr "" + +#: StructureDefinition.element.SushiConfigLogicalModel.resources/definition +msgid "Explicit resource definitions for the IG" +msgstr "" + +#: ImplementationGuide.definition.resource.description +#: ValueSet.description +msgid "Extensible value set of ISCO-08 codes for persona classification" +msgstr "" + +#: ImplementationGuide.definition.resource.description +#: StructureDefinition.element.Extension/definition +msgid "Extension to reference SMART Guidelines task type" +msgstr "" + +#: StructureDefinition.element.FHIRSchemaBase.fhirParent/short +msgid "FHIR Parent" +msgstr "" + +#: ImplementationGuide.definition.resource.name +#: StructureDefinition.title +#: StructureDefinition.element.FHIRSchemaBase/short +msgid "FHIR Schema Base (SMART Guidelines)" +msgstr "" + +#: StructureDefinition.element.SushiConfigLogicalModel.fhirVersion/short +msgid "FHIR Version" +msgstr "" + +#: StructureDefinition.element.SushiConfigLogicalModel.global.type/definition +msgid "FHIR resource type" +msgstr "" + +#: StructureDefinition.name +msgid "FHIRSchemaBase" +msgstr "" + +#: StructureDefinition.element.SushiConfigLogicalModel.FSHOnly/short +msgid "FSH Only" +msgstr "" + +#: CodeSystem.concept.display +msgid "Facility Management Information System" +msgstr "" + +#: CodeSystem.concept.display +msgid "Facility management" +msgstr "" + +#: CodeSystem.concept.display +msgid "Facility management information systems" +msgstr "" + +#: CodeSystem.concept.display +msgid "Farming, Forestry and Fisheries Advisers" +msgstr "" + +#: StructureDefinition.element.TestScenario.feature/short +msgid "Feature File" +msgstr "" + +#: CodeSystem.concept.display +msgid "Field Crop and Vegetable Growers" +msgstr "" + +#: StructureDefinition.element.SushiConfigLogicalModel.pages.filename/definition +msgid "Filename of the page (e.g., index.md)" +msgstr "" + +#: CodeSystem.concept.display +msgid "Finance Managers" +msgstr "" + +#: StructureDefinition.mapping.w5/name +msgid "FiveWs Pattern Mapping" +msgstr "" + +#: CodeSystem.concept.display +msgid "Food Preparation Assistants" +msgstr "" + +#: CodeSystem.concept.display +msgid "Food Processing, Wood Working, Garment and Other Craft and Related Trades Workers" +msgstr "" + +#: CodeSystem.concept.display +msgid "Form creation for data acquisition" +msgstr "" + +#: StructureDefinition.element.DublinCore.format/short +msgid "Format" +msgstr "" + +#: StructureDefinition.element.DAK.title/definition +msgid "Full title of the DAK (e.g., SMART Base)" +msgstr "" + +#: StructureDefinition.element.SushiConfigLogicalModel.title/definition +msgid "Full title of the IG, should match DAK title (e.g., WHO SMART Trust)" +msgstr "" + +#: ImplementationGuide.definition.resource.name +#: StructureDefinition.title +#: StructureDefinition.element.FunctionalRequirement/short +msgid "Functional Requirement (DAK)" +msgstr "" + +#: StructureDefinition.element.Requirements.functional/short +msgid "Functional Requirements" +msgstr "" + +#: StructureDefinition.element.DAK.requirements/short +msgid "Functional and Non-Functional Requirements" +msgstr "" + +#: ImplementationGuide.definition.resource.name +#: StructureDefinition.title +#: StructureDefinition.element.Requirements/short +msgid "Functional and Non-Functional Requirements (DAK)" +msgstr "" + +#: StructureDefinition.element.Requirements.functional/definition +msgid "Functional requirements for the system - can be provided as canonical reference or inline instance data" +msgstr "" + +#: StructureDefinition.name +msgid "FunctionalRequirement" +msgstr "" + +#: CodeSystem.concept.display +msgid "Gardeners, Horticultural and Nursery Growers" +msgstr "" + +#: CodeSystem.concept.display +msgid "General Office Clerks" +msgstr "" + +#: CodeSystem.concept.display +msgid "General and Keyboard Clerks" +msgstr "" + +#: CodeSystem.concept.display +msgid "Generalist Medical Practitioners" +msgstr "" + +#: CodeSystem.concept.display +msgid "Generative AI for tailored content creation" +msgstr "" + +#: StructureDefinition.element.DAK.businessProcesses/short +msgid "Generic Business Processes and Workflows" +msgstr "" + +#: ImplementationGuide.definition.resource.name +#: StructureDefinition.title +#: StructureDefinition.element.GenericPersona/short +msgid "Generic Persona (DAK)" +msgstr "" + +#: ImplementationGuide.definition.resource.name +#: StructureDefinition.title +#: StructureDefinition.element.GenericPersonaSource/short +msgid "Generic Persona Source" +msgstr "" + +#: StructureDefinition.element.DAK.personas/short +msgid "Generic Personas" +msgstr "" + +#: StructureDefinition.name +msgid "GenericPersona" +msgstr "" + +#: StructureDefinition.name +msgid "GenericPersonaSource" +msgstr "" + +#: CodeSystem.concept.display +msgid "Geographic Information Systems" +msgstr "" + +#: CodeSystem.concept.display +msgid "Geographic information systems (GIS)" +msgstr "" + +#: CodeSystem.concept.display +msgid "Geologists and Geophysicists" +msgstr "" + +#: CodeSystem.concept.display +msgid "Geospatial information management" +msgstr "" + +#: StructureDefinition.element.SushiConfigLogicalModel.global/short +msgid "Global Profiles" +msgstr "" + +#: StructureDefinition.element.SushiConfigLogicalModel.global/definition +msgid "Global profile assignments" +msgstr "" + +#: CodeSystem.concept.display +msgid "Graphic and Multimedia Designers" +msgstr "" + +#: StructureDefinition.element.SushiConfigLogicalModel.groups.description/short +msgid "Group Description" +msgstr "" + +#: StructureDefinition.element.SushiConfigLogicalModel.groups.id/short +msgid "Group ID" +msgstr "" + +#: StructureDefinition.element.SushiConfigLogicalModel.groups.name/short +msgid "Group Name" +msgstr "" + +#: StructureDefinition.element.SushiConfigLogicalModel.groups.resources/short +msgid "Group Resources" +msgstr "" + +#: StructureDefinition.element.SushiConfigLogicalModel.resources.groupingId/short +msgid "Grouping ID" +msgstr "" + +#: CodeSystem.concept.display +msgid "Guidance" +msgstr "" + +#: Questionnaire.item.text +msgid "HIV status" +msgstr "" + +#: StructureDefinition.mapping.v2/name +msgid "HL7 v2 Mapping" +msgstr "" + +#: CodeSystem.concept.display +msgid "Hairdressers, Beauticians and Related Workers" +msgstr "" + +#: CodeSystem.concept.display +msgid "Handicraft and Printing Workers" +msgstr "" + +#: CodeSystem.concept.display +msgid "Hardware Device" +msgstr "" + +#: CodeSystem.concept.definition +msgid "Hardware devices required to perform workflows in the DAK" +msgstr "" + +#: CodeSystem.concept.display +msgid "Health Associate Professionals" +msgstr "" + +#: ActorDefinition.title +#: ImplementationGuide.definition.resource.name +msgid "Health Data Manager and Analyst" +msgstr "" + +#: ActorDefinition.title +#: ImplementationGuide.definition.resource.name +msgid "Health Information Exchange / Interoperability Platform" +msgstr "" + +#: StructureDefinition.element.HealthInterventions.id/short +msgid "Health Intervention ID" +msgstr "" + +#: ImplementationGuide.definition.resource.name +#: StructureDefinition.title +#: StructureDefinition.element.HealthInterventionsSource/short +msgid "Health Interventions Source" +msgstr "" + +#: StructureDefinition.element.DAK.healthInterventions/short +msgid "Health Interventions and Recommendations" +msgstr "" + +#: ImplementationGuide.definition.resource.name +#: StructureDefinition.title +#: StructureDefinition.element.HealthInterventions/short +msgid "Health Interventions and Recommendations (DAK)" +msgstr "" + +#: CodeSystem.concept.display +msgid "Health Management Information System" +msgstr "" + +#: ActorDefinition.title +#: ImplementationGuide.definition.resource.name +msgid "Health Management Information System (HMIS)" +msgstr "" + +#: CodeSystem.concept.display +msgid "Health Management Information systems (HMIS)" +msgstr "" + +#: CodeSystem.concept.display +msgid "Health Professionals" +msgstr "" + +#: CodeSystem.concept.display +msgid "Health Professionals Not Elsewhere Classified" +msgstr "" + +#: ImplementationGuide.definition.resource.name +#: ValueSet.title +msgid "Health System Challenges" +msgstr "" + +#: ActorDefinition.title +#: ImplementationGuide.definition.resource.name +msgid "Health System Manager" +msgstr "" + +#: CodeSystem.concept.display +msgid "Health finance and insurance system" +msgstr "" + +#: CodeSystem.concept.display +msgid "Health finance-related information systems" +msgstr "" + +#: CodeSystem.concept.display +msgid "Health financing" +msgstr "" + +#: CodeSystem.concept.display +msgid "Health management and support personnel" +msgstr "" + +#: CodeSystem.concept.display +msgid "Health program monitoring systems" +msgstr "" + +#: CodeSystem.concept.display +msgid "Health system financial management" +msgstr "" + +#: CodeSystem.concept.display +msgid "Health system managers" +msgstr "" + +#: CodeSystem.concept.display +msgid "Health system/Provider administration" +msgstr "" + +#: CodeSystem.concept.display +msgid "Health worker activity planning and scheduling" +msgstr "" + +#: CodeSystem.concept.display +msgid "Health worker communication" +msgstr "" + +#: CodeSystem.concept.display +msgid "Health worker decision support" +msgstr "" + +#: CodeSystem.concept.display +msgid "Health worker registry" +msgstr "" + +#: CodeSystem.concept.display +msgid "Health worker training" +msgstr "" + +#: CodeSystem.concept.display +msgid "Health workers" +msgstr "" + +#: StructureDefinition.name +msgid "HealthInterventions" +msgstr "" + +#: StructureDefinition.name +msgid "HealthInterventionsSource" +msgstr "" + +#: ActorDefinition.name +msgid "HealthManagementInformationSystem" +msgstr "" + +#: ActorDefinition.name +msgid "HealthSystemManager" +msgstr "" + +#: ActorDefinition.title +#: ImplementationGuide.definition.resource.name +msgid "Healthcare Provider" +msgstr "" + +#: CodeSystem.concept.display +msgid "Healthcare provider communication" +msgstr "" + +#: CodeSystem.concept.display +msgid "Healthcare provider decision support" +msgstr "" + +#: CodeSystem.concept.display +msgid "Healthcare provider financial transactions" +msgstr "" + +#: CodeSystem.concept.display +msgid "Healthcare provider training" +msgstr "" + +#: CodeSystem.concept.display +msgid "Healthcare providers" +msgstr "" + +#: ActorDefinition.name +msgid "HealthcareProvider" +msgstr "" + +#: CodeSystem.concept.display +msgid "Helpers in Food Preparation" +msgstr "" + +#: ConceptMap.title +#: ImplementationGuide.definition.resource.name +msgid "Hierarchy of the Classification of Digital Health Interventions v1" +msgstr "" + +#: ConceptMap.title +#: ImplementationGuide.definition.resource.name +msgid "Hierarchy of the Classification of Digital Health Interventions v2" +msgstr "" + +#: StructureDefinition.element.DAK.requirements/definition +msgid "High-level list of core functions and capabilities that the system must have" +msgstr "" + +#: ImplementationGuide.definition.page.title +msgid "Home" +msgstr "" + +#: CodeSystem.concept.display +msgid "Hospitality, Retail and Other Services Managers" +msgstr "" + +#: CodeSystem.concept.display +msgid "House Builders" +msgstr "" + +#: CodeSystem.concept.display +msgid "Human Resource Information System" +msgstr "" + +#: CodeSystem.concept.display +msgid "Human Resource Managers" +msgstr "" + +#: CodeSystem.concept.display +msgid "Human resource information systems" +msgstr "" + +#: CodeSystem.concept.display +msgid "Human resource management" +msgstr "" + +#: StructureDefinition.element.SushiConfigLogicalModel.groups.name/definition +msgid "Human-readable name for the group" +msgstr "" + +#: StructureDefinition.element.SushiConfigLogicalModel.resources.name/definition +msgid "Human-readable name for the resource" +msgstr "" + +#: StructureDefinition.element.SushiConfigLogicalModel.resources.groupingId/definition +msgid "ID of the group this resource belongs to" +msgstr "" + +#: StructureDefinition.element.SushiConfigLogicalModel.description/short +msgid "IG Description" +msgstr "" + +#: StructureDefinition.element.SushiConfigLogicalModel.id/short +msgid "IG Identifier" +msgstr "" + +#: StructureDefinition.element.SushiConfigLogicalModel.meta/short +msgid "IG Metadata" +msgstr "" + +#: StructureDefinition.element.SushiConfigLogicalModel.name/short +msgid "IG Name" +msgstr "" + +#: StructureDefinition.element.SushiConfigLogicalModel.status/short +msgid "IG Status" +msgstr "" + +#: StructureDefinition.element.SushiConfigLogicalModel.title/short +msgid "IG Title" +msgstr "" + +#: StructureDefinition.element.SushiConfigLogicalModel.version/short +msgid "IG Version" +msgstr "" + +#: Questionnaire.title +msgid "IMMZ.D2 Determine required vaccination(s) if any" +msgstr "" + +#: StructureDefinition.element.GenericPersona.iscoCode/short +#: StructureDefinition.element.Persona.ISCO/short +#: StructureDefinition.element.Persona.ISCO/definition +msgid "ISCO Code" +msgstr "" + +#: ImplementationGuide.definition.resource.name +#: ValueSet.title +msgid "ISCO-08 Value Set" +msgstr "" + +#: StructureDefinition.element.GenericPersona.iscoCode/definition +msgid "ISCO-08 codes for occupation classification" +msgstr "" + +#: CodeSystem.description +#: ImplementationGuide.definition.resource.description +msgid "ISCO-08 codes from the International Labour Organization official classification" +msgstr "" + +#: CodeSystem.name +msgid "ISCO08" +msgstr "" + +#: ValueSet.name +msgid "ISCO08ValueSet" +msgstr "" + +#: StructureDefinition.mapping.iso11179/name +msgid "ISO 11179" +msgstr "" + +#: CodeSystem.concept.display +msgid "Identification and registration of persons" +msgstr "" + +#: CodeSystem.concept.display +msgid "Identification registries and directories" +msgstr "" + +#: StructureDefinition.element.DublinCore.identifier/short +msgid "Identifier" +msgstr "" + +#: StructureDefinition.element.DAK.id/definition +msgid "Identifier for the DAK (e.g., smart.who.int.base)" +msgstr "" + +#: StructureDefinition.element.CoreDataElement.id/definition +msgid "Identifier for the core data element" +msgstr "" + +#: StructureDefinition.element.SushiConfigLogicalModel.groups.id/definition +msgid "Identifier for the group" +msgstr "" + +#: StructureDefinition.element.GenericPersona.id/definition +msgid "Identifier for the persona" +msgstr "" + +#: StructureDefinition.element.ProgramIndicator.id/definition +msgid "Identifier for the program indicator" +msgstr "" + +#: StructureDefinition.element.BusinessProcessWorkflow.task.identifier/definition +msgid "Identifier for the task" +msgstr "" + +#: CodeSystem.concept.display +msgid "Identify client(s) in need of services" +msgstr "" + +#: CodeSystem.concept.display +msgid "Identify persons in need of services" +msgstr "" + +#: CodeSystem.concept.display +msgid "Immunization information systems" +msgstr "" + +#: Questionnaire.item.text +msgid "Immunologically stable" +msgstr "" + +#: ImplementationGuide.definition.resource.description +#: StructureDefinition.element.Extension/definition +msgid "Indicates that if the conditions for this requirement are satisified, then that it should be viewed as satisifying the referenced requirement." +msgstr "" + +#: StructureDefinition.element.ProgramIndicator.id/short +msgid "Indicator ID" +msgstr "" + +#: CodeSystem.concept.display +msgid "Industrial and Production Engineers" +msgstr "" + +#: StructureDefinition.element.DublinCore.rights/definition +msgid "Information about rights held in and over the resource" +msgstr "" + +#: CodeSystem.concept.display +msgid "Information and Communications Technicians" +msgstr "" + +#: CodeSystem.concept.display +msgid "Information and Communications Technology Professionals" +msgstr "" + +#: StructureDefinition.element.BusinessProcessWorkflowSource.instance/definition +msgid "Inline BusinessProcessWorkflow instance data" +msgstr "" + +#: StructureDefinition.element.CoreDataElementSource.instance/definition +msgid "Inline CoreDataElement instance data" +msgstr "" + +#: StructureDefinition.element.DecisionSupportLogicSource.instance/definition +msgid "Inline DecisionSupportLogic instance data" +msgstr "" + +#: StructureDefinition.element.Requirements.functional.instance/definition +msgid "Inline FunctionalRequirement instance data" +msgstr "" + +#: StructureDefinition.element.GenericPersonaSource.instance/definition +msgid "Inline GenericPersona instance data" +msgstr "" + +#: StructureDefinition.element.HealthInterventionsSource.instance/definition +msgid "Inline HealthInterventions instance data" +msgstr "" + +#: StructureDefinition.element.Requirements.nonfunctional.instance/definition +msgid "Inline NonFunctionalRequirement instance data" +msgstr "" + +#: StructureDefinition.element.ProgramIndicatorSource.instance/definition +msgid "Inline ProgramIndicator instance data" +msgstr "" + +#: StructureDefinition.element.RequirementsSource.instance/definition +msgid "Inline Requirements instance data" +msgstr "" + +#: StructureDefinition.element.TestScenarioSource.instance/definition +msgid "Inline TestScenario instance data" +msgstr "" + +#: StructureDefinition.element.UserScenarioSource.instance/definition +msgid "Inline UserScenario instance data" +msgstr "" + +#: StructureDefinition.element.BusinessProcessWorkflowSource.instance/short +#: StructureDefinition.element.CoreDataElementSource.instance/short +#: StructureDefinition.element.DecisionSupportLogicSource.instance/short +#: StructureDefinition.element.GenericPersonaSource.instance/short +#: StructureDefinition.element.HealthInterventionsSource.instance/short +#: StructureDefinition.element.ProgramIndicatorSource.instance/short +#: StructureDefinition.element.Requirements.functional.instance/short +#: StructureDefinition.element.Requirements.nonfunctional.instance/short +#: StructureDefinition.element.RequirementsSource.instance/short +#: StructureDefinition.element.TestScenarioSource.instance/short +#: StructureDefinition.element.UserScenarioSource.instance/short +msgid "Instance" +msgstr "" + +#: StructureDefinition.element.SushiConfigLogicalModel.instanceOptions/short +msgid "Instance Options" +msgstr "" + +#: CodeSystem.title +#: ImplementationGuide.definition.resource.name +msgid "International Standard Classification of Occupations 2008" +msgstr "" + +#: ActorDefinition.name +msgid "InteroperabilityPlatform" +msgstr "" + +#: StructureDefinition.element.SushiConfigLogicalModel.resources.exampleBoolean/short +msgid "Is Example" +msgstr "" + +#: StructureDefinition.element.FHIRSchemaBase.jsonldContextTemplate/short +msgid "JSON-LD Context Template" +msgstr "" + +#: StructureDefinition.element.FHIRSchemaBase.jsonldValuesets/short +msgid "JSON-LD Value Sets" +msgstr "" + +#: StructureDefinition.element.FHIRSchemaBase.jsonldContextTemplate/definition +msgid "JSON-LD context template for this logical model (serialised as 'jsonld:contextTemplate' in JSON)" +msgstr "" + +#: StructureDefinition.element.SushiConfigLogicalModel.jurisdiction/short +msgid "Jurisdiction" +msgstr "" + +#: StructureDefinition.element.SushiConfigLogicalModel.jurisdiction/definition +msgid "Jurisdictions where this IG applies" +msgstr "" + +#: CodeSystem.concept.display +msgid "Key Persona" +msgstr "" + +#: CodeSystem.concept.definition +msgid "Key Persona for workflows in this DAK" +msgstr "" + +#: CodeSystem.concept.display +msgid "Keyboard Operators" +msgstr "" + +#: CodeSystem.concept.display +msgid "Knowledge Management" +msgstr "" + +#: CodeSystem.concept.display +msgid "Knowledge management systems" +msgstr "" + +#: StructureDefinition.element.SushiConfigLogicalModel.releaseLabel/definition +msgid "Label for this release (e.g., ci-build, draft, ballot)" +msgstr "" + +#: ActorDefinition.title +#: ImplementationGuide.definition.resource.name +msgid "Laboratory Information System (LIS)" +msgstr "" + +#: CodeSystem.concept.display +msgid "Laboratory and Diagnostic System" +msgstr "" + +#: CodeSystem.concept.display +msgid "Laboratory and Diagnostics Imaging Management" +msgstr "" + +#: CodeSystem.concept.display +msgid "Laboratory and diagnostics imaging management" +msgstr "" + +#: CodeSystem.concept.display +msgid "Laboratory information systems" +msgstr "" + +#: ActorDefinition.name +msgid "LaboratoryInformationSystem" +msgstr "" + +#: CodeSystem.concept.display +msgid "Labourers in Mining, Construction, Manufacturing and Transport" +msgstr "" + +#: CodeSystem.concept.display +msgid "Landscape Architects" +msgstr "" + +#: StructureDefinition.element.DublinCore.language/short +msgid "Language" +msgstr "" + +#: CodeSystem.concept.display +msgid "Learning and Training System" +msgstr "" + +#: CodeSystem.concept.display +msgid "Learning and training systems" +msgstr "" + +#: CodeSystem.concept.display +msgid "Legal, Social and Cultural Professionals" +msgstr "" + +#: CodeSystem.concept.display +msgid "Legal, Social, Cultural and Related Associate Professionals" +msgstr "" + +#: CodeSystem.concept.display +msgid "Legislators" +msgstr "" + +#: CodeSystem.concept.display +msgid "Legislators and Senior Officials" +msgstr "" + +#: ImplementationGuide.definition.page.title +#: StructureDefinition.element.DAK.license/short +#: StructureDefinition.element.SushiConfigLogicalModel.license/short +msgid "License" +msgstr "" + +#: StructureDefinition.element.DAK.license/definition +msgid "License under which the DAK is published" +msgstr "" + +#: StructureDefinition.element.SushiConfigLogicalModel.license/definition +msgid "License under which the IG is published, should match DAK license" +msgstr "" + +#: CodeSystem.concept.display +msgid "Life Science Professionals" +msgstr "" + +#: CodeSystem.concept.display +msgid "Life Science Technicians and Related Associate Professionals" +msgstr "" + +#: StructureDefinition.element.BusinessProcessWorkflow.source/definition +msgid "Link to a BPMN file containing the workflow definition. Source URI could be absolute or relative to the root of the DAK" +msgstr "" + +#: StructureDefinition.element.DecisionSupportLogic.source/definition +msgid "Link to a DMN file containing the decision logic. Source URI could be absolute or relative to the root of the DAK" +msgstr "" + +#: StructureDefinition.element.TestScenario.feature/definition +msgid "Link to a feature file containing the test scenarios" +msgstr "" + +#: ImplementationGuide.definition.resource.name +#: StructureDefinition.name +msgid "LinkIdExt" +msgstr "" + +#: CodeSystem.concept.display +msgid "List health facilities and related information" +msgstr "" + +#: CodeSystem.concept.display +msgid "List health workforce cadres and related identification information" +msgstr "" + +#: StructureDefinition.element.SushiConfigLogicalModel.groups.resources/definition +msgid "List of resources in this group" +msgstr "" + +#: CodeSystem.concept.display +msgid "Livestock Farm Labourers" +msgstr "" + +#: CodeSystem.concept.display +msgid "Location mapping" +msgstr "" + +#: CodeSystem.concept.display +msgid "Logical Model" +msgstr "" + +#: ImplementationGuide.definition.resource.description +#: StructureDefinition.element.CoreDataElement/definition +msgid "Logical Model for representing Core Data Elements from a DAK. A core data element can be one of: a ValueSet, a CodeSystem, a ConceptMap, or a Logical Model adherent to SGLogicalModel. This is the ONE EXCEPTION to allowing FHIR R4 models into the DAK LMs." +msgstr "" + +#: ImplementationGuide.definition.resource.description +#: StructureDefinition.element.DecisionSupportLogic/definition +msgid "Logical Model for representing Decision-Support Logic from a DAK. Decision-support logic and algorithms to support appropriate service delivery in accordance with WHO clinical, public health and data use guidelines." +msgstr "" + +#: ImplementationGuide.definition.resource.description +#: StructureDefinition.element.Requirements/definition +msgid "Logical Model for representing Functional and Non-Functional Requirements from a DAK. A high-level list of core functions and capabilities that the system must have to meet the end users' needs." +msgstr "" + +#: ImplementationGuide.definition.resource.description +#: StructureDefinition.element.BusinessProcessWorkflow/definition +msgid "Logical Model for representing Generic Business Processes and Workflows from a DAK. A business process is a set of related activities or tasks performed together to achieve the objectives of the health programme area." +msgstr "" + +#: ImplementationGuide.definition.resource.description +#: StructureDefinition.element.GenericPersona/definition +msgid "Logical Model for representing Generic Personas from a DAK. Depiction of the human and system actors. Human actors are end users, supervisors and related stakeholders who would be interacting with the digital system or involved in the clinical care, public health or health system pathway." +msgstr "" + +#: ImplementationGuide.definition.resource.description +#: StructureDefinition.element.HealthInterventions/definition +msgid "Logical Model for representing Health Interventions and Recommendations from a DAK. Overview of the health interventions and WHO, regional or national recommendations included within the DAK." +msgstr "" + +#: ImplementationGuide.definition.resource.description +#: StructureDefinition.element.Persona/definition +msgid "Logical Model for representing Personas from a DAK" +msgstr "" + +#: ImplementationGuide.definition.resource.description +#: StructureDefinition.element.ProgramIndicator/definition +msgid "Logical Model for representing Program Indicators from a DAK. Core set of indicators that need to be aggregated for decision-making, performance metrics and subnational and national reporting." +msgstr "" + +#: ImplementationGuide.definition.resource.description +#: StructureDefinition.element.TestScenario/definition +msgid "Logical Model for representing Test Scenarios from a DAK. A set of test scenarios to validate an implementation of the DAK." +msgstr "" + +#: ImplementationGuide.definition.resource.description +#: StructureDefinition.element.UserScenario/definition +msgid "Logical Model for representing User Scenarios from a DAK. Narratives that describe how the different personas may interact with each other." +msgstr "" + +#: ImplementationGuide.definition.resource.description +#: StructureDefinition.element.DAK/definition +msgid "Logical Model for representing a complete Digital Adaptation Kit (DAK) with metadata and all 9 DAK components" +msgstr "" + +#: ImplementationGuide.definition.resource.description +#: StructureDefinition.element.FunctionalRequirement/definition +msgid "Logical Model for representing functional requirement from a DAK" +msgstr "" + +#: ImplementationGuide.definition.resource.description +#: StructureDefinition.element.NonFunctionalRequirement/definition +msgid "Logical Model for representing non-functional requirement from a DAK" +msgstr "" + +#: ImplementationGuide.definition.resource.description +#: StructureDefinition.element.DublinCore/definition +msgid "Logical Model representing Dublin Core metadata elements as defined at https://www.dublincore.org/specifications/dublin-core/dcmi-terms/" +msgstr "" + +#: StructureDefinition.element.SushiConfigLogicalModel.groups/definition +msgid "Logical groupings of resources in the IG" +msgstr "" + +#: ImplementationGuide.definition.resource.description +#: StructureDefinition.element.SushiConfigLogicalModel/definition +msgid "Logical model defining the structure of sushi-config.yaml files used for FHIR Implementation Guide configuration. This model captures the essential metadata and configuration parameters needed for IG publishing." +msgstr "" + +#: CodeSystem.concept.display +msgid "Logistics Management Information System" +msgstr "" + +#: ActorDefinition.title +#: ImplementationGuide.definition.resource.name +msgid "Logistics Management Information System (LMIS)" +msgstr "" + +#: CodeSystem.concept.display +msgid "Logistics management information systems (LMIS)" +msgstr "" + +#: ActorDefinition.name +msgid "LogisticsManagementInformationSystem" +msgstr "" + +#: CodeSystem.concept.display +msgid "Longitudinal tracking of clients’ health status and services" +msgstr "" + +#: CodeSystem.concept.display +msgid "Longitudinal tracking of person's health status and services" +msgstr "" + +#: CodeSystem.concept.display +msgid "Look-up of information on health and health services by individuals" +msgstr "" + +#: CodeSystem.concept.display +msgid "Manage and plan budget allocations, revenue and expenditures" +msgstr "" + +#: CodeSystem.concept.display +msgid "Manage budget and expenditures" +msgstr "" + +#: CodeSystem.concept.display +msgid "Manage certification/registration of health worker(s)" +msgstr "" + +#: CodeSystem.concept.display +msgid "Manage client’s structured clinical records" +msgstr "" + +#: CodeSystem.concept.display +msgid "Manage client’s unstructured clinical records" +msgstr "" + +#: CodeSystem.concept.display +msgid "Manage health workforce activities" +msgstr "" + +#: CodeSystem.concept.display +msgid "Manage inventory and distribution of health commodities" +msgstr "" + +#: CodeSystem.concept.display +msgid "Manage person-centred structured clinical records" +msgstr "" + +#: CodeSystem.concept.display +msgid "Manage person-centred unstructured clinical records (e.g. notes, images, documents)" +msgstr "" + +#: CodeSystem.concept.display +msgid "Manage procurement of commodities" +msgstr "" + +#: CodeSystem.concept.display +msgid "Manage provision and withdrawal of consent by individuals" +msgstr "" + +#: CodeSystem.concept.display +msgid "Manage referrals between health and other sectors" +msgstr "" + +#: CodeSystem.concept.display +msgid "Manage referrals between health and other sectors (social services, police, justice, economic support schemes)" +msgstr "" + +#: CodeSystem.concept.display +msgid "Manage referrals between points of service within health sector" +msgstr "" + +#: CodeSystem.concept.display +msgid "Manage registration/certification of healthcare provider(s)" +msgstr "" + +#: CodeSystem.concept.display +msgid "Managers" +msgstr "" + +#: CodeSystem.concept.display +msgid "Managing Directors and Chief Executives" +msgstr "" + +#: CodeSystem.concept.display +msgid "Manual Task" +msgstr "" + +#: CodeSystem.concept.display +msgid "Map health and health indicator data to geographic data" +msgstr "" + +#: CodeSystem.concept.display +msgid "Map location of clients and households" +msgstr "" + +#: CodeSystem.concept.display +msgid "Map location of health event" +msgstr "" + +#: CodeSystem.concept.display +msgid "Map location of health events" +msgstr "" + +#: CodeSystem.concept.display +msgid "Map location of health facilities/structures" +msgstr "" + +#: CodeSystem.concept.display +msgid "Map location of health facilities/structures and households" +msgstr "" + +#: CodeSystem.concept.display +msgid "Map location of health worker" +msgstr "" + +#: CodeSystem.concept.display +msgid "Map location of healthcare provider(s)" +msgstr "" + +#: CodeSystem.concept.display +msgid "Map location of persons and settlements" +msgstr "" + +#: ConceptMap.title +#: ImplementationGuide.definition.resource.name +msgid "Mapping from CDHI v1 to CDISAH v2" +msgstr "" + +#: ConceptMap.title +#: ImplementationGuide.definition.resource.name +msgid "Mapping from CDSC v1 to Services and Application Types v2" +msgstr "" + +#: ConceptMap.description +#: ImplementationGuide.definition.resource.description +msgid "Mapping to represent hierarchy within the Classification of Digital Health Interventions v1." +msgstr "" + +#: ConceptMap.description +#: ImplementationGuide.definition.resource.description +msgid "Mapping to represent hierarchy within the Classification of Digital Interventions, Services and Applications in Health (CDISAH) v2." +msgstr "" + +#: ImplementationGuide.definition.resource.name +#: StructureDefinition.name +msgid "Markdown" +msgstr "" + +#: ImplementationGuide.definition.resource.description +#: StructureDefinition.element.Extension/definition +msgid "Markdown extension" +msgstr "" + +#: CodeSystem.concept.display +msgid "Market-oriented Skilled Agricultural Workers" +msgstr "" + +#: CodeSystem.concept.display +msgid "Market-oriented Skilled Forestry, Fishery and Hunting Workers" +msgstr "" + +#: CodeSystem.concept.definition +msgid "Master lists of health facilities with unique identifiers, locations, services offered, and operational information." +msgstr "" + +#: CodeSystem.concept.display +msgid "Master patient index" +msgstr "" + +#: CodeSystem.concept.display +msgid "Mathematicians, Actuaries and Statisticians" +msgstr "" + +#: CodeSystem.concept.display +msgid "Mechanical Engineering Technicians" +msgstr "" + +#: CodeSystem.concept.display +msgid "Mechanical Engineers" +msgstr "" + +#: CodeSystem.concept.display +msgid "Medical Doctors" +msgstr "" + +#: CodeSystem.concept.display +msgid "Medical and Pharmaceutical Technicians" +msgstr "" + +#: StructureDefinition.element.SushiConfigLogicalModel.menu/short +msgid "Menu Structure" +msgstr "" + +#: StructureDefinition.element.SushiConfigLogicalModel.menu.title/short +msgid "Menu Title" +msgstr "" + +#: StructureDefinition.element.SushiConfigLogicalModel.menu.url/short +msgid "Menu URL" +msgstr "" + +#: CodeSystem.concept.display +msgid "Merge, de-duplicate and curate coded datasets or terminologies" +msgstr "" + +#: CodeSystem.concept.display +msgid "Merge, de‐duplicate, and curate coded datasets or terminologies" +msgstr "" + +#: CodeSystem.concept.display +msgid "Message routing" +msgstr "" + +#: StructureDefinition.element.SushiConfigLogicalModel.meta.profile/short +msgid "Meta Profiles" +msgstr "" + +#: CodeSystem.concept.display +msgid "Metal Processing and Finishing Plant Operators" +msgstr "" + +#: CodeSystem.concept.display +msgid "Metal, Machinery and Related Trades Workers" +msgstr "" + +#: CodeSystem.concept.display +msgid "Meteorologists" +msgstr "" + +#: CodeSystem.concept.display +msgid "Midwifery Professionals" +msgstr "" + +#: CodeSystem.concept.display +msgid "Mineral and Stone Processing Plant Operators" +msgstr "" + +#: CodeSystem.concept.display +msgid "Miners and Quarriers" +msgstr "" + +#: CodeSystem.concept.display +msgid "Mining Engineers, Metallurgists and Related Professionals" +msgstr "" + +#: CodeSystem.concept.display +msgid "Mining and Metallurgical Technicians" +msgstr "" + +#: CodeSystem.concept.display +msgid "Mining and Mineral Processing Plant Operators" +msgstr "" + +#: CodeSystem.concept.display +msgid "Mining, Manufacturing and Construction Supervisors" +msgstr "" + +#: CodeSystem.concept.display +msgid "Mixed Crop Growers" +msgstr "" + +#: CodeSystem.concept.display +msgid "Mixed Crop and Livestock Farm Labourers" +msgstr "" + +#: CodeSystem.concept.display +msgid "Monitor cold-chain sensitive commodities" +msgstr "" + +#: CodeSystem.concept.display +msgid "Monitor cold‐chain sensitive commodities" +msgstr "" + +#: CodeSystem.concept.display +msgid "Monitor performance of health worker(s)" +msgstr "" + +#: CodeSystem.concept.display +msgid "Monitor performance of healthcare provider(s)" +msgstr "" + +#: CodeSystem.concept.display +msgid "Monitor status and maintenance of health equipment" +msgstr "" + +#: CodeSystem.concept.display +msgid "Monitor status of health equipment" +msgstr "" + +#: StructureDefinition.element.Persona.name/short +#: StructureDefinition.element.Persona.name/definition +#: StructureDefinition.element.ProgramIndicator.name/short +msgid "Name" +msgstr "" + +#: StructureDefinition.element.ProgramIndicator.name/definition +msgid "Name of the indicator" +msgstr "" + +#: StructureDefinition.element.DAK.publisher.name/definition +msgid "Name of the publishing organization" +msgstr "" + +#: StructureDefinition.element.SushiConfigLogicalModel.publisher.name/definition +msgid "Name of the publishing organization, should match DAK publisher name" +msgstr "" + +#: StructureDefinition.element.BusinessProcessWorkflow.task.name/definition +msgid "Name of the task" +msgstr "" + +#: StructureDefinition.element.DAK.userScenarios/definition +msgid "Narratives that describe how the different personas may interact with each other" +msgstr "" + +#: StructureDefinition.element.SushiConfigLogicalModel.menu/definition +msgid "Navigation menu structure for the IG" +msgstr "" + +#: ImplementationGuide.definition.resource.name +#: StructureDefinition.title +#: StructureDefinition.element.NonFunctionalRequirement/short +msgid "Non-Functional Requirement (DAK)" +msgstr "" + +#: StructureDefinition.element.Requirements.nonfunctional/short +msgid "Non-Functional Requirements" +msgstr "" + +#: StructureDefinition.element.Requirements.nonfunctional/definition +msgid "Non-functional requirements for the system - can be provided as canonical reference or inline instance data" +msgstr "" + +#: StructureDefinition.name +msgid "NonFunctionalRequirement" +msgstr "" + +#: CodeSystem.concept.display +msgid "Non‐routine data collection and management" +msgstr "" + +#: CodeSystem.concept.display +msgid "Notification of public health events from point of diagnosis" +msgstr "" + +#: CodeSystem.concept.display +msgid "Notify birth event" +msgstr "" + +#: CodeSystem.concept.display +msgid "Notify death event" +msgstr "" + +#: CodeSystem.concept.display +msgid "Notify stock levels of health commodities" +msgstr "" + +#: CodeSystem.concept.display +msgid "Notify, register and certify birth event" +msgstr "" + +#: CodeSystem.concept.display +msgid "Notify, register and certify death event" +msgstr "" + +#: Questionnaire.item.text +msgid "Number of BCG primary series doses administered" +msgstr "" + +#: StructureDefinition.element.ProgramIndicator.numerator/short +msgid "Numerator" +msgstr "" + +#: CodeSystem.concept.display +msgid "Numerical and Material Recording Clerks" +msgstr "" + +#: CodeSystem.concept.display +msgid "Nursing Professionals" +msgstr "" + +#: CodeSystem.concept.display +msgid "Nursing and Midwifery Associate Professionals" +msgstr "" + +#: CodeSystem.concept.display +msgid "Nursing and Midwifery Professionals" +msgstr "" + +#: StructureDefinition.mapping.objimpl/name +msgid "Object Implementation Information" +msgstr "" + +#: StructureDefinition.element.BusinessProcessWorkflow.objectives/short +msgid "Objectives" +msgstr "" + +#: CodeSystem.concept.display +msgid "On demand communication with persons" +msgstr "" + +#: CodeSystem.concept.display +msgid "On‐demand information services to clients" +msgstr "" + +#: CodeSystem.concept.display +msgid "Optometrists and Ophthalmic Opticians" +msgstr "" + +#: StructureDefinition.element.DAK.publisher/definition +msgid "Organization responsible for publishing the DAK" +msgstr "" + +#: StructureDefinition.element.SushiConfigLogicalModel.publisher/definition +msgid "Organization responsible for publishing the IG" +msgstr "" + +#: CodeSystem.concept.display +msgid "Other Clerical Support Workers" +msgstr "" + +#: CodeSystem.concept.display +msgid "Other Health Associate Professionals" +msgstr "" + +#: CodeSystem.concept.display +msgid "Other Health Professionals" +msgstr "" + +#: StructureDefinition.element.SushiConfigLogicalModel.dependencies/definition +msgid "Other IGs or packages this IG depends on" +msgstr "" + +#: StructureDefinition.element.GenericPersona.otherNames/short +msgid "Other Names/Examples" +msgstr "" + +#: CodeSystem.concept.display +msgid "Other Personal Services Workers" +msgstr "" + +#: CodeSystem.concept.display +msgid "Other Teaching Professionals" +msgstr "" + +#: StructureDefinition.element.GenericPersona.otherNames/definition +msgid "Other names or examples for the persona" +msgstr "" + +#: CodeSystem.concept.display +msgid "Output" +msgstr "" + +#: StructureDefinition.element.DAK.healthInterventions/definition +msgid "Overview of the health interventions and WHO, regional or national recommendations included within the DAK" +msgstr "" + +#: StructureDefinition.element.SushiConfigLogicalModel.dependencies.id/definition +msgid "Package ID of the dependency" +msgstr "" + +#: StructureDefinition.element.SushiConfigLogicalModel.pages.filename/short +msgid "Page Filename" +msgstr "" + +#: StructureDefinition.element.SushiConfigLogicalModel.pages.title/short +msgid "Page Title" +msgstr "" + +#: StructureDefinition.element.SushiConfigLogicalModel.pages/short +msgid "Pages" +msgstr "" + +#: CodeSystem.concept.display +msgid "Painters, Building Structure Cleaners and Related Trades Workers" +msgstr "" + +#: CodeSystem.concept.display +msgid "Paramedical Practitioners" +msgstr "" + +#: CodeSystem.concept.display +msgid "Parse unstructured data into structured data" +msgstr "" + +#: CodeSystem.concept.display +msgid "Patient Administration systems" +msgstr "" + +#: CodeSystem.concept.display +msgid "Peer group for clients" +msgstr "" + +#: CodeSystem.concept.display +msgid "Peer group for health workers" +msgstr "" + +#: CodeSystem.concept.display +msgid "Peer group for healthcare providers" +msgstr "" + +#: CodeSystem.concept.display +msgid "Peer group for individuals" +msgstr "" + +#: ActorDefinition.name +msgid "Person" +msgstr "" + +#: ActorDefinition.title +#: ImplementationGuide.definition.resource.name +msgid "Person (Health Service User)" +msgstr "" + +#: CodeSystem.concept.display +msgid "Person based reporting" +msgstr "" + +#: CodeSystem.concept.display +msgid "Person to Person communication" +msgstr "" + +#: CodeSystem.concept.display +msgid "Person-centred consent management" +msgstr "" + +#: CodeSystem.concept.display +msgid "Person-centred financial transactions" +msgstr "" + +#: CodeSystem.concept.display +msgid "Person-centred health certificate management" +msgstr "" + +#: CodeSystem.concept.display +msgid "Person-centred health records" +msgstr "" + +#: StructureDefinition.name +msgid "Persona" +msgstr "" + +#: ImplementationGuide.definition.resource.name +#: StructureDefinition.title +#: StructureDefinition.element.Persona/short +msgid "Persona (DAK)" +msgstr "" + +#: StructureDefinition.element.GenericPersona.id/short +#: StructureDefinition.element.Persona.id/short +msgid "Persona ID" +msgstr "" + +#: StructureDefinition.element.Persona.type/definition +msgid "Persona Types: Key/Related/System/Hardware Device" +msgstr "" + +#: CodeSystem.concept.display +msgid "Personal Care Workers" +msgstr "" + +#: CodeSystem.concept.display +msgid "Personal Service Workers" +msgstr "" + +#: CodeSystem.concept.display +msgid "Personal health records" +msgstr "" + +#: CodeSystem.concept.display +msgid "Personal health tracking" +msgstr "" + +#: StructureDefinition.element.UserScenario.personas/short +msgid "Personas" +msgstr "" + +#: CodeSystem.concept.display +msgid "Persons" +msgstr "" + +#: CodeSystem.concept.display +msgid "Pharmacists" +msgstr "" + +#: CodeSystem.concept.display +msgid "Pharmacy Information System" +msgstr "" + +#: CodeSystem.concept.display +msgid "Pharmacy information systems" +msgstr "" + +#: CodeSystem.concept.display +msgid "Physical and Earth Science Professionals" +msgstr "" + +#: CodeSystem.concept.display +msgid "Physical and Engineering Science Technicians" +msgstr "" + +#: CodeSystem.concept.display +msgid "Physical and Engineering Science Technicians Not Elsewhere Classified" +msgstr "" + +#: CodeSystem.concept.display +msgid "Physicists and Astronomers" +msgstr "" + +#: CodeSystem.concept.display +msgid "Physiotherapists" +msgstr "" + +#: CodeSystem.concept.display +msgid "Plant and Machine Operators and Assemblers" +msgstr "" + +#: CodeSystem.concept.display +msgid "Point of service" +msgstr "" + +#: CodeSystem.concept.display +msgid "Point-to-point data integration" +msgstr "" + +#: CodeSystem.concept.display +msgid "Policy and Planning Managers" +msgstr "" + +#: CodeSystem.concept.display +msgid "Prescription and medication management" +msgstr "" + +#: StructureDefinition.element.DAK.previewUrl/short +msgid "Preview URL" +msgstr "" + +#: StructureDefinition.element.DAK.previewUrl/definition +msgid "Preview URL for the current CI build (e.g., https://worldhealthorganization.github.io/smart-base)" +msgstr "" + +#: CodeSystem.concept.display +msgid "Primary School and Early Childhood Teachers" +msgstr "" + +#: CodeSystem.concept.display +msgid "Process Control Technicians" +msgstr "" + +#: CodeSystem.concept.display +msgid "Product and Garment Designers" +msgstr "" + +#: CodeSystem.concept.display +msgid "Product catalogues" +msgstr "" + +#: CodeSystem.concept.display +msgid "Production and Specialised Services Managers" +msgstr "" + +#: CodeSystem.concept.display +msgid "Professionals" +msgstr "" + +#: StructureDefinition.element.SushiConfigLogicalModel.global.profile/short +msgid "Profile URL" +msgstr "" + +#: StructureDefinition.element.SushiConfigLogicalModel.meta.profile/definition +msgid "Profiles this IG conforms to" +msgstr "" + +#: ImplementationGuide.definition.resource.name +#: StructureDefinition.title +#: StructureDefinition.element.ProgramIndicator/short +msgid "Program Indicator (DAK)" +msgstr "" + +#: ImplementationGuide.definition.resource.name +#: StructureDefinition.title +#: StructureDefinition.element.ProgramIndicatorSource/short +msgid "Program Indicator Source" +msgstr "" + +#: StructureDefinition.element.DAK.indicators/short +msgid "Program Indicators" +msgstr "" + +#: StructureDefinition.name +msgid "ProgramIndicator" +msgstr "" + +#: StructureDefinition.name +msgid "ProgramIndicatorSource" +msgstr "" + +#: CodeSystem.concept.display +msgid "Protective Services Workers" +msgstr "" + +#: CodeSystem.concept.display +msgid "Provide checklist according to protocol" +msgstr "" + +#: ImplementationGuide.definition.resource.description +msgid "Provide communication" +msgstr "" + +#: CodeSystem.concept.display +msgid "Provide prompts and alerts based according to protocol" +msgstr "" + +#: CodeSystem.concept.display +msgid "Provide training content to health worker(s)" +msgstr "" + +#: CodeSystem.concept.display +msgid "Provide training content to healthcare provider(s)" +msgstr "" + +#: ActorDefinition.title +#: ImplementationGuide.definition.resource.name +msgid "Public Health and Disease Surveillance System" +msgstr "" + +#: CodeSystem.concept.display +msgid "Public Key directories" +msgstr "" + +#: CodeSystem.concept.display +msgid "Public health and disease surveillance" +msgstr "" + +#: CodeSystem.concept.display +msgid "Public health and disease surveillance systems" +msgstr "" + +#: CodeSystem.concept.display +msgid "Public health event notification" +msgstr "" + +#: ActorDefinition.name +msgid "PublicHealthSurveillanceSystem" +msgstr "" + +#: StructureDefinition.element.DAK.publicationUrl/short +msgid "Publication URL" +msgstr "" + +#: StructureDefinition.element.DAK.status/definition +msgid "Publication status of the DAK" +msgstr "" + +#: StructureDefinition.element.SushiConfigLogicalModel.status/definition +msgid "Publication status of the IG" +msgstr "" + +#: StructureDefinition.element.DAK.publisher/short +#: StructureDefinition.element.DublinCore.publisher/short +#: StructureDefinition.element.SushiConfigLogicalModel.publisher/short +msgid "Publisher" +msgstr "" + +#: StructureDefinition.element.SushiConfigLogicalModel.publisher.email/short +msgid "Publisher Email" +msgstr "" + +#: StructureDefinition.element.DAK.publisher.name/short +#: StructureDefinition.element.SushiConfigLogicalModel.publisher.name/short +msgid "Publisher Name" +msgstr "" + +#: StructureDefinition.element.DAK.publisher.url/short +#: StructureDefinition.element.SushiConfigLogicalModel.publisher.url/short +msgid "Publisher URL" +msgstr "" + +#: ImplementationGuide.definition.resource.name +msgid "Questionnaire for IMMZ.D2 Determine required vaccination(s) if any" +msgstr "" + +#: StructureDefinition.mapping.rim/name +msgid "RIM Mapping" +msgstr "" + +#: StructureDefinition.element.SushiConfigLogicalModel.dependencies.reason/definition +msgid "Reason for this dependency" +msgstr "" + +#: CodeSystem.concept.display +msgid "Receive Task" +msgstr "" + +#: CodeSystem.concept.display +msgid "Receive payments from individuals" +msgstr "" + +#: CodeSystem.concept.display +msgid "Record training credentials of health worker(s)" +msgstr "" + +#: CodeSystem.concept.display +msgid "Record training credentials of healthcare provider(s)" +msgstr "" + +#: StructureDefinition.element.HealthInterventions.reference/short +msgid "Reference" +msgstr "" + +#: StructureDefinition.element.HealthInterventions.reference/definition +msgid "Reference data element using Dublin Core metadata" +msgstr "" + +#: CodeSystem.concept.definition +msgid "Reference to a FHIR CodeSystem resource" +msgstr "" + +#: CodeSystem.concept.definition +msgid "Reference to a FHIR ConceptMap resource" +msgstr "" + +#: CodeSystem.concept.definition +msgid "Reference to a FHIR ValueSet resource" +msgstr "" + +#: CodeSystem.concept.definition +msgid "Reference to a Logical Model adherent to SGLogicalModel" +msgstr "" + +#: StructureDefinition.element.SushiConfigLogicalModel.resources.reference/definition +msgid "Reference to the resource (used as YAML key)" +msgstr "" + +#: StructureDefinition.element.ProgramIndicator.references/short +msgid "References" +msgstr "" + +#: StructureDefinition.element.ProgramIndicator.references/definition +msgid "References to Health Intervention IDs providing additional context" +msgstr "" + +#: StructureDefinition.element.UserScenario.personas/definition +msgid "References to persona IDs that participate in this scenario" +msgstr "" + +#: StructureDefinition.element.BusinessProcessWorkflow.scenario/definition +msgid "References to user scenario IDs related to this workflow" +msgstr "" + +#: CodeSystem.concept.display +msgid "Referral coordination" +msgstr "" + +#: CodeSystem.concept.display +msgid "Referral co‐ordination" +msgstr "" + +#: CodeSystem.concept.display +msgid "Refuse Workers and Other Elementary Workers" +msgstr "" + +#: CodeSystem.concept.display +msgid "Register and store current health certificate information" +msgstr "" + +#: CodeSystem.concept.display +msgid "Register and verify client insurance membership" +msgstr "" + +#: CodeSystem.concept.display +msgid "Register and verify health coverage scheme membership of persons" +msgstr "" + +#: CodeSystem.concept.display +msgid "Register birth event" +msgstr "" + +#: CodeSystem.concept.display +msgid "Register death event" +msgstr "" + +#: CodeSystem.concept.display +msgid "Register licensed drugs and health commodities" +msgstr "" + +#: CodeSystem.concept.display +msgid "Registries and Directories" +msgstr "" + +#: CodeSystem.concept.display +msgid "Related Persona" +msgstr "" + +#: CodeSystem.concept.definition +msgid "Related Personas that don't directly enact in any workflow in this DAK" +msgstr "" + +#: StructureDefinition.element.DublinCore.relation/short +msgid "Relation" +msgstr "" + +#: StructureDefinition.element.SushiConfigLogicalModel.releaseLabel/short +msgid "Release Label" +msgstr "" + +#: CodeSystem.concept.display +msgid "Remote monitoring of client health or diagnostic data by provider" +msgstr "" + +#: CodeSystem.concept.display +msgid "Remote monitoring of person's health or diagnostic data by provider" +msgstr "" + +#: ConceptMap.group.element.target.comment +msgid "Renamed: 'Access by client' → 'Access by the individual'" +msgstr "" + +#: ConceptMap.group.element.target.comment +msgid "Renamed: 'Active data capture/documentation by client' → '...by an individual'" +msgstr "" + +#: ConceptMap.group.element.target.comment +msgid "Renamed: 'Citizen-based reporting' → 'Person based reporting'" +msgstr "" + +#: ConceptMap.group.element.target.comment +msgid "Renamed: 'Civil Registration and Vital Statistics' [same concept]" +msgstr "" + +#: ConceptMap.group.element.target.comment +msgid "Renamed: 'Client financial transactions' → 'Person-centred financial transactions'" +msgstr "" + +#: ConceptMap.group.element.target.comment +msgid "Renamed: 'Client health records' → 'Person-centred health records'" +msgstr "" + +#: ConceptMap.group.element.target.comment +msgid "Renamed: 'Client identification and registration' → 'Identification and registration of persons'" +msgstr "" + +#: ConceptMap.group.element.target.comment +msgid "Renamed: 'Client look-up of health information' → 'Look-up of information on health and health services by individuals'" +msgstr "" + +#: ConceptMap.group.element.target.comment +msgid "Renamed: 'Client to client communication' → 'Person to Person communication'" +msgstr "" + +#: ConceptMap.group.element.target.comment +msgid "Renamed: 'Clients' → 'Persons'" +msgstr "" + +#: ConceptMap.group.element.target.comment +msgid "Renamed: 'Data collection; management; and use' → 'Data Management'" +msgstr "" + +#: ConceptMap.group.element.target.comment +msgid "Renamed: 'Data exchange and interoperability'; split from 1 leaf code into 3 in v2" +msgstr "" + +#: ConceptMap.group.element.target.comment +msgid "Renamed: 'Health financing' → 'Health system financial management'" +msgstr "" + +#: ConceptMap.group.element.target.comment +msgid "Renamed: 'Health system managers' → 'Health management and support personnel'" +msgstr "" + +#: ConceptMap.group.element.target.comment +msgid "Renamed: 'Health worker activity planning' → 'Scheduling and activity planning for healthcare providers'" +msgstr "" + +#: ConceptMap.group.element.target.comment +msgid "Renamed: 'Health worker communication' → 'Healthcare provider communication'" +msgstr "" + +#: ConceptMap.group.element.target.comment +msgid "Renamed: 'Health worker decision support' → 'Healthcare provider decision support'" +msgstr "" + +#: ConceptMap.group.element.target.comment +msgid "Renamed: 'Health worker training' → 'Healthcare provider training'" +msgstr "" + +#: ConceptMap.group.element.target.comment +msgid "Renamed: 'Health workers' → 'Healthcare providers'" +msgstr "" + +#: ConceptMap.group.element.target.comment +msgid "Renamed: 'Location mapping' → 'Geo spatial information management'; expanded from 4 to 5 sub-codes" +msgstr "" + +#: ConceptMap.group.element.target.comment +msgid "Renamed: 'Map location of health events' → 'Map location of health event'" +msgstr "" + +#: ConceptMap.group.element.target.comment +msgid "Renamed: 'On-demand information services to clients' → 'On demand communication with persons'" +msgstr "" + +#: ConceptMap.group.element.target.comment +msgid "Renamed: 'Peer group for clients' → 'Peer group for individuals'" +msgstr "" + +#: ConceptMap.group.element.target.comment +msgid "Renamed: 'Peer group for health workers' → 'Peer group for healthcare providers'" +msgstr "" + +#: ConceptMap.group.element.target.comment +msgid "Renamed: 'Personal health tracking' [unchanged]" +msgstr "" + +#: ConceptMap.group.element.target.comment +msgid "Renamed: 'Self monitoring of health... by client' → '...by the individual'" +msgstr "" + +#: ConceptMap.group.element.target.comment +msgid "Renamed: 'Targeted client communication' → 'Targeted communication to Persons'" +msgstr "" + +#: ConceptMap.group.element.target.comment +msgid "Renamed: 'Transmit diagnostic result to health worker' → 'Transmit person's diagnostic result to healthcare provider'" +msgstr "" + +#: ConceptMap.group.element.target.comment +msgid "Renamed: 'Untargeted client communication' → 'Untargeted communication to Persons'" +msgstr "" + +#: ConceptMap.group.element.target.comment +msgid "Renamed: 'Verify client unique identity' → 'Verify a person's unique identity'" +msgstr "" + +#: ConceptMap.group.element.target.comment +msgid "Renamed: 'adverse drug events' → 'adverse drug effects'" +msgstr "" + +#: ConceptMap.group.element.target.comment +msgid "Renamed: 'by client[s]' → 'by individuals'" +msgstr "" + +#: ConceptMap.group.element.target.comment +msgid "Renamed: 'by clients' → 'by persons'" +msgstr "" + +#: ConceptMap.group.element.target.comment +msgid "Renamed: 'client' → 'person'" +msgstr "" + +#: ConceptMap.group.element.target.comment +msgid "Renamed: 'client' → 'person's'" +msgstr "" + +#: ConceptMap.group.element.target.comment +msgid "Renamed: 'client' → 'person[s]'" +msgstr "" + +#: ConceptMap.group.element.target.comment +msgid "Renamed: 'client's' → 'individual's'" +msgstr "" + +#: ConceptMap.group.element.target.comment +msgid "Renamed: 'client's' → 'person-centred'" +msgstr "" + +#: ConceptMap.group.element.target.comment +msgid "Renamed: 'client's' → 'person-centred'; now explicitly includes examples [notes; images; documents]" +msgstr "" + +#: ConceptMap.group.element.target.comment +msgid "Renamed: 'client[s]' → 'person[s]'" +msgstr "" + +#: ConceptMap.group.element.target.comment +msgid "Renamed: 'client[s]' → 'persons'" +msgstr "" + +#: ConceptMap.group.element.target.comment +msgid "Renamed: 'clients and households' → 'persons and settlements'" +msgstr "" + +#: ConceptMap.group.element.target.comment +msgid "Renamed: 'clients' → 'person's'" +msgstr "" + +#: ConceptMap.group.element.target.comment +msgid "Renamed: 'clients' → 'persons'" +msgstr "" + +#: ConceptMap.group.element.target.comment +msgid "Renamed: 'health worker' → 'healthcare provider'" +msgstr "" + +#: ConceptMap.group.element.target.comment +msgid "Renamed: 'health worker' → 'healthcare provider[s]'" +msgstr "" + +#: ConceptMap.group.element.target.comment +msgid "Renamed: 'health worker's' → 'healthcare provider's'" +msgstr "" + +#: ConceptMap.group.element.target.comment +msgid "Renamed: 'health worker[s]' → 'healthcare provider'" +msgstr "" + +#: ConceptMap.group.element.target.comment +msgid "Renamed: 'health worker[s]' → 'healthcare provider[s]'" +msgstr "" + +#: ConceptMap.group.element.target.comment +msgid "Renamed: 'health worker[s]' → 'healthcare providers'" +msgstr "" + +#: ConceptMap.group.element.target.comment +msgid "Renamed: 'to client[s]' → 'to individuals'" +msgstr "" + +#: ConceptMap.group.element.target.comment +msgid "Renamed: 'visualisation' → 'visualizations'" +msgstr "" + +#: ConceptMap.group.element.target.comment +msgid "Renumbered from v1 3.5.4; health worker → healthcare provider" +msgstr "" + +#: ConceptMap.group.element.target.comment +msgid "Renumbered from v1 3.5.5; health worker → healthcare provider" +msgstr "" + +#: ConceptMap.group.element.target.comment +msgid "Renumbered from v1 3.5.6; reworded to include revenue and expenditures" +msgstr "" + +#: CodeSystem.concept.display +msgid "Report adverse drug effects" +msgstr "" + +#: CodeSystem.concept.display +msgid "Report adverse drug events" +msgstr "" + +#: CodeSystem.concept.display +msgid "Report counterfeit or substandard drugs by clients" +msgstr "" + +#: CodeSystem.concept.display +msgid "Report counterfeit or substandard drugs by persons" +msgstr "" + +#: CodeSystem.concept.display +msgid "Reporting of health system feedback by clients" +msgstr "" + +#: CodeSystem.concept.display +msgid "Reporting of health system feedback by persons" +msgstr "" + +#: CodeSystem.concept.display +msgid "Reporting of public health events by clients" +msgstr "" + +#: CodeSystem.concept.display +msgid "Reporting of public health events by persons" +msgstr "" + +#: CodeSystem.concept.display +msgid "Request Expected Actions" +msgstr "" + +#: CodeSystem.concept.display +msgid "Request Message Semantics" +msgstr "" + +#: CodeSystem.concept.display +msgid "Request Trigger Events" +msgstr "" + +#: StructureDefinition.element.NonFunctionalRequirement.requirement/short +msgid "Requirement" +msgstr "" + +#: StructureDefinition.element.FunctionalRequirement.id/short +#: StructureDefinition.element.NonFunctionalRequirement.id/short +msgid "Requirement ID" +msgstr "" + +#: StructureDefinition.name +msgid "Requirements" +msgstr "" + +#: ImplementationGuide.definition.resource.name +#: StructureDefinition.title +#: StructureDefinition.element.RequirementsSource/short +msgid "Requirements Source" +msgstr "" + +#: StructureDefinition.name +msgid "RequirementsSource" +msgstr "" + +#: CodeSystem.concept.display +msgid "Research and Development Managers" +msgstr "" + +#: CodeSystem.concept.display +msgid "Research information system" +msgstr "" + +#: CodeSystem.concept.display +msgid "Research information systems" +msgstr "" + +#: StructureDefinition.element.FHIRSchemaBase.resourceDefinition/short +msgid "Resource Definition" +msgstr "" + +#: StructureDefinition.element.SushiConfigLogicalModel.resources/short +msgid "Resource Definitions" +msgstr "" + +#: StructureDefinition.element.SushiConfigLogicalModel.resources.description/short +msgid "Resource Description" +msgstr "" + +#: StructureDefinition.element.SushiConfigLogicalModel.groups/short +msgid "Resource Groups" +msgstr "" + +#: StructureDefinition.element.SushiConfigLogicalModel.resources.name/short +msgid "Resource Name" +msgstr "" + +#: StructureDefinition.element.SushiConfigLogicalModel.resources.reference/short +msgid "Resource Reference" +msgstr "" + +#: StructureDefinition.element.FHIRSchemaBase.resourceType/short +#: StructureDefinition.element.SushiConfigLogicalModel.global.type/short +msgid "Resource Type" +msgstr "" + +#: CodeSystem.concept.display +msgid "Response Expected Actions" +msgstr "" + +#: CodeSystem.concept.display +msgid "Response Message Semantics" +msgstr "" + +#: CodeSystem.concept.display +msgid "Response Trigger Events" +msgstr "" + +#: CodeSystem.concept.display +msgid "Retrieve and validate current health certificate information" +msgstr "" + +#: CodeSystem.concept.display +msgid "Revoke and update health certificate" +msgstr "" + +#: StructureDefinition.element.DublinCore.rights/short +msgid "Rights" +msgstr "" + +#: CodeSystem.concept.display +msgid "Routine health indicator data collection and management" +msgstr "" + +#: StructureDefinition.name +msgid "SGActivityDefinition" +msgstr "" + +#: StructureDefinition.name +msgid "SGActor" +msgstr "" + +#: ImplementationGuide.definition.resource.name +#: StructureDefinition.name +msgid "SGActorExt" +msgstr "" + +#: StructureDefinition.name +msgid "SGBusinessProcess" +msgstr "" + +#: StructureDefinition.name +msgid "SGCodeSystem" +msgstr "" + +#: StructureDefinition.name +msgid "SGCommunicationRequest" +msgstr "" + +#: StructureDefinition.name +msgid "SGConceptMap" +msgstr "" + +#: StructureDefinition.name +msgid "SGDecisionTable" +msgstr "" + +#: ActivityDefinition.name +#: ImplementationGuide.definition.resource.name +msgid "SGDecisionTableGuidance" +msgstr "" + +#: ImplementationGuide.definition.resource.name +#: StructureDefinition.name +msgid "SGDocumentation" +msgstr "" + +#: StructureDefinition.name +msgid "SGGraphDefinition" +msgstr "" + +#: StructureDefinition.name +msgid "SGGroupDefinition" +msgstr "" + +#: StructureDefinition.name +msgid "SGImplementationGuide" +msgstr "" + +#: StructureDefinition.name +msgid "SGLibrary" +msgstr "" + +#: StructureDefinition.name +msgid "SGLogicalModel" +msgstr "" + +#: ImplementationGuide.definition.resource.name +#: StructureDefinition.name +msgid "SGMarkdown" +msgstr "" + +#: StructureDefinition.name +msgid "SGMeasure" +msgstr "" + +#: CodeSystem.name +msgid "SGPersonaTypes" +msgstr "" + +#: ValueSet.name +msgid "SGPersonaTypesVS" +msgstr "" + +#: StructureDefinition.name +msgid "SGPlanDefinition" +msgstr "" + +#: StructureDefinition.name +msgid "SGQuestionnaire" +msgstr "" + +#: ImplementationGuide.definition.resource.name +#: StructureDefinition.name +msgid "SGRequirementExt" +msgstr "" + +#: ImplementationGuide.definition.resource.name +#: StructureDefinition.name +msgid "SGString" +msgstr "" + +#: StructureDefinition.name +msgid "SGStructureDefinition" +msgstr "" + +#: StructureDefinition.name +msgid "SGStructureMap" +msgstr "" + +#: ImplementationGuide.definition.resource.name +#: StructureDefinition.name +msgid "SGTask" +msgstr "" + +#: CodeSystem.name +msgid "SGTasks" +msgstr "" + +#: StructureDefinition.name +msgid "SGTransaction" +msgstr "" + +#: ImplementationGuide.definition.resource.name +#: StructureDefinition.name +msgid "SGUserStory" +msgstr "" + +#: StructureDefinition.name +msgid "SGValueSet" +msgstr "" + +#: ImplementationGuide.definition.resource.name +#: StructureDefinition.name +msgid "SGcode" +msgstr "" + +#: CodeSystem.concept.display +msgid "SHR and health information repositories" +msgstr "" + +#: ImplementationGuide.title +msgid "SMART Base" +msgstr "" + +#: ImplementationGuide.definition.resource.name +#: StructureDefinition.title +msgid "SMART Guidelines ActivityDefinition" +msgstr "" + +#: ImplementationGuide.definition.resource.name +msgid "SMART Guidelines Actor" +msgstr "" + +#: ImplementationGuide.definition.resource.name +#: StructureDefinition.title +msgid "SMART Guidelines Business Process" +msgstr "" + +#: ImplementationGuide.definition.resource.name +#: StructureDefinition.title +msgid "SMART Guidelines CodeSystem" +msgstr "" + +#: ImplementationGuide.definition.resource.name +#: StructureDefinition.title +msgid "SMART Guidelines Communication Request" +msgstr "" + +#: ImplementationGuide.definition.resource.name +#: StructureDefinition.title +msgid "SMART Guidelines ConceptMap" +msgstr "" + +#: ImplementationGuide.definition.resource.name +#: StructureDefinition.title +msgid "SMART Guidelines Decision Table" +msgstr "" + +#: ImplementationGuide.definition.resource.name +msgid "SMART Guidelines GraphDefinition" +msgstr "" + +#: ImplementationGuide.definition.resource.name +msgid "SMART Guidelines Group Definition" +msgstr "" + +#: ImplementationGuide.definition.resource.name +#: StructureDefinition.title +msgid "SMART Guidelines ImplementationGuide" +msgstr "" + +#: ImplementationGuide.definition.resource.name +#: StructureDefinition.title +msgid "SMART Guidelines Library" +msgstr "" + +#: ImplementationGuide.definition.resource.name +#: StructureDefinition.title +msgid "SMART Guidelines Logical Model" +msgstr "" + +#: ImplementationGuide.definition.resource.name +#: StructureDefinition.title +msgid "SMART Guidelines Measure" +msgstr "" + +#: CodeSystem.title +#: ImplementationGuide.definition.resource.name +msgid "SMART Guidelines Persona Types" +msgstr "" + +#: ImplementationGuide.definition.resource.name +#: StructureDefinition.title +msgid "SMART Guidelines PlanDefinition" +msgstr "" + +#: ImplementationGuide.definition.resource.name +#: StructureDefinition.title +msgid "SMART Guidelines Questionnaire" +msgstr "" + +#: ImplementationGuide.definition.resource.name +#: StructureDefinition.name +msgid "SMART Guidelines Requirements" +msgstr "" + +#: ImplementationGuide.definition.resource.name +#: StructureDefinition.title +msgid "SMART Guidelines StructureDefinition" +msgstr "" + +#: ImplementationGuide.definition.resource.name +#: StructureDefinition.title +msgid "SMART Guidelines StructureMap" +msgstr "" + +#: CodeSystem.title +#: ImplementationGuide.definition.resource.name +msgid "SMART Guidelines Tasks" +msgstr "" + +#: ImplementationGuide.definition.resource.name +msgid "SMART Guidelines Transaction" +msgstr "" + +#: ImplementationGuide.definition.resource.name +#: StructureDefinition.title +msgid "SMART Guidelines ValueSet" +msgstr "" + +#: ImplementationGuide.definition.resource.name +#: StructureDefinition.title +#: StructureDefinition.element.SushiConfigLogicalModel/short +msgid "SUSHI Configuration Logical Model" +msgstr "" + +#: CodeSystem.concept.display +msgid "Sales Workers" +msgstr "" + +#: CodeSystem.concept.display +msgid "Sales and Marketing Managers" +msgstr "" + +#: CodeSystem.concept.display +msgid "Sales, Marketing and Development Managers" +msgstr "" + +#: ImplementationGuide.definition.resource.name +#: StructureDefinition.name +msgid "Satisfies" +msgstr "" + +#: StructureDefinition.element.BusinessProcessWorkflow.scenario/short +msgid "Scenario" +msgstr "" + +#: StructureDefinition.element.UserScenario.id/short +msgid "Scenario ID" +msgstr "" + +#: CodeSystem.concept.display +msgid "Schedule health worker's activities" +msgstr "" + +#: CodeSystem.concept.display +msgid "Schedule healthcare provider's activities" +msgstr "" + +#: CodeSystem.concept.display +msgid "Scheduling and activity planning for healthcare providers" +msgstr "" + +#: CodeSystem.concept.display +msgid "Science and Engineering Associate Professionals" +msgstr "" + +#: CodeSystem.concept.display +msgid "Science and Engineering Professionals" +msgstr "" + +#: ConceptMap.group.element.target.comment +msgid "Scope expanded: v2 explicitly includes social services; police; justice; economic support schemes" +msgstr "" + +#: ConceptMap.group.element.target.comment +msgid "Scope extended to explicitly include AI and machine learning" +msgstr "" + +#: CodeSystem.concept.display +msgid "Screen clients by risk or other health status" +msgstr "" + +#: CodeSystem.concept.display +msgid "Screen persons by risk or other health status" +msgstr "" + +#: CodeSystem.concept.display +msgid "Script Task" +msgstr "" + +#: CodeSystem.concept.display +msgid "Secondary Education Teachers" +msgstr "" + +#: CodeSystem.concept.display +msgid "Secretaries (general)" +msgstr "" + +#: CodeSystem.concept.display +msgid "Security Considerations" +msgstr "" + +#: CodeSystem.concept.display +msgid "Self monitoring of health or diagnostic data by client" +msgstr "" + +#: CodeSystem.concept.display +msgid "Self monitoring of health or diagnostic data by the individual" +msgstr "" + +#: CodeSystem.concept.display +msgid "Send Task" +msgstr "" + +#: CodeSystem.concept.display +msgid "Senior Government Officials" +msgstr "" + +#: CodeSystem.concept.display +msgid "Senior Officials of Special-Interest Organizations" +msgstr "" + +#: CodeSystem.concept.display +msgid "Service Task" +msgstr "" + +#: CodeSystem.concept.display +msgid "Service and Sales Workers" +msgstr "" + +#: ImplementationGuide.definition.resource.name +#: ValueSet.title +msgid "Services and Application Types" +msgstr "" + +#: ImplementationGuide.definition.resource.name +#: ValueSet.title +msgid "Services and Application Types: Data Management Services" +msgstr "" + +#: ImplementationGuide.definition.resource.name +#: ValueSet.title +msgid "Services and Application Types: Health System/Provider Administration" +msgstr "" + +#: ImplementationGuide.definition.resource.name +#: ValueSet.title +msgid "Services and Application Types: Point of Service" +msgstr "" + +#: ImplementationGuide.definition.resource.name +#: ValueSet.title +msgid "Services and Application Types: Registries and Directories" +msgstr "" + +#: ImplementationGuide.definition.resource.name +#: ValueSet.title +msgid "Services and Application Types: Surveillance and Response" +msgstr "" + +#: CodeSystem.concept.definition +msgid "Services and systems that support the collection, aggregation, storage, analysis, and exchange of health data." +msgstr "" + +#: ImplementationGuide.definition.resource.description +#: ValueSet.description +msgid "Services and systems that support the collection, aggregation, storage, analysis, and exchange of health data. Group D of the Classification of Digital Health Services and Application Types v2 (CDISAH, 2023)." +msgstr "" + +#: StructureDefinition.element.SushiConfigLogicalModel.instanceOptions.setId/short +msgid "Set ID" +msgstr "" + +#: StructureDefinition.element.SushiConfigLogicalModel.instanceOptions.setMetaProfile/short +msgid "Set Meta Profile" +msgstr "" + +#: StructureDefinition.element.DAK.testScenarios/definition +msgid "Set of test scenarios to validate an implementation of the DAK" +msgstr "" + +#: CodeSystem.concept.display +msgid "Shared Health Record and Health Information Repository" +msgstr "" + +#: CodeSystem.concept.display +msgid "Ship and Aircraft Controllers and Technicians" +msgstr "" + +#: StructureDefinition.element.DAK.name/definition +msgid "Short name for the DAK (e.g., Base)" +msgstr "" + +#: StructureDefinition.element.SushiConfigLogicalModel.name/definition +msgid "Short name for the IG, should derive from DAK name (e.g., Trust)" +msgstr "" + +#: CodeSystem.concept.display +msgid "Simulated human-like conversations with individual(s)" +msgstr "" + +#: CodeSystem.concept.display +msgid "Skilled Agricultural, Forestry and Fishery Workers" +msgstr "" + +#: ImplementationGuide.definition.resource.description +#: StructureDefinition.element.Extension/definition +msgid "Smart Guidelines (required) string extension for use in a complex extension" +msgstr "" + +#: CodeSystem.title +#: ImplementationGuide.definition.resource.name +msgid "Smart Guidelines Actions (columns) for Decision Tables" +msgstr "" + +#: ImplementationGuide.definition.resource.description +#: StructureDefinition.element.Extension/definition +msgid "Smart Guidelines Actor Reference extension" +msgstr "" + +#: ImplementationGuide.definition.resource.name +#: ValueSet.title +msgid "Smart Guidelines Decision Table Actions" +msgstr "" + +#: CodeSystem.title +#: ImplementationGuide.definition.resource.name +#: ValueSet.title +msgid "Smart Guidelines Documentation Section" +msgstr "" + +#: ImplementationGuide.definition.resource.description +#: StructureDefinition.element.Extension/definition +msgid "Smart Guidelines Documentation extension" +msgstr "" + +#: ImplementationGuide.definition.resource.name +#: ValueSet.title +msgid "Smart Guidelines Persona Types Value Set" +msgstr "" + +#: ImplementationGuide.definition.resource.description +msgid "Smart Guidelines Requirements" +msgstr "" + +#: ImplementationGuide.definition.resource.description +#: StructureDefinition.element.Extension/definition +msgid "Smart Guidelines Requirements extension" +msgstr "" + +#: ImplementationGuide.definition.resource.description +#: StructureDefinition.element.Extension/definition +msgid "Smart Guidelines code extension" +msgstr "" + +#: ImplementationGuide.definition.resource.description +#: StructureDefinition.element.Extension/definition +msgid "Smart Guidelines extension to support structured User Stories (As a `Actor` I want to `capability` so that `benefit`) extension" +msgstr "" + +#: ImplementationGuide.definition.resource.description +#: StructureDefinition.element.Extension/definition +msgid "Smart Guidelines link identifier extension" +msgstr "" + +#: ImplementationGuide.definition.resource.description +#: StructureDefinition.element.Extension/definition +msgid "Smart Guidelines markdown extension" +msgstr "" + +#: StructureDefinition.element.BusinessProcessWorkflow.source/short +#: StructureDefinition.element.DecisionSupportLogic.source/short +#: StructureDefinition.element.DublinCore.source/short +msgid "Source" +msgstr "" + +#: CodeSystem.concept.display +msgid "Specialist Medical Practitioners" +msgstr "" + +#: CodeSystem.concept.display +msgid "Standards-compliant interoperability" +msgstr "" + +#: CodeSystem.concept.display +msgid "Stationary Plant and Machine Operators" +msgstr "" + +#: CodeSystem.concept.display +msgid "Stonemasons, Stone Cutters, Splitters and Carvers" +msgstr "" + +#: CodeSystem.concept.display +msgid "Street and Related Sales and Service Workers" +msgstr "" + +#: ImplementationGuide.definition.resource.description +msgid "Structure and constraints for ActorDefinition resources used in SMART Guidelines" +msgstr "" + +#: ImplementationGuide.definition.resource.description +msgid "Structure and constraints for Business Processes represented in SMART Guidelines" +msgstr "" + +#: ImplementationGuide.definition.resource.description +msgid "Structure and constraints for Group Definitions represented in SMART Guidelines" +msgstr "" + +#: ImplementationGuide.definition.resource.description +msgid "Structure and constraints for TransactionDefinition resources used in SMART Guidelines" +msgstr "" + +#: StructureDefinition.element.SushiConfigLogicalModel.menu.subItems/short +msgid "Sub Menu Items" +msgstr "" + +#: StructureDefinition.element.SushiConfigLogicalModel.menu.subItems.title/short +msgid "Sub-item Title" +msgstr "" + +#: StructureDefinition.element.SushiConfigLogicalModel.menu.subItems.url/short +msgid "Sub-item URL" +msgstr "" + +#: StructureDefinition.element.SushiConfigLogicalModel.menu.subItems/definition +msgid "Sub-menu items" +msgstr "" + +#: StructureDefinition.element.DublinCore.subject/short +msgid "Subject" +msgstr "" + +#: CodeSystem.concept.display +msgid "Subsistence Farmers, Fishers, Hunters and Gatherers" +msgstr "" + +#: CodeSystem.concept.display +msgid "Supply chain management" +msgstr "" + +#: CodeSystem.concept.display +msgid "Surveillance and Response" +msgstr "" + +#: StructureDefinition.name +msgid "SushiConfigLogicalModel" +msgstr "" + +#: CodeSystem.concept.display +msgid "System" +msgstr "" + +#: CodeSystem.concept.definition +msgid "Systems and standards that enable health data exchange between disparate systems, including APIs, messaging standards, and interoperability platforms." +msgstr "" + +#: CodeSystem.concept.definition +msgid "Systems for aggregate health data reporting, target-setting, and programme monitoring, typically at district and national levels." +msgstr "" + +#: CodeSystem.concept.definition +msgid "Systems for curating, organising and disseminating clinical guidelines, protocols, and health knowledge resources." +msgstr "" + +#: CodeSystem.concept.definition +msgid "Systems for delivering, tracking, and assessing training and professional development of health workers." +msgstr "" + +#: CodeSystem.concept.definition +msgid "Systems for detecting, monitoring, investigating and responding to disease outbreaks and public health threats." +msgstr "" + +#: CodeSystem.concept.definition +msgid "Systems for diagnostic applications found in devices (software in a medical device), wearables that collect health data, radiology information systems, medical imaging systems, and picture archiving and communication systems (PACS)." +msgstr "" + +#: CodeSystem.concept.definition +msgid "Systems for managing and applying standardised clinical terminologies, ontologies, and health classifications." +msgstr "" + +#: CodeSystem.concept.definition +msgid "Systems for managing health facility information, infrastructure, equipment, capacity and assessments." +msgstr "" + +#: CodeSystem.concept.definition +msgid "Systems for managing health workforce data including cadres, deployment, credentials, performance and payroll." +msgstr "" + +#: CodeSystem.concept.definition +msgid "Systems for managing the collection, testing, processing, storage, and distribution of blood and blood products." +msgstr "" + +#: CodeSystem.concept.definition +msgid "Systems for managing the health supply chain from forecasting to distribution and stock monitoring." +msgstr "" + +#: CodeSystem.concept.definition +msgid "Systems for recording, tracking, and reporting individual and population-level immunization history and coverage." +msgstr "" + +#: CodeSystem.concept.definition +msgid "Systems maintaining authoritative directories of health workforce cadres, credentials, and deployment information." +msgstr "" + +#: CodeSystem.concept.definition +msgid "Systems that aggregate and share longitudinal health records across facilities and providers." +msgstr "" + +#: CodeSystem.concept.definition +msgid "Systems that are used to transfer electronic information. Messages can be exchanged between healthcare providers or between healthcare providers and persons." +msgstr "" + +#: CodeSystem.concept.definition +msgid "Systems that capture, analyse and visualise geospatial health data for mapping health events, facilities, and populations." +msgstr "" + +#: CodeSystem.concept.definition +msgid "Systems that collect, store, and manage population-level demographic data including census information." +msgstr "" + +#: CodeSystem.concept.definition +msgid "Systems that consolidate and store large volumes of historical health data from multiple sources for analysis and reporting." +msgstr "" + +#: CodeSystem.concept.definition +msgid "Systems that coordinate emergency medical services, disaster preparedness, and mass casualty management." +msgstr "" + +#: CodeSystem.concept.definition +msgid "Systems that create, maintain, and provide authoritative master records for persons, providers, facilities, products and health events." +msgstr "" + +#: ImplementationGuide.definition.resource.description +#: ValueSet.description +msgid "Systems that create, maintain, and provide authoritative master records for persons, providers, facilities, products and health events. Group C of the Classification of Digital Health Services and Application Types v2 (CDISAH, 2023)." +msgstr "" + +#: CodeSystem.concept.definition +msgid "Systems that create, maintain, and verify unique identities for persons, healthcare providers, and other health actors." +msgstr "" + +#: CodeSystem.concept.definition +msgid "Systems that enable remote clinical consultation, monitoring and diagnosis between persons and healthcare providers at a distance." +msgstr "" + +#: CodeSystem.concept.definition +msgid "Systems that facilitate the provision and delivery of healthcare services to persons at the point of care." +msgstr "" + +#: ImplementationGuide.definition.resource.description +#: ValueSet.description +msgid "Systems that facilitate the provision and delivery of healthcare services to persons at the point of care. Group A of the Classification of Digital Health Services and Application Types v2 (CDISAH, 2023)." +msgstr "" + +#: CodeSystem.concept.definition +msgid "Systems that maintain authoritative lists of approved health commodities, medicines, devices and equipment." +msgstr "" + +#: CodeSystem.concept.definition +msgid "Systems that manage and distribute public cryptographic keys to support digital signature and identity verification." +msgstr "" + +#: CodeSystem.concept.definition +msgid "Systems that manage health financing, insurance enrolment, claims, billing, reimbursement, and budgeting." +msgstr "" + +#: CodeSystem.concept.definition +msgid "Systems that manage patient administrative information including admissions, discharges, transfers, appointments and billing." +msgstr "" + +#: CodeSystem.concept.definition +msgid "Systems that manage prescriptions, dispensing, stock and billing for medicines and health products." +msgstr "" + +#: CodeSystem.concept.definition +msgid "Systems that monitor environmental determinants of health such as air quality, water safety and vector habitats." +msgstr "" + +#: CodeSystem.concept.definition +msgid "Systems that process and analyse health data to produce insights, dashboards, reports and predictive models." +msgstr "" + +#: CodeSystem.concept.definition +msgid "Systems that register and certify vital events such as births, deaths, marriages and their underlying causes." +msgstr "" + +#: CodeSystem.concept.definition +msgid "Systems that support the administrative and managerial functions of health systems and healthcare organisations." +msgstr "" + +#: ImplementationGuide.definition.resource.description +#: ValueSet.description +msgid "Systems that support the administrative and managerial functions of health systems and healthcare organisations. Group B of the Classification of Digital Health Services and Application Types v2 (CDISAH, 2023)." +msgstr "" + +#: CodeSystem.concept.definition +msgid "Systems that support the design, conduct, data management and reporting of health research and clinical trials." +msgstr "" + +#: CodeSystem.concept.definition +msgid "Systems that support the detection, monitoring, and response to disease outbreaks and public health threats." +msgstr "" + +#: ImplementationGuide.definition.resource.description +#: ValueSet.description +msgid "Systems that support the detection, monitoring, and response to disease outbreaks and public health threats. Group E of the Classification of Digital Health Services and Application Types v2 (CDISAH, 2023)." +msgstr "" + +#: CodeSystem.concept.definition +msgid "Systems that support the process from patient sample to patient result, including lab requests/test ordering, sample tracking and processing, and results reporting." +msgstr "" + +#: CodeSystem.concept.definition +msgid "Systems that uniquely identify and link patient records across multiple health information systems and facilities." +msgstr "" + +#: CodeSystem.concept.definition +msgid "Systems used to collect and analyse aggregate health data for programme planning, monitoring and evaluation." +msgstr "" + +#: Questionnaire.item.text +msgid "TB infection test result" +msgstr "" + +#: ImplementationGuide.definition.page.title +msgid "Table of Contents" +msgstr "" + +#: CodeSystem.concept.display +msgid "Targeted client communication" +msgstr "" + +#: CodeSystem.concept.display +msgid "Targeted communication to Persons" +msgstr "" + +#: CodeSystem.concept.display +#: StructureDefinition.element.BusinessProcessWorkflow.task/short +msgid "Task" +msgstr "" + +#: StructureDefinition.element.BusinessProcessWorkflow.task.description/short +msgid "Task Description" +msgstr "" + +#: StructureDefinition.element.BusinessProcessWorkflow.task.identifier/short +msgid "Task ID" +msgstr "" + +#: StructureDefinition.element.BusinessProcessWorkflow.task.name/short +msgid "Task Name" +msgstr "" + +#: StructureDefinition.element.BusinessProcessWorkflow.task/definition +msgid "Tasks within the business process workflow" +msgstr "" + +#: CodeSystem.concept.display +msgid "Teaching Professionals" +msgstr "" + +#: CodeSystem.concept.display +msgid "Technicians and Associate Professionals" +msgstr "" + +#: CodeSystem.concept.display +msgid "Telecommunications Engineers" +msgstr "" + +#: CodeSystem.concept.display +msgid "Telehealth systems" +msgstr "" + +#: CodeSystem.concept.display +msgid "Telemedicine" +msgstr "" + +#: CodeSystem.concept.display +msgid "Terminology and classification systems" +msgstr "" + +#: ImplementationGuide.definition.resource.name +#: StructureDefinition.title +#: StructureDefinition.element.TestScenario/short +msgid "Test Scenario (DAK)" +msgstr "" + +#: ImplementationGuide.definition.resource.name +#: StructureDefinition.title +#: StructureDefinition.element.TestScenarioSource/short +msgid "Test Scenario Source" +msgstr "" + +#: StructureDefinition.element.DAK.testScenarios/short +msgid "Test Scenarios" +msgstr "" + +#: StructureDefinition.name +msgid "TestScenario" +msgstr "" + +#: StructureDefinition.name +msgid "TestScenarioSource" +msgstr "" + +#: StructureDefinition.element.FHIRSchemaBase.fhirParent/definition +msgid "The FHIR parent base type from which this logical model is derived (serialised as 'fhir:parent' in JSON)" +msgstr "" + +#: StructureDefinition.element.FHIRSchemaBase.resourceType/definition +msgid "The FHIR resource type identifying this logical model resource" +msgstr "" + +#: StructureDefinition.element.FunctionalRequirement.actor/definition +msgid "The actor(s) that should fulfill the requirement" +msgstr "" + +#: StructureDefinition.element.DAK.canonicalUrl/definition +msgid "The canonical URL to use for this DAK instance - equals publicationUrl for release branches, previewUrl for development branches" +msgstr "" + +#: StructureDefinition.element.DublinCore.format/definition +msgid "The file format, physical medium, or dimensions of the resource" +msgstr "" + +#: ImplementationGuide.definition.resource.description +msgid "The minimum expectations for ActivityDefinition resources used in SMART Guidelines" +msgstr "" + +#: ImplementationGuide.definition.resource.description +msgid "The minimum expectations for GraphDefinition resources used in SMART Guidelines" +msgstr "" + +#: StructureDefinition.element.DublinCore.type/definition +msgid "The nature or genre of the resource" +msgstr "" + +#: StructureDefinition.element.DublinCore.coverage/definition +msgid "The spatial or temporal topic of the resource" +msgstr "" + +#: StructureDefinition.element.DublinCore.subject/definition +msgid "The topic of the resource" +msgstr "" + +#: Questionnaire.description +msgid "This questionnaire supports the decision logic for: IMMZ.D2 Determine required vaccination(s) if any" +msgstr "" + +#: Questionnaire.item.text +msgid "Time passed since a live vaccine was administered" +msgstr "" + +#: StructureDefinition.element.DublinCore.title/short +#: StructureDefinition.element.GenericPersona.title/short +#: StructureDefinition.element.UserScenario.title/short +msgid "Title" +msgstr "" + +#: StructureDefinition.element.SushiConfigLogicalModel.menu.title/definition +msgid "Title of the menu item" +msgstr "" + +#: StructureDefinition.element.SushiConfigLogicalModel.pages.title/definition +msgid "Title of the page" +msgstr "" + +#: StructureDefinition.element.GenericPersona.title/definition +msgid "Title of the persona" +msgstr "" + +#: StructureDefinition.element.SushiConfigLogicalModel.menu.subItems.title/definition +msgid "Title of the sub-menu item" +msgstr "" + +#: StructureDefinition.element.UserScenario.title/definition +msgid "Title of the user scenario" +msgstr "" + +#: CodeSystem.concept.display +msgid "Town and Traffic Planners" +msgstr "" + +#: CodeSystem.concept.display +msgid "Track and manage insurance billing and claims processes" +msgstr "" + +#: CodeSystem.concept.display +msgid "Track and manage insurance reimbursement" +msgstr "" + +#: CodeSystem.concept.display +msgid "Track biological specimens" +msgstr "" + +#: CodeSystem.concept.display +msgid "Track client's medication consumption" +msgstr "" + +#: CodeSystem.concept.display +msgid "Track individual's medication consumption" +msgstr "" + +#: CodeSystem.concept.display +msgid "Track insurance billing and claims submission" +msgstr "" + +#: CodeSystem.concept.display +msgid "Track regulation and licensing of medical equipment" +msgstr "" + +#: CodeSystem.concept.display +msgid "Traditional Chiefs and Heads of Villages" +msgstr "" + +#: CodeSystem.concept.display +msgid "Traditional and Complementary Medicine Associate Professionals" +msgstr "" + +#: CodeSystem.concept.display +msgid "Traditional and Complementary Medicine Professionals" +msgstr "" + +#: CodeSystem.concept.display +msgid "Transmission of medical data (e.g. images, notes, and videos) to healthcare provider" +msgstr "" + +#: CodeSystem.concept.display +msgid "Transmission of medical data to health worker" +msgstr "" + +#: CodeSystem.concept.display +msgid "Transmit and manage payments to health facilities" +msgstr "" + +#: CodeSystem.concept.display +msgid "Transmit and manage routine payroll payment to healthcare provider(s)" +msgstr "" + +#: CodeSystem.concept.display +msgid "Transmit and track diagnostic orders" +msgstr "" + +#: CodeSystem.concept.display +msgid "Transmit diagnostic result to health worker" +msgstr "" + +#: CodeSystem.concept.display +msgid "Transmit diagnostics result, or availability of result, to client(s)" +msgstr "" + +#: CodeSystem.concept.display +msgid "Transmit diagnostics result, or availability of result, to person(s)" +msgstr "" + +#: CodeSystem.concept.display +msgid "Transmit health event alerts to specific population group(s)" +msgstr "" + +#: CodeSystem.concept.display +msgid "Transmit non-routine health event alerts to healthcare provider(s)" +msgstr "" + +#: CodeSystem.concept.display +msgid "Transmit non‐routine health event alerts to health worker(s)" +msgstr "" + +#: CodeSystem.concept.display +msgid "Transmit or manage financial incentives to healthcare provider(s)" +msgstr "" + +#: CodeSystem.concept.display +msgid "Transmit or manage incentives to client(s) for health services" +msgstr "" + +#: CodeSystem.concept.display +msgid "Transmit or manage incentives to health worker(s)" +msgstr "" + +#: CodeSystem.concept.display +msgid "Transmit or manage incentives to individuals for health services" +msgstr "" + +#: CodeSystem.concept.display +msgid "Transmit or manage out-of-pocket payments by individuals" +msgstr "" + +#: CodeSystem.concept.display +msgid "Transmit or manage out‐of‐pocket payments by client(s)" +msgstr "" + +#: CodeSystem.concept.display +msgid "Transmit or manage vouchers to client(s) for health services" +msgstr "" + +#: CodeSystem.concept.display +msgid "Transmit or manage vouchers to individuals for health services" +msgstr "" + +#: CodeSystem.concept.display +msgid "Transmit or track prescription orders" +msgstr "" + +#: CodeSystem.concept.display +msgid "Transmit person's diagnostic result to healthcare provider" +msgstr "" + +#: CodeSystem.concept.display +msgid "Transmit routine news and workflow notifications to health worker(s)" +msgstr "" + +#: CodeSystem.concept.display +msgid "Transmit routine news and workflow notifications to healthcare provider(s)" +msgstr "" + +#: CodeSystem.concept.display +msgid "Transmit routine payroll payment to health worker(s)" +msgstr "" + +#: CodeSystem.concept.display +msgid "Transmit targeted alerts and reminders to client(s)" +msgstr "" + +#: CodeSystem.concept.display +msgid "Transmit targeted alerts and reminders to person(s)" +msgstr "" + +#: CodeSystem.concept.display +msgid "Transmit targeted health information to client(s) based on health status or demographics" +msgstr "" + +#: CodeSystem.concept.display +msgid "Transmit targeted health information to person(s) based on health status or demographics" +msgstr "" + +#: CodeSystem.concept.display +msgid "Transmit untargeted" +msgstr "" + +#: CodeSystem.concept.display +msgid "Transmit untargeted health event alerts to undefined group" +msgstr "" + +#: CodeSystem.concept.display +msgid "Transmit untargeted health information to an undefined population" +msgstr "" + +#: CodeSystem.concept.display +msgid "Transport Conductors" +msgstr "" + +#: CodeSystem.concept.display +msgid "Travel Attendants and Travel Stewards" +msgstr "" + +#: CodeSystem.concept.display +msgid "Travel Attendants, Conductors and Guides" +msgstr "" + +#: CodeSystem.concept.display +msgid "Travel Guides" +msgstr "" + +#: CodeSystem.concept.display +msgid "Tree and Shrub Crop Growers" +msgstr "" + +#: CodeSystem.concept.display +msgid "Trust architecture" +msgstr "" + +#: StructureDefinition.element.CoreDataElement.type/short +#: StructureDefinition.element.DublinCore.type/short +msgid "Type" +msgstr "" + +#: StructureDefinition.element.Persona.type/short +msgid "Type of Persona" +msgstr "" + +#: StructureDefinition.element.CoreDataElement.type/definition +msgid "Type of core data element: valueset, codesystem, conceptmap, or logicalmodel" +msgstr "" + +#: CodeSystem.concept.display +msgid "Typists and Word Processing Operators" +msgstr "" + +#: StructureDefinition.element.BusinessProcessWorkflowSource.url/short +#: StructureDefinition.element.CoreDataElementSource.url/short +#: StructureDefinition.element.DecisionSupportLogicSource.url/short +#: StructureDefinition.element.GenericPersonaSource.url/short +#: StructureDefinition.element.HealthInterventionsSource.url/short +#: StructureDefinition.element.ProgramIndicatorSource.url/short +#: StructureDefinition.element.RequirementsSource.url/short +#: StructureDefinition.element.TestScenarioSource.url/short +#: StructureDefinition.element.UserScenarioSource.url/short +msgid "URL" +msgstr "" + +#: StructureDefinition.element.SushiConfigLogicalModel.menu.url/definition +msgid "URL of the menu item" +msgstr "" + +#: StructureDefinition.element.SushiConfigLogicalModel.global.profile/definition +msgid "URL of the profile to apply globally" +msgstr "" + +#: StructureDefinition.element.DAK.publisher.url/definition +msgid "URL of the publishing organization" +msgstr "" + +#: StructureDefinition.element.SushiConfigLogicalModel.publisher.url/definition +msgid "URL of the publishing organization, should match DAK publisher URL" +msgstr "" + +#: StructureDefinition.element.SushiConfigLogicalModel.menu.subItems.url/definition +msgid "URL of the sub-menu item" +msgstr "" + +#: StructureDefinition.element.BusinessProcessWorkflowSource.url/definition +msgid "URL to retrieve BusinessProcessWorkflow definition from input/ or external source" +msgstr "" + +#: StructureDefinition.element.CoreDataElementSource.url/definition +msgid "URL to retrieve CoreDataElement definition from input/ or external source" +msgstr "" + +#: StructureDefinition.element.DecisionSupportLogicSource.url/definition +msgid "URL to retrieve DecisionSupportLogic definition from input/ or external source" +msgstr "" + +#: StructureDefinition.element.GenericPersonaSource.url/definition +msgid "URL to retrieve GenericPersona definition from input/ or external source" +msgstr "" + +#: StructureDefinition.element.HealthInterventionsSource.url/definition +msgid "URL to retrieve HealthInterventions definition from input/ or external source" +msgstr "" + +#: StructureDefinition.element.ProgramIndicatorSource.url/definition +msgid "URL to retrieve ProgramIndicator definition from input/ or external source" +msgstr "" + +#: StructureDefinition.element.RequirementsSource.url/definition +msgid "URL to retrieve Requirements definition from input/ or external source" +msgstr "" + +#: StructureDefinition.element.TestScenarioSource.url/definition +msgid "URL to retrieve TestScenario definition from input/ or external source" +msgstr "" + +#: StructureDefinition.element.UserScenarioSource.url/definition +msgid "URL to retrieve UserScenario definition from input/ or external source" +msgstr "" + +#: StructureDefinition.element.SushiConfigLogicalModel.id/definition +msgid "Unique identifier for the IG, should match DAK id (e.g., smart.who.int.trust)" +msgstr "" + +#: CodeSystem.concept.display +msgid "University and Higher Education Teachers" +msgstr "" + +#: CodeSystem.concept.display +msgid "Untargeted client communication" +msgstr "" + +#: CodeSystem.concept.display +msgid "Untargeted communication to Persons" +msgstr "" + +#: ConceptMap.group.element.target.comment +msgid "Updated wording: 'Track insurance billing and claims submission' → 'Track and manage insurance billing and claims processes'" +msgstr "" + +#: ConceptMap.group.element.target.comment +msgid "Updated wording: now covers all coverage schemes; not only insurance" +msgstr "" + +#: StructureDefinition.element.SushiConfigLogicalModel.useContext/short +msgid "Use Context" +msgstr "" + +#: ImplementationGuide.definition.resource.name +#: StructureDefinition.title +#: StructureDefinition.element.UserScenario/short +msgid "User Scenario (DAK)" +msgstr "" + +#: ImplementationGuide.definition.resource.name +#: StructureDefinition.title +#: StructureDefinition.element.UserScenarioSource/short +msgid "User Scenario Source" +msgstr "" + +#: StructureDefinition.element.DAK.userScenarios/short +msgid "User Scenarios" +msgstr "" + +#: CodeSystem.concept.display +msgid "User Task" +msgstr "" + +#: StructureDefinition.name +msgid "UserScenario" +msgstr "" + +#: StructureDefinition.name +msgid "UserScenarioSource" +msgstr "" + +#: ImplementationGuide.definition.resource.description +#: ValueSet.description +msgid "Value Set for Classification of Digital Health Interventions v1. Autogenerated from DAK artifacts" +msgstr "" + +#: ImplementationGuide.definition.resource.description +#: ValueSet.description +msgid "Value Set for Smart Guidelines Documentation Decision Table Actions" +msgstr "" + +#: ImplementationGuide.definition.resource.description +#: ValueSet.description +msgid "Value Set for Smart Guidelines Documentation Section to autogenerate documentation from artifacts" +msgstr "" + +#: ImplementationGuide.definition.resource.description +#: ValueSet.description +msgid "Value Set for Smart Guidelines Persona Section to autogenerate documentation from artifacts" +msgstr "" + +#: ImplementationGuide.definition.resource.description +#: ValueSet.description +msgid "Value Set for the Classification of Digital Interventions, Services and Applications in Health (CDISAH), second edition (2023)." +msgstr "" + +#: ImplementationGuide.definition.resource.description +#: ValueSet.description +msgid "Value set for Health System Challenges (Classification of Digital Health System Categories v1, 2018). Includes all 25 system category codes (A–Y)." +msgstr "" + +#: ImplementationGuide.definition.resource.description +#: ValueSet.description +msgid "Value set for Services and Application Types (Classification of Digital Health Services and Application Types v2, CDISAH 2023). Includes all codes across the five architecture groups (A–E)." +msgstr "" + +#: ImplementationGuide.definition.resource.description +#: ValueSet.description +msgid "Value set of core data element types" +msgstr "" + +#: CodeSystem.concept.display +msgid "ValueSet" +msgstr "" + +#: StructureDefinition.element.FHIRSchemaBase.jsonldValuesets/definition +msgid "ValueSet identifiers used in this logical model for JSON-LD context generation (serialised as 'jsonld:valuesets' in JSON)" +msgstr "" + +#: CodeSystem.concept.display +msgid "Vehicle, Window, Laundry and Other Hand Cleaning Workers" +msgstr "" + +#: CodeSystem.concept.display +msgid "Verify a person's unique identity" +msgstr "" + +#: CodeSystem.concept.display +msgid "Verify client unique identity" +msgstr "" + +#: CodeSystem.concept.display +msgid "Verify individual's health coverage and financing scheme membership" +msgstr "" + +#: StructureDefinition.element.SushiConfigLogicalModel.fhirVersion/definition +msgid "Version of FHIR this IG is built on" +msgstr "" + +#: StructureDefinition.element.DAK.version/definition +msgid "Version of the DAK" +msgstr "" + +#: StructureDefinition.element.SushiConfigLogicalModel.version/definition +msgid "Version of the IG, should match DAK version" +msgstr "" + +#: StructureDefinition.element.SushiConfigLogicalModel.dependencies.version/definition +msgid "Version of the dependency" +msgstr "" + +#: CodeSystem.concept.display +msgid "Veterinarians" +msgstr "" + +#: CodeSystem.concept.display +msgid "Veterinary Technicians and Assistants" +msgstr "" + +#: CodeSystem.concept.display +msgid "Vocational Education Teachers" +msgstr "" + +#: ActivityDefinition.publisher +#: ContactDetail.name +#: ActorDefinition.publisher +#: CodeSystem.publisher +#: ConceptMap.publisher +#: ImplementationGuide.publisher +#: Questionnaire.publisher +#: StructureDefinition.publisher +#: StructureDefinition.contact.name +#: ValueSet.publisher +msgid "WHO" +msgstr "" + +#: StructureDefinition.title +msgid "WHO SMART Guidelines Group Definition" +msgstr "" + +#: CodeSystem.copyright +msgid "WHO © 2023. Some rights reserved. CC BY-NC-SA 3.0 IGO." +msgstr "" + +#: CodeSystem.concept.display +msgid "Waiters and Bartenders" +msgstr "" + +#: CodeSystem.concept.display +msgid "Well Drillers and Borers and Related Workers" +msgstr "" + +#: StructureDefinition.element.SushiConfigLogicalModel.instanceOptions.setId/definition +msgid "When to automatically set id" +msgstr "" + +#: StructureDefinition.element.SushiConfigLogicalModel.instanceOptions.setMetaProfile/definition +msgid "When to automatically set meta.profile" +msgstr "" + +#: StructureDefinition.element.SushiConfigLogicalModel.experimental/definition +msgid "Whether this IG is experimental, should match DAK" +msgstr "" + +#: StructureDefinition.element.SushiConfigLogicalModel.resources.exampleBoolean/definition +msgid "Whether this resource is an example" +msgstr "" + +#: StructureDefinition.element.SushiConfigLogicalModel.applyExtensionMetadataToRoot/definition +msgid "Whether to apply extension metadata to root elements" +msgstr "" + +#: StructureDefinition.element.SushiConfigLogicalModel.FSHOnly/definition +msgid "Whether to export only FSH resources without IG content" +msgstr "" + +#: StructureDefinition.mapping.workflow/name +msgid "Workflow Pattern" +msgstr "" + +#: StructureDefinition.element.DAK.copyrightYear/definition +msgid "Year or year range for copyright" +msgstr "" + +#: StructureDefinition.element.SushiConfigLogicalModel.copyrightYear/definition +msgid "Year or year range for copyright, should match DAK" +msgstr "" + +#: ImplementationGuide.definition.grouping.description +msgid "constraints and profile structures for SMART Guidelines resources" +msgstr "" + +#: ActorDefinition.documentation +msgid "https://www.who.int/publications/i/item/9789240081949" +msgstr "" + +#: ConceptMap.group.element.target.comment +msgid "v1 'Census; population information & data warehouse' → v2 C1 'Census and population information systems'" +msgstr "" + +#: ConceptMap.group.element.target.comment +msgid "v1 'Civil Registration and Vital Statistics' → v2 C2 [same concept; refined name]" +msgstr "" + +#: ConceptMap.group.element.target.comment +msgid "v1 'Client applications' [broad] most closely maps to v2 A7 'Personal health records'; point-of-service apps generally fall under A. group" +msgstr "" + +#: ConceptMap.group.element.target.comment +msgid "v1 'Client communication system' → v2 A1 'Communication systems'" +msgstr "" + +#: ConceptMap.group.element.target.comment +msgid "v1 'Clinical terminology and classifications' → v2 C11 'Terminology and classification systems'" +msgstr "" + +#: ConceptMap.group.element.target.comment +msgid "v1 'Community-based Information System' → v2 A2 'Community-based information systems'" +msgstr "" + +#: ConceptMap.group.element.target.comment +msgid "v1 'Data exchange across systems' is broader; v2 4.4.1 covers only point-to-point integration. See also v2 4.4.2 [standards-compliant] and 4.4.3 [message routing]." +msgstr "" + +#: ConceptMap.group.element.target.comment +msgid "v1 'Data interchange interoperability and accessibility' → v2 D2 'Data interchange and interoperability'" +msgstr "" + +#: ConceptMap.group.element.target.comment +msgid "v1 'Electronic Medical Record' → v2 A5 'Electronic medical record systems'" +msgstr "" + +#: ConceptMap.group.element.target.comment +msgid "v1 'Emergency response system' → v2 E1 'Emergency preparedness and response systems'" +msgstr "" + +#: ConceptMap.group.element.target.comment +msgid "v1 'Environmental monitoring systems' → v2 D4 'Environmental monitoring systems'" +msgstr "" + +#: ConceptMap.group.element.target.comment +msgid "v1 'Facility Management Information System' → v2 C3 'Facility management information systems'" +msgstr "" + +#: ConceptMap.group.element.target.comment +msgid "v1 'Geographic Information Systems' → v2 D5 'Geographic information systems [GIS]'" +msgstr "" + +#: ConceptMap.group.element.target.comment +msgid "v1 'Health Management Information System' → v2 D6 'Health Management Information systems [HMIS]'" +msgstr "" + +#: ConceptMap.group.element.target.comment +msgid "v1 'Health finance and insurance system' → v2 B2 'Health finance-related information systems'" +msgstr "" + +#: ConceptMap.group.element.target.comment +msgid "v1 'Human Resource Information System' → v2 B4 'Human resource information systems'" +msgstr "" + +#: ConceptMap.group.element.target.comment +msgid "v1 'Identification registries and directories' → v2 C6 [same concept]" +msgstr "" + +#: ConceptMap.group.element.target.comment +msgid "v1 'Knowledge Management' → v2 D7 'Knowledge management systems'" +msgstr "" + +#: ConceptMap.group.element.target.comment +msgid "v1 'Laboratory and Diagnostic System' → v2 A6 'Laboratory information systems'" +msgstr "" + +#: ConceptMap.group.element.target.comment +msgid "v1 'Learning and Training System' → v2 B5 'Learning and training systems'" +msgstr "" + +#: ConceptMap.group.element.target.comment +msgid "v1 'Logistics Management Information System' → v2 B6 'Logistics management information systems [LMIS]'" +msgstr "" + +#: ConceptMap.group.element.target.comment +msgid "v1 'Pharmacy Information System' → v2 A8 'Pharmacy information systems'" +msgstr "" + +#: ConceptMap.group.element.target.comment +msgid "v1 'Public health and disease surveillance' → v2 E2 'Public health and disease surveillance systems'" +msgstr "" + +#: ConceptMap.group.element.target.comment +msgid "v1 'Research information system' → v2 B8 'Research information systems'" +msgstr "" + +#: ConceptMap.group.element.target.comment +msgid "v1 'SHR and health information repositories' → v2 D8 'Shared Health Record and Health Information Repository'" +msgstr "" + +#: ConceptMap.group.element.target.comment +msgid "v1 'Telemedicine' → v2 A9 'Telehealth systems'" +msgstr "" + +#: ConceptMap.group.element.target.comment +msgid "v1: 'Non-routine data collection and management'; v2: 'Form creation for data acquisition' — overlapping but distinct scope" +msgstr "" + +#: ConceptMap.group.element.target.comment +msgid "v2 4.4.2 'Standards-compliant interoperability' partially covers the broader v1 4.4.1 concept" +msgstr "" + +#: ConceptMap.group.element.target.comment +msgid "v2 4.4.3 'Message routing' partially covers the broader v1 4.4.1 concept" +msgstr "" + +#: CodeSystem.copyright +msgid "© International Labour Organization 2008" +msgstr "" + diff --git a/sushi-config.yaml b/sushi-config.yaml index 272dd4cf68..77df7b4c40 100644 --- a/sushi-config.yaml +++ b/sushi-config.yaml @@ -67,6 +67,63 @@ menu: +parameters: + # Enable FHIR resource translation extraction. The IG Publisher will + # produce .po/.xliff/.json translation files in the translations/ + # directory when these parameters are present. The collect step in + # ghbuild.yml and collect_publisher_pot_files() in run_ig_publisher.py + # copy them to input/translations/ for Weblate integration. + # + # i18n-lang MUST list target languages that differ from i18n-default-lang; + # the IG Publisher skips extraction when the target equals the default. + i18n-default-lang: en + i18n-lang: + - fr + - ar + - zh + - ru + - es + +# ── Translation configuration ───────────────────────────────────────────── +# Single source of truth for target languages, plural forms, and translation +# service integrations. Read by input/scripts/translation_config.py and +# propagated to weblate.yaml, .po files, and CI workflows. +# +# NOTE: This block is a WHO SMART custom extension. SUSHI ignores unknown +# top-level keys, allowing all IG configuration to live in one file without +# affecting the build. +translations: + sourceLanguage: en + languages: + - code: ar + name: Arabic + direction: rtl + plural: "nplurals=6; plural=(n == 0 ? 0 : n == 1 ? 1 : n == 2 ? 2 : n%100 >= 3 && n%100 <= 10 ? 3 : n%100 >= 11 && n%100 <= 99 ? 4 : 5);" + - code: zh + name: Chinese (Simplified) + direction: ltr + plural: "nplurals=1; plural=0;" + - code: fr + name: French + direction: ltr + plural: "nplurals=2; plural=(n > 1);" + - code: ru + name: Russian + direction: ltr + plural: "nplurals=3; plural=(n%10 == 1 && n%100 != 11 ? 0 : n%10 >= 2 && n%10 <= 4 && (n%100 < 10 || n%100 >= 20) ? 1 : 2);" + - code: es + name: Spanish + direction: ltr + plural: "nplurals=2; plural=(n != 1);" + services: + weblate: + enabled: true + url: https://hosted.weblate.org + launchpad: + enabled: false + crowdin: + enabled: false + meta: profile: - http://smart.who.int/base/StructureDefinition/SGImplementationGuide From ac83148a11a71e70698a77174ca7c7d3d30fd573 Mon Sep 17 00:00:00 2001 From: Carl Leitner Date: Fri, 6 Mar 2026 14:45:14 -0500 Subject: [PATCH 11/14] Update .github/skills/common/ig_errors.py Co-authored-by: Copilot <175728472+Copilot@users.noreply.github.com> --- .github/skills/common/ig_errors.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/skills/common/ig_errors.py b/.github/skills/common/ig_errors.py index d386752ac5..6da8fa658b 100644 --- a/.github/skills/common/ig_errors.py +++ b/.github/skills/common/ig_errors.py @@ -5,7 +5,7 @@ matching the IG Publisher output format. Usage: - from common.ig_errors import error, warning, info, fatal, format_issue + from common.ig_errors import error, warning, info, fatal, format_issues issues = [] issues.append(error("BPMN-001", "Zeebe namespace detected", file="test.bpmn")) From e91966d46c035201379e8b149fbd5df3aa7efccb Mon Sep 17 00:00:00 2001 From: "copilot-swe-agent[bot]" <198982749+Copilot@users.noreply.github.com> Date: Fri, 6 Mar 2026 19:46:39 +0000 Subject: [PATCH 12/14] fix: use --tx for Python runner, -tx for Java JAR in ig_publisher_iface.py Co-authored-by: litlfred <662242+litlfred@users.noreply.github.com> --- .github/skills/common/ig_publisher_iface.py | 7 +++++-- 1 file changed, 5 insertions(+), 2 deletions(-) diff --git a/.github/skills/common/ig_publisher_iface.py b/.github/skills/common/ig_publisher_iface.py index 67b48125cb..aeede8744d 100644 --- a/.github/skills/common/ig_publisher_iface.py +++ b/.github/skills/common/ig_publisher_iface.py @@ -37,7 +37,8 @@ def run_ig_publisher( # Prefer the repo's own runner script if present runner_script = ig_root_path / "input" / "scripts" / "run_ig_publisher.py" - if runner_script.is_file(): + use_python_runner = runner_script.is_file() + if use_python_runner: cmd = [sys.executable, str(runner_script)] else: jar = os.environ.get( @@ -47,7 +48,9 @@ def run_ig_publisher( cmd = ["java", "-jar", jar, "-ig", str(ig_root_path)] if tx_server: - cmd.extend(["-tx", tx_server]) + # run_ig_publisher.py uses argparse (--tx); the Java JAR uses -tx + tx_flag = "--tx" if use_python_runner else "-tx" + cmd.extend([tx_flag, tx_server]) if extra_args: cmd.extend(extra_args) From c87fd25a848efdd092b3887dfea80a936b14f89f Mon Sep 17 00:00:00 2001 From: "copilot-swe-agent[bot]" <198982749+Copilot@users.noreply.github.com> Date: Fri, 6 Mar 2026 19:49:18 +0000 Subject: [PATCH 13/14] =?UTF-8?q?fix:=20address=20automated=20review=20fee?= =?UTF-8?q?dback=20=E2=80=94=20missing=20=5F=5Finit=5F=5F.py,=20prompt=20l?= =?UTF-8?q?oader=20rename,=20workflow=20deps,=20security,=20keyword=20and?= =?UTF-8?q?=20docstring=20fixes?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Co-authored-by: litlfred <662242+litlfred@users.noreply.github.com> --- .github/skills/README.md | 2 +- .github/skills/bpmn_author/actions/__init__.py | 0 .../skills/bpmn_author/actions/bpmn_author_action.py | 11 ++++++++++- .github/skills/bpmn_import/actions/__init__.py | 0 .github/skills/common/ig_errors.py | 2 +- .../skills/common/{prompts.py => prompt_loader.py} | 2 +- .github/skills/dak_authoring/actions/__init__.py | 0 .../dak_authoring/actions/classify_issue_action.py | 8 +++----- .../dak_authoring/actions/dak_authoring_action.py | 2 +- .github/skills/ig_publisher/actions/__init__.py | 0 .../ig_publisher/actions/interpret_errors_action.py | 2 +- .github/skills/l1_review/actions/__init__.py | 0 .github/skills/l3_review/actions/__init__.py | 0 .github/skills/translation/actions/__init__.py | 0 .github/workflows/classify-issue.yml | 4 ++++ .github/workflows/pr-validate-slash.yml | 5 +++-- .github/workflows/skill-l1-review.yml | 3 +++ .github/workflows/skill-l2-dak.yml | 3 +++ .github/workflows/skill-l3-review.yml | 3 +++ 19 files changed, 34 insertions(+), 13 deletions(-) create mode 100644 .github/skills/bpmn_author/actions/__init__.py create mode 100644 .github/skills/bpmn_import/actions/__init__.py rename .github/skills/common/{prompts.py => prompt_loader.py} (97%) create mode 100644 .github/skills/dak_authoring/actions/__init__.py create mode 100644 .github/skills/ig_publisher/actions/__init__.py create mode 100644 .github/skills/l1_review/actions/__init__.py create mode 100644 .github/skills/l3_review/actions/__init__.py create mode 100644 .github/skills/translation/actions/__init__.py diff --git a/.github/skills/README.md b/.github/skills/README.md index 92f7a53a1a..3ae56b4ad5 100644 --- a/.github/skills/README.md +++ b/.github/skills/README.md @@ -92,7 +92,7 @@ Skills run automatically via GitHub Actions workflows: │ └── dak_skill.py # CLI entry point ├── common/ │ ├── llm_utils.py # LLM helpers — thin wrappers around LiteLLM -│ ├── prompts.py # load_prompt() — .md templates with {variable} +│ ├── prompt_loader.py # load_prompt() — .md templates with {variable} │ ├── ig_errors.py # FATAL/ERROR/WARNING/INFORMATION format │ ├── fsh_utils.py # FSH file utilities │ ├── ig_publisher_iface.py diff --git a/.github/skills/bpmn_author/actions/__init__.py b/.github/skills/bpmn_author/actions/__init__.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/.github/skills/bpmn_author/actions/bpmn_author_action.py b/.github/skills/bpmn_author/actions/bpmn_author_action.py index cb2036a279..3850bf519b 100644 --- a/.github/skills/bpmn_author/actions/bpmn_author_action.py +++ b/.github/skills/bpmn_author/actions/bpmn_author_action.py @@ -32,16 +32,25 @@ def main() -> None: sys.exit(0) from common.llm_utils import dak_completion - from common.prompts import load_prompt + from common.prompt_loader import load_prompt issue_title = os.environ.get("ISSUE_TITLE", "") issue_body = os.environ.get("ISSUE_BODY", "") model = os.environ.get("DAK_LLM_MODEL", "gpt-4o") + # Load additional prompt components required by the create_or_edit_bpmn template + _prompts_dir = _SKILLS_ROOT / "common" / "prompts" + dak_bpmn_constraints = (_prompts_dir / "dak_bpmn_constraints.md").read_text(encoding="utf-8") + bpmn_xml_schema = (_prompts_dir / "bpmn_xml_schema.md").read_text(encoding="utf-8") + actor_context = (_prompts_dir / "actor_context.md").read_text(encoding="utf-8") + prompt = load_prompt( "bpmn_author", "create_or_edit_bpmn", user_request=f"{issue_title}\n\n{issue_body}", current_bpmn="(none — creating new BPMN)", + dak_bpmn_constraints=dak_bpmn_constraints, + bpmn_xml_schema=bpmn_xml_schema, + actor_context=actor_context, ) print(f"🤖 Requesting BPMN from {model}...") diff --git a/.github/skills/bpmn_import/actions/__init__.py b/.github/skills/bpmn_import/actions/__init__.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/.github/skills/common/ig_errors.py b/.github/skills/common/ig_errors.py index 6da8fa658b..80dc2a5ea0 100644 --- a/.github/skills/common/ig_errors.py +++ b/.github/skills/common/ig_errors.py @@ -5,7 +5,7 @@ matching the IG Publisher output format. Usage: - from common.ig_errors import error, warning, info, fatal, format_issues + from common.ig_errors import error, warning, info, fatal, format_issuess issues = [] issues.append(error("BPMN-001", "Zeebe namespace detected", file="test.bpmn")) diff --git a/.github/skills/common/prompts.py b/.github/skills/common/prompt_loader.py similarity index 97% rename from .github/skills/common/prompts.py rename to .github/skills/common/prompt_loader.py index 969022a5a2..8da3b23111 100644 --- a/.github/skills/common/prompts.py +++ b/.github/skills/common/prompt_loader.py @@ -6,7 +6,7 @@ ``str.format_map``. Usage: - from common.prompts import load_prompt + from common.prompt_loader import load_prompt prompt = load_prompt("bpmn_author", "create_or_edit_bpmn", bpmn_xml="", diff --git a/.github/skills/dak_authoring/actions/__init__.py b/.github/skills/dak_authoring/actions/__init__.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/.github/skills/dak_authoring/actions/classify_issue_action.py b/.github/skills/dak_authoring/actions/classify_issue_action.py index 201b398f63..2191a88fe4 100644 --- a/.github/skills/dak_authoring/actions/classify_issue_action.py +++ b/.github/skills/dak_authoring/actions/classify_issue_action.py @@ -5,6 +5,7 @@ """ import os +import re import sys from pathlib import Path @@ -94,13 +95,10 @@ # Translation issues "mistranslation", "mistranslated", "wrong translation", "translation error", "translation review", "translation update", - "string", "untranslated", "missing translation", + "translatable string", "untranslated", "missing translation", ] -import re - - def _keyword_in_text(keyword: str, text: str) -> bool: """Check if ``keyword`` appears in ``text``. @@ -150,7 +148,7 @@ def apply_labels(issue_number: int, labels: list) -> None: def main() -> None: - from common.prompts import load_prompt + from common.prompt_loader import load_prompt from common.llm_utils import dak_completion issue_number = int(os.environ["ISSUE_NUMBER"]) diff --git a/.github/skills/dak_authoring/actions/dak_authoring_action.py b/.github/skills/dak_authoring/actions/dak_authoring_action.py index 42f359bc06..8105a27e47 100644 --- a/.github/skills/dak_authoring/actions/dak_authoring_action.py +++ b/.github/skills/dak_authoring/actions/dak_authoring_action.py @@ -26,7 +26,7 @@ def main() -> None: sys.exit(0) from common.llm_utils import dak_completion - from common.prompts import load_prompt + from common.prompt_loader import load_prompt issue_title = os.environ.get("ISSUE_TITLE", "") issue_body = os.environ.get("ISSUE_BODY", "") diff --git a/.github/skills/ig_publisher/actions/__init__.py b/.github/skills/ig_publisher/actions/__init__.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/.github/skills/ig_publisher/actions/interpret_errors_action.py b/.github/skills/ig_publisher/actions/interpret_errors_action.py index 9413254459..348f09d88c 100644 --- a/.github/skills/ig_publisher/actions/interpret_errors_action.py +++ b/.github/skills/ig_publisher/actions/interpret_errors_action.py @@ -24,7 +24,7 @@ def main() -> None: sys.exit(0) from common.llm_utils import dak_completion - from common.prompts import load_prompt + from common.prompt_loader import load_prompt model = os.environ.get("DAK_LLM_MODEL", "gpt-4o-mini") diff --git a/.github/skills/l1_review/actions/__init__.py b/.github/skills/l1_review/actions/__init__.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/.github/skills/l3_review/actions/__init__.py b/.github/skills/l3_review/actions/__init__.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/.github/skills/translation/actions/__init__.py b/.github/skills/translation/actions/__init__.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/.github/workflows/classify-issue.yml b/.github/workflows/classify-issue.yml index 7d3f13efd7..78aafb5f14 100644 --- a/.github/workflows/classify-issue.yml +++ b/.github/workflows/classify-issue.yml @@ -22,6 +22,10 @@ jobs: && echo "enabled=true" >> $GITHUB_OUTPUT \ || echo "enabled=false" >> $GITHUB_OUTPUT + - name: Install Python dependencies + if: steps.dak.outputs.enabled == 'true' + run: pip install litellm requests + - name: Classify and label if: steps.dak.outputs.enabled == 'true' env: diff --git a/.github/workflows/pr-validate-slash.yml b/.github/workflows/pr-validate-slash.yml index 7521990a9d..149b9ebdf8 100644 --- a/.github/workflows/pr-validate-slash.yml +++ b/.github/workflows/pr-validate-slash.yml @@ -12,10 +12,11 @@ on: jobs: dispatch: - # Only run on pull-request comments that start with /validate + # Only run on pull-request comments that start with /validate from collaborators if: > github.event.issue.pull_request != null && - startsWith(github.event.comment.body, '/validate') + startsWith(github.event.comment.body, '/validate') && + contains(fromJson('["OWNER","MEMBER","COLLABORATOR"]'), github.event.comment.author_association) runs-on: ubuntu-latest permissions: issues: write diff --git a/.github/workflows/skill-l1-review.yml b/.github/workflows/skill-l1-review.yml index e12f1a7b4f..3b7e1f3270 100644 --- a/.github/workflows/skill-l1-review.yml +++ b/.github/workflows/skill-l1-review.yml @@ -15,6 +15,9 @@ jobs: steps: - uses: actions/checkout@v4 + - name: Install Python dependencies + run: pip install litellm requests + - name: Run L1 review skill env: GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} diff --git a/.github/workflows/skill-l2-dak.yml b/.github/workflows/skill-l2-dak.yml index 2960aff681..a783b95ed4 100644 --- a/.github/workflows/skill-l2-dak.yml +++ b/.github/workflows/skill-l2-dak.yml @@ -16,6 +16,9 @@ jobs: steps: - uses: actions/checkout@v4 + - name: Install Python dependencies + run: pip install litellm requests + - name: Run L2 DAK authoring skill env: GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} diff --git a/.github/workflows/skill-l3-review.yml b/.github/workflows/skill-l3-review.yml index 9f9c6cdaf4..ea57cab3a5 100644 --- a/.github/workflows/skill-l3-review.yml +++ b/.github/workflows/skill-l3-review.yml @@ -15,6 +15,9 @@ jobs: steps: - uses: actions/checkout@v4 + - name: Install Python dependencies + run: pip install litellm requests + - name: Run L3 review skill env: GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} From 057c33a0dd0745f1934c70b63bd3777b5826fe45 Mon Sep 17 00:00:00 2001 From: "copilot-swe-agent[bot]" <198982749+Copilot@users.noreply.github.com> Date: Fri, 6 Mar 2026 19:50:08 +0000 Subject: [PATCH 14/14] fix: correct format_issues typo in ig_errors.py docstring Co-authored-by: litlfred <662242+litlfred@users.noreply.github.com> --- .github/skills/common/ig_errors.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/skills/common/ig_errors.py b/.github/skills/common/ig_errors.py index 80dc2a5ea0..6da8fa658b 100644 --- a/.github/skills/common/ig_errors.py +++ b/.github/skills/common/ig_errors.py @@ -5,7 +5,7 @@ matching the IG Publisher output format. Usage: - from common.ig_errors import error, warning, info, fatal, format_issuess + from common.ig_errors import error, warning, info, fatal, format_issues issues = [] issues.append(error("BPMN-001", "Zeebe namespace detected", file="test.bpmn"))