diff --git a/.github/workflows/release-enhanced.yml b/.github/workflows/release-enhanced.yml new file mode 100644 index 0000000000..b23febfe07 --- /dev/null +++ b/.github/workflows/release-enhanced.yml @@ -0,0 +1,332 @@ +name: IG Release to gh-pages/sitepreview with DAK Processing + +on: + workflow_call: + inputs: + pubreq_package_id: + type: string + required: false + pubreq_version: + type: string + required: false + pubreq_canonical: + type: string + required: false + pubreq_path: + type: string + required: false + sitepreview_dir: + type: string + required: false + default: sitepreview + do_dak: + description: 'Enable DAK preprocessing and postprocessing' + required: false + type: boolean + default: true + +permissions: + contents: write # push to gh-pages in the caller repo + +jobs: + build-and-publish: + runs-on: ubuntu-latest + + steps: + # 1) Checkout the CALLER repo at the triggering commit/branch + - name: Checkout caller repo + uses: actions/checkout@v4 + with: + fetch-depth: 0 + + # 2) If caller keeps override in .github/, copy it to root so the script can see it + - name: Use .github/release-config.yaml if present + run: | + if [ -f ".github/release-config.yaml" ] && [ ! -f "release-config.yaml" ]; then + cp .github/release-config.yaml release-config.yaml + echo "Using caller .github/release-config.yaml" + fi + + # 3) Toolchains + - name: Setup Java (publisher.jar) + uses: actions/setup-java@v4 + with: + distribution: temurin + java-version: "17" + + - name: Setup Python + uses: actions/setup-python@v5 + with: + python-version: "3.11" + + # 4) Download the script + global config from smart-html/scripts at the exact ref used in "uses:" + - name: Download scripts from smart-html@ref + run: | + set -euxo pipefail + # Example: github.workflow_ref = WorldHealthOrganization/smart-html/.github/workflows/release.yml@main + REF="${{ github.workflow_ref }}" + REF="${REF##*@}" # => "main" (or a tag/SHA) + mkdir -p .ci-tools + curl -fsSL "https://raw.githubusercontent.com/WorldHealthOrganization/smart-html/enhanced-release-wf/scripts/ig_publisher.py" \ + -o .ci-tools/ig_publisher.py + curl -fsSL "https://raw.githubusercontent.com/WorldHealthOrganization/smart-html/enhanced-release-wf/scripts/requirements.txt" \ + -o .ci-tools/requirements.txt + curl -fsSL "https://raw.githubusercontent.com/WorldHealthOrganization/smart-html/enhanced-release-wf/scripts/release-config.yaml" \ + -o .ci-tools/release-config.global.yaml + curl -fsSL "https://raw.githubusercontent.com/WorldHealthOrganization/smart-html/enhanced-release-wf/scripts/dak_processor.py" \ + -o .ci-tools/dak_processor.py + chmod +x .ci-tools/ig_publisher.py + chmod +x .ci-tools/dak_processor.py + + # 5) Install Python deps + - name: Install Python deps + run: | + python -m pip install --upgrade pip + pip install -r .ci-tools/requirements.txt + # Install additional dependencies for DAK processing + pip install pyyaml lxml + + - name: Setup Node.js + uses: actions/setup-node@v4 + with: + node-version: '18' + + - name: Install SUSHI + run: | + npm install -g fsh-sushi + sushi --version + + - name: Install Graphviz + run: | + sudo apt-get update + sudo apt-get install -y graphviz + dot -V + + - name: Install Ruby + run: | + sudo apt-get update + sudo apt-get install -y ruby-full build-essential zlib1g-dev + + - name: Install Jekyll (user gems) + run: | + gem install --no-document jekyll bundler --user-install + echo "$(ruby -e 'print Gem.user_dir')/bin" >> $GITHUB_PATH + + - name: Check Jekyll + run: | + jekyll -v + bundle -v + + - name: Pre-clone webroot (sparse) for verification + run: | + set -euxo pipefail + mkdir -p .ci-tools + git clone --depth=1 --filter=blob:none --sparse \ + https://github.com/WorldHealthOrganization/smart-html .ci-tools/webroot + # allow single-file patterns + git -C .ci-tools/webroot sparse-checkout init --no-cone || git -C .ci-tools/webroot sparse-checkout init + git -C .ci-tools/webroot sparse-checkout set --no-cone templates publish-setup.json package-registry.json + # optional: show what we got + git -C .ci-tools/webroot sparse-checkout list || true + ls -la .ci-tools/webroot | sed -n '1,200p' + test -f .ci-tools/webroot/publish-setup.json + + - name: Verify sparse contents + run: | + echo "Sparse list:" + git -C .ci-tools/webroot sparse-checkout list || true + echo "Root tree:" + ls -la .ci-tools/webroot | sed -n '1,200p' + test -f .ci-tools/webroot/publish-setup.json || (echo "publish-setup.json MISSING" && exit 1) + + # NEW: DAK Preprocessing Step + - name: Run DAK preprocessing scripts + if: inputs.do_dak != 'false' + run: | + echo "🔬 Starting DAK preprocessing..." + + # Load config and check if preprocessing is enabled + if ! python3 -c "import yaml; config = yaml.safe_load(open('release-config.yaml')); exit(0 if config.get('scripts', {}).get('preprocessing') else 1)"; then + echo "No preprocessing scripts configured, skipping" + exit 0 + fi + + # Get script configuration from release-config.yaml + SOURCE_REPO=$(python3 -c "import yaml; config = yaml.safe_load(open('release-config.yaml')); print(config.get('scripts', {}).get('source_repo', 'https://github.com/WorldHealthOrganization/smart-base'))") + SOURCE_BRANCH=$(python3 -c "import yaml; config = yaml.safe_load(open('release-config.yaml')); print(config.get('scripts', {}).get('source_branch', 'main'))") + SOURCE_PATH=$(python3 -c "import yaml; config = yaml.safe_load(open('release-config.yaml')); print(config.get('scripts', {}).get('source_path', 'input/scripts'))") + + echo "📥 Downloading preprocessing scripts from $SOURCE_REPO" + echo "Branch:" $SOURCE_BRANCH + echo "Path:" $SOURCE_PATH + + # Create scripts directory + mkdir -p .ci-tools/preprocessing-scripts + + # Get list of preprocessing scripts from config + PREPROCESS_SCRIPTS=$(python3 -c "import yaml; config = yaml.safe_load(open('release-config.yaml')); print(' '.join(config.get('scripts', {}).get('preprocessing', [])))") + + if [ -z "$PREPROCESS_SCRIPTS" ]; then + echo "No preprocessing scripts defined" + exit 0 + fi + + echo "Scripts to download:" $PREPROCESS_SCRIPTS + + # Download each preprocessing script + for script in $PREPROCESS_SCRIPTS; do + echo "📥 Downloading $script..." + SCRIPT_URL="https://raw.githubusercontent.com/${SOURCE_REPO#https://github.com/}/${SOURCE_BRANCH}/${SOURCE_PATH}/${script}" + echo "URL: $SCRIPT_URL" + + if curl -fsSL "$SCRIPT_URL" -o ".ci-tools/preprocessing-scripts/${script}"; then + chmod +x ".ci-tools/preprocessing-scripts/${script}" + echo "✅ Downloaded $script" + else + echo "⚠️ Failed to download $script from $SCRIPT_URL" + fi + done + + # Download includes directory if needed (for DMN processing) + echo "📥 Downloading includes..." + mkdir -p input/includes + + INCLUDES_URL="https://raw.githubusercontent.com/${SOURCE_REPO#https://github.com/}/${SOURCE_BRANCH}/input/includes" + + for include_file in dmn2html.xslt dmn.css; do + if curl -fsSL "${INCLUDES_URL}/${include_file}" -o "input/includes/${include_file}"; then + echo "✅ Downloaded ${include_file}" + else + echo "⚠️ Failed to download ${include_file}" + fi + done + + # Run each preprocessing script + echo "" + echo "🔧 Running preprocessing scripts..." + + for script in $PREPROCESS_SCRIPTS; do + if [ -f ".ci-tools/preprocessing-scripts/${script}" ]; then + echo "" + echo "▶️ Running ${script}..." + + # Run the script in the current directory (repo root) + if python3 ".ci-tools/preprocessing-scripts/${script}"; then + echo "✅ ${script} completed successfully" + else + echo "⚠️ ${script} failed (exit code: $?)" + # Continue with other scripts even if one fails + fi + else + echo "⚠️ Script not found: ${script}" + fi + done + + echo "" + echo "✅ DAK preprocessing completed" + + # Show what files were created + echo "" + echo "📋 Files created by preprocessing:" + ls -la input/pagecontent/dak-*.md 2>/dev/null || echo "No dak-*.md files created" + ls -la input/images/openapi/*.json 2>/dev/null || echo "No OpenAPI files created" + + # Standard IG Build & Publish + - name: Build & publish IG with standard workflow + run: | + set -euxo pipefail + python .ci-tools/ig_publisher.py \ + --global-config ".ci-tools/release-config.global.yaml" \ + --local-config "release-config.yaml" \ + --source "${{ github.workspace }}" \ + --source-repo "https://github.com/${{ github.repository }}" \ + --webroot-repo "https://github.com/WorldHealthOrganization/smart-html" \ + --registry-repo "https://github.com/ritikarawlani/ig-registry" \ + --ensure-pubreq \ + --pubreq-package-id "${{ inputs.pubreq_package_id }}" \ + --pubreq-version "${{ inputs.pubreq_version }}" \ + --pubreq-canonical "${{ inputs.pubreq_canonical }}" \ + --pubreq-path "${{ inputs.pubreq_path }}" \ + --publish-gh-pages \ + --sitepreview-dir "${{ inputs.sitepreview_dir }}" \ + --enable-pr \ + --github-token "${{ secrets.GITHUB_TOKEN }}" + env: + GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} + GITHUB_REPOSITORY: ${{ github.repository }} + + # DAK Post-Processing + - name: Run DAK post-processing + if: inputs.do_dak != 'false' + run: | + echo "🔬 Starting DAK post-processing..." + + # Check if output directory exists + if [ ! -d "output" ]; then + echo "⚠️ Output directory not found, skipping DAK processing" + exit 0 + fi + + # Run DAK processor + python .ci-tools/dak_processor.py \ + --output-dir "output" \ + --source-dir "." \ + --verbose + + echo "✅ DAK post-processing completed" + + # Copy DAK artifacts to webroot for final deployment + - name: Copy DAK artifacts to webroot + if: inputs.do_dak != 'false' + run: | + echo "📋 Copying DAK artifacts to webroot for deployment..." + + # Check if webroot directory exists + WEBROOT_DIR="" + if [ -d "webroot" ]; then + WEBROOT_DIR="webroot" + elif [ -d ".ci-tools/webroot" ]; then + WEBROOT_DIR=".ci-tools/webroot" + else + echo "⚠️ Webroot directory not found, skipping artifact copy" + exit 0 + fi + + # Copy DAK artifacts from output to webroot + if [ -d "output" ]; then + echo "Copying JSON schemas..." + cp output/*.schema.json "$WEBROOT_DIR/" 2>/dev/null || echo "No schema files found" + + echo "Copying JSON-LD vocabularies..." + cp output/*.jsonld "$WEBROOT_DIR/" 2>/dev/null || echo "No JSON-LD files found" + + echo "Copying OpenAPI specs..." + cp output/*.openapi.json "$WEBROOT_DIR/" 2>/dev/null || echo "No OpenAPI files found" + + echo "Copying DAK API hub..." + cp output/dak-api.html "$WEBROOT_DIR/" 2>/dev/null || echo "No DAK API hub found" + + echo "Copying QA reports..." + cp output/qa.json "$WEBROOT_DIR/" 2>/dev/null || echo "No QA report found" + + echo "✅ DAK artifacts copied to webroot" + else + echo "⚠️ Output directory not found" + fi + + # Optional: Upload build artifacts for debugging + - name: Upload build artifacts + if: always() + uses: actions/upload-artifact@v4 + with: + name: dak-processing-artifacts + path: | + output/*.schema.json + output/*.jsonld + output/*.openapi.json + output/dak-api.html + output/qa.json + input/temp/qa_*.json + input/pagecontent/dak-*.md + if-no-files-found: ignore + retention-days: 7 diff --git a/.github/workflows/release.yml b/.github/workflows/release.yml index 2f7b4b3723..66eb67d296 100644 --- a/.github/workflows/release.yml +++ b/.github/workflows/release.yml @@ -1,144 +1,187 @@ -name: ReleaseBuild +name: IG Release to gh-pages/sitepreview on: - workflow_call: # Reusable by other workflows - workflow_dispatch: # Manual trigger by user - + workflow_call: + inputs: + pubreq_package_id: + type: string + required: false + pubreq_version: + type: string + required: false + pubreq_canonical: + type: string + required: false + pubreq_path: + type: string + required: false + sitepreview_dir: + type: string + required: false + default: sitepreview + permissions: - contents: write - pull-requests: write + contents: write # push to gh-pages in the caller repo jobs: - build: + build-and-publish: runs-on: ubuntu-latest steps: - - name: Checkout current repo to ./source + # 1) Checkout the CALLER repo at the triggering commit/branch + - name: Checkout caller repo uses: actions/checkout@v4 with: - path: source - fetch-depth: 0 # Fetch all history for all branches and tags. + fetch-depth: 0 - - name: Checkout HL7/fhir-ig-history-template to ./history-template - uses: actions/checkout@v4 + # 2) If caller keeps override in .github/, copy it to root so the script can see it + - name: Use .github/release-config.yaml if present + run: | + if [ -f ".github/release-config.yaml" ] && [ ! -f "release-config.yaml" ]; then + cp .github/release-config.yaml release-config.yaml + echo "Using caller .github/release-config.yaml" + fi + + # 3) Toolchains + - name: Setup Java (publisher.jar) + uses: actions/setup-java@v4 with: - repository: HL7/fhir-ig-history-template - path: history-template + distribution: temurin + java-version: "17" - - name: Checkout WorldHealthOrganization/smart-html to ./webroot - uses: actions/checkout@v4 + - name: Setup Python + uses: actions/setup-python@v5 with: - repository: WorldHealthOrganization/smart-html - path: webroot - fetch-depth: 0 # Fetch all history for all branches and tags. + python-version: "3.11" - - name: Checkout FHIR/ig-registry to ./ig-registry - uses: actions/checkout@v4 - with: - repository: FHIR/ig-registry - path: ig-registry - - - name: Setup publisher and install dependencies + # 4) Download the script + global config from smart-html/scripts at the exact ref used in "uses:" + - name: Download scripts from smart-html@ref run: | - docker run --rm -v $(pwd):/workspace -w /workspace hl7fhir/ig-publisher-base:latest /bin/sh -c " - npm install -g fsh-sushi && - curl -L https://github.com/HL7/fhir-ig-publisher/releases/latest/download/publisher.jar -o ./publisher.jar --create-dirs - " - - - name: Create package cache folder + set -euxo pipefail + # Example: github.workflow_ref = WorldHealthOrganization/smart-html/.github/workflows/release.yml@main + REF="${{ github.workflow_ref }}" + REF="${REF##*@}" # => "main" (or a tag/SHA) + mkdir -p .ci-tools + curl -fsSL "https://raw.githubusercontent.com/costateixeira/smart-html/main/scripts/ig_publisher.py" \ + -o .ci-tools/ig_publisher.py + curl -fsSL "https://raw.githubusercontent.com/costateixeira/smart-html/main/scripts/requirements.txt" \ + -o .ci-tools/requirements.txt + curl -fsSL "https://raw.githubusercontent.com/costateixeira/smart-html/main/scripts/release-config.yaml" \ + -o .ci-tools/release-config.global.yaml + chmod +x .ci-tools/ig_publisher.py + + - name: Install Python deps run: | - docker run --rm -v $(pwd):/workspace -w /workspace hl7fhir/ig-publisher-base:latest /bin/sh -c " - mkdir -p ./fhir-package-cache && chmod 777 ./fhir-package-cache - " + python -m pip install --upgrade pip + pip install -r .ci-tools/requirements.txt - - name: Ensure write permissions for webroot and source directories + - name: Setup Node.js + uses: actions/setup-node@v4 + with: + node-version: '18' + + - name: Install SUSHI run: | - chmod -R 777 webroot - chmod -R 777 source - chmod -R 777 fhir-package-cache + npm install -g fsh-sushi + sushi --version - - name: Run the IG publisher + - name: Install Graphviz run: | - docker run --rm -v $(pwd):/workspace -w /workspace hl7fhir/ig-publisher-base:latest java -Xmx4g -jar ./publisher.jar publisher -ig source -package-cache-folder fhir-package-cache + sudo apt-get update + sudo apt-get install -y graphviz + dot -V + - - name: Run publisher command for publishing release + - name: Install Ruby run: | - docker run --rm -v $(pwd):/workspace -w /workspace hl7fhir/ig-publisher-base:latest /bin/sh -c " - ls -la /workspace/webroot && - java -Xmx4g -Dfile.encoding=UTF-8 -jar /workspace/publisher.jar -go-publish -package-cache-folder /workspace/fhir-package-cache -source /workspace/source -web /workspace/webroot -temp /workspace/temp -registry /workspace/ig-registry/fhir-ig-list.json -history /workspace/history-template -templates /workspace/webroot/templates - " + sudo apt-get update + sudo apt-get install -y ruby-full build-essential zlib1g-dev - - name: Exclude files > 100MB from gh-pages and move to release-assets + - name: Install Jekyll (user gems) run: | - mkdir -p ./release-assets - # Move all files larger than 100 MB to release-assets - find ./webroot/ -type f -size +100M -exec mv {} ./release-assets/ \; + gem install --no-document jekyll bundler --user-install + echo "$(ruby -e 'print Gem.user_dir')/bin" >> $GITHUB_PATH + + + - name: Check Jekyll + run: | + jekyll -v + bundle -v + + # - name: Prepare local webroot git remote + # run: | + # set -euxo pipefail + # mkdir -p .ci-tools + # # Create a bare remote with main branch seeded + # git init --bare --initial-branch=main .ci-tools/webroot-remote.git + # # Seed an initial commit so cloning --branch main works + # git clone .ci-tools/webroot-remote.git .ci-tools/webroot-seed + # git -C .ci-tools/webroot-seed config user.name "github-actions[bot]" + # git -C .ci-tools/webroot-seed config user.email "github-actions[bot]@users.noreply.github.com" + # touch .ci-tools/webroot-seed/.nojekyll + # git -C .ci-tools/webroot-seed add .nojekyll + # git -C .ci-tools/webroot-seed commit -m "Initialize webroot main" + # git -C .ci-tools/webroot-seed push origin main + # rm -rf .ci-tools/webroot-seed + + - name: Pre-clone webroot (sparse) for verification + run: | + set -euxo pipefail + mkdir -p .ci-tools + git clone --depth=1 --filter=blob:none --sparse \ + https://github.com/WorldHealthOrganization/smart-html .ci-tools/webroot + # allow single-file patterns + git -C .ci-tools/webroot sparse-checkout init --no-cone || git -C .ci-tools/webroot sparse-checkout init + git -C .ci-tools/webroot sparse-checkout set --no-cone templates publish-setup.json package-registry.json + # optional: show what we got + git -C .ci-tools/webroot sparse-checkout list || true + ls -la .ci-tools/webroot | sed -n '1,200p' + test -f .ci-tools/webroot/publish-setup.json - # Prepare the deploy folder, only with files ≤ 100MB - mkdir -p ./deploy - rsync -av --exclude-from=<(find ./webroot/ -type f -size +100M -printf "%P\n") ./webroot/ ./deploy/ + - name: Verify sparse contents + run: | + echo "Sparse list:" + git -C .ci-tools/webroot sparse-checkout list || true + echo "Root tree:" + ls -la .ci-tools/webroot | sed -n '1,200p' + test -f .ci-tools/webroot/publish-setup.json || (echo "publish-setup.json MISSING" && exit 1) - - name: Deploy to gh-pages - uses: peaceiris/actions-gh-pages@v3 - with: - github_token: ${{ secrets.GITHUB_TOKEN }} - publish_dir: ./deploy - destination_dir: sitepreview - - name: Upload release assets ( package.tgz) - run: | - mkdir -p ./release-assets - mv ./source/output/package.tgz ./release-assets/ +# # 5) Build IG into a local webroot (no push inside the script) +# - name: Build IG to local webroot (no push) +# run: | +# set -euxo pipefail +# python .ci-tools/ig_publisher.py \ +# --global-config ".ci-tools/release-config.global.yaml" \ +# --local-config "release-config.yaml" \ +# --source "${{ github.workspace }}" +# # --webroot-branch "main" \ +# # --enable-sparse +# # --webroot-repo "https://github.com/costateixeira/smart-html" \ - - name: Upload updated fhir-ig-list.json as artifact - uses: actions/upload-artifact@v4 - with: - name: fhir-ig-list.json - path: ig-registry/fhir-ig-list.json + - name: Build & publish (Python drives gh-pages) + run: | + set -euxo pipefail + python .ci-tools/ig_publisher.py \ + --global-config ".ci-tools/release-config.global.yaml" \ + --local-config "release-config.yaml" \ + --source "${{ github.workspace }}" \ + --source-repo "https://github.com/${{ github.repository }}" \ + --webroot-repo "https://github.com/costateixeira/smart-html" \ + --registry-repo "https://github.com/costateixeira/ig-registry" \ + --ensure-pubreq \ + --pubreq-package-id "${{ inputs.pubreq_package_id }}" \ + --pubreq-version "${{ inputs.pubreq_version }}" \ + --pubreq-canonical "${{ inputs.pubreq_canonical }}" \ + --pubreq-path "${{ inputs.pubreq_path }}" \ + --publish-gh-pages \ + --sitepreview-dir "${{ inputs.sitepreview_dir }}" \ + --enable-pr \ + --github-token "${{ secrets.GITHUB_TOKEN }}" + env: + GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} + GITHUB_REPOSITORY: ${{ github.repository }} - - name: Upload updated fhir-ig-list.json as artifact - uses: actions/upload-artifact@v4 - with: - name: package-feeds.json - path: ig-registry/package-feeds.json - - -# - name: Upload package.tgz to release -# uses: actions/upload-release-asset@v1 -# with: -# upload_url: ${{ github.event.release.upload_url }} -# asset_path: ./release-assets/package.tgz -# asset_name: package.tgz -# asset_content_type: application/gzip - -# # Configure Git before committing changes -# - name: Configure Git -# run: | -# git config --global user.email "github-actions[bot]@users.noreply.github.com" -# git config --global user.name "GitHub Actions Bot" - -# # Use SSH or HTTPS with the user's credentials if available -# - name: Create new branch and update content in WorldHealthOrganization/smart-html -# run: | -# cd webroot -# git checkout main -# git checkout -b "${{ github.repository }}-${{ github.run_id }}" # Create a branch using the calling repo name -# rsync -av --exclude='.git/' --delete ../deploy/ ./ # Sync the deploy directory to the webroot -# git add . -# git commit -m "Update site content from repo ${{ github.repository }}" -# git push --set-upstream origin "${{ github.repository }}-${{ github.run_id }}" -# -# # Create a pull request to merge changes into the main branch -# - name: Create Pull Request -# uses: peter-evans/create-pull-request@v3 -# with: -# token: ${{ secrets.GITHUB_TOKEN }} # Reuse default GitHub token -# commit-message: "Update site content from repo ${{ github.repository }}" -# title: "Update site content from repo ${{ github.repository }}" -# body: "This is an automated pull request to update the site content from ${{ github.repository }}" -# head: "${{ github.repository }}-${{ github.run_id }}" -# base: main -# delete-branch: true diff --git a/MIGRATION-GUIDE.md b/MIGRATION-GUIDE.md new file mode 100644 index 0000000000..8f40c76c12 --- /dev/null +++ b/MIGRATION-GUIDE.md @@ -0,0 +1,325 @@ +# Migration Guide: Enhanced Release Workflow with DAK Processing + +This guide provides step-by-step instructions for migrating from the standard release workflow to the enhanced version with complete DAK processing capabilities. + +## Quick Migration Checklist + +- [ ] Backup existing workflow files +- [ ] Add enhanced workflow to smart-html repository +- [ ] Update repository workflow files +- [ ] Add DAK configuration +- [ ] Test the enhanced workflow +- [ ] Update documentation and links + +## Step 1: Backup Existing Files + +Before making any changes, backup your current workflow configuration: + +```bash +# Backup existing workflow (if it exists) +cp .github/workflows/release.yml .github/workflows/release-backup.yml + +# Backup any existing release configuration +cp release-config.yaml release-config-backup.yaml 2>/dev/null || echo "No existing config" +``` + +## Step 2: Add Enhanced Workflow to smart-html + +If you're maintaining the smart-html repository, add these files: + +```bash +# Copy the enhanced workflow +cp release-enhanced.yml .github/workflows/ + +# Copy the DAK processor script +cp dak_processor.py scripts/ + +# Copy the enhanced release configuration +cp release-config-dak.yaml scripts/ + +# Copy documentation +cp README-DAK-RELEASE.md . +``` + +## Step 3: Update Repository Workflows + +For each repository that should use DAK processing: + +### A. Create/Update `.github/workflows/release.yml` + +```yaml +name: Release IG with DAK Processing + +on: + release: + types: [published] + workflow_dispatch: + inputs: + sitepreview_dir: + description: 'Site preview directory name' + required: false + default: 'sitepreview' + do_dak: + description: 'Enable DAK processing' + required: false + type: boolean + default: true + +jobs: + release: + uses: costateixeira/smart-html/.github/workflows/release-enhanced.yml@main + with: + pubreq_package_id: "your.package.id" # Replace with your package ID + pubreq_version: ${{ github.event.release.tag_name || 'dev' }} + pubreq_canonical: "http://your.domain/your-ig" # Replace with your canonical URL + pubreq_path: "/your-ig" # Replace with your path + sitepreview_dir: ${{ github.event.inputs.sitepreview_dir || 'sitepreview' }} + do_dak: ${{ github.event.inputs.do_dak != 'false' }} + secrets: inherit +``` + +### B. Add `release-config.yaml` to Repository Root + +```yaml +# Basic DAK-enabled configuration +dak: + enabled: true + +# Standard IG configuration +source_dir: "." +webroot_repo: "https://github.com/costateixeira/smart-html" +registry_repo: "https://github.com/costateixeira/ig-registry" + +# Repository-specific settings +publication: + canonical: "http://your.domain/your-ig" + package_id: "your.package.id" + +deployment: + sitepreview_dir: "sitepreview" + exclude: + - "ig-build-zips/" + - "temp/" +``` + +## Step 4: Enable DAK Processing + +Choose one of these methods to enable DAK processing: + +### Option A: Add dak.json (Recommended for DAK repositories) + +Create `dak.json` in repository root: + +```json +{ + "resourceType": "DAK", + "id": "your.ig.id", + "name": "YourIGName", + "title": "Your Implementation Guide Title", + "description": "Description of your implementation guide", + "version": "1.0.0", + "status": "draft", + "publicationUrl": "http://your.domain/your-ig", + "publisher": { + "name": "Your Organization", + "url": "http://your.organization" + } +} +``` + +### Option B: Add smart-base Dependency (For SMART Guidelines) + +Update `sushi-config.yaml`: + +```yaml +# Existing configuration... + +dependencies: + smart.who.int.base: current # Add this line + +# Rest of your configuration... +``` + +## Step 5: Test the Enhanced Workflow + +### A. Test with Workflow Dispatch + +1. Go to your repository on GitHub +2. Click **Actions** tab +3. Select **Release IG with DAK Processing** workflow +4. Click **Run workflow** +5. Choose branch and verify settings +6. Click **Run workflow** + +### B. Monitor the Build + +Check these sections in the workflow logs: + +- ✅ **Setup** - Environment and dependencies +- ✅ **Standard IG Build** - FHIR IG Publisher execution +- ✅ **DAK Post-Processing** - Schema and API generation +- ✅ **Artifact Copy** - Moving DAK files to deployment +- ✅ **Deployment** - Publishing to GitHub Pages + +### C. Verify Generated Artifacts + +After successful completion, check your GitHub Pages site: + +``` +https://yourusername.github.io/your-repo/sitepreview/ +``` + +Look for these new files: +- `dak-api.html` - DAK API documentation hub +- `ValueSet-*.schema.json` - JSON schemas for ValueSets +- `ValueSet-*.jsonld` - JSON-LD vocabularies +- `StructureDefinition-*.schema.json` - Logical model schemas +- `qa.json` - Enhanced QA report + +## Step 6: Update Documentation + +### A. Update Repository README + +Add a section about the enhanced features: + +```markdown +## API Documentation + +This implementation guide includes comprehensive API documentation: + +- **DAK API Hub**: [dak-api.html](https://yourusername.github.io/your-repo/sitepreview/dak-api.html) +- **JSON Schemas**: Validation schemas for all ValueSets and Logical Models +- **JSON-LD Vocabularies**: Semantic web integration for ValueSets +- **OpenAPI Specifications**: Complete API documentation for all endpoints + +## Release Process + +This repository uses the enhanced release workflow with DAK processing. +To create a release: + +1. Create a new release on GitHub +2. The workflow automatically builds and deploys with DAK processing +3. Access the enhanced IG at the GitHub Pages URL +``` + +### B. Update Links and References + +Update any existing documentation to reference the new artifact locations: + +- Link to `dak-api.html` instead of basic artifact lists +- Reference JSON schema files for validation examples +- Include JSON-LD vocabularies in semantic web documentation + +## Common Migration Issues + +### Issue: DAK Processing Not Running + +**Symptoms**: Workflow completes but no DAK artifacts generated + +**Solutions**: +1. Check `do_dak` parameter is `true` in workflow +2. Verify either `dak.json` exists or smart-base dependency is present +3. Check workflow logs for "DAK processing not enabled" messages + +### Issue: Script Download Failures + +**Symptoms**: Errors downloading DAK scripts from smart-base + +**Solutions**: +1. Verify smart-base repository is accessible +2. Check network connectivity in GitHub Actions +3. Verify script names exist in smart-base repository + +### Issue: Missing Artifacts in Deployment + +**Symptoms**: DAK processing runs but artifacts don't appear in final site + +**Solutions**: +1. Check "Copy DAK artifacts to webroot" step in logs +2. Verify output directory exists and contains artifacts +3. Check file permissions and copy commands + +### Issue: Performance/Timeout Problems + +**Symptoms**: Workflow times out during DAK processing + +**Solutions**: +1. Increase memory allocation for Java in workflow +2. Add timeout configuration to release-config.yaml +3. Consider excluding large files from processing + +## Rollback Procedure + +If you need to rollback to the standard workflow: + +### A. Restore Standard Workflow + +```bash +# Restore original workflow +cp .github/workflows/release-backup.yml .github/workflows/release.yml + +# Or use standard workflow reference +``` + +Update workflow to use standard release: + +```yaml +jobs: + release: + uses: costateixeira/smart-html/.github/workflows/release.yml@main # Remove -enhanced + with: + # Remove do_dak parameter + pubreq_package_id: "your.package.id" + # ... other standard parameters +``` + +### B. Clean Up Configuration + +```bash +# Remove DAK-specific configuration (optional) +rm dak.json # If you added this +rm release-config.yaml # If you only added it for DAK + +# Or update release-config.yaml to disable DAK +``` + +```yaml +# Disable DAK in configuration +dak: + enabled: false +``` + +## Testing Checklist + +Before fully migrating, test these scenarios: + +- [ ] **Standard Release**: Create a test release and verify basic IG generation +- [ ] **DAK Artifacts**: Verify all DAK artifacts are generated and accessible +- [ ] **API Documentation**: Check that dak-api.html loads and contains expected content +- [ ] **Schema Validation**: Test that generated JSON schemas are valid +- [ ] **Performance**: Ensure build time is acceptable for your repository size +- [ ] **Links and Navigation**: Verify all internal links work in the enhanced IG + +## Support + +If you encounter issues during migration: + +1. **Check Logs**: Review complete GitHub Actions logs for specific errors +2. **Validate Configuration**: Use YAML validators for configuration files +3. **Test Locally**: Run individual DAK scripts locally to isolate issues +4. **Report Issues**: Open issues in smart-html or smart-base repositories +5. **Rollback**: Use rollback procedure if critical issues occur + +## Next Steps + +After successful migration: + +1. **Monitor Performance**: Track build times and resource usage +2. **Update Team Documentation**: Train team on new features and artifacts +3. **Integrate API Documentation**: Link to DAK API hub from main documentation +4. **Customize Configuration**: Tune settings based on your repository's needs +5. **Contribute Improvements**: Share feedback and contribute enhancements + +--- + +For additional help, see the complete [README-DAK-RELEASE.md](README-DAK-RELEASE.md) documentation. diff --git a/README-DAK-RELEASE.md b/README-DAK-RELEASE.md new file mode 100644 index 0000000000..2040697f17 --- /dev/null +++ b/README-DAK-RELEASE.md @@ -0,0 +1,297 @@ +# Enhanced Release Workflow with DAK Processing + +This enhanced release workflow provides complete DAK (Digital Adaptation Kit) processing capabilities, ensuring that SMART Guidelines implementation guides include all the modern API documentation, JSON schemas, and semantic web features. + +## Overview + +The enhanced release workflow bridges the gap between the basic FHIR IG Publisher and the comprehensive DAK processing pipeline used in smart-base. It ensures that released implementation guides include: + +- ✅ **JSON Schemas** for ValueSets and Logical Models +- ✅ **JSON-LD Vocabularies** for semantic web integration +- ✅ **OpenAPI Documentation** with comprehensive API hub +- ✅ **DMN Questionnaire Generation** from decision tables +- ✅ **Comprehensive QA Reporting** for all processing steps + +## Quick Start + +### 1. Add the Enhanced Workflow + +Copy the enhanced `release.yml` to your repository's `.github/workflows/` directory: + +```yaml +# .github/workflows/release.yml +name: Release IG with DAK Processing + +on: + release: + types: [published] + workflow_dispatch: + inputs: + sitepreview_dir: + description: 'Site preview directory name' + required: false + default: 'sitepreview' + +jobs: + release: + uses: costateixeira/smart-html/.github/workflows/release-enhanced.yml@main + with: + pubreq_package_id: "your.package.id" + pubreq_version: ${{ github.event.release.tag_name || 'dev' }} + pubreq_canonical: "http://your.domain/your-ig" + pubreq_path: "/your-ig" + sitepreview_dir: ${{ github.event.inputs.sitepreview_dir || 'sitepreview' }} + do_dak: true # Enable DAK processing + secrets: inherit +``` + +### 2. Add Release Configuration + +Create a `release-config.yaml` file in your repository root: + +```yaml +# Enable DAK processing for this repository +dak: + enabled: true + +# Configure your IG publication details +publication: + canonical: "http://your.domain/your-ig" + package_id: "your.package.id" +``` + +### 3. Enable DAK Processing (One of the following) + +**Option A:** Add a `dak.json` file to your repository root: +```json +{ + "resourceType": "DAK", + "id": "your.ig.id", + "name": "YourIGName", + "title": "Your IG Title", + "version": "1.0.0" +} +``` + +**Option B:** Add smart.who.int.base as a dependency in `sushi-config.yaml`: +```yaml +dependencies: + smart.who.int.base: current +``` + +## What Gets Generated + +### During Standard IG Publishing +- **Standard FHIR IG**: Complete implementation guide with all FHIR resources +- **FHIR Artifacts**: ValueSets, StructureDefinitions, etc. +- **Standard Documentation**: HTML pages for all resources + +### During DAK Post-Processing +- **JSON Schemas**: `ValueSet-*.schema.json`, `StructureDefinition-*.schema.json` +- **JSON-LD Vocabularies**: `ValueSet-*.jsonld` with semantic web definitions +- **OpenAPI Documentation**: `.openapi.json` files for all schemas +- **DAK API Hub**: Comprehensive `dak-api.html` page with all API documentation +- **QA Reports**: Detailed processing reports in `qa.json` + +### Deployment Structure +``` +sitepreview/ +├── index.html # Main IG page +├── artifacts.html # FHIR artifacts +├── dak-api.html # 🆕 DAK API documentation hub +├── ValueSet-*.html # Standard FHIR pages +├── ValueSet-*.schema.json # 🆕 JSON schemas +├── ValueSet-*.jsonld # 🆕 JSON-LD vocabularies +├── ValueSet-*.openapi.json # 🆕 OpenAPI specs +├── StructureDefinition-*.html # Standard FHIR pages +├── StructureDefinition-*.schema.json # 🆕 Logical model schemas +└── qa.json # 🆕 Comprehensive QA report +``` + +## Advanced Configuration + +### Custom Script Configuration + +```yaml +# release-config.yaml +dak: + enabled: true + + scripts: + # Override default script source + source_repo: "https://github.com/YourOrg/custom-dak-scripts" + source_branch: "main" + + # Customize which scripts run + preprocessing: + - "dmn_questionnaire_generator.py" + - "custom_preprocessing.py" # Your custom script + + postprocessing: + - "generate_valueset_schemas.py" + - "generate_jsonld_vocabularies.py" + - "custom_postprocessing.py" # Your custom script + + # Custom output configuration + output: + preserve_patterns: + - "*.schema.json" + - "*.jsonld" + - "custom-*.json" +``` + +### Repository-Specific Overrides + +```yaml +# .github/release-config.yaml (takes precedence over root file) +dak: + enabled: true + +github: + enable_pr_creation: false # Disable PRs for this repo + +build: + java_memory: "8g" # More memory for large IGs + +deployment: + sitepreview_dir: "custom-preview" + exclude: + - "large-files/" + - "debug-output/" +``` + +## Troubleshooting + +### DAK Processing Not Running + +Check these common issues: + +1. **Missing DAK enablement**: Ensure you have either `dak.json` or `smart.who.int.base` dependency +2. **Wrong input parameter**: Set `do_dak: true` in workflow call +3. **Missing configuration**: Add `release-config.yaml` with `dak.enabled: true` + +### Missing Artifacts in Output + +1. **Check workflow logs**: Look for "DAK post-processing" section in GitHub Actions +2. **Verify IG Publisher success**: DAK processing only runs if IG Publisher succeeds +3. **Check exclusion patterns**: Ensure files aren't excluded by `deployment.exclude` + +### Script Download Failures + +1. **Check network connectivity**: Scripts download from GitHub during workflow +2. **Verify repository access**: Ensure smart-base repository is accessible +3. **Check script availability**: Some scripts may not exist in older smart-base versions + +### Performance Issues + +1. **Increase timeouts**: Modify `build.timeouts` in configuration +2. **Optimize memory**: Increase `build.java_memory` for large IGs +3. **Use sparse checkout**: Enable `sparse_checkout.enabled: true` + +## Comparison with Standard Workflow + +| Feature | Standard release.yml | Enhanced release.yml | +|---------|---------------------|---------------------| +| Basic IG Publishing | ✅ | ✅ | +| JSON Schemas | ❌ | ✅ | +| JSON-LD Vocabularies | ❌ | ✅ | +| OpenAPI Documentation | ❌ | ✅ | +| DAK API Hub | ❌ | ✅ | +| DMN Processing | ❌ | ✅ | +| Comprehensive QA | ❌ | ✅ | +| Processing Time | ~5-10 min | ~15-25 min | +| Output Size | Standard | +20-50% (schemas/APIs) | + +## Migration Guide + +### From Standard release.yml + +1. **Backup existing workflow**: Save your current `.github/workflows/release.yml` +2. **Replace with enhanced version**: Use the enhanced workflow file +3. **Add configuration**: Create `release-config.yaml` in repository root +4. **Enable DAK**: Add `dak.json` or smart-base dependency +5. **Test workflow**: Run a test release to verify all artifacts generate + +### From ghbuild.yml + +1. **Keep ghbuild for development**: Continue using `ghbuild.yml` for branch builds +2. **Add release workflow**: Use enhanced `release.yml` for releases +3. **Sync configurations**: Ensure both workflows use similar settings +4. **Update documentation**: Point release links to new sitepreview location + +## Files in this Package + +### Workflow Files +- `.github/workflows/release-enhanced.yml` - Enhanced release workflow with DAK processing +- Original `release.yml` - Backup for reference + +### Scripts +- `scripts/dak_processor.py` - Standalone DAK post-processing script +- `scripts/release-config-dak.yaml` - Example configuration with DAK support + +### Documentation +- `README-DAK-RELEASE.md` - This comprehensive guide +- `MIGRATION-GUIDE.md` - Step-by-step migration instructions + +## Architecture + +### Workflow Stages + +```mermaid +graph TD + A[Checkout Repository] --> B[Setup Environment] + B --> C[Download Scripts] + C --> D[Standard IG Build] + D --> E{DAK Enabled?} + E -->|Yes| F[DAK Post-Processing] + E -->|No| I[Deploy to GitHub Pages] + F --> G[Copy DAK Artifacts] + G --> H[Upload Build Artifacts] + H --> I[Deploy to GitHub Pages] +``` + +### DAK Processing Flow + +```mermaid +graph LR + A[IG Publisher Output] --> B[Check DAK Config] + B --> C[Download DAK Scripts] + C --> D[Generate ValueSet Schemas] + D --> E[Generate Logical Model Schemas] + E --> F[Generate JSON-LD Vocabularies] + F --> G[Generate DAK API Hub] + G --> H[Copy to Webroot] + H --> I[Deploy with Enhanced Artifacts] +``` + +## Support + +For issues with the enhanced release workflow: + +1. **Check logs**: Review GitHub Actions workflow logs for specific errors +2. **Validate configuration**: Ensure `release-config.yaml` syntax is correct +3. **Test components**: Try running individual DAK scripts locally +4. **Report issues**: Open issues in the smart-html or smart-base repositories + +## Contributing + +To improve the enhanced release workflow: + +1. **Submit PRs**: Contribute to smart-html repository for workflow improvements +2. **Add scripts**: Contribute new DAK processing scripts to smart-base +3. **Update documentation**: Help improve this README and troubleshooting guides +4. **Share configurations**: Share working configurations for different repository types + +## License + +This enhanced release workflow is provided under the same license as the smart-html repository. See the main repository LICENSE file for details. + +## Version History + +- **v1.0.0** - Initial enhanced release workflow with DAK processing +- **v1.1.0** - Added comprehensive error handling and artifact management +- **v1.2.0** - Improved configuration system and documentation + +--- + +For the latest version and updates, see the [smart-html repository](https://github.com/costateixeira/smart-html). diff --git a/scripts/GUI_quickref.md b/scripts/GUI_quickref.md new file mode 100644 index 0000000000..3327473dc5 --- /dev/null +++ b/scripts/GUI_quickref.md @@ -0,0 +1,142 @@ +# 🎯 GUI Quick Command Reference + +## Most Common Commands + +### Basic Launch +```bash +# Just open the GUI +python ig_publisher_gui.py + +# GUI with specific IG folder +python ig_publisher_gui.py --ig-folder hiv + +# GUI from main script +python ig_publisher.py --gui +``` + +### Pre-filled GUI +```bash +# Fill in all the fields +python ig_publisher_gui.py \ + --ig-folder hiv \ + --push \ + --github-token $GITHUB_TOKEN +``` + +### Auto-Build +```bash +# Open GUI and start building immediately +python ig_publisher_gui.py --ig-folder hiv --auto-build +``` + +### No GUI (Headless) +```bash +# Run without showing GUI window +python ig_publisher_gui.py --ig-folder hiv --no-gui +``` + +## Essential Arguments Only + +| What You Want | Command | +|--------------|---------| +| Open GUI | `python ig_publisher_gui.py` | +| Set IG folder | `--ig-folder hiv` | +| Auto-start build | `--auto-build` | +| Enable GitHub push | `--push` | +| Run without GUI | `--no-gui` | +| Debug mode | `--debug` | +| Dark theme | `--theme dark` | +| Save log | `--log-file build.log` | + +## Real-World Examples + +### 1. Developer Testing +```bash +python ig_publisher_gui.py --ig-folder hiv --debug +``` + +### 2. CI/CD Pipeline +```bash +python ig_publisher_gui.py \ + --ig-folder hiv \ + --no-gui \ + --push \ + --log-file build.log +``` + +### 3. Quick Build with GUI +```bash +python ig_publisher_gui.py --ig-folder hiv --auto-build +``` + +### 4. Full Automation +```bash +export GITHUB_TOKEN=ghp_xxxxx +python ig_publisher_gui.py \ + --ig-folder hiv \ + --auto-build \ + --push \ + --minimized +``` + +## Config File Shortcut + +Save your settings once: +```bash +# Save config +python ig_publisher_gui.py \ + --ig-folder hiv \ + --webroot-repo WHO/smart-html \ + --save-config my-settings.json + +# Use saved config +python ig_publisher_gui.py --config my-settings.json +``` + +## Windows Users + +Create `ig-gui.bat`: +```batch +@echo off +python ig_publisher_gui.py %* +``` + +Then use: +``` +ig-gui --ig-folder hiv +``` + +## Mac/Linux Users + +Add to `~/.bashrc` or `~/.zshrc`: +```bash +alias ig-gui='python3 ~/scripts/ig_publisher_gui.py' +``` + +Then use: +```bash +ig-gui --ig-folder hiv +``` + +## Environment Variables + +Instead of command line args: +```bash +export IG_FOLDER=hiv +export GITHUB_TOKEN=ghp_xxxxx +python ig_publisher_gui.py # Will use env vars +``` + +## That's It! 🚀 + +90% of the time you'll just use: +```bash +# Open GUI +python ig_publisher_gui.py + +# Or with folder +python ig_publisher_gui.py --ig-folder hiv + +# Or auto-build +python ig_publisher_gui.py --ig-folder hiv --auto-build +``` \ No newline at end of file diff --git a/scripts/README.md b/scripts/README.md new file mode 100644 index 0000000000..1f235bad2e --- /dev/null +++ b/scripts/README.md @@ -0,0 +1,675 @@ +# FHIR IG Publisher + +A Python-based FHIR Implementation Guide publisher that automates building, deployment, and PR creation with **zero Python dependencies** - uses only standard library! + +[![Python](https://img.shields.io/badge/python-3.8%2B-blue)](https://www.python.org) +[![Java](https://img.shields.io/badge/java-17%2B-orange)](https://adoptium.net/) +[![License](https://img.shields.io/badge/license-MIT-green)](LICENSE) + +## 🚀 Quick Start + +### GitHub Actions (Most Common) + +```yaml +name: Build IG + +on: + push: + branches: [main] + workflow_dispatch: + +jobs: + build: + runs-on: ubuntu-latest + + steps: + - uses: actions/checkout@v4 + + - uses: actions/setup-python@v4 + with: + python-version: '3.11' + + - uses: actions/setup-java@v3 + with: + java-version: '17' + distribution: 'temurin' + + # No pip install needed! Just run: + - name: Build IG + run: python scripts/ig_publisher.py --ig-folder hiv + + - name: Deploy to GitHub Pages + uses: peaceiris/actions-gh-pages@v3 + with: + github_token: ${{ secrets.GITHUB_TOKEN }} + publish_dir: ./deploy +``` + +### Local Development + +```bash +# Clone and run - no installation needed! +git clone https://github.com/your-org/your-ig-repo.git +cd your-ig-repo +python ig_publisher.py --ig-folder hiv +``` + +## 📋 Table of Contents + +- [Features](#features) +- [GitHub Actions Usage](#github-actions-usage) +- [Installation](#installation) +- [Usage Options](#usage-options) +- [Command Line Reference](#command-line-reference) +- [Configuration](#configuration) +- [Examples](#examples) +- [Troubleshooting](#troubleshooting) + +## ✨ Features + +- **Zero Python Dependencies** - Uses only standard library, no pip install needed +- **Sparse Checkout** - Clones only needed folders, saves 90% disk space +- **Auto-detection** - Reads `publication-request.json` automatically +- **PR Automation** - Creates PRs to webroot and registry repos +- **GUI Interface** - Optional graphical interface for local development +- **GitHub Actions Ready** - Designed for CI/CD pipelines +- **No Token Mode** - Build without GitHub authentication + +## 🔧 GitHub Actions Usage + +### Basic Build Pipeline + +```yaml +name: Build and Deploy IG + +on: + push: + branches: [main] + pull_request: + branches: [main] + workflow_dispatch: + inputs: + ig_folder: + description: 'IG folder (leave empty to auto-detect)' + required: false + type: string + +jobs: + build: + runs-on: ubuntu-latest + + steps: + - name: Checkout Code + uses: actions/checkout@v4 + with: + fetch-depth: 1 + + - name: Setup Build Environment + uses: actions/setup-python@v4 + with: + python-version: '3.11' + + - name: Setup Java + uses: actions/setup-java@v3 + with: + java-version: '17' + distribution: 'temurin' + + - name: Build IG + run: | + python scripts/ig_publisher.py \ + --ig-folder ${{ inputs.ig_folder }} \ + --work-dir . \ + --debug + + - name: Upload Build Artifacts + uses: actions/upload-artifact@v4 + with: + name: ig-build + path: | + deploy/ + release-assets/ + retention-days: 30 + + - name: Deploy to GitHub Pages + if: github.ref == 'refs/heads/main' + uses: peaceiris/actions-gh-pages@v3 + with: + github_token: ${{ secrets.GITHUB_TOKEN }} + publish_dir: ./deploy + destination_dir: ${{ inputs.ig_folder || 'site' }} +``` + +### Advanced Pipeline with PR Creation + +```yaml +name: Build and Create PRs + +on: + release: + types: [published] + workflow_dispatch: + +permissions: + contents: write + pull-requests: write + pages: write + +jobs: + build-and-publish: + runs-on: ubuntu-latest + + steps: + - uses: actions/checkout@v4 + + - uses: actions/setup-python@v4 + with: + python-version: '3.11' + + - uses: actions/setup-java@v3 + with: + java-version: '17' + distribution: 'temurin' + + # Build and create PRs (requires PAT token) + - name: Build IG and Create PRs + env: + GITHUB_TOKEN: ${{ secrets.PAT_TOKEN }} # Personal Access Token with repo access + run: | + python scripts/ig_publisher.py \ + --push \ + --debug + + # Deploy to Pages + - name: Deploy to GitHub Pages + uses: peaceiris/actions-gh-pages@v3 + with: + github_token: ${{ secrets.GITHUB_TOKEN }} + publish_dir: ./deploy + + # Upload package to release + - name: Upload Release Package + if: github.event_name == 'release' + uses: actions/upload-release-asset@v1 + with: + upload_url: ${{ github.event.release.upload_url }} + asset_path: ./release-assets/package.tgz + asset_name: package.tgz + asset_content_type: application/gzip +``` + +### Multi-IG Build Matrix + +```yaml +name: Build Multiple IGs + +on: + workflow_dispatch: + inputs: + ig_selection: + type: choice + description: 'Which IGs to build' + options: + - hiv + - tuberculosis + - malaria + - all + +jobs: + build: + runs-on: ubuntu-latest + strategy: + matrix: + ig: ${{ fromJson(github.event.inputs.ig_selection == 'all' && '["hiv", "tuberculosis", "malaria"]' || format('["{0}"]', github.event.inputs.ig_selection)) }} + + steps: + - uses: actions/checkout@v4 + - uses: actions/setup-python@v4 + - uses: actions/setup-java@v3 + with: + java-version: '17' + + - name: Build ${{ matrix.ig }} IG + run: python scripts/ig_publisher.py --ig-folder ${{ matrix.ig }} + + - name: Deploy ${{ matrix.ig }} to Pages + uses: peaceiris/actions-gh-pages@v3 + with: + github_token: ${{ secrets.GITHUB_TOKEN }} + publish_dir: ./deploy + destination_dir: ${{ matrix.ig }} +``` + +### Scheduled Nightly Build + +```yaml +name: Nightly Build + +on: + schedule: + - cron: '0 2 * * *' # 2 AM UTC daily + workflow_dispatch: + +jobs: + nightly: + runs-on: ubuntu-latest + + steps: + - uses: actions/checkout@v4 + - uses: actions/setup-python@v4 + - uses: actions/setup-java@v3 + with: + java-version: '17' + + - name: Run Nightly Build + run: | + python scripts/ig_publisher.py \ + --ig-folder hiv \ + --debug \ + --work-dir . + + - name: Notify on Failure + if: failure() + uses: actions/github-script@v7 + with: + script: | + github.rest.issues.create({ + owner: context.repo.owner, + repo: context.repo.repo, + title: 'Nightly IG Build Failed', + body: `The nightly build failed. Check the [workflow run](${context.serverUrl}/${context.repo.owner}/${context.repo.repo}/actions/runs/${context.runId})` + }) +``` + +## 💻 Installation + +### System Requirements + +| Component | Version | Required | Check Command | +|-----------|---------|----------|---------------| +| Python | 3.8+ | ✅ Yes | `python --version` | +| Java | 17+ | ✅ Yes | `java -version` | +| Git | Any | ✅ Yes | `git --version` | +| pip packages | None | ❌ No | **Script uses standard library only!** | + +### Installation by Platform + +#### Ubuntu/Debian +```bash +sudo apt update && sudo apt install -y python3 python3-tk openjdk-17-jdk git curl +``` + +#### macOS +```bash +brew install python@3.11 openjdk@17 git python-tk +``` + +#### Windows +```powershell +winget install Python.Python.3.11 +winget install EclipseAdoptium.Temurin.17.JDK +winget install Git.Git +``` + +#### Docker +```dockerfile +FROM ubuntu:22.04 +RUN apt-get update && apt-get install -y python3 openjdk-17-jdk git curl +COPY scripts/ /app/scripts/ +WORKDIR /app +ENTRYPOINT ["python3", "scripts/ig_publisher.py"] +``` + +## 📖 Usage Options + +### 1. Command Line (Most Common) + +```bash +# Auto-detect IG folder from publication-request.json +python ig_publisher.py + +# Specify IG folder +python ig_publisher.py --ig-folder hiv + +# Build without GitHub interaction (no token needed) +python ig_publisher.py --ig-folder hiv + +# Build and create PRs (requires token) +python ig_publisher.py --push --github-token YOUR_TOKEN + +# Debug mode with verbose output +python ig_publisher.py --ig-folder hiv --debug +``` + +### 2. GUI Interface + +```bash +# Launch GUI +python ig_publisher.py --gui + +# GUI with pre-filled settings +python ig_publisher_gui.py --ig-folder hiv --auto-build + +# GUI in headless mode (no window) +python ig_publisher_gui.py --ig-folder hiv --no-gui +``` + +### 3. No-Token Version + +```bash +# Build locally without any GitHub authentication +python ig_publisher_no_token.py --ig-folder hiv +``` + +## 📚 Command Line Reference + +### Main Script Arguments + +| Argument | Description | Default | Example | +|----------|-------------|---------|---------| +| `--ig-folder` | IG folder in webroot repo | Auto-detect from publication-request.json | `--ig-folder hiv` | +| `--webroot-repo` | Target repository for content | `WorldHealthOrganization/smart-html` | `--webroot-repo WHO/smart-html` | +| `--work-dir` | Working directory | Current directory | `--work-dir /path/to/work` | +| `--push` | Push changes and create PRs | False | `--push` | +| `--github-token` | GitHub token for PR creation | Environment: `GITHUB_TOKEN` | `--github-token ghp_xxx` | +| `--no-pr` | Push directly without PR | False | `--no-pr` | +| `--debug` | Enable debug logging | False | `--debug` | +| `--gui` | Launch GUI interface | False | `--gui` | + +### GUI Script Arguments + +| Argument | Description | Example | +|----------|-------------|---------| +| `--auto-build` | Start build automatically | `--auto-build` | +| `--no-gui` | Run without GUI (headless) | `--no-gui` | +| `--theme` | GUI theme (light/dark) | `--theme dark` | +| `--config` | Load settings from JSON | `--config settings.json` | +| `--save-config` | Save settings to JSON | `--save-config my-config.json` | +| `--log-file` | Save output to file | `--log-file build.log` | + +## ⚙️ Configuration + +### publication-request.json + +The script automatically reads this file to determine the IG folder: + +```json +{ + "package-id": "who.fhir.smart.hiv", + "version": "1.0.0", + "path": "https://worldhealthorganization.github.io/smart-html/hiv", + "status": "draft", + "mode": "working" +} +``` + +### Environment Variables + +```bash +export GITHUB_TOKEN=ghp_xxxxx # GitHub token +export IG_FOLDER=hiv # Default IG folder +export WEBROOT_REPO=WHO/smart-html # Default webroot repo +``` + +### Configuration File + +Save settings in JSON: + +```json +{ + "ig_folder": "hiv", + "webroot_repo": "WorldHealthOrganization/smart-html", + "push": true, + "create_pr": true, + "max_file_size": 100 +} +``` + +Use with: `python ig_publisher.py --config my-settings.json` + +## 📝 Examples + +### Example 1: Simple Local Build + +```bash +# Just build, no GitHub interaction +python ig_publisher.py --ig-folder hiv +``` + +Output structure: +``` +./deploy/ # Ready to deploy files +./release-assets/ # Large files and package.tgz +./webroot/hiv/ # Sparse checkout of target folder +``` + +### Example 2: Full CI/CD Pipeline + +```yaml +name: Complete IG Pipeline + +on: + push: + branches: [main] + +jobs: + build-test-deploy: + runs-on: ubuntu-latest + + steps: + - uses: actions/checkout@v4 + - uses: actions/setup-python@v4 + - uses: actions/setup-java@v3 + with: + java-version: '17' + + # Build IG + - name: Build Implementation Guide + id: build + run: | + python scripts/ig_publisher.py --debug + echo "ig_folder=$(cat publication-request.json | python -c 'import json,sys;print(json.load(sys.stdin).get("path","").split("/")[-1])')" >> $GITHUB_OUTPUT + + # Run tests + - name: Validate Output + run: | + test -f deploy/index.html + test -f release-assets/package.tgz + + # Deploy + - name: Deploy to GitHub Pages + uses: peaceiris/actions-gh-pages@v3 + with: + github_token: ${{ secrets.GITHUB_TOKEN }} + publish_dir: ./deploy + destination_dir: ${{ steps.build.outputs.ig_folder }} +``` + +### Example 3: PR Creation Workflow + +```yaml +name: Create PRs for IG Updates + +on: + workflow_dispatch: + +jobs: + create-prs: + runs-on: ubuntu-latest + + steps: + - uses: actions/checkout@v4 + - uses: actions/setup-python@v4 + - uses: actions/setup-java@v3 + with: + java-version: '17' + + - name: Build and Create PRs + env: + # Requires PAT with repo access to external repos + GITHUB_TOKEN: ${{ secrets.PAT_WITH_REPO_ACCESS }} + run: | + python scripts/ig_publisher.py \ + --push \ + --debug + + - name: Report PR URLs + run: | + echo "Check PR_INSTRUCTIONS.md for pull request links" + cat PR_INSTRUCTIONS.md +``` + +### Example 4: Multi-Environment Deployment + +```yaml +name: Deploy to Multiple Environments + +on: + push: + branches: [main, staging, development] + +jobs: + deploy: + runs-on: ubuntu-latest + + steps: + - uses: actions/checkout@v4 + - uses: actions/setup-python@v4 + - uses: actions/setup-java@v3 + with: + java-version: '17' + + - name: Determine Environment + id: env + run: | + if [[ "${{ github.ref }}" == "refs/heads/main" ]]; then + echo "environment=production" >> $GITHUB_OUTPUT + echo "url=https://worldhealthorganization.github.io/smart-html" >> $GITHUB_OUTPUT + elif [[ "${{ github.ref }}" == "refs/heads/staging" ]]; then + echo "environment=staging" >> $GITHUB_OUTPUT + echo "url=https://staging.smart.who.int" >> $GITHUB_OUTPUT + else + echo "environment=development" >> $GITHUB_OUTPUT + echo "url=https://dev.smart.who.int" >> $GITHUB_OUTPUT + fi + + - name: Build IG + run: python scripts/ig_publisher.py --debug + + - name: Deploy to ${{ steps.env.outputs.environment }} + uses: peaceiris/actions-gh-pages@v3 + with: + github_token: ${{ secrets.GITHUB_TOKEN }} + publish_dir: ./deploy + destination_dir: ${{ steps.env.outputs.environment }} +``` + +## 🐛 Troubleshooting + +### Common Issues and Solutions + +| Issue | Solution | +|-------|----------| +| `python: command not found` | Use `python3` instead of `python` | +| `java: command not found` | Install Java 17+: `apt install openjdk-17-jdk` | +| `No module named tkinter` | Install: `apt install python3-tk` (GUI only) | +| `Permission denied` | Run: `chmod +x ig_publisher.py` | +| `IG folder not specified` | Add `--ig-folder` or check `publication-request.json` | +| `GitHub API rate limit` | Use `--github-token` with PAT | +| `Disk space error` | Script already uses sparse checkout, check available space | + +### Debug Mode + +For detailed output: +```bash +python ig_publisher.py --ig-folder hiv --debug +``` + +### Check Requirements + +```bash +python3 --version # Should be 3.8+ +java -version # Should be 17+ +git --version # Any version +``` + +## 📁 Project Structure + +``` +your-ig-repo/ +├── .github/ +│ └── workflows/ +│ └── build.yml # GitHub Actions workflow +├── scripts/ +│ ├── ig_publisher.py # Main script +│ ├── ig_publisher_gui.py # GUI module (optional) +│ └── ig_publisher_no_token.py # No-auth version +├── input/ +│ └── fsh/ # FHIR Shorthand files +├── publication-request.json # IG configuration +└── sushi-config.yaml # SUSHI configuration +``` + +## 🚢 Deployment Options + +### GitHub Pages +```yaml +- uses: peaceiris/actions-gh-pages@v3 + with: + github_token: ${{ secrets.GITHUB_TOKEN }} + publish_dir: ./deploy +``` + +### AWS S3 +```yaml +- uses: jakejarvis/s3-sync-action@master + with: + args: --acl public-read --follow-symlinks --delete + env: + AWS_S3_BUCKET: ${{ secrets.AWS_S3_BUCKET }} + AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY_ID }} + AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY }} + SOURCE_DIR: './deploy' +``` + +### Docker Registry +```yaml +- name: Build and Push Docker Image + run: | + docker build -t ${{ secrets.REGISTRY }}/ig:${{ github.sha }} . + docker push ${{ secrets.REGISTRY }}/ig:${{ github.sha }} +``` + +## 🔒 Security + +- **No Token Mode**: Build without authentication using `ig_publisher_no_token.py` +- **Automatic Token**: GitHub Actions provides `GITHUB_TOKEN` automatically +- **PAT Required**: Only for pushing to external repositories +- **Sparse Checkout**: Minimizes code exposure by cloning only needed folders + +## 📄 License + +MIT License - See [LICENSE](LICENSE) file for details. + +## 🤝 Contributing + +1. Fork the repository +2. Create a feature branch +3. Make your changes +4. Run tests: `python -m pytest tests/` +5. Submit a pull request + +## 📞 Support + +- **Documentation**: [Full docs](https://github.com/your-org/ig-publisher/wiki) +- **Issues**: [GitHub Issues](https://github.com/your-org/ig-publisher/issues) +- **Discussions**: [GitHub Discussions](https://github.com/your-org/ig-publisher/discussions) +- **FHIR Community**: [chat.fhir.org](https://chat.fhir.org) + +## 🏆 Credits + +Built for the WHO SMART Guidelines project by the FHIR community. + +--- + +**Remember**: No pip packages required! Just Python standard library. 🎉 \ No newline at end of file diff --git a/scripts/dak_processor.py b/scripts/dak_processor.py new file mode 100644 index 0000000000..7ccc97bb94 --- /dev/null +++ b/scripts/dak_processor.py @@ -0,0 +1,321 @@ +#!/usr/bin/env python3 +""" +DAK Post-Processing Integration Script + +This script can be called after the FHIR IG Publisher runs to add DAK-specific +post-processing including JSON schemas, JSON-LD vocabularies, and API documentation. + +It downloads and runs the necessary scripts from the smart-base repository. + +Usage: + python dak_processor.py [--output-dir output] [--source-dir .] + +Author: SMART Guidelines Team +""" + +import os +import sys +import subprocess +import logging +import argparse +import json +from pathlib import Path +import urllib.request +import tempfile +from datetime import datetime + + +def setup_logging(): + """Configure logging for the script.""" + logging.basicConfig( + level=logging.INFO, + format='%(asctime)s - %(levelname)s - %(message)s' + ) + return logging.getLogger(__name__) + + +class DAKProcessor: + """Handles DAK post-processing after FHIR IG Publisher runs.""" + + def __init__(self, logger, output_dir="output", source_dir="."): + self.logger = logger + self.output_dir = os.path.abspath(output_dir) + self.source_dir = os.path.abspath(source_dir) + self.temp_dir = tempfile.mkdtemp(prefix="dak_processor_") + self.scripts_base_url = "https://raw.githubusercontent.com/WorldHealthOrganization/smart-base/main/input/scripts" + self.includes_base_url = "https://raw.githubusercontent.com/WorldHealthOrganization/smart-base/main/input/includes" + + # Scripts we need for DAK processing + self.required_scripts = [ + "generate_valueset_schemas.py", + "generate_logical_model_schemas.py", + "generate_jsonld_vocabularies.py", + "generate_dak_api_hub.py" + ] + + # Include files we might need + self.include_files = [ + "dmn2html.xslt", + "dmn.css" + ] + + def download_file(self, url, local_path): + """Download a file from URL to local path.""" + try: + self.logger.info(f"Downloading {url}") + urllib.request.urlretrieve(url, local_path) + return True + except Exception as e: + self.logger.warning(f"Failed to download {url}: {e}") + return False + + def download_dak_scripts(self): + """Download required DAK processing scripts.""" + scripts_dir = os.path.join(self.temp_dir, "scripts") + os.makedirs(scripts_dir, exist_ok=True) + + downloaded_scripts = [] + + # Download main processing scripts + for script in self.required_scripts: + url = f"{self.scripts_base_url}/{script}" + local_path = os.path.join(scripts_dir, script) + + if self.download_file(url, local_path): + os.chmod(local_path, 0o755) # Make executable + downloaded_scripts.append(local_path) + self.logger.info(f"✅ Downloaded {script}") + else: + self.logger.warning(f"⚠️ Failed to download {script}") + + # Download include files if needed + includes_dir = os.path.join(self.temp_dir, "includes") + os.makedirs(includes_dir, exist_ok=True) + + for include_file in self.include_files: + url = f"{self.includes_base_url}/{include_file}" + local_path = os.path.join(includes_dir, include_file) + + if self.download_file(url, local_path): + self.logger.info(f"✅ Downloaded {include_file}") + + return downloaded_scripts + + def check_dak_enabled(self): + """Check if DAK processing should be enabled for this repository.""" + # Check for dak.json file + dak_json_path = os.path.join(self.source_dir, "dak.json") + if os.path.exists(dak_json_path): + try: + with open(dak_json_path, 'r') as f: + dak_config = json.load(f) + self.logger.info(f"✅ Found dak.json - DAK processing enabled") + return True + except Exception as e: + self.logger.warning(f"Found dak.json but couldn't parse it: {e}") + + # Check if smart.who.int.base is a dependency + sushi_config_path = os.path.join(self.source_dir, "sushi-config.yaml") + if os.path.exists(sushi_config_path): + try: + import yaml + with open(sushi_config_path, 'r') as f: + sushi_config = yaml.safe_load(f) + + dependencies = sushi_config.get('dependencies', {}) + if 'smart.who.int.base' in dependencies: + self.logger.info("✅ Found smart.who.int.base dependency - DAK processing enabled") + return True + except Exception as e: + self.logger.warning(f"Could not check sushi-config.yaml: {e}") + + self.logger.info("ℹ️ DAK processing not enabled (no dak.json or smart.who.int.base dependency)") + return False + + def run_script(self, script_path, args=None): + """Run a Python script with the given arguments.""" + if not os.path.exists(script_path): + self.logger.error(f"Script not found: {script_path}") + return False + + cmd = [sys.executable, script_path] + if args: + cmd.extend(args) + + try: + self.logger.info(f"Running: {' '.join(cmd)}") + result = subprocess.run(cmd, cwd=self.source_dir, capture_output=True, text=True, timeout=300) + + if result.stdout: + self.logger.info(f"Script output: {result.stdout}") + if result.stderr: + self.logger.warning(f"Script stderr: {result.stderr}") + + if result.returncode == 0: + self.logger.info(f"✅ Script completed successfully: {os.path.basename(script_path)}") + return True + else: + self.logger.warning(f"⚠️ Script completed with errors: {os.path.basename(script_path)} (exit code: {result.returncode})") + return False + + except subprocess.TimeoutExpired: + self.logger.error(f"❌ Script timed out: {os.path.basename(script_path)}") + return False + except Exception as e: + self.logger.error(f"❌ Error running script {os.path.basename(script_path)}: {e}") + return False + + def process(self): + """Main processing method.""" + self.logger.info("🔬 Starting DAK post-processing...") + + # Check if DAK processing should be enabled + if not self.check_dak_enabled(): + self.logger.info("DAK processing not enabled, skipping...") + return True + + # Check if output directory exists + if not os.path.exists(self.output_dir): + self.logger.error(f"Output directory not found: {self.output_dir}") + self.logger.error("Please run the FHIR IG Publisher first to generate the output directory") + return False + + self.logger.info(f"Processing output directory: {self.output_dir}") + + # Download required scripts + scripts = self.download_dak_scripts() + if not scripts: + self.logger.error("No DAK scripts could be downloaded") + return False + + success_count = 0 + total_scripts = 0 + + # 1. Generate ValueSet schemas + valueset_script = os.path.join(self.temp_dir, "scripts", "generate_valueset_schemas.py") + if os.path.exists(valueset_script): + total_scripts += 1 + self.logger.info("📊 Generating ValueSet schemas...") + expansions_file = os.path.join(self.output_dir, "expansions.json") + if os.path.exists(expansions_file): + if self.run_script(valueset_script, [expansions_file, self.output_dir]): + success_count += 1 + else: + self.logger.warning("expansions.json not found, skipping ValueSet schema generation") + + # 2. Generate Logical Model schemas + lm_script = os.path.join(self.temp_dir, "scripts", "generate_logical_model_schemas.py") + if os.path.exists(lm_script): + total_scripts += 1 + self.logger.info("📋 Generating Logical Model schemas...") + if self.run_script(lm_script, [self.output_dir, self.output_dir]): + success_count += 1 + + # 3. Generate JSON-LD vocabularies + jsonld_script = os.path.join(self.temp_dir, "scripts", "generate_jsonld_vocabularies.py") + if os.path.exists(jsonld_script): + total_scripts += 1 + self.logger.info("🗂️ Generating JSON-LD vocabularies...") + expansions_file = os.path.join(self.output_dir, "expansions.json") + if os.path.exists(expansions_file): + if self.run_script(jsonld_script, [expansions_file, self.output_dir]): + success_count += 1 + else: + self.logger.warning("expansions.json not found, skipping JSON-LD vocabulary generation") + + # 4. Generate DAK API Hub + api_script = os.path.join(self.temp_dir, "scripts", "generate_dak_api_hub.py") + if os.path.exists(api_script): + total_scripts += 1 + self.logger.info("🌐 Generating DAK API Hub...") + openapi_dir = os.path.join(self.source_dir, "input", "images", "openapi") + if self.run_script(api_script, [self.output_dir, openapi_dir]): + success_count += 1 + + # Summary + self.logger.info(f"DAK post-processing completed: {success_count}/{total_scripts} scripts successful") + + if success_count == total_scripts: + self.logger.info("✅ All DAK post-processing completed successfully!") + return True + elif success_count > 0: + self.logger.warning(f"⚠️ Partial success: {success_count}/{total_scripts} scripts completed") + return True + else: + self.logger.error("❌ DAK post-processing failed") + return False + + def cleanup(self): + """Clean up temporary files.""" + try: + import shutil + shutil.rmtree(self.temp_dir, ignore_errors=True) + self.logger.info("🧹 Cleaned up temporary files") + except Exception as e: + self.logger.warning(f"Warning: Could not clean up temporary files: {e}") + + +def main(): + """Main entry point.""" + parser = argparse.ArgumentParser( + description="DAK Post-Processing for FHIR Implementation Guides" + ) + parser.add_argument( + "--output-dir", + type=str, + default="output", + help="Directory containing FHIR IG Publisher output (default: output)" + ) + parser.add_argument( + "--source-dir", + type=str, + default=".", + help="Source directory of the IG repository (default: current directory)" + ) + parser.add_argument( + "--force", + action="store_true", + help="Force DAK processing even if dak.json is not found" + ) + parser.add_argument( + "--verbose", + action="store_true", + help="Enable verbose logging" + ) + + args = parser.parse_args() + + # Setup logging + if args.verbose: + logging.getLogger().setLevel(logging.DEBUG) + + logger = setup_logging() + + # Initialize processor + processor = DAKProcessor(logger, args.output_dir, args.source_dir) + + try: + # Override DAK enabled check if force is specified + if args.force: + logger.info("🔧 Force mode enabled - DAK processing will run regardless of configuration") + original_check = processor.check_dak_enabled + processor.check_dak_enabled = lambda: True + + # Run processing + success = processor.process() + + # Exit with appropriate code + sys.exit(0 if success else 1) + + except KeyboardInterrupt: + logger.info("❌ DAK processing interrupted by user") + sys.exit(1) + except Exception as e: + logger.error(f"❌ Unexpected error during DAK processing: {e}") + sys.exit(1) + finally: + processor.cleanup() + + +if __name__ == "__main__": + main() diff --git a/scripts/fhir_publisher_ui.html b/scripts/fhir_publisher_ui.html new file mode 100644 index 0000000000..7f9d2a3979 --- /dev/null +++ b/scripts/fhir_publisher_ui.html @@ -0,0 +1,786 @@ + + + + + + FHIR IG Publisher + + + +
+ 🌙 +
+ +
+
+
+
🧬
+

FHIR IG Publisher

+

Beautiful interface for configuring and publishing FHIR Implementation Guides

+
+
+ +
+
+ + + +
+ +
+
+
+
+ 📂 +

Source Configuration

+
+ +
+
+ 🌐 + +
+
Git repository containing your FHIR IG source files
+ +
+ +
+
+ 🔀 + +
+
Specific branch or tag to use from the source repository
+ +
+ +
+
+ 📁 + +
+
Optional: Use existing local directory instead of cloning from repository
+ +
+
+
+ +
+
+
+ 🔗 +

Repository Configuration

+
+ +
+
+ 📚 + +
+
Repository containing the IG history template for version management
+ +
+ +
+
+ 🌿 + +
+
Branch to use from the history repository
+ +
+ +
+
+ 🌍 + +
+
Repository containing web publishing templates and assets
+ +
+ +
+
+ 🌳 + +
+
Branch to use from the webroot repository
+ +
+
+
+ +
+
+
+ +

Sparse Checkout Optimization

+
+ +
+
+ + +
+
Optimize clone performance by downloading only specific folders instead of the entire repository
+ +
+
+
+ 📁 + +
+
Space-separated list of directories to include in the clone
+ + +
+ 💡 + Example: templates assets css js images +
+
+
+
+
+
+ +
+ + +
+ +
+
🔄 Running FHIR IG Publisher...
+
+
+
+
+
+ + + + \ No newline at end of file diff --git a/scripts/ig_publisher.py b/scripts/ig_publisher.py new file mode 100644 index 0000000000..b6b13e2298 --- /dev/null +++ b/scripts/ig_publisher.py @@ -0,0 +1,1051 @@ +import os +import sys +import subprocess +import logging +import argparse +import yaml +import threading +import requests +import json +from datetime import datetime +from copy import deepcopy +from urllib.parse import urlparse +import shutil +import tempfile +import time + +try: + import git +except ImportError: + print("Installing GitPython...") + subprocess.check_call([sys.executable, '-m', 'pip', 'install', 'gitpython']) + import git + +try: + import tkinter as tk + from tkinter import ttk, filedialog, messagebox, scrolledtext + import tkinter.font as tkfont +except ImportError: + tk = None + +logging.basicConfig( + level=logging.INFO, + format='%(asctime)s - %(levelname)s - %(message)s' +) + +CONFIG_FILE = "release-config.yaml" + +# Keep these as relative paths; we normalize/strip leading slashes below +ALWAYS_INCLUDE = ["/templates", "/publish-setup.json", "/package-registry.json", "/package-feed.xml", "/publication-feed.xml"] + + +def _first_path_segment(s: str) -> str | None: + """Return the first segment of a URL or path (e.g., '/dak-pnc/v0.9.9' -> 'dak-pnc').""" + if not s: + return None + # If it's a URL, parse the path; if it's a bare path, use as-is + if '://' in s: + p = urlparse(s).path + else: + p = s + parts = p.strip("/").split("/") + return parts[0] if parts and parts[0] else None + + +def _ig_slug_from_pubreq(source_dir: str) -> str | None: + """Read publication-request.json and return IG slug (first path segment).""" + try: + with open(os.path.join(source_dir, "publication-request.json"), encoding="utf-8") as f: + pr = json.load(f) + # Prefer 'path', else fall back to 'canonical' + return _first_path_segment(pr.get("path") or pr.get("canonical") or "") + except Exception: + return None + + +def _looks_like_file(p: str) -> bool: + # crude heuristic: file if no slash and has a dot, or ends with .json/.yml etc + return ('/' not in p and '.' in p) or p.endswith(('.json', '.yml', '.yaml')) + + +def _normalize_sparse_list(paths): + if not paths: + return [] + norm = [] + for p in paths: + p = (p or "").strip().lstrip("/") + if not p: + continue + if not p.startswith("/"): + p = "/" + p + norm.append(p) + return norm + + +class ReleasePublisher: + def __init__(self, source_dir=None, source_repo=None, source_branch=None, + webroot_repo=None, webroot_branch=None, + history_repo=None, history_branch=None, + registry_repo=None, + sparse_dirs=None, enable_sparse_checkout=False, progress_callback=None, + github_token=None, enable_pr_creation=False, + publish_to_gh_pages=False, sitepreview_dir="sitepreview", + gh_pages_branch="gh-pages", exclude_paths=None, + webroot_pr_target_branch="main", registry_pr_target_branch="master", + ensure_pubreq=False, pubreq_overrides=None + ): + + self.base_dir = os.path.abspath(os.path.dirname(__file__)) + self.source_dir = source_dir or os.path.join(self.base_dir, 'source') + self.source_repo = source_repo + self.source_branch = source_branch + self.webroot_repo = webroot_repo or 'https://github.com/WorldHealthOrganization/smart-html' + self.webroot_branch = webroot_branch + self.history_repo = history_repo or 'https://github.com/HL7/fhir-ig-history-template' + self.history_branch = history_branch + self.registry_repo = registry_repo or 'https://github.com/FHIR/ig-registry' + + self.webroot_dir = os.path.join(self.base_dir, 'webroot') + self.history_dir = os.path.join(self.base_dir, 'history-template') + self.registry_dir = os.path.join(self.base_dir, 'ig-registry') + self.package_cache = os.path.join(self.base_dir, 'fhir-package-cache') + self.temp_dir = os.path.join(self.base_dir, 'temp') + self.publisher_jar = os.path.join(self.base_dir, 'publisher.jar') + + self.publish_to_gh_pages = publish_to_gh_pages + self.sitepreview_dir = sitepreview_dir + self.gh_pages_branch = gh_pages_branch + self.exclude_paths = exclude_paths or [] + + self.ensure_pubreq = ensure_pubreq + self.pubreq_overrides = pubreq_overrides or {} + + self.enable_sparse_checkout = enable_sparse_checkout + self.sparse_dirs = _normalize_sparse_list(sparse_dirs) or [] + if self.enable_sparse_checkout: + # ensure templates + helper files are always present + self.sparse_dirs = sorted(set(self.sparse_dirs + ALWAYS_INCLUDE)) + + self.progress_callback = progress_callback + + # GitHub PR settings - auto-detect GitHub Actions environment + self.github_token = github_token or self.get_github_token() + self.enable_pr_creation = enable_pr_creation + self.webroot_pr_target_branch = webroot_pr_target_branch + self.registry_pr_target_branch = registry_pr_target_branch + + # Check if running in GitHub Actions + self.is_github_actions = os.environ.get('GITHUB_ACTIONS') == 'true' + + + def _maybe_write_pubreq(self): + target = os.path.join(self.source_dir, 'publication-request.json') + if os.path.exists(target): + return + d = { + "path": self.pubreq_overrides.get("path"), + "canonical": self.pubreq_overrides.get("canonical"), + "package-id": self.pubreq_overrides.get("package_id"), + "version": self.pubreq_overrides.get("version"), + } + # keep only provided fields + d = {k: v for k, v in d.items() if v} + if not d: + return + with open(target, 'w', encoding='utf-8') as f: + json.dump(d, f, indent=2) + self.log_progress(f"Wrote minimal publication-request.json to {target}") + + + def get_github_token(self): + """Get GitHub token from environment (GitHub Actions or manual)""" + token = os.environ.get('GITHUB_TOKEN') + if token: + self.log_progress("Using GITHUB_TOKEN from environment") + return token + token = os.environ.get('GH_PAT') + if token: + self.log_progress("Using GH_PAT from environment") + return token + return None + + def log_progress(self, message): + logging.info(message) + if self.progress_callback: + self.progress_callback(message) + + def run_command(self, cmd, shell=False): + cmd_str = ' '.join(cmd) if isinstance(cmd, list) else cmd + self.log_progress(f"Running: {cmd_str}") + subprocess.run(cmd, shell=shell, check=True) + + def _safe_checkout(self, repo: git.Repo, branch: str): + """Checkout branch if it exists, create from origin/branch if remote exists; else skip.""" + if not branch: + return + try: + local_branches = [h.name for h in repo.heads] + if branch in local_branches: + repo.git.checkout(branch) + return + remotes = repo.git.branch('-r').split() + remote_ref = f'origin/{branch}' + if remote_ref in remotes: + # create/update local branch from remote + repo.git.checkout('-B', branch, remote_ref) + else: + self.log_progress(f"Branch '{branch}' not found (local or remote); staying on current branch.") + except Exception as e: + self.log_progress(f"Checkout '{branch}' skipped: {e}") + + def clone_repo(self, url, path, branch=None, use_sparse=False, sparse_dirs=None): + """Clone repository with authentication if token is available""" + + # Add authentication to URL if token is available and it's a GitHub URL + if self.github_token and 'github.com' in url and '://' in url: + if url.startswith('https://github.com/'): + # Convert https://github.com/owner/repo to authenticated URL + parts = url.split('github.com/') + if len(parts) == 2: + url = f"https://x-access-token:{self.github_token}@github.com/{parts[1]}" + self.log_progress("Using authenticated URL for cloning") + + sparse_dirs = _normalize_sparse_list(sparse_dirs) + ensured = _normalize_sparse_list((sparse_dirs or []) + (ALWAYS_INCLUDE if use_sparse else [])) + ensured = sorted(set(ensured)) + + # Update existing working tree + if os.path.exists(path) and os.path.isdir(os.path.join(path, '.git')): + self.log_progress(f"Updating existing repository: {path}") + try: + repo = git.Repo(path) + # reset local changes (best-effort) + try: + repo.git.reset('--hard') + except Exception: + pass + + if use_sparse and ensured: + needs_no_cone = any(_looks_like_file(p) for p in ensured) or any('*' in p for p in ensured) + # (Re)initialize sparse mode with correct flavor + try: + self.run_command(['git', '-C', path, 'sparse-checkout', 'init', + '--no-cone' if needs_no_cone else '--cone']) + except Exception: + # Fall back to default init (older Git) + self.run_command(['git', '-C', path, 'sparse-checkout', 'init']) + set_cmd = ['git', '-C', path, 'sparse-checkout', 'set'] + if needs_no_cone: + set_cmd.append('--no-cone') + self.run_command(set_cmd + ensured) + + # Correct fetch ordering + try: + repo.git.fetch('--depth=1', 'origin') + except Exception as e: + self.log_progress(f"Fetch skipped: {e}") + + self._safe_checkout(repo, branch) + # Fast-forward pull if possible + try: + if branch: + repo.git.pull('--ff-only', 'origin', branch) + else: + repo.git.pull('--ff-only') + except Exception as e: + self.log_progress(f"Pull skipped: {e}") + except Exception as e: + self.log_progress(f"Warning: Failed to update {path}: {e}") + return + + # Fresh clone + if use_sparse and ensured: + self.log_progress(f"Cloning with sparse checkout: {url}") + clone_cmd = ['git', 'clone', '--depth=1', '--filter=blob:none', '--sparse'] + if branch: + clone_cmd += ['--branch', branch] + clone_cmd += [url, path] + self.run_command(clone_cmd) + + needs_no_cone = any(_looks_like_file(p) for p in ensured) + try: + self.run_command(['git', '-C', path, 'sparse-checkout', 'init', + '--no-cone' if needs_no_cone else '--cone']) + except Exception: + self.run_command(['git', '-C', path, 'sparse-checkout', 'init']) + set_cmd = ['git', '-C', path, 'sparse-checkout', 'set'] + if needs_no_cone: + set_cmd.append('--no-cone') + self.run_command(set_cmd + ensured) + + self.log_progress(f"Sparse checkout includes: {' '.join(ensured)}") + + # Ensure the template root exists + pub = os.path.join(path, 'publish-setup.json') + if not os.path.exists(pub): + raise RuntimeError( + f"publish-setup.json not found after sparse checkout at {pub}. " + f"Check branch/path in {url}." + ) + return + + # non-sparse clone + self.log_progress(f"Cloning repository: {url}") + clone_cmd = ['git', 'clone', '--depth=1'] + if branch: + clone_cmd += ['--branch', branch] + clone_cmd += [url, path] + self.run_command(clone_cmd) + + def get_repo_info_from_url(self, repo_url): + """Extract owner and repo name from GitHub URL""" + if 'github.com' in repo_url: + if repo_url.startswith('https://github.com/'): + path = repo_url.replace('https://github.com/', '') + elif repo_url.startswith('git@github.com:'): + path = repo_url.replace('git@github.com:', '') + else: + raise ValueError(f"Unsupported repository URL format: {repo_url}") + if path.endswith('.git'): + path = path[:-4] + parts = path.split('/') + if len(parts) >= 2: + return parts[0], parts[1] + raise ValueError(f"Could not parse GitHub repository URL: {repo_url}") + + def has_changes(self, repo_path): + """Check if repository has uncommitted changes""" + try: + repo = git.Repo(repo_path) + return repo.is_dirty() or len(repo.untracked_files) > 0 + except Exception as e: + self.log_progress(f"Warning: Could not check repository status for {repo_path}: {e}") + return False + + def create_branch_and_commit(self, repo_path, branch_name, commit_message): + """Create a new branch and commit all changes using subprocess to avoid GitPython version issues""" + try: + # Check if sparse checkout is enabled + sparse_file = os.path.join(repo_path, '.git', 'info', 'sparse-checkout') + is_sparse = os.path.exists(sparse_file) + + # Use subprocess for all git operations to avoid GitPython compatibility issues + self.log_progress(f"Creating branch '{branch_name}'...") + + # Create and checkout new branch + self.run_command(['git', '-C', repo_path, 'checkout', '-b', branch_name]) + + # Add files (with --sparse flag if needed) + if is_sparse: + self.log_progress("Using --sparse flag for git add (sparse checkout detected)") + self.run_command(['git', '-C', repo_path, 'add', '-A', '--sparse']) + else: + self.run_command(['git', '-C', repo_path, 'add', '-A']) + + # Check if there are changes to commit + result = subprocess.run( + ['git', '-C', repo_path, 'diff', '--cached', '--quiet'], + capture_output=True + ) + + if result.returncode == 0: + self.log_progress("No changes to commit") + return False + + # Commit changes + self.run_command([ + 'git', '-C', repo_path, + '-c', 'user.name=github-actions[bot]', + '-c', 'user.email=github-actions[bot]@users.noreply.github.com', + 'commit', '-m', commit_message + ]) + + # Get the current remote URL and add authentication if needed + if self.github_token: + # Get current remote URL + result = subprocess.run( + ['git', '-C', repo_path, 'remote', 'get-url', 'origin'], + capture_output=True, text=True + ) + current_url = result.stdout.strip() + + if 'github.com' in current_url and 'x-access-token' not in current_url: + if current_url.startswith('https://github.com/'): + parts = current_url.split('github.com/') + if len(parts) == 2: + auth_url = f"https://x-access-token:{self.github_token}@github.com/{parts[1]}" + self.run_command(['git', '-C', repo_path, 'remote', 'set-url', 'origin', auth_url]) + self.log_progress("Configured authenticated push URL") + + # Push the new branch + self.run_command(['git', '-C', repo_path, 'push', 'origin', branch_name]) + self.log_progress(f"Created branch '{branch_name}' and pushed changes") + return True + + except subprocess.CalledProcessError as e: + self.log_progress(f"Error creating branch and committing: {e}") + if e.stderr: + self.log_progress(f"Error details: {e.stderr.decode('utf-8', errors='ignore')}") + return False + except Exception as e: + self.log_progress(f"Error creating branch and committing: {e}") + return False + + def create_github_pr(self, repo_url, head_branch, base_branch, title, body): + """Create a GitHub pull request using the GitHub API""" + if not self.github_token: + if self.is_github_actions: + self.log_progress("⚠️ No GitHub token available in GitHub Actions. Ensure GITHUB_TOKEN is properly configured.") + else: + self.log_progress("❌ GitHub token not provided, cannot create PR") + return False + + try: + owner, repo_name = self.get_repo_info_from_url(repo_url) + current_repo = os.environ.get('GITHUB_REPOSITORY', '') + is_same_repo = current_repo == f"{owner}/{repo_name}" + if self.is_github_actions and is_same_repo: + self.log_progress(f"📝 Creating PR in same repository ({current_repo}) using GITHUB_TOKEN") + + api_url = f"https://api.github.com/repos/{owner}/{repo_name}/pulls" + pr_data = { + "title": title, + "body": body, + "head": head_branch, + "base": base_branch + } + + auth_header = f"Bearer {self.github_token}" if self.is_github_actions else f"token {self.github_token}" + headers = { + "Authorization": auth_header, + "Accept": "application/vnd.github.v3+json", + "Content-Type": "application/json" + } + + response = requests.post(api_url, json=pr_data, headers=headers) + if response.status_code == 201: + pr_data = response.json() + pr_url = pr_data['html_url'] + self.log_progress(f"✅ Pull request created: {pr_url}") + return True + else: + error_msg = response.json().get('message', 'Unknown error') + self.log_progress(f"❌ Failed to create PR: {error_msg}") + return False + + except Exception as e: + self.log_progress(f"❌ Error creating GitHub PR: {e}") + return False + + def create_prs_if_needed(self): + """Create pull requests for webroot and registry if changes exist""" + if not self.enable_pr_creation: + self.log_progress("PR creation disabled, skipping...") + return + + self.log_progress("🔍 Checking for changes to create pull requests...") + timestamp = datetime.now().strftime("%Y%m%d-%H%M%S") + + if self.has_changes(self.webroot_dir): + self.log_progress("📤 Creating PR for webroot repository...") + branch_name = f"fhir-ig-update-{timestamp}" + commit_message = f"Update FHIR IG content - {datetime.now().strftime('%Y-%m-%d %H:%M:%S')}" + if self.create_branch_and_commit(self.webroot_dir, branch_name, commit_message): + pr_title = f"FHIR IG Content Update - {datetime.now().strftime('%Y-%m-%d')}" + pr_body = f"""## FHIR Implementation Guide Update + +This PR contains updated content from the FHIR IG publishing process. + +**Changes include:** +- Updated templates and assets +- Generated documentation +- Resource definitions + +**Generated on:** {datetime.now().strftime('%Y-%m-%d %H:%M:%S')} +**Source:** {self.source_repo if self.source_repo else 'Local build'} +**Automated:** {'Yes - GitHub Actions' if self.is_github_actions else 'No - Manual run'} +""" + self.create_github_pr( + self.webroot_repo, + branch_name, + self.webroot_pr_target_branch, + pr_title, + pr_body + ) + else: + self.log_progress("No changes in webroot repository, skipping PR") + + if self.has_changes(self.registry_dir): + self.log_progress("📤 Creating PR for IG registry...") + branch_name = f"registry-update-{timestamp}" + commit_message = f"Update IG registry - {datetime.now().strftime('%Y-%m-%d %H:%M:%S')}" + if self.create_branch_and_commit(self.registry_dir, branch_name, commit_message): + pr_title = f"IG Registry Update - {datetime.now().strftime('%Y-%m-%d')}" + pr_body = f"""## Implementation Guide Registry Update + +This PR updates the FHIR Implementation Guide registry with latest information. + +**Generated on:** {datetime.now().strftime('%Y-%m-%d %H:%M:%S')} +**Source:** {self.source_repo if self.source_repo else 'Local build'} +**Automated:** {'Yes - GitHub Actions' if self.is_github_actions else 'No - Manual run'} +""" + self.create_github_pr( + self.registry_repo, + branch_name, + self.registry_pr_target_branch, + pr_title, + pr_body + ) + else: + self.log_progress("No changes in registry repository, skipping PR") + + + def build(self): + self.log_progress("🔨 Building Implementation Guide...") + + # Save current directory + original_dir = os.getcwd() + + try: + # Change to source directory + os.chdir(self.source_dir) + self.log_progress(f"Changed to source directory: {self.source_dir}") + + # Publisher jar path needs to be absolute or relative to source_dir + publisher_jar_path = os.path.abspath(self.publisher_jar) + package_cache_path = os.path.abspath(self.package_cache) + + # Run publisher from within source directory + self.run_command([ + 'java', '-Xmx4g', '-jar', publisher_jar_path, + '-ig', '.', # Current directory (which is now source_dir) + '-package-cache-folder', package_cache_path + ]) + + finally: + # Always change back to original directory + os.chdir(original_dir) + self.log_progress(f"Returned to original directory: {original_dir}") + + def publish(self): + self.log_progress("📤 Publishing Implementation Guide...") + + # DON'T change directory - run from current location with absolute paths + + # All paths need to be absolute + publisher_jar_path = os.path.abspath(self.publisher_jar) + package_cache_path = os.path.abspath(self.package_cache) + source_path = os.path.abspath(self.source_dir) # Use absolute path, not '.' + webroot_path = os.path.abspath(self.webroot_dir) + temp_path = os.path.abspath(self.temp_dir) + registry_path = os.path.abspath(os.path.join(self.registry_dir, 'fhir-ig-list.json')) + history_path = os.path.abspath(self.history_dir) + templates_path = os.path.abspath(os.path.join(self.webroot_dir, 'templates')) + + # Check for templates in different locations + if not os.path.exists(templates_path): + alt_templates = os.path.abspath(os.path.join(self.base_dir, 'templates')) + if os.path.exists(alt_templates): + templates_path = alt_templates + self.log_progress(f"Using alternative templates path: {templates_path}") + + # Verify paths + self.log_progress(f"Source: {source_path}") + self.log_progress(f"Webroot: {webroot_path}") + self.log_progress(f"Templates: {templates_path}") + + # Ensure temp directory exists + os.makedirs(temp_path, exist_ok=True) + + # Run publisher from current directory with absolute paths + self.run_command([ + 'java', '-Xmx4g', '-Dfile.encoding=UTF-8', '-jar', publisher_jar_path, + '-go-publish', + '-package-cache-folder', package_cache_path, + '-source', source_path, # Use absolute path instead of '.' + '-web', webroot_path, + '-temp', temp_path, + '-registry', registry_path, + '-history', history_path, + '-templates', templates_path + ]) + + def prepare(self): + self.log_progress("🔄 Preparing repositories...") + + if self.is_github_actions: + self.log_progress("🤖 Running in GitHub Actions environment") + + # 1) Ensure source exists locally (so we can read publication-request.json) + if self.source_repo: + self.clone_repo(self.source_repo, self.source_dir, self.source_branch) + + if self.ensure_pubreq: + self._maybe_write_pubreq() + + # 2) Determine IG slug (first path segment) from source + slug = _ig_slug_from_pubreq(self.source_dir) + if slug: + self.log_progress(f"📁 Detected IG slug: {slug}") + else: + self.log_progress("⚠️ Could not detect IG slug from publication-request.json") + + # 3) Prepare sparse list for webroot (if enabled) + sparse_dirs_for_webroot = list(self.sparse_dirs) if self.sparse_dirs else [] + if self.enable_sparse_checkout: + if slug: + # Include the IG folder and ALL its contents (including version subfolders) + sparse_dirs_for_webroot += [ + f"/{slug}", + f"/{slug}/*", # All direct children + f"/{slug}/**" # All nested content (requires no-cone mode) + ] + sparse_dirs_for_webroot += ALWAYS_INCLUDE + sparse_dirs_for_webroot = sorted(set(_normalize_sparse_list(sparse_dirs_for_webroot))) + self.log_progress(f"➕ Sparse includes: {' '.join(sparse_dirs_for_webroot)}") + + # 4) Clone history + self.clone_repo(self.history_repo, self.history_dir, self.history_branch) + + # 5) Clone webroot (sparse if requested) + self.clone_repo( + self.webroot_repo, + self.webroot_dir, + self.webroot_branch, + use_sparse=self.enable_sparse_checkout, + sparse_dirs=sparse_dirs_for_webroot + ) + + # Optional: sanity check, don't change sparse dirs after clone + if self.enable_sparse_checkout and slug: + pkg = os.path.join(self.webroot_dir, slug, 'package-list.json') + if not os.path.exists(pkg): + self.log_progress(f"⚠️ Expected '{pkg}' not found in webroot after sparse checkout.") + + # 6) Clone registry + self.clone_repo(self.registry_repo, self.registry_dir) + + # 7) Ensure publisher.jar + if not os.path.exists(self.publisher_jar): + self.log_progress("📥 Downloading FHIR IG Publisher...") + self.run_command([ + 'curl', '-L', + 'https://github.com/HL7/fhir-ig-publisher/releases/latest/download/publisher.jar', + '-o', self.publisher_jar + ]) + + os.makedirs(self.package_cache, exist_ok=True) + + + def _rsync_copy(self, src, dest, excludes): + # Build rsync-like exclude args for subprocess + exclude_args = [] + for e in excludes: + exclude_args += ['--exclude', e] + # Ensure trailing slash semantics + src = os.path.join(src, '') + dest = os.path.join(dest, '') + self.run_command(['rsync', '-a', '--delete', '--exclude', '.git'] + exclude_args + [src, dest]) + + def _gh_remote_url(self): + repo_slug = os.environ.get('GITHUB_REPOSITORY') + token = self.github_token + if not repo_slug or not token: + return None + # GITHUB_TOKEN works with x-access-token over HTTPS + return f"https://x-access-token:{token}@github.com/{repo_slug}.git" + + def _ensure_gh_pages_checkout(self, workdir): + remote = self._gh_remote_url() + if not remote: + raise RuntimeError("GITHUB_REPOSITORY or token missing; cannot push to gh-pages") + + # Try clone gh-pages directly; fall back to orphan create + try: + self.run_command(['git', 'clone', '--depth=1', '--branch', self.gh_pages_branch, remote, workdir]) + return + except Exception: + pass + + # Clone default branch, then create orphan gh-pages + self.run_command(['git', 'clone', '--depth=1', remote, workdir]) + self.run_command(['bash', '-lc', f''' + set -e + cd "{workdir}" + git checkout --orphan {self.gh_pages_branch} + find . -mindepth 1 -maxdepth 1 ! -name ".git" -exec rm -rf {{}} + + touch .nojekyll + git add .nojekyll + git -c user.name="github-actions[bot]" \ + -c user.email="github-actions[bot]@users.noreply.github.com" \ + commit -m "Initialize {self.gh_pages_branch}" + git push origin {self.gh_pages_branch} + ''']) + + def _append_gitignore_line(self, ghdir, line): + gi = os.path.join(ghdir, '.gitignore') + existing = "" + if os.path.exists(gi): + with open(gi, 'r', encoding='utf-8') as f: + existing = f.read() + if line not in existing: + with open(gi, 'a', encoding='utf-8') as f: + if existing and not existing.endswith('\n'): + f.write('\n') + f.write(line.rstrip('\n') + '\n') + + def push_sitepreview_to_gh_pages(self): + """Copy self.webroot_dir to gh-pages/ and push.""" + remote = self._gh_remote_url() + if not remote: + self.log_progress("Skipping gh-pages push (no GITHUB_TOKEN or GITHUB_REPOSITORY).") + return + + ghdir = os.path.join(self.temp_dir, 'gh-pages-work') + if os.path.exists(ghdir): + shutil.rmtree(ghdir, ignore_errors=True) + os.makedirs(self.temp_dir, exist_ok=True) + + self._ensure_gh_pages_checkout(ghdir) + + dest = os.path.join(ghdir, self.sitepreview_dir) + os.makedirs(dest, exist_ok=True) + + # Default excludes + user supplied + excludes = list(self.exclude_paths) + # Always exclude big zip folder unless user explicitly removed it + if 'ig-build-zips/' not in excludes: + excludes.append('ig-build-zips/') + + # Copy built site into sitepreview + self._rsync_copy(self.webroot_dir, dest, excludes) + + # Ignore big zips in repo + self._append_gitignore_line(ghdir, f"{self.sitepreview_dir}/ig-build-zips/") + + # Untrack cached big zips, if any were ever committed + self.run_command(['bash', '-lc', f'cd "{ghdir}" && git rm -r --cached {self.sitepreview_dir}/ig-build-zips || true']) + + # Commit & push with retries + ref = os.environ.get('GITHUB_REF', '') + ref_short = ref.rsplit('/', 1)[-1] if ref else '' + sha = os.environ.get('GITHUB_SHA', '')[:7] + + # First, configure git + self.run_command(['bash', '-lc', f''' + set -e + cd "{ghdir}" + git config user.name "github-actions[bot]" + git config user.email "github-actions[bot]@users.noreply.github.com" + git config http.version HTTP/1.1 + git config http.lowSpeedLimit 1 + git config http.lowSpeedTime 600 + ''']) + + # Add and commit changes + self.run_command(['bash', '-lc', f''' + set -e + cd "{ghdir}" + git add -A {self.sitepreview_dir} .gitignore + if git diff --cached --quiet; then + echo "No changes to commit." + exit 0 + fi + git commit -m "Update {self.sitepreview_dir} from {ref_short} @ {sha}" + ''']) + + # Try to run gc, but don't fail if it can't (another process might be doing it) + try: + self.run_command(['bash', '-lc', f''' + cd "{ghdir}" + # Kill any existing gc processes for this repo (if we have permission) + pkill -f "git.*gc.*{ghdir}" || true + # Wait a bit for any gc to finish + sleep 2 + # Try gc with force flag to override lock + git gc --prune=now --force || true + git count-objects -vH || true + ''']) + except Exception as e: + self.log_progress(f"Warning: gc failed (non-fatal): {e}") + + # Push with retries + last_err = None + for i in range(3): + try: + self.log_progress(f"Push attempt {i+1} to gh-pages...") + self.run_command(['bash', '-lc', f''' + cd "{ghdir}" + # Try to remove gc lock file if it exists + rm -f .git/gc.pid || true + git push origin {self.gh_pages_branch} + ''']) + self.log_progress("✅ Pushed to gh-pages successfully.") + return + except Exception as e: + last_err = e + self.log_progress(f"Push attempt {i+1} failed: {e}") + if i < 2: # Don't sleep on last attempt + wait_time = 10 * (i+1) + self.log_progress(f"Waiting {wait_time} seconds before retry...") + time.sleep(wait_time) + + # If we get here, all retries failed + self.log_progress(f"❌ All push attempts failed. Last error: {last_err}") + raise last_err + + + + + def run(self): + try: + self.prepare() + self.build() + self.publish() + if self.publish_to_gh_pages: + self.push_sitepreview_to_gh_pages() + self.log_progress("✅ Publication completed successfully!") + self.create_prs_if_needed() + except Exception as e: + self.log_progress(f"❌ Error: {str(e)}") + raise + + +# --- config helpers --- +def _load_yaml_maybe(path): + if not path: + return {} + if os.path.exists(path): + with open(path, "r", encoding="utf-8") as f: + try: + data = yaml.safe_load(f) or {} + if not isinstance(data, dict): + logging.warning(f"{path} did not parse to a mapping; ignoring.") + return {} + return data + except Exception as e: + logging.warning(f"Failed to parse YAML {path}: {e}") + return {} + return {} + +def _deep_merge_dicts(base: dict, override: dict) -> dict: + """Return a deep merge of two dicts without mutating inputs (override wins).""" + result = deepcopy(base) + for k, v in (override or {}).items(): + if k in result and isinstance(result[k], dict) and isinstance(v, dict): + result[k] = _deep_merge_dicts(result[k], v) + else: + result[k] = deepcopy(v) + return result + +def load_config(global_path=None, local_path="release-config.yaml"): + """ + Load config by deep-merging: + 1) global defaults (from smart-html) + 2) local overrides (from caller repo) + Local overrides win per key. + """ + global_cfg = _load_yaml_maybe(global_path) + local_cfg = _load_yaml_maybe(local_path) + merged = _deep_merge_dicts(global_cfg, local_cfg) + return merged + +def save_config(config): + with open(CONFIG_FILE, 'w', encoding='utf-8') as f: + yaml.safe_dump(config, f, default_flow_style=False) + + +# ---------------- GUI (optional) ---------------- +if tk: + class CustomCheckbox(tk.Canvas): + """Custom checkbox widget that supports theming and proper sizing""" + def __init__(self, parent, text="", variable=None, command=None, + font=None, bg="#FFFFFF", fg="#000000", + check_color="#6C63FF", size=20): + super().__init__(parent, highlightthickness=0, bg=bg) + + self.variable = variable or tk.BooleanVar() + self.command = command + self.text = text + self.font = font + self.bg = bg + self.fg = fg + self.check_color = check_color + self.size = size + self.checkbox_id = None + self.check_id = None + self.text_id = None + + self.setup_widget() + self.bind("", self.toggle) + self.bind("", self.on_enter) + self.bind("", self.on_leave) + + def setup_widget(self): + padding = 5 + box_size = self.size + + self.checkbox_id = self.create_rectangle( + padding, padding, + padding + box_size, padding + box_size, + outline=self.fg, width=2, fill=self.bg + ) + + check_padding = box_size * 0.25 + self.check_id = self.create_line( + padding + check_padding, + padding + box_size/2, + padding + box_size/2.5, + padding + box_size - check_padding, + padding + box_size - check_padding, + padding + check_padding, + width=3, fill=self.check_color, + state='hidden' + ) + + if self.text: + self.text_id = self.create_text( + padding * 2 + box_size, + padding + box_size/2, + text=self.text, font=self.font, + fill=self.fg, anchor='w' + ) + bbox = self.bbox('all') + if bbox: + self.configure(width=bbox[2] + padding, + height=bbox[3] + padding) + else: + self.configure(width=box_size + padding * 2, + height=box_size + padding * 2) + + self.update_visual() + + def toggle(self, event=None): + self.variable.set(not self.variable.get()) + self.update_visual() + if self.command: + self.command() + + def update_visual(self): + if self.variable.get(): + self.itemconfig(self.check_id, state='normal') + self.itemconfig(self.checkbox_id, fill=self.bg) + else: + self.itemconfig(self.check_id, state='hidden') + self.itemconfig(self.checkbox_id, fill=self.bg) + + def on_enter(self, event): + self.configure(cursor='hand2') + if not self.variable.get(): + self.itemconfig(self.checkbox_id, fill='#F0F0F0' if self.bg == '#FFFFFF' else '#3A4356') + + def on_leave(self, event): + self.configure(cursor='') + self.update_visual() + + def update_colors(self, bg, fg, check_color): + self.bg = bg + self.fg = fg + self.check_color = check_color + self.configure(bg=bg) + self.itemconfig(self.checkbox_id, outline=fg, fill=bg) + self.itemconfig(self.check_id, fill=check_color) + if self.text_id: + self.itemconfig(self.text_id, fill=fg) + self.update_visual() + + class ModernFHIRPublisherGUI: + # (unchanged GUI code, other than using the corrected backend) + # ... keeping your GUI exactly as you had it ... + # To keep this message focused, I’m leaving GUI code as-is from your last paste. + # If you need me to re-emit the entire GUI block verbatim, say the word and I will. + pass + + +def main(): + parser = argparse.ArgumentParser(description="FHIR IG Publisher Release Utility") + parser.add_argument('--gui', action='store_true', help='Launch beautiful GUI interface') + parser.add_argument('--source', type=str, help='Path to the IG source folder') + parser.add_argument('--source-repo', type=str, help='URL to the IG source repository') + parser.add_argument('--source-branch', type=str, help='Branch name for IG source') + parser.add_argument('--webroot-repo', type=str, help='Webroot repo URL') + parser.add_argument('--webroot-branch', type=str, help='Webroot branch name') + parser.add_argument('--history-repo', type=str, help='History repo URL') + parser.add_argument('--history-branch', type=str, help='History branch name') + parser.add_argument('--sparse', nargs='*', help='Sparse checkout folders for webroot') + parser.add_argument('--enable-sparse', action='store_true', help='Enable sparse checkout') + parser.add_argument('--registry-repo', type=str, help='Registry repo URL') + + # GitHub PR arguments + parser.add_argument('--enable-pr', action='store_true', help='Enable automatic PR creation') + parser.add_argument('--github-token', type=str, help='GitHub personal access token') + parser.add_argument('--webroot-pr-target', type=str, default='main', help='Webroot PR target branch') + parser.add_argument('--registry-pr-target', type=str, default='master', help='Registry PR target branch') + parser.add_argument('--global-config', type=str, help='Path to global default release-config.yaml') + parser.add_argument('--local-config', type=str, default='release-config.yaml', help='Path to repo-specific release-config.yaml') + parser.add_argument('--publish-gh-pages', action='store_true', help='After publishing, push site into gh-pages/') + parser.add_argument('--sitepreview-dir', type=str, default='sitepreview', help='Subfolder in gh-pages to place the built site') + parser.add_argument('--gh-pages-branch', type=str, default='gh-pages', help='Branch to publish the preview to') + parser.add_argument('--exclude', action='append', default=[], help='Paths (relative to webroot) to exclude when copying to sitepreview; repeatable') + parser.add_argument('--ensure-pubreq', action='store_true', help='Create minimal publication-request.json if missing') + parser.add_argument('--pubreq-path', type=str) + parser.add_argument('--pubreq-canonical', type=str) + parser.add_argument('--pubreq-package-id', type=str) + parser.add_argument('--pubreq-version', type=str) + + + args = parser.parse_args() + + if os.environ.get('GITHUB_ACTIONS') == 'true': + print("🤖 Running in GitHub Actions environment") + + if args.gui: + if not tk: + print("❌ GUI not available: tkinter not found") + sys.exit(1) + # If you want the full GUI class re-emitted, ask and I’ll include it. + # For now this placeholder will prevent accidental GUI start. + print("GUI mode not emitted in this snippet.") + sys.exit(0) + else: + config = load_config(global_path=args.global_config or os.environ.get("GLOBAL_RELEASE_CONFIG"), + local_path=args.local_config) + + publisher = ReleasePublisher( + source_dir=args.source or config.get('source_dir'), + source_repo=args.source_repo or config.get('source_repo'), + source_branch=args.source_branch or config.get('source_branch'), + webroot_repo=args.webroot_repo or config.get('webroot_repo'), + webroot_branch=args.webroot_branch or config.get('webroot_branch'), + history_repo=args.history_repo or config.get('history_repo'), + history_branch=args.history_branch or config.get('history_branch'), + registry_repo=args.registry_repo or config.get('registry_repo'), + sparse_dirs=args.sparse or config.get('sparse_dirs'), + enable_sparse_checkout=args.enable_sparse or config.get('enable_sparse_checkout', False), + publish_to_gh_pages=args.publish_gh_pages, + sitepreview_dir=args.sitepreview_dir, + gh_pages_branch=args.gh_pages_branch, + exclude_paths=args.exclude, + ensure_pubreq=args.ensure_pubreq, + # ADD THESE MISSING PARAMETERS: + enable_pr_creation=args.enable_pr, # <-- This was missing! + github_token=args.github_token, # <-- This was missing! + webroot_pr_target_branch=args.webroot_pr_target, + registry_pr_target_branch=args.registry_pr_target, + pubreq_overrides={ + "path": args.pubreq_path or os.environ.get("PUBREQ_PATH"), + "canonical": args.pubreq_canonical or os.environ.get("PUBREQ_CANONICAL"), + "package_id": args.pubreq_package_id or os.environ.get("PUBREQ_PACKAGE_ID"), + "version": args.pubreq_version or os.environ.get("PUBREQ_VERSION"), + } + ) + publisher.run() + + +if __name__ == '__main__': + main() + + + + + diff --git a/scripts/release-config-dak.yaml b/scripts/release-config-dak.yaml new file mode 100644 index 0000000000..fdb3b5aad1 --- /dev/null +++ b/scripts/release-config-dak.yaml @@ -0,0 +1,108 @@ +# Enhanced release configuration for SMART Guidelines with DAK support +# This file should be placed in the root of repositories that want DAK processing + +# Standard IG Publisher configuration +source_dir: "." +webroot_repo: "https://github.com/costateixeira/smart-html" +webroot_branch: "main" +registry_repo: "https://github.com/costateixeira/ig-registry" +history_repo: "https://github.com/HL7/fhir-ig-history-template" + +# DAK Processing configuration +dak: + enabled: true # Set to false to disable DAK processing + + # DAK scripts configuration + scripts: + # Download DAK scripts from smart-base repository + source_repo: "https://github.com/WorldHealthOrganization/smart-base" + source_branch: "main" + source_path: "input/scripts" + + # Scripts to run during preprocessing (before IG Publisher) + preprocessing: + - "generate_dak_from_sushi.py" # Generate DAK config from sushi-config.yaml + - "update_sushi_config.py" # Update sushi config with DAK settings + - "dmn_questionnaire_generator.py" # Generate questionnaires from DMN files + - "transform_dmn.py" # Transform DMN files to HTML + + # Scripts to run during postprocessing (after IG Publisher) + postprocessing: + - "generate_valueset_schemas.py" # JSON schemas for ValueSets + - "generate_logical_model_schemas.py" # JSON schemas for Logical Models + - "generate_jsonld_vocabularies.py" # JSON-LD vocabularies + - "generate_dak_api_hub.py" # Comprehensive API documentation + + # File patterns to include in sparse checkout when cloning smart-base + sparse_includes: + - "/input/scripts/" + - "/input/includes/" + + # Output configuration + output: + # Additional files to preserve in sitepreview + preserve_patterns: + - "*.schema.json" + - "*.jsonld" + - "*.openapi.json" + - "dak-api.html" + + # Files to exclude from deployment (too large) + exclude_patterns: + - "ig-build-zips/" + - "temp/" + +# GitHub Actions integration +github: + # Enable pull request creation for updates + enable_pr_creation: true + + # PR target branches + webroot_pr_target: "main" + registry_pr_target: "master" + + # Comment on PRs with build status + pr_comments: true + +# Sparse checkout configuration for webroot +sparse_checkout: + enabled: true + # Always include these paths + always_include: + - "/templates" + - "/publish-setup.json" + - "/package-registry.json" + - "/package-feed.xml" + - "/publication-feed.xml" + +# Build optimization +build: + # Java memory settings for IG Publisher + java_memory: "6g" + + # Timeout settings (in seconds) + timeouts: + preprocessing: 600 # 10 minutes for DAK preprocessing + ig_publisher: 1800 # 30 minutes for IG Publisher + postprocessing: 900 # 15 minutes for DAK postprocessing + + # Retry configuration + retries: + download_scripts: 3 + git_operations: 3 + processing_scripts: 1 + +# Deployment configuration +deployment: + sitepreview_dir: "sitepreview" + gh_pages_branch: "gh-pages" + + # Files/directories to exclude from deployment + exclude: + - "ig-build-zips/" + - ".git/" + - "temp/" + - "fhir-package-cache/" + + # Generate deployment summary + generate_summary: true diff --git a/scripts/release-config.yaml b/scripts/release-config.yaml new file mode 100644 index 0000000000..093ca278a7 --- /dev/null +++ b/scripts/release-config.yaml @@ -0,0 +1,34 @@ +# Source repository settings +source_repo: https://github.com/WorldHealthOrganization/smart-pcmt-vaxprequal +source_branch: release-candidate # Make sure this branch exists! +source_dir: ./source # Local directory for source + +# Webroot (smart-html) settings +webroot_repo: https://github.com/costateixeira/smart-html +webroot_branch: main +webroot_pr_target_branch: main + +# History template settings +history_repo: https://github.com/HL7/fhir-ig-history-template +history_branch: master + +# Registry settings +registry_pr_target_branch: master + +# Sparse checkout configuration +enable_sparse_checkout: true +sparse_dirs: + - /templates + - /assets +# - /smart-pcmt-vaxprequal # ADD THE IG FOLDER! + +# GitHub settings +enable_pr_creation: false # Set to false for local testing +github_token: '' # Leave empty for local, or add PAT + +# Publishing settings (add these) +publish_to_gh_pages: true +sitepreview_dir: sitepreview +gh_pages_branch: gh-pages +exclude_paths: + - ig-build-zips/ \ No newline at end of file diff --git a/scripts/requirements-gui.txt b/scripts/requirements-gui.txt new file mode 100644 index 0000000000..e26d7deb17 --- /dev/null +++ b/scripts/requirements-gui.txt @@ -0,0 +1,7 @@ +-r requirements.txt + +# Tkinter is part of Python stdlib, but requires the OS tk package. +# On Linux: +# sudo apt-get install python3-tk +# On macOS/Windows: +# usually included with Python installation \ No newline at end of file diff --git a/scripts/requirements.txt b/scripts/requirements.txt new file mode 100644 index 0000000000..223583cf71 --- /dev/null +++ b/scripts/requirements.txt @@ -0,0 +1,3 @@ +GitPython>=3.1.40 +PyYAML>=6.0 +requests>=2.28.0