From 5b951c50ed86d8c02c45559f2a4b5941e02de58c Mon Sep 17 00:00:00 2001
From: Erik
Date: Sun, 11 Jan 2026 18:37:06 +0100
Subject: [PATCH 01/35] Add GitHub Actions workflow for release automation
Setup a release workflow triggered by tag pushes and manual dispatch. Includes validation, testing, SBOM generation, and automated deployment to Maven Central and GitHub Releases.
---
.github/workflows/release.yml | 197 ++++++++++++++++++++++++++++++++++
1 file changed, 197 insertions(+)
create mode 100644 .github/workflows/release.yml
diff --git a/.github/workflows/release.yml b/.github/workflows/release.yml
new file mode 100644
index 0000000..6f63087
--- /dev/null
+++ b/.github/workflows/release.yml
@@ -0,0 +1,197 @@
+name: Release
+
+on:
+ push:
+ tags:
+ - 'v*'
+ workflow_dispatch:
+ inputs:
+ version:
+ description: 'Release version (e.g., 1.0.0)'
+ required: true
+ type: string
+ dry_run:
+ description: 'Dry run (skip actual deployment)'
+ required: false
+ type: boolean
+ default: false
+
+# Minimal global permissions - jobs request additional permissions as needed
+permissions:
+ contents: read
+
+env:
+ JAVA_VERSION: '21'
+
+jobs:
+ validate:
+ name: Validate Release
+ runs-on: ubuntu-latest
+ outputs:
+ version: ${{ steps.version.outputs.version }}
+ should_deploy: ${{ steps.deploy-check.outputs.should_deploy }}
+
+ steps:
+ - name: Checkout repository
+ uses: actions/checkout@v4
+ with:
+ fetch-depth: 0
+
+ - name: Determine version
+ id: version
+ run: |
+ if [ "${{ github.event_name }}" == "push" ]; then
+ VERSION="${GITHUB_REF#refs/tags/v}"
+ else
+ VERSION="${{ github.event.inputs.version }}"
+ fi
+ echo "version=$VERSION" >> $GITHUB_OUTPUT
+ echo "Release version: $VERSION"
+
+ - name: Check deployment condition
+ id: deploy-check
+ run: |
+ # Tag push: always deploy
+ # Manual dispatch: only if dry_run is not true
+ if [ "${{ github.event_name }}" == "push" ]; then
+ echo "should_deploy=true" >> $GITHUB_OUTPUT
+ echo "Deployment: enabled (tag push)"
+ elif [ "${{ github.event.inputs.dry_run }}" != "true" ]; then
+ echo "should_deploy=true" >> $GITHUB_OUTPUT
+ echo "Deployment: enabled (manual trigger, dry_run=false)"
+ else
+ echo "should_deploy=false" >> $GITHUB_OUTPUT
+ echo "Deployment: disabled (dry run mode)"
+ fi
+
+ - name: Set up JDK ${{ env.JAVA_VERSION }}
+ uses: actions/setup-java@v4
+ with:
+ java-version: ${{ env.JAVA_VERSION }}
+ distribution: 'temurin'
+ cache: 'maven'
+
+ - name: Validate build
+ run: mvn -B clean verify -DskipTests
+
+ test:
+ name: Run Tests
+ runs-on: ubuntu-latest
+ needs: validate
+
+ strategy:
+ matrix:
+ java: [ '17', '21' ]
+
+ steps:
+ - name: Checkout repository
+ uses: actions/checkout@v4
+
+ - name: Set up JDK ${{ matrix.java }}
+ uses: actions/setup-java@v4
+ with:
+ java-version: ${{ matrix.java }}
+ distribution: 'temurin'
+ cache: 'maven'
+
+ - name: Run tests
+ run: mvn -B clean test
+
+ deploy:
+ name: Deploy to Maven Central
+ runs-on: ubuntu-latest
+ needs: [ validate, test ]
+ if: needs.validate.outputs.should_deploy == 'true'
+
+ steps:
+ - name: Checkout repository
+ uses: actions/checkout@v4
+
+ - name: Set up JDK ${{ env.JAVA_VERSION }}
+ uses: actions/setup-java@v4
+ with:
+ java-version: ${{ env.JAVA_VERSION }}
+ distribution: 'temurin'
+ cache: 'maven'
+ server-id: central
+ server-username: CENTRAL_USERNAME
+ server-password: CENTRAL_TOKEN
+ gpg-private-key: ${{ secrets.GPG_PRIVATE_KEY }}
+ gpg-passphrase: GPG_PASSPHRASE
+
+ - name: Deploy to Maven Central
+ env:
+ CENTRAL_USERNAME: ${{ secrets.CENTRAL_USERNAME }}
+ CENTRAL_TOKEN: ${{ secrets.CENTRAL_TOKEN }}
+ GPG_PASSPHRASE: ${{ secrets.GPG_PASSPHRASE }}
+ run: mvn -B clean deploy -Prelease -DskipTests -Dgpg.useAgent=false
+
+ sbom:
+ name: Generate SBOM
+ runs-on: ubuntu-latest
+ needs: validate
+
+ steps:
+ - name: Checkout repository
+ uses: actions/checkout@v4
+
+ - name: Set up JDK ${{ env.JAVA_VERSION }}
+ uses: actions/setup-java@v4
+ with:
+ java-version: ${{ env.JAVA_VERSION }}
+ distribution: 'temurin'
+ cache: 'maven'
+
+ - name: Generate SBOM
+ run: mvn -B cyclonedx:makeAggregateBom -Pqa
+
+ - name: Upload SBOM artifact
+ uses: actions/upload-artifact@v4
+ with:
+ name: sbom
+ path: target/bom.*
+ retention-days: 90
+
+ github-release:
+ name: Create GitHub Release
+ runs-on: ubuntu-latest
+ needs: [ validate, deploy, sbom ]
+ if: needs.validate.outputs.should_deploy == 'true' && needs.deploy.result == 'success'
+
+ # Only this job needs write access to create the release
+ permissions:
+ contents: write
+
+ steps:
+ - name: Checkout repository
+ uses: actions/checkout@v4
+
+ - name: Download SBOM
+ uses: actions/download-artifact@v4
+ with:
+ name: sbom
+ path: sbom/
+
+ - name: Read release notes
+ id: release-notes
+ run: |
+ if [ ! -f "RELEASE.md" ]; then
+ echo "Error: RELEASE.md not found"
+ exit 1
+ fi
+ echo "body<> $GITHUB_OUTPUT
+ cat RELEASE.md >> $GITHUB_OUTPUT
+ echo "EOF" >> $GITHUB_OUTPUT
+
+ - name: Create GitHub Release
+ uses: softprops/action-gh-release@v2
+ with:
+ tag_name: v${{ needs.validate.outputs.version }}
+ name: Release v${{ needs.validate.outputs.version }}
+ body: ${{ steps.release-notes.outputs.body }}
+ files: |
+ sbom/bom.json
+ sbom/bom.xml
+ draft: false
+ prerelease: ${{ contains(needs.validate.outputs.version, '-') }}
+ generate_release_notes: false
From f68bf47ea2e9dcf88cbd0d05cac4c831687966cf Mon Sep 17 00:00:00 2001
From: Erik
Date: Sun, 11 Jan 2026 18:44:49 +0100
Subject: [PATCH 02/35] Simplify `release.yml` by consolidating environment
variable setups and updating dependencies for SBOM generation.
---
.github/workflows/release.yml | 10 +++++-----
1 file changed, 5 insertions(+), 5 deletions(-)
diff --git a/.github/workflows/release.yml b/.github/workflows/release.yml
index 6f63087..007c2ae 100644
--- a/.github/workflows/release.yml
+++ b/.github/workflows/release.yml
@@ -109,6 +109,10 @@ jobs:
- name: Set up JDK ${{ env.JAVA_VERSION }}
uses: actions/setup-java@v4
+ env:
+ CENTRAL_USERNAME: ${{ secrets.CENTRAL_USERNAME }}
+ CENTRAL_TOKEN: ${{ secrets.CENTRAL_TOKEN }}
+ GPG_PASSPHRASE: ${{ secrets.GPG_PASSPHRASE }}
with:
java-version: ${{ env.JAVA_VERSION }}
distribution: 'temurin'
@@ -120,16 +124,12 @@ jobs:
gpg-passphrase: GPG_PASSPHRASE
- name: Deploy to Maven Central
- env:
- CENTRAL_USERNAME: ${{ secrets.CENTRAL_USERNAME }}
- CENTRAL_TOKEN: ${{ secrets.CENTRAL_TOKEN }}
- GPG_PASSPHRASE: ${{ secrets.GPG_PASSPHRASE }}
run: mvn -B clean deploy -Prelease -DskipTests -Dgpg.useAgent=false
sbom:
name: Generate SBOM
runs-on: ubuntu-latest
- needs: validate
+ needs: [ validate, test ]
steps:
- name: Checkout repository
From 9c7d0ad06fc496422fe120cccdcb8aa5984bb5af Mon Sep 17 00:00:00 2001
From: Erik
Date: Sun, 11 Jan 2026 18:47:03 +0100
Subject: [PATCH 03/35] Add CI workflow for build, test, quality analysis, and
dependency checks
---
.github/workflows/ci.yml | 145 +++++++++++++++++++++++++++++++++++++++
1 file changed, 145 insertions(+)
create mode 100644 .github/workflows/ci.yml
diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml
new file mode 100644
index 0000000..fbea71c
--- /dev/null
+++ b/.github/workflows/ci.yml
@@ -0,0 +1,145 @@
+name: CI
+
+on:
+ push:
+ branches: [ main, develop, 'feature/**' ]
+ pull_request:
+ branches: [ main, develop ]
+
+permissions:
+ contents: read
+ checks: write
+ pull-requests: write
+
+jobs:
+ build:
+ name: Build & Test (Java ${{ matrix.java }})
+ runs-on: ubuntu-latest
+ strategy:
+ fail-fast: false
+ matrix:
+ java: [ '17', '21' ]
+
+ steps:
+ - name: Checkout repository
+ uses: actions/checkout@v4
+ with:
+ fetch-depth: 1
+
+ - name: Set up JDK ${{ matrix.java }}
+ uses: actions/setup-java@v4
+ with:
+ java-version: ${{ matrix.java }}
+ distribution: 'temurin'
+ cache: 'maven'
+
+ - name: Build and test with Maven
+ run: mvn -B clean verify -Pqa -Ddependency-check.skip=true
+
+ - name: Upload test results
+ uses: actions/upload-artifact@v4
+ if: always()
+ with:
+ name: test-results-java-${{ matrix.java }}
+ path: |
+ **/target/surefire-reports/
+ **/target/failsafe-reports/
+ retention-days: 7
+
+ - name: Upload coverage report
+ uses: actions/upload-artifact@v4
+ if: matrix.java == '21'
+ with:
+ name: coverage-report
+ path: |
+ **/target/site/jacoco/
+ **/target/jacoco.exec
+ retention-days: 7
+
+ - name: Publish Test Report
+ uses: mikepenz/action-junit-report@v4
+ if: always()
+ with:
+ report_paths: '**/target/surefire-reports/TEST-*.xml'
+ check_name: Test Report (Java ${{ matrix.java }})
+
+ quality:
+ name: Code Quality Analysis
+ runs-on: ubuntu-latest
+ needs: build
+
+ steps:
+ - name: Checkout repository
+ uses: actions/checkout@v4
+ with:
+ fetch-depth: 1
+
+ - name: Set up JDK 21
+ uses: actions/setup-java@v4
+ with:
+ java-version: '21'
+ distribution: 'temurin'
+ cache: 'maven'
+
+ - name: Compile for analysis
+ run: mvn -B clean compile -DskipTests
+
+ - name: Run SpotBugs analysis
+ run: mvn -B spotbugs:check -Pqa -Ddependency-check.skip=true
+ continue-on-error: true
+
+ - name: Run Checkstyle analysis
+ run: mvn -B checkstyle:check -Pqa -Ddependency-check.skip=true
+ continue-on-error: true
+
+ - name: Upload SpotBugs report
+ uses: actions/upload-artifact@v4
+ if: always()
+ with:
+ name: spotbugs-report
+ path: '**/target/spotbugsXml.xml'
+ retention-days: 7
+
+ - name: Upload Checkstyle report
+ uses: actions/upload-artifact@v4
+ if: always()
+ with:
+ name: checkstyle-report
+ path: '**/target/checkstyle-result.xml'
+ retention-days: 7
+
+ dependency-check:
+ name: OWASP Dependency Check
+ runs-on: ubuntu-latest
+ needs: build
+
+ steps:
+ - name: Checkout repository
+ uses: actions/checkout@v4
+
+ - name: Set up JDK 21
+ uses: actions/setup-java@v4
+ with:
+ java-version: '21'
+ distribution: 'temurin'
+ cache: 'maven'
+
+ - name: Cache NVD database
+ uses: actions/cache@v4
+ with:
+ path: ~/.m2/repository/org/owasp/dependency-check-data
+ key: nvd-${{ runner.os }}-${{ hashFiles('**/pom.xml') }}
+ restore-keys: |
+ nvd-${{ runner.os }}-
+
+ - name: Run OWASP Dependency Check
+ run: mvn -B dependency-check:aggregate -Pqa
+ continue-on-error: true
+
+ - name: Upload Dependency Check report
+ uses: actions/upload-artifact@v4
+ if: always()
+ with:
+ name: dependency-check-report
+ path: target/dependency-check-report.html
+ retention-days: 30
From 6c087fd2b6e61889b14ca27981f1369837f749e7 Mon Sep 17 00:00:00 2001
From: Erik
Date: Sun, 11 Jan 2026 18:50:21 +0100
Subject: [PATCH 04/35] Add QA profiles, static analysis, code coverage, and
SBOM generation configuration
Set up QA profiles with support for JaCoCo, SpotBugs, Checkstyle, OWASP Dependency Check, and CycloneDX. Configure plugins for code quality, security analysis, and SBOM generation and update project properties for build consistency.
---
aether-datafixers-bom/pom.xml | 4 +
aether-datafixers-examples/pom.xml | 1 +
pom.xml | 157 ++++++++++++++++++++++++++++-
3 files changed, 161 insertions(+), 1 deletion(-)
diff --git a/aether-datafixers-bom/pom.xml b/aether-datafixers-bom/pom.xml
index 6dd9b44..324656c 100644
--- a/aether-datafixers-bom/pom.xml
+++ b/aether-datafixers-bom/pom.xml
@@ -13,6 +13,10 @@
pomAether Datafixers :: BOM
+
+
+ true
+ Bill of Materials for Aether Datafixers modules.
diff --git a/aether-datafixers-examples/pom.xml b/aether-datafixers-examples/pom.xml
index d09af88..d8fe91a 100644
--- a/aether-datafixers-examples/pom.xml
+++ b/aether-datafixers-examples/pom.xml
@@ -17,6 +17,7 @@
true
+ true
diff --git a/pom.xml b/pom.xml
index 5e3ca7f..558496b 100644
--- a/pom.xml
+++ b/pom.xml
@@ -24,6 +24,7 @@
UTF-8UTF-817
+ true26.0.2
@@ -44,6 +45,16 @@
3.1.23.5.1
+
+ 0.8.14
+ 4.9.8.2
+ 3.6.0
+ 12.2.0
+ 2.9.1
+
+
+ false
+
4.7.6
@@ -319,7 +330,11 @@
sign
- true
+ ${gpg.useAgent}
+
+ --pinentry-mode
+ loopback
+
@@ -327,5 +342,145 @@
+
+
+
+ qa
+
+
+
+
+ org.jacoco
+ jacoco-maven-plugin
+ ${plugin.jacoco.version}
+
+
+ prepare-agent
+
+ prepare-agent
+
+
+
+ report
+ test
+
+ report
+
+
+
+ check
+
+ check
+
+
+ ${jacoco.skip}
+
+
+ BUNDLE
+
+
+ LINE
+ COVEREDRATIO
+ 0.75
+
+
+
+
+
+
+
+
+
+
+
+ com.github.spotbugs
+ spotbugs-maven-plugin
+ ${plugin.spotbugs.version}
+
+ Max
+ Medium
+ true
+ false
+
+
+
+
+ check
+
+
+
+
+
+
+
+ org.apache.maven.plugins
+ maven-checkstyle-plugin
+ ${plugin.checkstyle.version}
+
+ ${maven.multiModuleProjectDirectory}/checkstyle.xml
+ true
+ false
+ false
+ warning
+ false
+ false
+
+
+
+ validate
+ validate
+
+ check
+
+
+
+
+
+
+
+ org.owasp
+ dependency-check-maven
+ ${plugin.owasp.version}
+
+ 7
+
+ ${maven.multiModuleProjectDirectory}/dependency-check-suppressions.xml
+
+
+ HTML
+ JSON
+
+
+
+
+
+
+ org.cyclonedx
+ cyclonedx-maven-plugin
+ ${plugin.cyclonedx.version}
+
+
+ package
+
+ makeAggregateBom
+
+
+
+
+ library
+ 1.5
+ true
+ true
+ true
+ true
+ false
+ false
+ false
+ all
+
+
+
+
+
\ No newline at end of file
From 05ae29a63903d6e293b6159e41442ad32c603d02 Mon Sep 17 00:00:00 2001
From: Erik
Date: Sun, 11 Jan 2026 19:16:40 +0100
Subject: [PATCH 05/35] Update CI to enhance OWASP Dependency Check and test
reporting
- Add failsafe report paths for better test coverage in JUnit report step.
- Configure `NVD_API_KEY` for OWASP Dependency Check via workflow secrets.
- Adjust dependency-check data cache location and keys for improved clarity.
- Include JSON format in dependency-check report uploads for extended analysis.
- Update plugin configuration in `pom.xml` to support NVD API key and custom data directory.
---
.github/workflows/ci.yml | 18 ++++++++++++------
pom.xml | 2 ++
2 files changed, 14 insertions(+), 6 deletions(-)
diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml
index fbea71c..2bf1fb7 100644
--- a/.github/workflows/ci.yml
+++ b/.github/workflows/ci.yml
@@ -60,7 +60,9 @@ jobs:
uses: mikepenz/action-junit-report@v4
if: always()
with:
- report_paths: '**/target/surefire-reports/TEST-*.xml'
+ report_paths: |
+ **/target/surefire-reports/TEST-*.xml
+ **/target/failsafe-reports/TEST-*.xml
check_name: Test Report (Java ${{ matrix.java }})
quality:
@@ -112,6 +114,8 @@ jobs:
name: OWASP Dependency Check
runs-on: ubuntu-latest
needs: build
+ env:
+ NVD_API_KEY: ${{ secrets.NVD_API_KEY }}
steps:
- name: Checkout repository
@@ -124,13 +128,13 @@ jobs:
distribution: 'temurin'
cache: 'maven'
- - name: Cache NVD database
+ - name: Cache Dependency-Check DB
uses: actions/cache@v4
with:
- path: ~/.m2/repository/org/owasp/dependency-check-data
- key: nvd-${{ runner.os }}-${{ hashFiles('**/pom.xml') }}
+ path: target/dependency-check-data
+ key: depcheck-${{ runner.os }}-${{ hashFiles('**/pom.xml') }}
restore-keys: |
- nvd-${{ runner.os }}-
+ depcheck-${{ runner.os }}-
- name: Run OWASP Dependency Check
run: mvn -B dependency-check:aggregate -Pqa
@@ -141,5 +145,7 @@ jobs:
if: always()
with:
name: dependency-check-report
- path: target/dependency-check-report.html
+ path: |
+ target/dependency-check-report.html
+ target/dependency-check-report.json
retention-days: 30
diff --git a/pom.xml b/pom.xml
index 558496b..fdf6a49 100644
--- a/pom.xml
+++ b/pom.xml
@@ -442,6 +442,8 @@
dependency-check-maven${plugin.owasp.version}
+ ${env.NVD_API_KEY}
+ ${project.build.directory}/dependency-check-data7${maven.multiModuleProjectDirectory}/dependency-check-suppressions.xml
From dcdc8ef6b23640ae92336defbe64a9765c33ef66 Mon Sep 17 00:00:00 2001
From: Erik
Date: Sun, 11 Jan 2026 20:07:49 +0100
Subject: [PATCH 06/35] Update SECURITY.md with extended security practices,
release artifact signing, and vulnerability disclosure policies
---
SECURITY.md | 153 ++++++++++++++++++++++++++++++++++++++++++++--------
1 file changed, 130 insertions(+), 23 deletions(-)
diff --git a/SECURITY.md b/SECURITY.md
index 34eab22..b75e5a2 100644
--- a/SECURITY.md
+++ b/SECURITY.md
@@ -4,39 +4,146 @@
We provide security updates for the following versions:
-| Version | Supported |
-|---------|---------------------|
-| 1.0.x | ❌ Not supported yet |
-| 0.4.x | ✅ Active Support |
-| 0.3.x | ❌ Not supported |
-| 0.2.x | ❌ Not supported |
-| 0.1.x | ❌ Not supported |
+| Version | Support Status | End of Support |
+|--------|--------------------|----------------|
+| 1.0.x | 🔜 Planned LTS | TBD (1 year) |
+| 0.5.x | ✅ Active Support | February 2026 |
+| 0.4.x | ❌ End of Life | - |
+| 0.3.x | ❌ End of Life | - |
+| 0.2.x | ❌ End of Life | - |
+| 0.1.x | ❌ End of Life | - |
-If you are using an older version, we **strongly** recommend upgrading to the latest stable release.
+If you are using an older version, we **strongly recommend upgrading** to the latest stable release.
+
+---
+
+## Security Features
+
+### Automated Security Scanning
+
+This project uses multiple automated security tools:
+
+- **GitHub CodeQL** – Static Application Security Testing (SAST)
+- **OWASP Dependency-Check** – Known vulnerability detection in dependencies
+- **GitHub Dependency Review** – Pull request dependency analysis
+- **Dependabot** – Automated dependency updates
+
+All scans are executed automatically in CI pipelines on every pull request and release build.
+
+---
+
+## Supply Chain Security
+
+### Artifact Integrity & Signing
+
+All official release artifacts of **Aether Datafixers** are **cryptographically signed** to guarantee integrity and authenticity.
+
+- All release artifacts are **GPG signed**
+- Signatures are generated during the release pipeline
+- Each published artifact is accompanied by a corresponding `.asc` signature file
+- Consumers can verify artifacts before usage
+
+Example verification flow:
+
+```
+gpg --verify artifact.jar.asc artifact.jar
+```
+
+Unsigned or modified artifacts **must not be trusted**.
+
+---
+
+### Signing Keys
+
+- A **dedicated GPG key** is used for automated GitHub releases and deployments
+- Release signing keys are **separate from personal developer keys**
+- Private key material is **never committed** to the repository
+- Keys are stored securely using CI secret management
+
+The signing process is fully automated and enforced during release builds.
+
+---
## Reporting a Vulnerability
-If you find a security vulnerability in Aether Datafixers, please report it **privately**.
-We take security issues seriously and will respond as soon as possible.
+If you discover a security vulnerability in **Aether Datafixers**, please report it **privately**.
-### 📬 Contact
+### Contact
-- **Email:** security@splatgames.de
-- **GitHub Issues:** Do **not** report security vulnerabilities in public issues.
+- **Email:** `security@splatgames.de`
+- **GitHub Security Advisories:**
+ https://github.com/aether-framework/aether-datafixers/security/advisories/new
+- **GitHub Issues:**
+ Do **not** report security vulnerabilities in public issues.
-### 🔒 Disclosure Process
+---
-1. Report the issue privately via **security@splatgames.de**.
-2. Our team will acknowledge receipt within **48 hours**.
-3. We will investigate and provide a **fix timeline**.
-4. Once resolved, we will issue a **security advisory**.
+## Disclosure Process
+
+1. Report the issue privately
+2. Acknowledgment within **48 hours**
+3. Fix timeline provided within **7 days**
+4. Critical vulnerabilities (CVSS ≥ 9.0): patch within **72 hours**
+5. High severity (CVSS ≥ 7.0): patch within **14 days**
+6. Security advisory published after resolution
+
+---
+
+## Response Time SLA
+
+| Severity | Acknowledgment | Fix Timeline |
+|--------------------------|----------------|--------------|
+| Critical (CVSS 9.0–10.0) | 24 hours | 72 hours |
+| High (CVSS 7.0–8.9) | 48 hours | 14 days |
+| Medium (CVSS 4.0–6.9) | 48 hours | 30 days |
+| Low (CVSS 0.1–3.9) | 72 hours | Next release |
+
+---
## Security Best Practices
-To keep your application secure when using Aether Datafixers:
+- Always use the **latest stable version**
+- Verify **GPG signatures** of all downloaded artifacts
+- Enable automated dependency updates
+- Validate input data at system boundaries
+- Use appropriate `DynamicOps` implementations for untrusted data
+- Avoid sensitive data in logs
+- Review the attached **SBOM** for dependency transparency
+
+---
+
+## Vulnerability Disclosure Policy
+
+We follow a **coordinated disclosure** process:
+
+1. Private disclosure
+2. Fix development
+3. Advisory preparation
+4. Coordinated release
+5. Public disclosure after a grace period
+
+---
+
+## Security Audits
+
+Security audits are welcome.
+
+- Contact `security@splatgames.de` before starting
+- Follow responsible disclosure practices
+- Researchers may be credited with permission
+
+---
+
+## PGP Key
+
+For encrypted communication and release verification:
+
+- **Key Purpose:** Release artifact signing
+- **Key ID:** Available upon request
+- **Fingerprint:** Available upon request
+
+Contact: **security@splatgames.de**
-- Always use the **latest stable version**.
-- Validate event data properly.
-- Do not expose sensitive logging in production.
+---
-🚀 Thank you for helping us keep Aether Datafixers secure!
+Thank you for helping keep **Aether Datafixers** secure.
\ No newline at end of file
From 5ac3d8aba984463e11e78039fce5209ab5323939 Mon Sep 17 00:00:00 2001
From: Erik
Date: Sun, 11 Jan 2026 20:34:24 +0100
Subject: [PATCH 07/35] Add GitHub Actions workflow for dependency review on
PRs
Set up a `dependency-review.yml` workflow triggered on pull requests to `main` and `develop` branches. Configure fail-on-severity, license restrictions, and PR comment summaries to enhance dependency analysis and reporting.
---
.github/workflows/dependency-review.yml | 29 +++++++++++++++++++++++++
1 file changed, 29 insertions(+)
create mode 100644 .github/workflows/dependency-review.yml
diff --git a/.github/workflows/dependency-review.yml b/.github/workflows/dependency-review.yml
new file mode 100644
index 0000000..8a1b3ef
--- /dev/null
+++ b/.github/workflows/dependency-review.yml
@@ -0,0 +1,29 @@
+name: Dependency Review
+
+on:
+ pull_request:
+ branches: [ main, develop ]
+
+permissions:
+ contents: read
+ pull-requests: write
+
+jobs:
+ dependency-review:
+ name: Dependency Review
+ runs-on: ubuntu-latest
+
+ steps:
+ - name: Checkout repository
+ uses: actions/checkout@v4
+ with:
+ fetch-depth: 1
+
+ - name: Dependency Review
+ uses: actions/dependency-review-action@v4
+ with:
+ fail-on-severity: high
+ deny-licenses: GPL-3.0-only, GPL-3.0-or-later, AGPL-3.0-only, AGPL-3.0-or-later
+ allow-licenses: MIT, Apache-2.0, BSD-2-Clause, BSD-3-Clause, ISC, MPL-2.0
+ comment-summary-in-pr: always
+ warn-only: false
From 8668b86fe82949f1c11f4d203ef0e21bd4e32c8c Mon Sep 17 00:00:00 2001
From: Erik
Date: Sun, 11 Jan 2026 20:35:15 +0100
Subject: [PATCH 08/35] Add CodeQL Security Analysis workflow
Set up a `codeql.yml` GitHub Actions workflow to perform scheduled and event-driven security analysis for Java/Kotlin code using extended and quality queries.
---
.github/workflows/codeql.yml | 50 ++++++++++++++++++++++++++++++++++++
1 file changed, 50 insertions(+)
create mode 100644 .github/workflows/codeql.yml
diff --git a/.github/workflows/codeql.yml b/.github/workflows/codeql.yml
new file mode 100644
index 0000000..350f742
--- /dev/null
+++ b/.github/workflows/codeql.yml
@@ -0,0 +1,50 @@
+name: CodeQL Security Analysis
+
+on:
+ push:
+ branches: [ main, develop ]
+ pull_request:
+ branches: [ main, develop ]
+ schedule:
+ # Run every Monday at 00:00 UTC
+ - cron: '0 0 * * 1'
+
+permissions:
+ contents: read
+ security-events: write
+ actions: read
+
+jobs:
+ analyze:
+ name: Analyze (${{ matrix.language }})
+ runs-on: ubuntu-latest
+
+ strategy:
+ fail-fast: false
+ matrix:
+ language: [ 'java-kotlin' ]
+
+ steps:
+ - name: Checkout repository
+ uses: actions/checkout@v4
+
+ - name: Set up JDK 21
+ uses: actions/setup-java@v4
+ with:
+ java-version: '21'
+ distribution: 'temurin'
+ cache: 'maven'
+
+ - name: Initialize CodeQL
+ uses: github/codeql-action/init@v3
+ with:
+ languages: ${{ matrix.language }}
+ queries: security-extended,security-and-quality
+
+ - name: Build with Maven
+ run: mvn -B clean compile -DskipTests
+
+ - name: Perform CodeQL Analysis
+ uses: github/codeql-action/analyze@v3
+ with:
+ category: "/language:${{ matrix.language }}"
From 55a204c3bdc467e0ce2af06c105249ec53c01079 Mon Sep 17 00:00:00 2001
From: Erik
Date: Sun, 11 Jan 2026 20:36:39 +0100
Subject: [PATCH 09/35] Refactor CodeQL workflow for simplified configuration
and enhanced build control
---
.github/workflows/codeql.yml | 17 +++++++----------
1 file changed, 7 insertions(+), 10 deletions(-)
diff --git a/.github/workflows/codeql.yml b/.github/workflows/codeql.yml
index 350f742..5233757 100644
--- a/.github/workflows/codeql.yml
+++ b/.github/workflows/codeql.yml
@@ -6,8 +6,7 @@ on:
pull_request:
branches: [ main, develop ]
schedule:
- # Run every Monday at 00:00 UTC
- - cron: '0 0 * * 1'
+ - cron: '0 0 * * 1' # Monday 00:00 UTC
permissions:
contents: read
@@ -16,17 +15,14 @@ permissions:
jobs:
analyze:
- name: Analyze (${{ matrix.language }})
+ name: Analyze (java-kotlin)
runs-on: ubuntu-latest
- strategy:
- fail-fast: false
- matrix:
- language: [ 'java-kotlin' ]
-
steps:
- name: Checkout repository
uses: actions/checkout@v4
+ with:
+ fetch-depth: 1
- name: Set up JDK 21
uses: actions/setup-java@v4
@@ -38,7 +34,8 @@ jobs:
- name: Initialize CodeQL
uses: github/codeql-action/init@v3
with:
- languages: ${{ matrix.language }}
+ languages: java-kotlin
+ build-mode: manual
queries: security-extended,security-and-quality
- name: Build with Maven
@@ -47,4 +44,4 @@ jobs:
- name: Perform CodeQL Analysis
uses: github/codeql-action/analyze@v3
with:
- category: "/language:${{ matrix.language }}"
+ category: "/language:java-kotlin"
From 2944350d88bc28308653353ea1b058702e8f9d15 Mon Sep 17 00:00:00 2001
From: Erik
Date: Sun, 11 Jan 2026 20:47:05 +0100
Subject: [PATCH 10/35] Configure Dependabot for Maven dependencies and GitHub
Actions updates
---
.github/dependabot.yml | 72 ++++++++++++++++++++++++++++++++++++++++++
1 file changed, 72 insertions(+)
create mode 100644 .github/dependabot.yml
diff --git a/.github/dependabot.yml b/.github/dependabot.yml
new file mode 100644
index 0000000..0c53cfa
--- /dev/null
+++ b/.github/dependabot.yml
@@ -0,0 +1,72 @@
+version: 2
+updates:
+ # Maven dependencies
+ - package-ecosystem: "maven"
+ directory: "/"
+ schedule:
+ interval: "weekly"
+ day: "monday"
+ time: "06:00"
+ timezone: "Europe/Berlin"
+ open-pull-requests-limit: 10
+ reviewers:
+ - "aether-framework/maintainers"
+ labels:
+ - "dependencies"
+ - "java"
+ commit-message:
+ prefix: "deps"
+ include: "scope"
+ groups:
+ jackson:
+ patterns:
+ - "com.fasterxml.jackson*"
+ update-types:
+ - "minor"
+ - "patch"
+ spring:
+ patterns:
+ - "org.springframework*"
+ update-types:
+ - "minor"
+ - "patch"
+ testing:
+ patterns:
+ - "org.junit*"
+ - "org.assertj*"
+ update-types:
+ - "minor"
+ - "patch"
+ maven-plugins:
+ patterns:
+ - "org.apache.maven.plugins:maven-*"
+ - "org.codehaus.mojo:*"
+ update-types:
+ - "minor"
+ - "patch"
+ build-plugins:
+ patterns:
+ - "org.sonatype.central:*"
+ - "org.owasp:*"
+ - "org.cyclonedx:*"
+ - "org.jacoco:*"
+ - "com.github.spotbugs:*"
+ update-types:
+ - "minor"
+ - "patch"
+
+ # GitHub Actions
+ - package-ecosystem: "github-actions"
+ directory: "/"
+ schedule:
+ interval: "weekly"
+ day: "monday"
+ time: "06:00"
+ timezone: "Europe/Berlin"
+ open-pull-requests-limit: 5
+ labels:
+ - "dependencies"
+ - "github-actions"
+ commit-message:
+ prefix: "ci"
+ include: "scope"
From ff98e5221b9c43887e31c110c3c9d471366f360c Mon Sep 17 00:00:00 2001
From: Erik
Date: Sun, 11 Jan 2026 20:52:49 +0100
Subject: [PATCH 11/35] Add OWASP Dependency-Check suppression configuration
file
Set up `dependency-check-suppressions.xml` to manage false positives and non-applicable vulnerabilities. Includes guidelines for suppression review and maintenance.
---
dependency-check-suppressions.xml | 40 +++++++++++++++++++++++++++++++
1 file changed, 40 insertions(+)
create mode 100644 dependency-check-suppressions.xml
diff --git a/dependency-check-suppressions.xml b/dependency-check-suppressions.xml
new file mode 100644
index 0000000..424b8da
--- /dev/null
+++ b/dependency-check-suppressions.xml
@@ -0,0 +1,40 @@
+
+
+
+
+
+
+
From 150773e506aaab20c59e8662facc42906cb0a631 Mon Sep 17 00:00:00 2001
From: Erik
Date: Sun, 11 Jan 2026 20:55:35 +0100
Subject: [PATCH 12/35] Add security.txt to define security contact and policy
information
Set up `.well-known/security.txt` following RFC 9116 to provide security contact details, disclosure policy, and acknowledgment information.
---
.well-known/security.txt | 10 ++++++++++
1 file changed, 10 insertions(+)
create mode 100644 .well-known/security.txt
diff --git a/.well-known/security.txt b/.well-known/security.txt
new file mode 100644
index 0000000..be8976d
--- /dev/null
+++ b/.well-known/security.txt
@@ -0,0 +1,10 @@
+# Aether Datafixers Security Contact
+# This file follows RFC 9116 (https://www.rfc-editor.org/rfc/rfc9116)
+
+Contact: mailto:security@splatgames.de
+Contact: https://github.com/aether-framework/aether-datafixers/security/advisories/new
+Expires: 2027-01-01T00:00:00.000Z
+Preferred-Languages: en, de
+Canonical: https://raw.githubusercontent.com/aether-framework/aether-datafixers/main/.well-known/security.txt
+Policy: https://github.com/aether-framework/aether-datafixers/blob/main/SECURITY.md
+Acknowledgments: https://github.com/aether-framework/aether-datafixers/blob/main/SECURITY.md#security-audits
From 81ea5cf48f699d0a065445ced38f572090c9c8c8 Mon Sep 17 00:00:00 2001
From: Erik
Date: Sun, 11 Jan 2026 21:13:25 +0100
Subject: [PATCH 13/35] Add Checkstyle configuration for code quality
enforcement
Set up `checkstyle.xml` with Google Java Style as a base and project-specific rules, including file length, line length, naming conventions, import rules, whitespace, braces, and annotations.
---
checkstyle.xml | 196 +++++++++++++++++++++++++++++++++++++++++++++++++
1 file changed, 196 insertions(+)
create mode 100644 checkstyle.xml
diff --git a/checkstyle.xml b/checkstyle.xml
new file mode 100644
index 0000000..b5d8da0
--- /dev/null
+++ b/checkstyle.xml
@@ -0,0 +1,196 @@
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
From e7521c4b3df57fb2cfadec3c01401227af4a45e0 Mon Sep 17 00:00:00 2001
From: Erik
Date: Sun, 11 Jan 2026 21:16:52 +0100
Subject: [PATCH 14/35] Add PGP public key for release artifact signing
Set up `KEYS` file with the project's PGP public key to enable verification of signed release artifacts.
---
KEYS | 52 ++++++++++++++++++++++++++++++++++++++++++++++++++++
1 file changed, 52 insertions(+)
create mode 100644 KEYS
diff --git a/KEYS b/KEYS
new file mode 100644
index 0000000..8e79d7e
--- /dev/null
+++ b/KEYS
@@ -0,0 +1,52 @@
+-----BEGIN PGP PUBLIC KEY BLOCK-----
+
+mQINBGlj2M8BEADdr0h5IJcIQ5USEP4rFPY+/91QLlwuFfI92Rcfsj0aGUXpsK+B
+UiCKeYgbyZJeFEQU4UHtP6QUfb58od8irVjAmv5eKAylHYy8vN8botNd4owzVNhz
+225NFtXjovs8GAV3phbXoW8D2IlnFt8lf0RohAeF+P/Ved1n7/5cHZXfQ92Aye82
+whA1pXdrqubmOmW+yHaw90BCksExwhOY5VzDXEPITeTj7yzhDY+fMofvnD7dAq98
+zA+vYDPTOZCWV3k38Re7xpvQcfl5F/lAqN9xtRqaMVIwzE35mhwLIPm7E9qPAOr6
+LI1boCRe5u3ei0gV8/OAkaZOWOQTF57g/G6J7TkB/GOhbkufR7XLGOK5DeU95xAI
+5d960k/6pa7LCIVMR1lIGHGbjS7i9rofDp4gpyL8B/qU9X8PRjguYYGPUv+6K5GT
+pIT7LCV4lDv+bR/H1htCzWZCCwUVVwOgvcXzec0R+Qd2WrHnGOoW4x3XCENvmsAL
+6/hHSxcpaOWJH5VK1iVWChjNZJqY95cu1dHV1jj7qeXb8xsDKQDuBq9B/X4RB/SK
+Suw7MRk1/EgHiZLQunsSKxCzfwKcpo0rI8TAFgm5Y30/12sTG6lt8ePQPFQUywQI
+YIV1+CYEMsDLdN5kul9o58P+PkwdfAq7YYDi32LrVSw0Z3/28+yoDJcg2QARAQAB
+tEFTcGxhdGdhbWVzLmRlIFNvZnR3YXJlIENJIFJlbGVhc2UgU2lnbmluZyA8cmVs
+ZWFzZUBzcGxhdGdhbWVzLmRlPokCUQQTAQgAOxYhBMa+Jb8qRjmmekkevTe1m5Pc
+dW7oBQJpY9jPAhsDBQsJCAcCAiICBhUKCQgLAgQWAgMBAh4HAheAAAoJEDe1m5Pc
+dW7ooZwQAJUz8PfFO3/vbmWUD4a4I11ipWuOT/C8xArmhH7RkxSysgHspdBq+vUT
+B7VXdx4IJNWps77IajJsMIA6bC3B1V4YZBnxKYKdvhNlkoDddmXGFcyBpTGSfzwD
+qPOHrWa8LDg6naJ22PSgCh5SIy9BPQ+Vr0ES9R+HE3dFPNJgrocZk5/M1lV0gGk+
+o64F9VYl9r06QKwn5zEKyl6CKdpAlW9xHLobPW85vCEiEaTQlWDWzTH4uM+7JeM4
+JnmRVE/vWxA3O+hCoQJNdr2EhnyleXWgrkLIRggfgquB77LBN53JSs1ABby2J/fC
+JHNigGF/FvPhwsGQ4ateRfgK4fkeitLeNH+ozbrs7V+HdA2MDpftVI9LnEX48T2R
+wEAJQq3V9pnAHbnXa9v7Y8eOnEcaGhgOtvtBaamYR1/PNHSp9osh1w1g/KSRtb3j
+vp28hfLZoId+A2Q8KT6/2u4liiDt9hqueuOJe1i9A1Y8vXoaFd0J5LGTCJBsQ/00
+ZZdehcvNvSZPfzy/tbTB6lHqnNGU1w+n1wLA4mKPQKH3TkujvSpuTRWSrPl2vgFj
+GUHjep/MI9ie6lTF8sQ20C1skJtZQiOEwTwNq5QDUOJB/QeDlfhUV3G62Vxd/vS6
+u49bs2j+7hiRXNMfuOMjSOMp47QzEMBvHx0FTMFx/NKI6BEzaUBFuQINBGlj2M8B
+EACxDR6BLawJA3lLd41lXhauatOEYzaURGLjmvoYRQwv4k/RkduXcOcHkHGRKhfe
+7jBOtlkG1RUMe6Jeori5ErNHhqCefDpAI4rCrHXt9cS7STFRlmMznsEZ3AI1rTgG
++AhFNmDMWpC/ImynvGgcd/P4ZJx0oZlUkELJBELU2XShvQMlJP6R7wTOSv9OWeda
+V/MJSdgRuDiJJxG7z1Ono98YOs5kOu2nBOLcthyaox5TZZssaUBKtkcRMiiEOtvv
+nJUmC9mN4Cx9w73xxVeibdgW+2ZGr1qwI/1sXcPPCOTJ4ena9Sy4+KK4JzWaWIsl
+pBqS+p2ngKWTx/kDyuu5CHnyXmZLie0shj6OjiO2XWyMRQ4pG/wsZYV0kEZk/b8C
+BOpFbMgVLN6jqZ1DiIFDGzFdi2sf7LE+MrDIOQa1IY/bmEUl0/vE49j+Nwqq8SeT
+/B8DOoMtyTly84Mbzf0Mg+RIZTMZ+7GvNhTVIirSc6xPsaZ1E4JstXDofDazVLd9
+Z15dtelbJeGbDJI2YpPS+7ISyFEUAbUobGXdeKgOh4J2c04VarO60u02Ed0/wWXZ
+ey6VXhXCmMzGwrSgnvuLiCD7F4ZM/5+nSi/GkeXEs8qXuYpmdQ9tSz4R9miax+iu
+j+UxfzdMlQfZL7VJq3RPa+RJunTb15SNwPevPilpZCBZdwARAQABiQI2BBgBCAAg
+FiEExr4lvypGOaZ6SR69N7Wbk9x1bugFAmlj2M8CGwwACgkQN7Wbk9x1bugd2w//
+T7vwqH87ovqEG3BkCPWBzftMbhzg5S/Qn8yyjTyMCao8Lm0ZK/0UmXJtwJ7hxNOk
+RRb3rIYdbbwWhLb0MgD87AYmg2DL604X7GpnBXvzwbDZlm0vNe8qB7v2JluDPZWN
+uw8kX9owInNn6pb3tChsVsQ50vgVa3yfPQ/8MhOqe/NzKbqeCBPDqbRhmODmC0+x
+yRYgGo4cVwMc6NFHMcl/CfEK5c1In7BYMAKnJ76LADzdgyeYZtGlJjPBvbM9JSJk
+E7uvPy6WcpDAqZN+EmJ36DbPPquGyuieUMwHmi8hGp0Vid/bKas/NZU3XYBVCXXA
+kn2KdvMT/ctqIE2NCs06keX7ojnt+kUxXeUmpTSCJ4J3zXb3bAEyTTL47BT8lnBB
+AejZndNaBHjnAUoW9VpiHDHHtDdOzELL3c+/sROLZLLLq3sq0+cUYwG2mKYXakXO
+1cygMAvFdrWYR+1ezLhG6IqDKwENUq7EQZv4itLlsPb//UrVwnKBd052CsIbGPRn
+zwZfIlCCdpL0/jRNgOh8lxm90U6P++o6m9SRCO+epFRVNKZBFW9QcYUT20HiTsVO
+txvN20Zz88ID9dyRywF236cYES0wdAGC0hgFHAI0vmjKzvuKspRhcJ8accjSLyF6
+XJ48wxLgq6MyEpefywMP3znLq0PGEwl8EmjsZc2bdCk=
+=3smd
+-----END PGP PUBLIC KEY BLOCK-----
From 3ccc9a9ccdb858ff56ac96d9a89de3b7a9515b64 Mon Sep 17 00:00:00 2001
From: Erik
Date: Sun, 11 Jan 2026 21:26:22 +0100
Subject: [PATCH 15/35] Update SECURITY.md with release signing key details and
enhance Checkstyle rules
- Add PGP key ID, fingerprint, and reference to `KEYS` file in SECURITY.md for artifact verification.
- Configure indentation rules in Checkstyle for consistent 4-space formatting.
---
SECURITY.md | 5 +++--
checkstyle.xml | 9 +++++++++
2 files changed, 12 insertions(+), 2 deletions(-)
diff --git a/SECURITY.md b/SECURITY.md
index b75e5a2..e39fe80 100644
--- a/SECURITY.md
+++ b/SECURITY.md
@@ -139,8 +139,9 @@ Security audits are welcome.
For encrypted communication and release verification:
- **Key Purpose:** Release artifact signing
-- **Key ID:** Available upon request
-- **Fingerprint:** Available upon request
+- **Key ID:** 37B59B93DC756EE8
+- **Fingerprint:** C6BE25BF2A4639A67A491EBD37B59B93DC756EE8
+- **Accessable in repository:** `KEYS`
Contact: **security@splatgames.de**
diff --git a/checkstyle.xml b/checkstyle.xml
index b5d8da0..cb42ee8 100644
--- a/checkstyle.xml
+++ b/checkstyle.xml
@@ -192,5 +192,14 @@
+
+
+
+
+
+
+
+
+
From 45dc08e065f95d2c257deb0e7ddd7ecf8a67b081 Mon Sep 17 00:00:00 2001
From: Erik
Date: Sun, 11 Jan 2026 21:34:41 +0100
Subject: [PATCH 16/35] Simplify Checkstyle configuration by removing redundant
modules
---
checkstyle.xml | 3 ---
1 file changed, 3 deletions(-)
diff --git a/checkstyle.xml b/checkstyle.xml
index cb42ee8..2a5efbf 100644
--- a/checkstyle.xml
+++ b/checkstyle.xml
@@ -109,7 +109,6 @@
-
@@ -142,7 +141,6 @@
-
@@ -191,7 +189,6 @@
-
From 7c9b73a4f006890d7122db1855e70737ed9ef6ae Mon Sep 17 00:00:00 2001
From: Erik
Date: Sun, 11 Jan 2026 21:34:50 +0100
Subject: [PATCH 17/35] Normalize Javadoc line breaks across multiple classes
for consistency.
---
.../aether/datafixers/api/DataVersion.java | 22 ++-
.../aether/datafixers/api/TypeReference.java | 26 ++--
.../api/bootstrap/DataFixerBootstrap.java | 15 +-
.../datafixers/api/codec/CodecRegistry.java | 40 +++--
.../api/codec/RecordCodecBuilder.java | 15 +-
.../api/diagnostic/MigrationReport.java | 81 +++++-----
.../aether/datafixers/api/dsl/DSL.java | 72 ++++++---
.../datafixers/api/dynamic/Dynamic.java | 147 +++++++++---------
.../datafixers/api/dynamic/TaggedDynamic.java | 19 +--
.../aether/datafixers/api/fix/Fixes.java | 5 +-
.../aether/datafixers/api/optic/Affine.java | 6 +-
.../aether/datafixers/api/optic/Iso.java | 5 +-
.../aether/datafixers/api/optic/Lens.java | 6 +-
.../aether/datafixers/api/optic/Prism.java | 6 +-
.../datafixers/api/optic/Traversal.java | 11 +-
.../datafixers/api/result/DataResult.java | 34 ++--
.../api/rewrite/BatchTransform.java | 4 +-
.../aether/datafixers/api/schema/Schema.java | 18 ++-
18 files changed, 270 insertions(+), 262 deletions(-)
diff --git a/aether-datafixers-api/src/main/java/de/splatgames/aether/datafixers/api/DataVersion.java b/aether-datafixers-api/src/main/java/de/splatgames/aether/datafixers/api/DataVersion.java
index cf97d68..6f7f9bd 100644
--- a/aether-datafixers-api/src/main/java/de/splatgames/aether/datafixers/api/DataVersion.java
+++ b/aether-datafixers-api/src/main/java/de/splatgames/aether/datafixers/api/DataVersion.java
@@ -74,8 +74,7 @@ public final class DataVersion implements Comparable {
* The internal numeric representation of this data version.
*
*
This value is guaranteed to be non-negative and represents the version number
- * in a monotonically increasing sequence. Higher values indicate newer versions of
- * the data schema.
+ * in a monotonically increasing sequence. Higher values indicate newer versions of the data schema.
*/
private final int version;
@@ -136,9 +135,8 @@ public int getVersion() {
* }
*
* @param o the data version to compare against; must not be {@code null}
- * @return a negative integer if this version is less than the specified version,
- * zero if they are equal, or a positive integer if this version is
- * greater than the specified version
+ * @return a negative integer if this version is less than the specified version, zero if they are equal, or a al,
+ * or a positive integer if this version is greater than the specified version
* @throws NullPointerException if the specified data version is {@code null}
*/
@Override
@@ -150,8 +148,8 @@ public int compareTo(@NotNull final DataVersion o) {
* Indicates whether some other object is "equal to" this data version.
*
*
Two {@code DataVersion} instances are considered equal if and only if they
- * have the same numeric version value. This method adheres to the general contract
- * of {@link Object#equals(Object)}, providing:
+ * have the same numeric version value. This method adheres to the general contract of
+ * {@link Object#equals(Object)}, providing:
*
*
Reflexivity: For any non-null {@code DataVersion x}, {@code x.equals(x)}
* returns {@code true}
@@ -169,8 +167,7 @@ public int compareTo(@NotNull final DataVersion o) {
*
*
* @param obj the reference object with which to compare; may be {@code null}
- * @return {@code true} if this data version is equal to the specified object;
- * {@code false} otherwise
+ * @return {@code true} if this data version is equal to the specified object; {@code false} otherwise
* @see #hashCode()
*/
@Override
@@ -213,8 +210,8 @@ public int hashCode() {
* Returns a string representation of this data version.
*
*
The returned string follows the format {@code "DataVersion{version=N}"} where
- * {@code N} is the numeric version value. This format is intended for debugging and
- * logging purposes and should not be parsed programmatically.
+ * {@code N} is the numeric version value. This format is intended for debugging and logging purposes and should not
+ * be parsed programmatically.
*
*
Example output:
*
{@code
@@ -222,8 +219,7 @@ public int hashCode() {
* new DataVersion(0).toString() // Returns "DataVersion{version=0}"
* }
*
- * @return a string representation of this data version in the format
- * {@code "DataVersion{version=N}"}
+ * @return a string representation of this data version in the format {@code "DataVersion{version=N}"}
*/
@Override
public String toString() {
diff --git a/aether-datafixers-api/src/main/java/de/splatgames/aether/datafixers/api/TypeReference.java b/aether-datafixers-api/src/main/java/de/splatgames/aether/datafixers/api/TypeReference.java
index 15fa8f7..5aaedb9 100644
--- a/aether-datafixers-api/src/main/java/de/splatgames/aether/datafixers/api/TypeReference.java
+++ b/aether-datafixers-api/src/main/java/de/splatgames/aether/datafixers/api/TypeReference.java
@@ -78,8 +78,8 @@ public final class TypeReference {
* The unique string identifier for this type reference.
*
*
This identifier is used as a key for type lookups in registries and serves as
- * the canonical name for the data type throughout the data fixing system. The value
- * is guaranteed to be non-null and non-empty.
+ * the canonical name for the data type throughout the data fixing system. The value is guaranteed to be non-null
+ * and non-empty.
*
*
By convention, type identifiers use lowercase letters with underscores to
* separate words (e.g., "player", "block_entity", "world_data").
@@ -104,8 +104,8 @@ public TypeReference(@NotNull final String id) {
* Returns the unique identifier for this type reference.
*
*
The returned identifier is the canonical name used to look up type definitions
- * in a {@link TypeRegistry} and to associate {@link DataFix} instances with this
- * data type. The identifier is guaranteed to be non-null and non-empty.
+ * in a {@link TypeRegistry} and to associate {@link DataFix} instances with this data type. The identifier is
+ * guaranteed to be non-null and non-empty.
*
*
Example usage:
*
{@code
@@ -127,8 +127,7 @@ public String getId() {
* Returns a hash code value for this type reference.
*
*
The hash code is computed solely based on the string identifier. This
- * implementation satisfies the general contract of {@link Object#hashCode()},
- * ensuring that:
+ * implementation satisfies the general contract of {@link Object#hashCode()}, ensuring that:
*
*
If two {@code TypeReference} objects are equal according to the
* {@link #equals(Object)} method, then calling {@code hashCode()} on each
@@ -153,8 +152,8 @@ public int hashCode() {
* Indicates whether some other object is "equal to" this type reference.
*
*
Two {@code TypeReference} instances are considered equal if and only if they
- * have the same string identifier (case-sensitive comparison). This method adheres
- * to the general contract of {@link Object#equals(Object)}, providing:
+ * have the same string identifier (case-sensitive comparison). This method adheres to the general contract of
+ * {@link Object#equals(Object)}, providing:
*
*
Reflexivity: For any non-null {@code TypeReference x},
* {@code x.equals(x)} returns {@code true}
@@ -180,8 +179,7 @@ public int hashCode() {
* }
*
* @param obj the reference object with which to compare; may be {@code null}
- * @return {@code true} if this type reference is equal to the specified object;
- * {@code false} otherwise
+ * @return {@code true} if this type reference is equal to the specified object; {@code false} otherwise
* @see #hashCode()
*/
@Override
@@ -199,8 +197,8 @@ public boolean equals(final Object obj) {
* Returns a string representation of this type reference.
*
*
The returned string follows the format {@code "TypeReference{id=''}"}.
- * This format is intended for debugging and logging purposes and provides a clear,
- * human-readable representation of the type reference.
+ * This format is intended for debugging and logging purposes and provides a clear, human-readable representation of
+ * the type reference.
*
*
Note: The format of this string is not guaranteed to remain stable across
- * versions and should not be parsed programmatically. Use {@link #getId()} to retrieve
- * the identifier for programmatic use.
+ * versions and should not be parsed programmatically. Use {@link #getId()} to retrieve the identifier for
+ * programmatic use.
*
* @return a string representation of this type reference
* @see #getId()
diff --git a/aether-datafixers-api/src/main/java/de/splatgames/aether/datafixers/api/bootstrap/DataFixerBootstrap.java b/aether-datafixers-api/src/main/java/de/splatgames/aether/datafixers/api/bootstrap/DataFixerBootstrap.java
index 6073b83..cc8fba4 100644
--- a/aether-datafixers-api/src/main/java/de/splatgames/aether/datafixers/api/bootstrap/DataFixerBootstrap.java
+++ b/aether-datafixers-api/src/main/java/de/splatgames/aether/datafixers/api/bootstrap/DataFixerBootstrap.java
@@ -88,8 +88,8 @@ public interface DataFixerBootstrap {
* Registers all schemas with the provided schema registry.
*
*
This method is invoked during data fixer initialization to populate the schema
- * registry with {@link Schema} instances for each supported data version. Schemas define
- * the structure and types available at each version of the data model.
+ * registry with {@link Schema} instances for each supported data version. Schemas define the structure and types
+ * available at each version of the data model.
*
*
Implementation Guidelines
*
When implementing this method, consider the following best practices:
@@ -127,8 +127,7 @@ public interface DataFixerBootstrap {
* Implementations do not need to be thread-safe, but they should not retain references
* to the registry after the method returns.
*
- * @param schemas the schema registry to populate with version-specific schemas;
- * must not be {@code null}
+ * @param schemas the schema registry to populate with version-specific schemas; must not be {@code null}
* @throws NullPointerException if {@code schemas} is {@code null}
* @see Schema
* @see SchemaRegistry
@@ -139,9 +138,8 @@ public interface DataFixerBootstrap {
* Registers all data fixes with the provided fix registrar.
*
*
This method is invoked during data fixer initialization to register all
- * {@link DataFix} instances that handle migrations between data versions. Each fix
- * defines a transformation from one version to another for a specific type or set
- * of types.
+ * {@link DataFix} instances that handle migrations between data versions. Each fix defines a transformation from
+ * one version to another for a specific type or set of types.
*
*
Implementation Guidelines
*
When implementing this method, adhere to these best practices:
@@ -179,8 +177,7 @@ public interface DataFixerBootstrap {
* Implementations do not need to be thread-safe, but they should not retain references
* to the registrar after the method returns.
*
- * @param fixes the fix registrar to populate with data migration fixes;
- * must not be {@code null}
+ * @param fixes the fix registrar to populate with data migration fixes; must not be {@code null}
* @throws NullPointerException if {@code fixes} is {@code null}
* @see DataFix
* @see FixRegistrar
diff --git a/aether-datafixers-api/src/main/java/de/splatgames/aether/datafixers/api/codec/CodecRegistry.java b/aether-datafixers-api/src/main/java/de/splatgames/aether/datafixers/api/codec/CodecRegistry.java
index a956109..8d78a6f 100644
--- a/aether-datafixers-api/src/main/java/de/splatgames/aether/datafixers/api/codec/CodecRegistry.java
+++ b/aether-datafixers-api/src/main/java/de/splatgames/aether/datafixers/api/codec/CodecRegistry.java
@@ -66,9 +66,9 @@ public interface CodecRegistry {
* Registers a codec for the given type reference.
*
*
This method associates a {@link Codec} with a {@link TypeReference}, enabling
- * later retrieval via {@link #get(TypeReference)} or {@link #require(TypeReference)}.
- * If a codec is already registered for the given reference, the behavior depends on
- * the implementation (it may replace the existing codec or throw an exception).
+ * later retrieval via {@link #get(TypeReference)} or {@link #require(TypeReference)}. If a codec is already
+ * registered for the given reference, the behavior depends on the implementation (it may replace the existing codec
+ * or throw an exception).
*
*
Example Usage
*
{@code
@@ -97,8 +97,8 @@ public interface CodecRegistry {
* Retrieves a codec by its type reference.
*
*
This method performs a lookup in the registry and returns the codec associated
- * with the given type reference, or {@code null} if no codec has been registered for
- * that reference. For a non-null guarantee, use {@link #require(TypeReference)} instead.
+ * with the given type reference, or {@code null} if no codec has been registered for that reference. For a non-null
+ * guarantee, use {@link #require(TypeReference)} instead.
*
*
*
* @param ref the type reference to look up; must not be {@code null}
- * @return the codec associated with the given reference, or {@code null} if no codec
- * is registered for that reference
+ * @return the codec associated with the given reference, or {@code null} if no codec is registered for that
+ * reference
* @throws NullPointerException if {@code ref} is {@code null}
* @see #has(TypeReference)
* @see #require(TypeReference)
@@ -123,9 +123,8 @@ public interface CodecRegistry {
* Checks whether a codec is registered for the given type reference.
*
*
This method provides a way to verify the existence of a codec registration
- * without actually retrieving the codec. It is more efficient than calling
- * {@link #get(TypeReference)} and checking for {@code null} if you only need to
- * test for presence.
+ * without actually retrieving the codec. It is more efficient than calling {@link #get(TypeReference)} and checking
+ * for {@code null} if you only need to test for presence.
*
*
*
* @param ref the type reference to check for registration; must not be {@code null}
- * @return {@code true} if a codec is registered for the given reference;
- * {@code false} otherwise
+ * @return {@code true} if a codec is registered for the given reference; {@code false} otherwise
* @throws NullPointerException if {@code ref} is {@code null}
* @see #get(TypeReference)
* @see #require(TypeReference)
@@ -151,9 +149,8 @@ public interface CodecRegistry {
* Retrieves a codec by its type reference, throwing an exception if not found.
*
*
This method is similar to {@link #get(TypeReference)} but guarantees a non-null
- * return value. If no codec is registered for the given reference, an
- * {@link IllegalStateException} is thrown. Use this method when the absence of a
- * codec indicates a programming error or misconfiguration.
+ * return value. If no codec is registered for the given reference, an {@link IllegalStateException} is thrown. Use
+ * this method when the absence of a codec indicates a programming error or misconfiguration.
*
*
Example Usage
*
{@code
@@ -191,9 +188,9 @@ default Codec> require(@NotNull final TypeReference ref) {
* Freezes this registry, making it immutable.
*
*
After freezing, any attempt to modify the registry (e.g., via
- * {@link #register(TypeReference, Codec)}) will throw an {@link IllegalStateException}.
- * This is useful for ensuring thread-safety after the initialization phase is complete,
- * as an immutable registry can be safely shared across threads without synchronization.
+ * {@link #register(TypeReference, Codec)}) will throw an {@link IllegalStateException}. This is useful for ensuring
+ * thread-safety after the initialization phase is complete, as an immutable registry can be safely shared across
+ * threads without synchronization.
*
*
Idempotency
*
This method is idempotent - calling it multiple times has no additional effect
@@ -228,8 +225,7 @@ default void freeze() {
* Returns whether this registry has been frozen and is now immutable.
*
*
A frozen registry cannot accept new codec registrations. Any call to
- * {@link #register(TypeReference, Codec)} on a frozen registry will throw an
- * {@link IllegalStateException}.
+ * {@link #register(TypeReference, Codec)} on a frozen registry will throw an {@link IllegalStateException}.
*
*
The default implementation returns {@code false}, indicating that the registry
* is always mutable. Implementations that support freezing should override this method.
*
- * @return {@code true} if this registry has been frozen and is immutable;
- * {@code false} if it is still mutable and accepts new registrations
+ * @return {@code true} if this registry has been frozen and is immutable; {@code false} if it is still mutable and
+ * accepts new registrations
* @see #freeze()
*/
default boolean isFrozen() {
diff --git a/aether-datafixers-api/src/main/java/de/splatgames/aether/datafixers/api/codec/RecordCodecBuilder.java b/aether-datafixers-api/src/main/java/de/splatgames/aether/datafixers/api/codec/RecordCodecBuilder.java
index a3da06e..2a4ee4b 100644
--- a/aether-datafixers-api/src/main/java/de/splatgames/aether/datafixers/api/codec/RecordCodecBuilder.java
+++ b/aether-datafixers-api/src/main/java/de/splatgames/aether/datafixers/api/codec/RecordCodecBuilder.java
@@ -958,9 +958,8 @@ public DataResult decode(@NotNull final DynamicOps ops,
* Internal tuple for accumulating 3 decoded values before applying the constructor.
*
*
This record is used internally during the decoding process to collect
- * intermediate results when decoding records with 3 or more fields. The values
- * are accumulated using {@link DataResult#apply2} before being passed to the
- * final constructor function.
Null Representation: YAML's {@code null} and {@code ~} values are
- * represented as Java {@code null}. The {@link #empty()} method returns {@code null}.
+ * represented by the {@link #NULL} sentinel object. Use {@link #wrap(Object)} after parsing
+ * YAML with SnakeYAML to convert Java {@code null} to the sentinel, and {@link #unwrap(Object)}
+ * before serializing to convert back.
*
Number Types: SnakeYAML preserves the specific numeric type from the
* YAML source (e.g., integers vs. floats), which is maintained in this implementation.
*
Key Types: While YAML supports complex keys, this implementation
@@ -228,6 +230,41 @@ public final class SnakeYamlOps implements DynamicOps {
*/
public static final SnakeYamlOps INSTANCE = new SnakeYamlOps();
+ /**
+ * Sentinel object representing the YAML null value.
+ *
+ *
This singleton instance represents the absence of a value in YAML, corresponding to
+ * YAML's explicit {@code null} or {@code ~} values. Unlike using Java's {@code null} directly,
+ * this sentinel allows the {@link DynamicOps} contract to be fulfilled (which requires
+ * {@link #empty()} to return a non-null value).
+ *
+ *
Usage
+ *
{@code
+ * // Check if a value is the YAML null sentinel
+ * if (value == SnakeYamlOps.NULL) {
+ * // Handle null case
+ * }
+ *
+ * // Create an explicit null value
+ * Object nullValue = SnakeYamlOps.NULL;
+ * }
+ *
+ *
Serialization Note
+ *
When serializing data containing this sentinel to YAML text using SnakeYAML, you should
+ * convert the sentinel back to Java {@code null} before serialization. Use
+ * {@link #unwrap(Object)} for this purpose:
+ *
{@code
+ * Object data = ...; // May contain YamlNull.INSTANCE
+ * Object unwrapped = SnakeYamlOps.unwrap(data);
+ * String yaml = new Yaml().dump(unwrapped);
+ * }
Returns {@code null} which represents the absence of a value in YAML. This is the
- * canonical "empty" value for the SnakeYAML format and corresponds to YAML's explicit
- * {@code null} or {@code ~} values.
+ *
Returns the {@link #NULL} sentinel which represents the absence of a value in YAML.
+ * This corresponds to YAML's explicit {@code null} or {@code ~} values.
*
- *
Unlike JSON-based implementations that return a null object (e.g., {@code JsonNull}),
- * SnakeYAML uses Java's {@code null} directly. This is used when:
- *
- *
A field has no value
- *
A conversion cannot determine the appropriate type
The sentinel object is used instead of Java's {@code null} to satisfy the
+ * {@link DynamicOps} contract which requires this method to return a non-null value.
+ * Use {@link #isNull(Object)} to check if a value is the null sentinel, or compare
+ * directly with {@code == SnakeYamlOps.NULL}.
+ *
+ *
When serializing data to YAML text format, use {@link #unwrap(Object)} to convert
+ * the sentinel back to Java {@code null} before passing to SnakeYAML.
*
- * @return {@code null}, representing the absence of a value
+ * @return the {@link #NULL} sentinel representing the absence of a value; never {@code null}
+ * @see #NULL
+ * @see #isNull(Object)
+ * @see #unwrap(Object)
*/
- @Nullable
+ @NotNull
@Override
public Object empty() {
- return null;
+ return YamlNull.INSTANCE;
}
// ==================== Type Check Operations ====================
@@ -702,20 +740,18 @@ public DataResult> getList(@NotNull final Object input) {
*
Success Conditions
*
*
Input list is a {@link List} instance
- *
Input list is {@code null} (treated as empty list)
*
*
*
Failure Conditions
*
- *
Input list is not a list or null (e.g., Map, primitive)
+ *
Input list is not a {@link List} instance (e.g., Map, primitive)
*
*
*
Immutability: The original list is never modified. A new
* {@link ArrayList} is created from the original elements, and the value is deep-copied
* before being appended to ensure nested structures are also copied.
*
- * @param list the list to append to; must not be {@code null}; may be {@code null}
- * (treated as empty list)
+ * @param list the list to append to; must not be {@code null}
* @param value the value to append; must not be {@code null}
* @return a {@link DataResult} containing the new list with the appended value,
* or an error message if the list is not valid; never {@code null}
@@ -727,10 +763,10 @@ public DataResult mergeToList(@NotNull final Object list,
@NotNull final Object value) {
Preconditions.checkNotNull(list, "list must not be null");
Preconditions.checkNotNull(value, "value must not be null");
- if (list != null && !(list instanceof List)) {
+ if (!(list instanceof List)) {
return DataResult.error("Not a list: " + list);
}
- final List result = list == null ? new ArrayList<>() : new ArrayList<>((List) list);
+ final List result = new ArrayList<>((List) list);
result.add(deepCopy(value));
return DataResult.success(result);
}
@@ -848,13 +884,12 @@ public DataResult>> getMapEntries(@NotNull final Obj
*
Success Conditions
*
*
Input map is a {@link Map} instance
- *
Input map is {@code null} (treated as empty map)
*
Key is a {@link String}
*
*
*
Failure Conditions
*
- *
Input map is not a map or null (e.g., List, primitive)
+ *
Input map is not a {@link Map} instance (e.g., List, primitive)
*
Key is not a {@link String}
*
*
@@ -862,8 +897,7 @@ public DataResult>> getMapEntries(@NotNull final Obj
* {@link LinkedHashMap} is created from the original entries, and the value is
* deep-copied before being added.
*
- * @param map the map to add the entry to; must not be {@code null}; may be {@code null}
- * (treated as empty map)
+ * @param map the map to add the entry to; must not be {@code null}
* @param key the key for the entry; must not be {@code null}; must be a {@link String}
* @param value the value for the entry; must not be {@code null}
* @return a {@link DataResult} containing the new map with the added entry,
@@ -878,15 +912,13 @@ public DataResult mergeToMap(@NotNull final Object map,
Preconditions.checkNotNull(map, "map must not be null");
Preconditions.checkNotNull(key, "key must not be null");
Preconditions.checkNotNull(value, "value must not be null");
- if (map != null && !(map instanceof Map)) {
+ if (!(map instanceof Map)) {
return DataResult.error("Not a map: " + map);
}
if (!(key instanceof String)) {
return DataResult.error("Key is not a string: " + key);
}
- final Map result = map == null
- ? new LinkedHashMap<>()
- : new LinkedHashMap<>((Map) map);
+ final Map result = new LinkedHashMap<>((Map) map);
result.put((String) key, deepCopy(value));
return DataResult.success(result);
}
@@ -900,13 +932,12 @@ public DataResult mergeToMap(@NotNull final Object map,
*
Success Conditions
*
*
Both inputs are {@link Map} instances
- *
Either input may be {@code null} (treated as empty map)
*
*
*
Failure Conditions
*
- *
First input is not a map or null
- *
Second input is not a map or null
+ *
First input is not a {@link Map} instance
+ *
Second input is not a {@link Map} instance
*
*
*
Merge Behavior
@@ -920,10 +951,8 @@ public DataResult mergeToMap(@NotNull final Object map,
* {@link LinkedHashMap} is created, and all values from the second map are deep-copied
* before being added.
*
- * @param map the base map; must not be {@code null}; may be {@code null}
- * (treated as empty map)
- * @param other the map to merge into the base; must not be {@code null}; may be
- * {@code null} (treated as empty map)
+ * @param map the base map; must not be {@code null}
+ * @param other the map to merge into the base; must not be {@code null}
* @return a {@link DataResult} containing the merged map, or an error message
* if either input is invalid; never {@code null}
*/
@@ -934,19 +963,15 @@ public DataResult mergeToMap(@NotNull final Object map,
@NotNull final Object other) {
Preconditions.checkNotNull(map, "map must not be null");
Preconditions.checkNotNull(other, "other must not be null");
- if (map != null && !(map instanceof Map)) {
+ if (!(map instanceof Map)) {
return DataResult.error("First argument is not a map: " + map);
}
- if (other != null && !(other instanceof Map)) {
+ if (!(other instanceof Map)) {
return DataResult.error("Second argument is not a map: " + other);
}
- final Map result = map == null
- ? new LinkedHashMap<>()
- : new LinkedHashMap<>((Map) map);
- if (other != null) {
- for (final Map.Entry entry : ((Map) other).entrySet()) {
- result.put(entry.getKey(), deepCopy(entry.getValue()));
- }
+ final Map result = new LinkedHashMap<>((Map) map);
+ for (final Map.Entry entry : ((Map) other).entrySet()) {
+ result.put(entry.getKey(), deepCopy(entry.getValue()));
}
return DataResult.success(result);
}
@@ -1121,20 +1146,20 @@ public boolean has(@NotNull final Object input,
* creates an {@link ArrayList} with recursively converted elements
*
Map: If {@link DynamicOps#getMapEntries} succeeds,
* creates a {@link LinkedHashMap} with recursively converted entries
- *
Fallback: Returns {@code null} if no type matches
+ *
Fallback: Returns the {@link #NULL} sentinel if no type matches
*
*
*
Edge Cases
*
*
Map entries with {@code null} keys are skipped
- *
Map entries with {@code null} values are converted to {@code null}
+ *
Map entries with {@code null} values are converted to the {@link #NULL} sentinel
*
Empty collections are preserved as empty ArrayList/LinkedHashMap
*
*
*
Format-Specific Notes
*
- *
Gson's {@code JsonNull} is converted to Java {@code null}
- *
Jackson's {@code NullNode} is converted to Java {@code null}
+ *
Gson's {@code JsonNull} is converted to the {@link #NULL} sentinel
+ *
Jackson's {@code NullNode} is converted to the {@link #NULL} sentinel
*
Numeric types are preserved where the source format supports them
*
*
@@ -1142,10 +1167,10 @@ public boolean has(@NotNull final Object input,
* {@code null}
* @param input the value to convert from the source format; must not be {@code null}
* @param the type parameter of the source format
- * @return the converted value as a SnakeYAML native type; may return {@code null}
- * for empty/null source values
+ * @return the converted value as a SnakeYAML native type; returns the {@link #NULL}
+ * sentinel for empty/null source values; never {@code null}
*/
- @Nullable
+ @NotNull
@Override
public Object convertTo(@NotNull final DynamicOps sourceOps,
@NotNull final U input) {
@@ -1193,7 +1218,7 @@ public Object convertTo(@NotNull final DynamicOps sourceOps,
);
}
- // Fallback: return null for unknown/empty types
+ // Fallback: return the NULL sentinel for unknown/empty types
return empty();
}
@@ -1210,6 +1235,7 @@ public Object convertTo(@NotNull final DynamicOps sourceOps,
*
Copy Behavior by Type
*
*
null: Returns {@code null}
+ *
YamlNull sentinel: Returns the sentinel as-is (it's a singleton)
*
Map: Creates a new {@link LinkedHashMap} with recursively
* deep-copied values (keys are assumed to be immutable strings)
*
List: Creates a new {@link ArrayList} with recursively
@@ -1224,7 +1250,7 @@ public Object convertTo(@NotNull final DynamicOps sourceOps,
* Consider using batch operations ({@link #createMap(Stream)}, {@link #createList(Stream)})
* to minimize the number of copy operations.
*
- * @param value the value to copy; may be {@code null}
+ * @param value the value to copy; may be {@code null} or the {@link #NULL} sentinel
* @return a deep copy of the value, or the value itself if it is immutable;
* {@code null} if the input is {@code null}
*/
@@ -1234,6 +1260,9 @@ private Object deepCopy(@Nullable final Object value) {
if (value == null) {
return null;
}
+ if (value == YamlNull.INSTANCE) {
+ return YamlNull.INSTANCE;
+ }
if (value instanceof Map) {
final Map original = (Map) value;
final Map copy = new LinkedHashMap<>();
@@ -1268,4 +1297,169 @@ private Object deepCopy(@Nullable final Object value) {
public String toString() {
return "SnakeYamlOps";
}
+
+ // ==================== Static Utility Methods ====================
+
+ /**
+ * Checks whether the given value is the YAML null sentinel.
+ *
+ *
This method provides a convenient way to check if a value represents YAML's null
+ * without directly comparing to {@link #NULL}.
+ *
+ * @param value the value to check; may be {@code null}
+ * @return {@code true} if the value is the YAML null sentinel, {@code false} otherwise
+ */
+ public static boolean isNull(@Nullable final Object value) {
+ return value == YamlNull.INSTANCE;
+ }
+
+ /**
+ * Recursively converts the YAML null sentinel back to Java {@code null}.
+ *
+ *
This method should be used before serializing data to YAML text format using
+ * SnakeYAML, as SnakeYAML expects Java {@code null} for null values, not the sentinel.
+ *
+ *
Conversion Behavior
+ *
+ *
{@link #NULL} sentinel is converted to Java {@code null}
+ *
{@link Map} instances are recursively processed (values only, keys are preserved)
+ *
{@link List} instances are recursively processed
+ *
All other values are returned unchanged
+ *
+ *
+ *
Example
+ *
{@code
+ * // Data structure with sentinel values
+ * Map data = new LinkedHashMap<>();
+ * data.put("name", "Alice");
+ * data.put("nickname", SnakeYamlOps.NULL);
+ *
+ * // Convert for serialization
+ * Object unwrapped = SnakeYamlOps.unwrap(data);
+ *
+ * // Now safe to serialize with SnakeYAML
+ * String yaml = new Yaml().dump(unwrapped);
+ * // Output: {name: Alice, nickname: null}
+ * }
+ *
+ * @param value the value to unwrap; may be {@code null}
+ * @return the value with all sentinel instances replaced by Java {@code null}
+ */
+ @Nullable
+ @SuppressWarnings("unchecked")
+ public static Object unwrap(@Nullable final Object value) {
+ if (value == null || value == YamlNull.INSTANCE) {
+ return null;
+ }
+ if (value instanceof Map) {
+ final Map original = (Map) value;
+ final Map result = new LinkedHashMap<>();
+ for (final Map.Entry entry : original.entrySet()) {
+ result.put(entry.getKey(), unwrap(entry.getValue()));
+ }
+ return result;
+ }
+ if (value instanceof List) {
+ final List original = (List) value;
+ final List result = new ArrayList<>();
+ for (final Object element : original) {
+ result.add(unwrap(element));
+ }
+ return result;
+ }
+ return value;
+ }
+
+ /**
+ * Recursively converts Java {@code null} values to the YAML null sentinel.
+ *
+ *
This method should be used after parsing YAML with SnakeYAML to ensure all null
+ * values are represented by the sentinel, making the data safe to use with
+ * {@link DynamicOps} methods that require non-null values.
+ *
+ *
Conversion Behavior
+ *
+ *
Java {@code null} is converted to {@link #NULL} sentinel
+ *
{@link Map} instances are recursively processed (values only, keys are preserved)
+ *
{@link List} instances are recursively processed
+ *
All other values are returned unchanged
+ *
+ *
+ *
Example
+ *
{@code
+ * // Parse YAML with SnakeYAML
+ * Yaml yaml = new Yaml();
+ * Object parsed = yaml.load("name: Alice\nnickname: null");
+ *
+ * // Wrap null values for use with DynamicOps
+ * Object wrapped = SnakeYamlOps.wrap(parsed);
+ *
+ * // Now safe to use with Dynamic
+ * Dynamic dynamic = new Dynamic<>(SnakeYamlOps.INSTANCE, wrapped);
+ * }
+ *
+ * @param value the value to wrap; may be {@code null}
+ * @return the value with all Java {@code null} instances replaced by the sentinel;
+ * never {@code null} (returns {@link #NULL} if input is {@code null})
+ */
+ @NotNull
+ @SuppressWarnings("unchecked")
+ public static Object wrap(@Nullable final Object value) {
+ if (value == null) {
+ return YamlNull.INSTANCE;
+ }
+ if (value instanceof Map) {
+ final Map original = (Map) value;
+ final Map result = new LinkedHashMap<>();
+ for (final Map.Entry entry : original.entrySet()) {
+ result.put(entry.getKey(), wrap(entry.getValue()));
+ }
+ return result;
+ }
+ if (value instanceof List) {
+ final List original = (List) value;
+ final List result = new ArrayList<>();
+ for (final Object element : original) {
+ result.add(wrap(element));
+ }
+ return result;
+ }
+ return value;
+ }
+
+ // ==================== Inner Classes ====================
+
+ /**
+ * Sentinel class representing the YAML null value.
+ *
+ *
This is a singleton class used to represent YAML's null value in a way that
+ * satisfies the {@link DynamicOps} contract (which requires non-null return values).
+ * The single instance is accessible via {@link SnakeYamlOps#NULL}.
+ *
+ *
This class is intentionally package-private and should not be instantiated
+ * or subclassed outside of {@link SnakeYamlOps}.
+ */
+ static final class YamlNull {
+ /**
+ * The singleton instance of the YAML null sentinel.
+ */
+ static final YamlNull INSTANCE = new YamlNull();
+
+ /**
+ * Private constructor to enforce singleton pattern.
+ */
+ private YamlNull() {
+ // Singleton
+ }
+
+ /**
+ * Returns a string representation of this null sentinel.
+ *
+ * @return the string {@code "null"}
+ */
+ @Override
+ public String toString() {
+ return "null";
+ }
+ }
}
diff --git a/aether-datafixers-codec/src/test/java/de/splatgames/aether/datafixers/codec/yaml/snakeyaml/SnakeYamlOpsTest.java b/aether-datafixers-codec/src/test/java/de/splatgames/aether/datafixers/codec/yaml/snakeyaml/SnakeYamlOpsTest.java
index 69b8167..32f895a 100644
--- a/aether-datafixers-codec/src/test/java/de/splatgames/aether/datafixers/codec/yaml/snakeyaml/SnakeYamlOpsTest.java
+++ b/aether-datafixers-codec/src/test/java/de/splatgames/aether/datafixers/codec/yaml/snakeyaml/SnakeYamlOpsTest.java
@@ -73,9 +73,10 @@ void toStringReturnsSnakeYamlOps() {
class EmptyValues {
@Test
- @DisplayName("empty() returns null")
- void emptyReturnsNull() {
- assertThat(ops.empty()).isNull();
+ @DisplayName("empty() returns NULL sentinel")
+ void emptyReturnsNullSentinel() {
+ assertThat(ops.empty()).isSameAs(SnakeYamlOps.NULL);
+ assertThat(SnakeYamlOps.isNull(ops.empty())).isTrue();
}
@Test
@@ -97,6 +98,138 @@ void emptyMapReturnsEmptyLinkedHashMap() {
}
}
+ @Nested
+ @DisplayName("Null Sentinel Utilities")
+ class NullSentinelUtilities {
+
+ @Test
+ @DisplayName("isNull() returns true for NULL sentinel")
+ void isNullReturnsTrueForSentinel() {
+ assertThat(SnakeYamlOps.isNull(SnakeYamlOps.NULL)).isTrue();
+ assertThat(SnakeYamlOps.isNull(ops.empty())).isTrue();
+ }
+
+ @Test
+ @DisplayName("isNull() returns false for other values")
+ void isNullReturnsFalseForOtherValues() {
+ assertThat(SnakeYamlOps.isNull(null)).isFalse();
+ assertThat(SnakeYamlOps.isNull("test")).isFalse();
+ assertThat(SnakeYamlOps.isNull(42)).isFalse();
+ assertThat(SnakeYamlOps.isNull(new LinkedHashMap<>())).isFalse();
+ }
+
+ @Test
+ @DisplayName("wrap() converts null to sentinel")
+ void wrapConvertsNullToSentinel() {
+ assertThat(SnakeYamlOps.wrap(null)).isSameAs(SnakeYamlOps.NULL);
+ }
+
+ @Test
+ @DisplayName("wrap() preserves non-null values")
+ void wrapPreservesNonNullValues() {
+ assertThat(SnakeYamlOps.wrap("test")).isEqualTo("test");
+ assertThat(SnakeYamlOps.wrap(42)).isEqualTo(42);
+ }
+
+ @Test
+ @DisplayName("wrap() recursively converts nulls in maps")
+ void wrapRecursivelyConvertsMaps() {
+ final Map input = new LinkedHashMap<>();
+ input.put("name", "Alice");
+ input.put("nickname", null);
+
+ final Object wrapped = SnakeYamlOps.wrap(input);
+
+ assertThat(wrapped).isInstanceOf(Map.class);
+ @SuppressWarnings("unchecked")
+ final Map result = (Map) wrapped;
+ assertThat(result.get("name")).isEqualTo("Alice");
+ assertThat(result.get("nickname")).isSameAs(SnakeYamlOps.NULL);
+ }
+
+ @Test
+ @DisplayName("wrap() recursively converts nulls in lists")
+ void wrapRecursivelyConvertsLists() {
+ final List input = new ArrayList<>();
+ input.add("first");
+ input.add(null);
+ input.add("third");
+
+ final Object wrapped = SnakeYamlOps.wrap(input);
+
+ assertThat(wrapped).isInstanceOf(List.class);
+ @SuppressWarnings("unchecked")
+ final List result = (List) wrapped;
+ assertThat(result.get(0)).isEqualTo("first");
+ assertThat(result.get(1)).isSameAs(SnakeYamlOps.NULL);
+ assertThat(result.get(2)).isEqualTo("third");
+ }
+
+ @Test
+ @DisplayName("unwrap() converts sentinel to null")
+ void unwrapConvertsSentinelToNull() {
+ assertThat(SnakeYamlOps.unwrap(SnakeYamlOps.NULL)).isNull();
+ }
+
+ @Test
+ @DisplayName("unwrap() preserves non-sentinel values")
+ void unwrapPreservesNonSentinelValues() {
+ assertThat(SnakeYamlOps.unwrap("test")).isEqualTo("test");
+ assertThat(SnakeYamlOps.unwrap(42)).isEqualTo(42);
+ assertThat(SnakeYamlOps.unwrap(null)).isNull();
+ }
+
+ @Test
+ @DisplayName("unwrap() recursively converts sentinels in maps")
+ void unwrapRecursivelyConvertsMaps() {
+ final Map input = new LinkedHashMap<>();
+ input.put("name", "Alice");
+ input.put("nickname", SnakeYamlOps.NULL);
+
+ final Object unwrapped = SnakeYamlOps.unwrap(input);
+
+ assertThat(unwrapped).isInstanceOf(Map.class);
+ @SuppressWarnings("unchecked")
+ final Map result = (Map) unwrapped;
+ assertThat(result.get("name")).isEqualTo("Alice");
+ assertThat(result.get("nickname")).isNull();
+ }
+
+ @Test
+ @DisplayName("unwrap() recursively converts sentinels in lists")
+ void unwrapRecursivelyConvertsLists() {
+ final List input = new ArrayList<>();
+ input.add("first");
+ input.add(SnakeYamlOps.NULL);
+ input.add("third");
+
+ final Object unwrapped = SnakeYamlOps.unwrap(input);
+
+ assertThat(unwrapped).isInstanceOf(List.class);
+ @SuppressWarnings("unchecked")
+ final List result = (List) unwrapped;
+ assertThat(result.get(0)).isEqualTo("first");
+ assertThat(result.get(1)).isNull();
+ assertThat(result.get(2)).isEqualTo("third");
+ }
+
+ @Test
+ @DisplayName("wrap() and unwrap() are inverse operations")
+ void wrapAndUnwrapAreInverse() {
+ final Map original = new LinkedHashMap<>();
+ original.put("name", "Alice");
+ original.put("nickname", null);
+ original.put("nested", new LinkedHashMap<>() {{
+ put("value", null);
+ }});
+
+ final Object wrapped = SnakeYamlOps.wrap(original);
+ final Object unwrapped = SnakeYamlOps.unwrap(wrapped);
+
+ assertThat(unwrapped).isEqualTo(original);
+ }
+ }
+
@Nested
@DisplayName("Type Checks")
class TypeChecks {
From ebd06971328c11b2e22bdaf64c4ec099e5a773e8 Mon Sep 17 00:00:00 2001
From: =?UTF-8?q?Erik=20Pf=C3=B6rtner?=
Date: Mon, 12 Jan 2026 12:56:59 +0100
Subject: [PATCH 24/35] Refine null-checking logic, improve defensive copying
and constructor behavior, update dependency management, and adjust formatting
rules in `checkstyle.xml`.
---
.../aether/datafixers/api/optic/Lens.java | 3 +--
.../datafixers/api/result/DataResult.java | 3 ---
.../aether/datafixers/api/util/Either.java | 2 --
aether-datafixers-core/pom.xml | 6 +++++
.../bootstrap/DataFixerRuntimeFactory.java | 1 -
.../diagnostic/DiagnosticContextImpl.java | 25 ++++++++++++++++++-
.../core/diagnostic/MigrationReportImpl.java | 1 -
.../core/schema/SimpleSchemaRegistry.java | 4 ++-
.../testkit/TestDataListBuilder.java | 1 -
.../testkit/assertion/DynamicAssert.java | 22 +++++++++++-----
.../testkit/harness/MigrationTester.java | 2 --
checkstyle.xml | 6 +----
12 files changed, 51 insertions(+), 25 deletions(-)
diff --git a/aether-datafixers-api/src/main/java/de/splatgames/aether/datafixers/api/optic/Lens.java b/aether-datafixers-api/src/main/java/de/splatgames/aether/datafixers/api/optic/Lens.java
index c4cf0f2..f802e47 100644
--- a/aether-datafixers-api/src/main/java/de/splatgames/aether/datafixers/api/optic/Lens.java
+++ b/aether-datafixers-api/src/main/java/de/splatgames/aether/datafixers/api/optic/Lens.java
@@ -230,8 +230,7 @@ public S set(@NotNull final S source, @NotNull final A value) {
* @throws NullPointerException if {@code source} or {@code value} is {@code null}
*/
@NotNull
- T set(@NotNull final S source,
- @NotNull final B value);
+ T set(@NotNull final S source, @NotNull final B value);
/**
* Transforms the focused value using the given function.
diff --git a/aether-datafixers-api/src/main/java/de/splatgames/aether/datafixers/api/result/DataResult.java b/aether-datafixers-api/src/main/java/de/splatgames/aether/datafixers/api/result/DataResult.java
index f1dcf4d..1aedbe6 100644
--- a/aether-datafixers-api/src/main/java/de/splatgames/aether/datafixers/api/result/DataResult.java
+++ b/aether-datafixers-api/src/main/java/de/splatgames/aether/datafixers/api/result/DataResult.java
@@ -531,7 +531,6 @@ record Success(@NotNull A value) implements DataResult {
*
*