();
+ for (var blockEntityCompound : chunkReader.getBlockEntities()) {
+ var blockEntity = convertBlockEntity(blockEntityCompound);
+ if (blockEntity != null) blockEntities.add(blockEntity);
+ }
+
+ var heightmaps = new byte[PolarChunk.HEIGHTMAP_BYTE_SIZE][PolarChunk.HEIGHTMAPS.length];
+ chunkData.getCompound("Heightmaps");
+ //todo: heightmaps
+// MOTION_BLOCKING MOTION_BLOCKING_NO_LEAVES
+// OCEAN_FLOOR OCEAN_FLOOR_WG
+// WORLD_SURFACE WORLD_SURFACE_WG
+
+ var userData = new byte[0];
+
+ chunks.add(new PolarChunk(
+ chunkReader.getChunkX(),
+ chunkReader.getChunkZ(),
+ sections,
+ blockEntities,
+ heightmaps,
+ userData
+ ));
+ }
+ }
+ return chunks;
+ }
+
+ private static @Nullable PolarChunk.BlockEntity convertBlockEntity(@NotNull NBTCompound blockEntityCompound) {
+ final var x = blockEntityCompound.getInt("x");
+ final var y = blockEntityCompound.getInt("y");
+ final var z = blockEntityCompound.getInt("z");
+ if (x == null || y == null || z == null) {
+ logger.warn("Block entity could not be converted due to invalid coordinates");
+ return null;
+ }
+
+ final String blockEntityId = blockEntityCompound.getString("id");
+ if (blockEntityId == null) {
+ logger.warn("Block entity could not be converted due to missing id");
+ return null;
+ }
+
+ // Remove anvil tags
+ MutableNBTCompound mutableCopy = blockEntityCompound.toMutableCompound();
+ mutableCopy.remove("id");
+ mutableCopy.remove("x");
+ mutableCopy.remove("y");
+ mutableCopy.remove("z");
+ mutableCopy.remove("keepPacked");
+
+ return new PolarChunk.BlockEntity(x, y, z, blockEntityId, mutableCopy.toCompound());
+ }
+
+ private static @NotNull String readBlock(@NotNull NBTCompound paletteEntry) {
+ var blockName = new StringBuilder();
+ var namespaceId = Objects.requireNonNull(paletteEntry.getString("Name"))
+ .replace("minecraft:", ""); // No need to include minecraft: prefix, it is assumed.
+ blockName.append(namespaceId);
+
+ var propertiesNbt = paletteEntry.getCompound("Properties");
+ if (propertiesNbt != null && propertiesNbt.getSize() > 0) {
+ blockName.append("[");
+
+ for (var property : propertiesNbt) {
+ blockName.append(property.getKey())
+ .append("=")
+ .append(((NBTString) property.getValue()).getValue())
+ .append(",");
+ }
+ blockName.deleteCharAt(blockName.length() - 1);
+
+ blockName.append("]");
+ }
+
+ return blockName.toString();
+ }
+
+}
diff --git a/api/src/main/java/me/combimagnetron/lagoon/world/ChunkSelector.java b/api/src/main/java/me/combimagnetron/lagoon/world/ChunkSelector.java
new file mode 100644
index 0000000..2cb5540
--- /dev/null
+++ b/api/src/main/java/me/combimagnetron/lagoon/world/ChunkSelector.java
@@ -0,0 +1,31 @@
+package me.combimagnetron.lagoon.world;
+
+import org.jetbrains.annotations.NotNull;
+
+/**
+ * A {@link ChunkSelector} can be used to select some chunks from a world. This is useful for
+ * saving or loading only a select portion of a world, ignoring the rest.
+ *
+ * Polar supports {@link ChunkSelector}s in most loading/saving APIs.
+ */
+public interface ChunkSelector {
+
+ static @NotNull ChunkSelector all() {
+ return (x, z) -> true;
+ }
+
+ static @NotNull ChunkSelector radius(int radius) {
+ return radius(0, 0, radius);
+ }
+
+ static @NotNull ChunkSelector radius(int centerX, int centerZ, int radius) {
+ return (x, z) -> {
+ int dx = x - centerX;
+ int dz = z - centerZ;
+ return dx * dx + dz * dz <= radius * radius;
+ };
+ }
+
+ boolean test(int x, int z);
+
+}
diff --git a/api/src/main/java/me/combimagnetron/lagoon/world/ChunkUtils.java b/api/src/main/java/me/combimagnetron/lagoon/world/ChunkUtils.java
new file mode 100644
index 0000000..8289a0c
--- /dev/null
+++ b/api/src/main/java/me/combimagnetron/lagoon/world/ChunkUtils.java
@@ -0,0 +1,38 @@
+package me.combimagnetron.lagoon.world;
+
+public class ChunkUtils {
+
+ public static int blockIndexToChunkPositionX(int index) {
+ return index & 0xF;
+ }
+
+ public static int blockIndexToChunkPositionY(int index) {
+ int y = (index & 0x07FFFFF0) >>> 4;
+ if (((index >>> 27) & 1) == 1) y = -y;
+ return y;
+ }
+
+ public static int blockIndexToChunkPositionZ(int index) {
+ return (index >> 28) & 0xF; // 28-32 bits
+ }
+
+ public static long getChunkIndex(int chunkX, int chunkZ) {
+ return (((long) chunkX) << 32) | (chunkZ & 0xffffffffL);
+ }
+
+ public static int getBlockIndex(int x, int y, int z) {
+ x = x % 16;
+ z = z % 16;
+
+ int index = x & 0xF; // 4 bits
+ if (y > 0) {
+ index |= (y << 4) & 0x07FFFFF0; // 23 bits (24th bit is always 0 because y is positive)
+ } else {
+ index |= ((-y) << 4) & 0x7FFFFF0; // Make positive and use 23 bits
+ index |= 1 << 27; // Set negative sign at 24th bit
+ }
+ index |= (z << 28) & 0xF0000000; // 4 bits
+ return index;
+ }
+
+}
diff --git a/api/src/main/java/me/combimagnetron/lagoon/world/PaletteUtil.java b/api/src/main/java/me/combimagnetron/lagoon/world/PaletteUtil.java
new file mode 100644
index 0000000..e0eafab
--- /dev/null
+++ b/api/src/main/java/me/combimagnetron/lagoon/world/PaletteUtil.java
@@ -0,0 +1,38 @@
+package me.combimagnetron.lagoon.world;
+
+final class PaletteUtil {
+ private PaletteUtil() {}
+
+ public static long[] pack(int[] ints, int bitsPerEntry) {
+ int intsPerLong = (int) Math.floor(64d / bitsPerEntry);
+ long[] longs = new long[(int) Math.ceil(ints.length / (double) intsPerLong)];
+
+ long mask = (1L << bitsPerEntry) - 1L;
+ for (int i = 0; i < longs.length; i++) {
+ for (int intIndex = 0; intIndex < intsPerLong; intIndex++) {
+ int bitIndex = intIndex * bitsPerEntry;
+ int intActualIndex = intIndex + i * intsPerLong;
+ if (intActualIndex < ints.length) {
+ longs[i] |= (ints[intActualIndex] & mask) << bitIndex;
+ }
+ }
+ }
+
+ return longs;
+ }
+
+ public static void unpack(int[] out, Long[] in, int bitsPerEntry) {
+ assert in.length != 0: "unpack input array is zero";
+
+ var intsPerLong = Math.floor(64d / bitsPerEntry);
+ var intsPerLongCeil = (int) Math.ceil(intsPerLong);
+
+ long mask = (1L << bitsPerEntry) - 1L;
+ for (int i = 0; i < out.length; i++) {
+ int longIndex = i / intsPerLongCeil;
+ int subIndex = i % intsPerLongCeil;
+
+ out[i] = (int) ((in[longIndex] >>> (bitsPerEntry * subIndex)) & mask);
+ }
+ }
+}
diff --git a/api/src/main/java/me/combimagnetron/lagoon/world/PolarChunk.java b/api/src/main/java/me/combimagnetron/lagoon/world/PolarChunk.java
new file mode 100644
index 0000000..27e2dd9
--- /dev/null
+++ b/api/src/main/java/me/combimagnetron/lagoon/world/PolarChunk.java
@@ -0,0 +1,54 @@
+package me.combimagnetron.lagoon.world;
+
+
+import org.jetbrains.annotations.Nullable;
+import org.jglrxavpok.hephaistos.nbt.NBTCompound;
+
+import java.util.Collection;
+import java.util.List;
+
+/**
+ * A Java type representing the latest version of the chunk format.
+ */
+public record PolarChunk(
+ int x,
+ int z,
+ PolarSection[] sections,
+ Collection blockEntities,
+ byte[][] heightmaps,
+ byte[] userData
+) {
+
+ public static final int HEIGHTMAP_NONE = 0b0;
+ public static final int HEIGHTMAP_MOTION_BLOCKING = 0b1;
+ public static final int HEIGHTMAP_MOTION_BLOCKING_NO_LEAVES = 0b10;
+ public static final int HEIGHTMAP_OCEAN_FLOOR = 0b100;
+ public static final int HEIGHTMAP_OCEAN_FLOOR_WG = 0b1000;
+ public static final int HEIGHTMAP_WORLD_SURFACE = 0b10000;
+ public static final int HEIGHTMAP_WORLD_SURFACE_WG = 0b100000;
+ static final int[] HEIGHTMAPS = new int[]{
+ HEIGHTMAP_NONE,
+ HEIGHTMAP_MOTION_BLOCKING,
+ HEIGHTMAP_MOTION_BLOCKING_NO_LEAVES,
+ HEIGHTMAP_OCEAN_FLOOR,
+ HEIGHTMAP_OCEAN_FLOOR_WG,
+ HEIGHTMAP_WORLD_SURFACE,
+ HEIGHTMAP_WORLD_SURFACE_WG,
+ };
+ static final int HEIGHTMAP_BYTE_SIZE = 32;
+
+ public byte @Nullable [] heightmap(int type) {
+ return heightmaps[type];
+ }
+
+ public record BlockEntity(
+ int x,
+ int y,
+ int z,
+ @Nullable String id,
+ @Nullable NBTCompound data
+ ) {
+
+ }
+
+}
diff --git a/api/src/main/java/me/combimagnetron/lagoon/world/PolarLoader.java b/api/src/main/java/me/combimagnetron/lagoon/world/PolarLoader.java
new file mode 100644
index 0000000..1897542
--- /dev/null
+++ b/api/src/main/java/me/combimagnetron/lagoon/world/PolarLoader.java
@@ -0,0 +1,394 @@
+package me.combimagnetron.lagoon.world;
+
+import it.unimi.dsi.fastutil.shorts.Short2ObjectMap;
+import it.unimi.dsi.fastutil.shorts.Short2ObjectOpenHashMap;
+import me.combimagnetron.lagoon.world.compat.ChunkSupplierShim;
+import org.bukkit.block.Biome;
+import org.jetbrains.annotations.Contract;
+import org.jetbrains.annotations.NotNull;
+import org.jetbrains.annotations.Nullable;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import java.io.IOException;
+import java.io.InputStream;
+import java.nio.ByteBuffer;
+import java.nio.file.Files;
+import java.nio.file.Path;
+import java.nio.file.StandardOpenOption;
+import java.util.ArrayList;
+import java.util.Collection;
+import java.util.List;
+import java.util.Map;
+import java.util.concurrent.CompletableFuture;
+import java.util.concurrent.ConcurrentHashMap;
+import java.util.concurrent.ForkJoinPool;
+import java.util.concurrent.locks.ReentrantReadWriteLock;
+
+@SuppressWarnings("UnstableApiUsage")
+public class PolarLoader implements IChunkLoader {
+ private static final BlockManager BLOCK_MANAGER = MinecraftServer.getBlockManager();
+ private static final BiomeManager BIOME_MANAGER = MinecraftServer.getBiomeManager();
+ private static final ExceptionManager EXCEPTION_HANDLER = MinecraftServer.getExceptionManager();
+ private static final Logger logger = LoggerFactory.getLogger(PolarLoader.class);
+
+ // Account for changes between main Minestom and minestom-ce.
+ private static final ChunkSupplierShim CHUNK_SUPPLIER = ChunkSupplierShim.select();
+
+ private static final Map biomeCache = new ConcurrentHashMap<>();
+
+ private final Path savePath;
+ private final ReentrantReadWriteLock worldDataLock = new ReentrantReadWriteLock();
+ private final PolarWorld worldData;
+
+ private PolarWorldAccess worldAccess = null;
+ private boolean parallel = false;
+
+ public PolarLoader(@NotNull Path path) throws IOException {
+ this.savePath = path;
+ if (Files.exists(path)) {
+ this.worldData = PolarReader.read(Files.readAllBytes(path));
+ } else {
+ this.worldData = new PolarWorld();
+ }
+ }
+
+ public PolarLoader(@NotNull InputStream inputStream) throws IOException {
+ try (inputStream) {
+ this.worldData = PolarReader.read(inputStream.readAllBytes());
+ this.savePath = null;
+ }
+ }
+
+ public PolarLoader(@NotNull PolarWorld world) {
+ this.worldData = world;
+ this.savePath = null;
+ }
+
+ public @NotNull PolarWorld world() {
+ return worldData;
+ }
+
+ @Contract("_ -> this")
+ public @NotNull PolarLoader setWorldAccess(@NotNull PolarWorldAccess worldAccess) {
+ this.worldAccess = worldAccess;
+ return this;
+ }
+
+ /**
+ * Sets the loader to save and load in parallel.
+ *
+ * The Polar loader on its own supports parallel load out of the box, but
+ * a user implementation of {@link PolarWorldAccess} may not support parallel
+ * operations, so care must be taken when enabling this option.
+ *
+ * @param parallel True to load and save chunks in parallel, false otherwise.
+ * @return this
+ */
+ @Contract("_ -> this")
+ public @NotNull PolarLoader setParallel(boolean parallel) {
+ this.parallel = parallel;
+ return this;
+ }
+
+ // Loading
+
+
+ @Override
+ public boolean supportsParallelLoading() {
+ return parallel;
+ }
+
+ @Override
+ public void loadInstance(@NotNull Instance instance) {
+ //todo validate that the chunk is loadable in this world
+ }
+
+ @Override
+ public @NotNull CompletableFuture<@Nullable Chunk> loadChunk(@NotNull Instance instance, int chunkX, int chunkZ) {
+ // Only need to lock for this tiny part, chunks are immutable.
+ worldDataLock.readLock().lock();
+ var chunkData = worldData.chunkAt(chunkX, chunkZ);
+ worldDataLock.readLock().unlock();
+ if (chunkData == null) return CompletableFuture.completedFuture(null);
+
+ // We are making the assumption here that the chunk height is the same as this world.
+ // Polar includes world height metadata in the prelude and assumes all chunks match
+ // those values. We check that the dimension settings match in #loadInstance, so
+ // here it can be ignored/assumed.
+
+ // Load the chunk
+ var chunk = CHUNK_SUPPLIER.createChunk(instance, chunkX, chunkZ);
+ synchronized (chunk) {
+ //todo replace with java locks, not synchronized
+ // actually on second thought, do we really even need to lock the chunk? it is a local variable still
+ int sectionY = chunk.getMinSection();
+ for (var sectionData : chunkData.sections()) {
+ if (sectionData.isEmpty()) continue;
+
+ var section = chunk.getSection(sectionY);
+ loadSection(sectionData, section);
+ sectionY++;
+ }
+
+ for (var blockEntity : chunkData.blockEntities()) {
+ loadBlockEntity(blockEntity, chunk);
+ }
+
+ var userData = chunkData.userData();
+ if (userData.length > 0 && worldAccess != null) {
+ worldAccess.loadChunkData(chunk, new NetworkBuffer(ByteBuffer.wrap(userData)));
+ }
+ }
+
+ return CompletableFuture.completedFuture(chunk);
+ }
+
+ private void loadSection(@NotNull PolarSection sectionData, @NotNull Section section) {
+ // assumed that section is _not_ empty
+
+ // Blocks
+ var rawBlockPalette = sectionData.blockPalette();
+ var blockPalette = new Block[rawBlockPalette.length];
+ for (int i = 0; i < rawBlockPalette.length; i++) {
+ try {
+ //noinspection deprecation
+ blockPalette[i] = ArgumentBlockState.staticParse(rawBlockPalette[i]);
+ } catch (ArgumentSyntaxException e) {
+ logger.error("Failed to parse block state: {} ({})", rawBlockPalette[i], e.getMessage());
+ blockPalette[i] = Block.AIR;
+ }
+ }
+ if (blockPalette.length == 1) {
+ section.blockPalette().fill(blockPalette[0].stateId());
+ } else {
+ final var paletteData = sectionData.blockData();
+ section.blockPalette().setAll((x, y, z) -> {
+ int index = y * Chunk.CHUNK_SECTION_SIZE * Chunk.CHUNK_SECTION_SIZE + z * Chunk.CHUNK_SECTION_SIZE + x;
+ return blockPalette[paletteData[index]].stateId();
+ });
+ }
+
+ // Biomes
+ var rawBiomePalette = sectionData.biomePalette();
+ var biomePalette = new Biome[rawBiomePalette.length];
+ for (int i = 0; i < rawBiomePalette.length; i++) {
+ biomePalette[i] = biomeCache.computeIfAbsent(rawBiomePalette[i], id -> {
+ var biome = BIOME_MANAGER.getByName(NamespaceID.from(id));
+ if (biome == null) {
+ logger.error("Failed to find biome: {}", id);
+ biome = Biome.PLAINS;
+ }
+ return biome;
+ });
+ }
+ if (biomePalette.length == 1) {
+ section.biomePalette().fill(biomePalette[0].id());
+ } else {
+ final var paletteData = sectionData.biomeData();
+ section.biomePalette().setAll((x, y, z) -> {
+ int index = x / 4 + (z / 4) * 4 + (y / 4) * 16;
+
+ var paletteIndex = paletteData[index];
+ if (paletteIndex >= biomePalette.length) {
+ logger.error("Invalid biome palette index. This is probably a corrupted world, " +
+ "but it has been loaded with plains instead. No data has been written.");
+ return Biome.PLAINS.id();
+ }
+
+ return biomePalette[paletteIndex].id();
+ });
+ }
+
+ // Light
+ if (sectionData.hasBlockLightData())
+ section.setBlockLight(sectionData.blockLight());
+ if (sectionData.hasSkyLightData())
+ section.setSkyLight(sectionData.skyLight());
+ }
+
+ private void loadBlockEntity(@NotNull PolarChunk.BlockEntity blockEntity, @NotNull Chunk chunk) {
+ // Fetch the block type, we can ignore Handler/NBT since we are about to replace it
+ var block = chunk.getBlock(blockEntity.x(), blockEntity.y(), blockEntity.z(), Block.Getter.Condition.TYPE);
+
+ if (blockEntity.id() != null)
+ block = block.withHandler(BLOCK_MANAGER.getHandlerOrDummy(blockEntity.id()));
+ if (blockEntity.data() != null)
+ block = block.withNbt(blockEntity.data());
+
+ chunk.setBlock(blockEntity.x(), blockEntity.y(), blockEntity.z(), block);
+ }
+
+ // Unloading/saving
+
+
+ @Override
+ public boolean supportsParallelSaving() {
+ return parallel;
+ }
+
+ @Override
+ public @NotNull CompletableFuture saveInstance(@NotNull Instance instance) {
+ return saveChunks(instance.getChunks());
+ }
+
+ @Override
+ public void unloadChunk(Chunk chunk) {
+ updateChunkData(new Short2ObjectOpenHashMap<>(), chunk);
+ }
+
+ @Override
+ public @NotNull CompletableFuture saveChunks(@NotNull Collection chunks) {
+ var blockCache = new Short2ObjectOpenHashMap();
+
+ // Update state of each chunk locally
+ chunks.forEach(c -> updateChunkData(blockCache, c));
+
+ // Write the file to disk
+ if (savePath != null) {
+ return CompletableFuture.runAsync(() -> {
+ try {
+ Files.write(savePath, PolarWriter.write(worldData),
+ StandardOpenOption.CREATE, StandardOpenOption.TRUNCATE_EXISTING);
+ } catch (IOException e) {
+ EXCEPTION_HANDLER.handleException(new RuntimeException("Failed to save world", e));
+ }
+ }, ForkJoinPool.commonPool());
+ }
+
+ return CompletableFuture.completedFuture(null);
+ }
+
+ private void updateChunkData(@NotNull Short2ObjectMap blockCache, @NotNull Chunk chunk) {
+ var dimension = chunk.getInstance().getDimensionType();
+
+ var blockEntities = new ArrayList();
+ var sections = new PolarSection[dimension.getHeight() / Chunk.CHUNK_SECTION_SIZE];
+ assert sections.length == chunk.getSections().size(): "World height mismatch";
+
+ var heightmaps = new byte[32][PolarChunk.HEIGHTMAPS.length];
+
+ var userData = new byte[0];
+
+ synchronized (chunk) {
+ for (int i = 0; i < sections.length; i++) {
+ int sectionY = i + chunk.getMinSection();
+ var section = chunk.getSection(sectionY);
+ //todo check if section is empty and skip
+
+ var blockPalette = new ArrayList();
+ int[] blockData = null;
+ if (section.blockPalette().count() == 0) {
+ // Short circuit empty palette
+ blockPalette.add("air");
+ } else {
+ var localBlockData = new int[PolarSection.BLOCK_PALETTE_SIZE];
+
+ section.blockPalette().getAll((x, sectionLocalY, z, blockStateId) -> {
+ final int blockIndex = x + sectionLocalY * 16 * 16 + z * 16;
+
+ // Section palette
+ var namespace = blockCache.computeIfAbsent((short) blockStateId, unused -> blockToString(Block.fromStateId((short) blockStateId)));
+ int paletteId = blockPalette.indexOf(namespace);
+ if (paletteId == -1) {
+ paletteId = blockPalette.size();
+ blockPalette.add(namespace);
+ }
+ localBlockData[blockIndex] = paletteId;
+ });
+
+ blockData = localBlockData;
+
+ // Block entities
+ for (int sectionLocalY = 0; sectionLocalY < Chunk.CHUNK_SECTION_SIZE; sectionLocalY++) {
+ for (int z = 0; z < Chunk.CHUNK_SIZE_Z; z++) {
+ for (int x = 0; x < Chunk.CHUNK_SIZE_X; x++) {
+ int y = sectionLocalY + sectionY * Chunk.CHUNK_SECTION_SIZE;
+ var block = chunk.getBlock(x, y, z, Block.Getter.Condition.CACHED);
+ if (block == null) continue;
+
+ var handlerId = block.handler() == null ? null : block.handler().getNamespaceId().asString();
+ if (handlerId != null || block.hasNbt()) {
+ blockEntities.add(new PolarChunk.BlockEntity(
+ x, y, z, handlerId, block.nbt()
+ ));
+ }
+ }
+ }
+ }
+ }
+
+ var biomePalette = new ArrayList();
+ var biomeData = new int[PolarSection.BIOME_PALETTE_SIZE];
+
+ section.biomePalette().getAll((x, y, z, id) -> {
+ var biomeId = BIOME_MANAGER.getById(id).name().asString();
+
+ var paletteId = biomePalette.indexOf(biomeId);
+ if (paletteId == -1) {
+ paletteId = biomePalette.size();
+ biomePalette.add(biomeId);
+ }
+
+ biomeData[x + z * 4 + y * 4 * 4] = paletteId;
+ });
+
+ byte[] blockLight = section.blockLight().array();
+ byte[] skyLight = section.skyLight().array();
+ if (blockLight.length != 2048 || skyLight.length != 2048) {
+ blockLight = null;
+ skyLight = null;
+ }
+
+ sections[i] = new PolarSection(
+ blockPalette.toArray(new String[0]), blockData,
+ biomePalette.toArray(new String[0]), biomeData,
+ blockLight, skyLight
+ );
+ }
+
+ //todo heightmaps
+
+ if (worldAccess != null)
+ userData = NetworkBuffer.makeArray(b -> worldAccess.saveChunkData(chunk, b));
+
+ }
+
+ worldDataLock.writeLock().lock();
+ worldData.updateChunkAt(
+ chunk.getChunkX(),
+ chunk.getChunkZ(),
+ new PolarChunk(
+ chunk.getChunkX(),
+ chunk.getChunkZ(),
+ sections,
+ blockEntities,
+ heightmaps,
+ userData
+ )
+ );
+ worldDataLock.writeLock().unlock();
+ }
+
+ @Override
+ public @NotNull CompletableFuture saveChunk(@NotNull Chunk chunk) {
+ return saveChunks(List.of(chunk));
+ }
+
+ private @NotNull String blockToString(@NotNull Block block) {
+ var builder = new StringBuilder(block.name());
+ if (block.properties().isEmpty()) return builder.toString();
+
+ builder.append('[');
+ for (var entry : block.properties().entrySet()) {
+ builder.append(entry.getKey())
+ .append('=')
+ .append(entry.getValue())
+ .append(',');
+ }
+ builder.deleteCharAt(builder.length() - 1);
+ builder.append(']');
+
+ return builder.toString();
+ }
+}
diff --git a/api/src/main/java/me/combimagnetron/lagoon/world/PolarReader.java b/api/src/main/java/me/combimagnetron/lagoon/world/PolarReader.java
new file mode 100644
index 0000000..9c066f4
--- /dev/null
+++ b/api/src/main/java/me/combimagnetron/lagoon/world/PolarReader.java
@@ -0,0 +1,179 @@
+package me.combimagnetron.lagoon.world;
+
+import com.github.luben.zstd.Zstd;
+import me.combimagnetron.lagoon.communication.serializer.ByteBuffer;
+import org.jetbrains.annotations.Contract;
+import org.jetbrains.annotations.NotNull;
+import org.jglrxavpok.hephaistos.nbt.*;
+
+import java.io.IOException;
+import java.io.InputStream;
+
+public class PolarReader {
+ private PolarReader() {}
+
+ public static @NotNull PolarWorld read(byte @NotNull [] data) {
+ ByteBuffer buffer = new ByteBuffer();
+ buffer.read(data);
+ var magicNumber = buffer.readInt();
+ assertThat(magicNumber == PolarWorld.MAGIC_NUMBER, "Invalid magic number");
+
+ short version = buffer.readShort();
+ validateVersion(version);
+
+ PolarWorld.CompressionType compression = PolarWorld.CompressionType.fromId(buffer.readByte());
+ assertThat(compression != null, "Invalid compression type");
+ int compressedDataLength = buffer.readVarInt();
+
+ // Replace the buffer with a "decompressed" version. This is a no-op if compression is NONE.
+ buffer = decompressBuffer(buffer, compression, compressedDataLength);
+
+ byte minSection = buffer.readByte(), maxSection = buffer.readByte();
+ assertThat(minSection < maxSection, "Invalid section range");
+
+ var chunks = buffer.readCollection(b -> readChunk(version, b, maxSection - minSection + 1));
+
+ return new PolarWorld(version, compression, minSection, maxSection, chunks);
+ }
+
+ private static @NotNull PolarChunk readChunk(short version, ByteBuffer buffer, int sectionCount) {
+ int chunkX = buffer.readVarInt();
+ int chunkZ = buffer.readVarInt();
+
+ PolarSection[] sections = new PolarSection[sectionCount];
+ for (int i = 0; i < sectionCount; i++) {
+ sections[i] = readSection(version, buffer);
+ }
+
+ var blockEntities = buffer.readCollection(b -> readBlockEntity(version, b));
+
+ var heightmaps = new byte[PolarChunk.HEIGHTMAP_BYTE_SIZE][PolarChunk.HEIGHTMAPS.length];
+ int heightmapMask = buffer.readInt();
+ for (int i = 0; i < PolarChunk.HEIGHTMAPS.length; i++) {
+ if ((heightmapMask & PolarChunk.HEIGHTMAPS[i]) == 0)
+ continue;
+
+ heightmaps[i] = buffer.readByteArray(32);
+ }
+
+ // Objects
+ byte[] userData = new byte[0];
+ if (version > PolarWorld.VERSION_USERDATA_OPT_BLOCK_ENT_NBT)
+ userData = buffer.readByteArray();
+
+ return new PolarChunk(
+ chunkX, chunkZ,
+ sections,
+ blockEntities,
+ heightmaps,
+ userData
+ );
+ }
+
+ private static @NotNull PolarSection readSection(short version, @NotNull ByteBuffer buffer) {
+ // If section is empty exit immediately
+ if (buffer.readBoolean()) return new PolarSection();
+
+ var blockPalette = buffer.readCollection(ByteBuffer::readString).toArray(String[]::new);
+ int[] blockData = null;
+ if (blockPalette.length > 1) {
+ blockData = new int[PolarSection.BLOCK_PALETTE_SIZE];
+
+ var rawBlockData = buffer.readCollection(ByteBuffer::readLong).toArray(Long[]::new);
+ var bitsPerEntry = rawBlockData.length * 64 / PolarSection.BLOCK_PALETTE_SIZE;
+ PaletteUtil.unpack(blockData, rawBlockData, bitsPerEntry);
+ }
+
+ var biomePalette = buffer.readCollection(ByteBuffer::readString).toArray(String[]::new);
+ int[] biomeData = null;
+ if (biomePalette.length > 1) {
+ biomeData = new int[PolarSection.BIOME_PALETTE_SIZE];
+
+ var rawBiomeData = buffer.readCollection(ByteBuffer::readLong).toArray(Long[]::new);
+ var bitsPerEntry = rawBiomeData.length * 64 / PolarSection.BIOME_PALETTE_SIZE;
+ PaletteUtil.unpack(biomeData, rawBiomeData, bitsPerEntry);
+ }
+
+ byte[] blockLight = null, skyLight = null;
+
+ if (version > PolarWorld.VERSION_UNIFIED_LIGHT) {
+ if (buffer.readBoolean())
+ blockLight = buffer.readByteArray(2048);
+ if (buffer.readBoolean())
+ skyLight = buffer.readByteArray(2048);
+ } else if (buffer.readBoolean()) {
+ blockLight = buffer.readByteArray(2048);
+ skyLight = buffer.readByteArray(2048);
+ }
+
+ return new PolarSection(blockPalette, blockData, biomePalette, biomeData, blockLight, skyLight);
+ }
+
+ private static @NotNull PolarChunk.BlockEntity readBlockEntity(int version, @NotNull ByteBuffer buffer) {
+ int posIndex = buffer.readInt();
+ var id = buffer.readBoolean() ? buffer.readString() : null;
+
+ NBTCompound nbt = null;
+ if (version <= PolarWorld.VERSION_USERDATA_OPT_BLOCK_ENT_NBT || buffer.readBoolean())
+ nbt = (NBTCompound) readNbt(buffer);
+
+ return new PolarChunk.BlockEntity(
+ ChunkUtils.blockIndexToChunkPositionX(posIndex),
+ ChunkUtils.blockIndexToChunkPositionY(posIndex),
+ ChunkUtils.blockIndexToChunkPositionZ(posIndex),
+ id, nbt
+ );
+ }
+
+
+
+ private static NBT readNbt(ByteBuffer buffer) {
+ NBTReader nbtReader = null;
+ nbtReader = new NBTReader(new InputStream() {
+ @Override
+ public int read() {
+ return buffer.readByte() & 0xFF;
+ }
+
+ @Override
+ public int available() {
+ return -1;
+ }
+ }, CompressedProcesser.NONE);
+ try {
+ return nbtReader.read();
+ } catch (IOException | NBTException e) {
+ throw new RuntimeException(e);
+ }
+ }
+
+ private static void validateVersion(int version) {
+ var invalidVersionError = String.format("Unsupported Polar version. Up to %d is supported, found %d.",
+ PolarWorld.LATEST_VERSION, version);
+ assertThat(version <= PolarWorld.LATEST_VERSION, invalidVersionError);
+ }
+
+ private static @NotNull ByteBuffer decompressBuffer(@NotNull ByteBuffer buffer, @NotNull PolarWorld.CompressionType compression, int length) {
+ return switch (compression) {
+ case NONE -> buffer;
+ case ZSTD -> {
+ byte[] bytes = Zstd.decompress(buffer.toBytes(), length);
+ ByteBuffer newBuffer = new ByteBuffer();
+ newBuffer.read(bytes);
+ yield newBuffer;
+ }
+ };
+ }
+
+ @Contract("false, _ -> fail")
+ private static void assertThat(boolean condition, @NotNull String message) {
+ if (!condition) throw new Error(message);
+ }
+
+ public static class Error extends RuntimeException {
+ private Error(String message) {
+ super(message);
+ }
+ }
+
+}
diff --git a/api/src/main/java/me/combimagnetron/lagoon/world/PolarSection.java b/api/src/main/java/me/combimagnetron/lagoon/world/PolarSection.java
new file mode 100644
index 0000000..2b3422a
--- /dev/null
+++ b/api/src/main/java/me/combimagnetron/lagoon/world/PolarSection.java
@@ -0,0 +1,105 @@
+package me.combimagnetron.lagoon.world;
+
+import org.jetbrains.annotations.ApiStatus;
+import org.jetbrains.annotations.NotNull;
+import org.jetbrains.annotations.Nullable;
+
+/**
+ * Representation of the latest version of the section format.
+ *
+ * Marked as internal because of the use of mutable arrays. These arrays must _not_ be mutated.
+ * This class should be considered immutable.
+ */
+@ApiStatus.Internal
+public class PolarSection {
+ public static final int BLOCK_PALETTE_SIZE = 4096;
+ public static final int BIOME_PALETTE_SIZE = 64;
+
+ private final boolean empty;
+
+ private final String @NotNull [] blockPalette;
+ private final int @Nullable [] blockData;
+
+ private final String @NotNull [] biomePalette;
+ private final int @Nullable [] biomeData;
+
+ // Both light arrays are present/missing together. you cannot have one without the other.
+ private final byte @Nullable [] blockLight;
+ private final byte @Nullable [] skyLight;
+
+ public PolarSection() {
+ this.empty = true;
+
+ this.blockPalette = new String[]{"minecraft:air"};
+ this.blockData = null;
+ this.biomePalette = new String[]{"minecraft:plains"};
+ this.biomeData = null;
+
+ this.blockLight = null;
+ this.skyLight = null;
+ }
+
+ public PolarSection(
+ String @NotNull [] blockPalette, int @Nullable [] blockData,
+ String @NotNull [] biomePalette, int @Nullable [] biomeData,
+ byte @Nullable [] blockLight, byte @Nullable [] skyLight
+ ) {
+ this.empty = false;
+
+ this.blockPalette = blockPalette;
+ this.blockData = blockData;
+ this.biomePalette = biomePalette;
+ this.biomeData = biomeData;
+
+ this.blockLight = blockLight;
+ this.skyLight = skyLight;
+ }
+
+ public boolean isEmpty() {
+ return empty;
+ }
+
+ public @NotNull String @NotNull [] blockPalette() {
+ return blockPalette;
+ }
+
+ /**
+ * Returns the uncompressed palette data. Each int corresponds to an index in the palette.
+ * Always has a length of 4096.
+ */
+ public int[] blockData() {
+ assert blockData != null : "must check length of blockPalette() before using blockData()";
+ return blockData;
+ }
+
+ public @NotNull String @NotNull [] biomePalette() {
+ return biomePalette;
+ }
+
+ /**
+ * Returns the uncompressed palette data. Each int corresponds to an index in the palette.
+ * Always has a length of 256.
+ */
+ public int[] biomeData() {
+ assert biomeData != null : "must check length of biomePalette() before using biomeData()";
+ return biomeData;
+ }
+
+ public boolean hasBlockLightData() {
+ return blockLight != null;
+ }
+
+ public boolean hasSkyLightData() {
+ return skyLight != null;
+ }
+
+ public byte[] blockLight() {
+ assert blockLight != null : "must check hasBlockLightData() before calling blockLight()";
+ return blockLight;
+ }
+
+ public byte[] skyLight() {
+ assert skyLight != null : "must check hasSkyLightData() before calling skyLight()";
+ return skyLight;
+ }
+}
diff --git a/api/src/main/java/me/combimagnetron/lagoon/world/PolarWorld.java b/api/src/main/java/me/combimagnetron/lagoon/world/PolarWorld.java
new file mode 100644
index 0000000..af8f6eb
--- /dev/null
+++ b/api/src/main/java/me/combimagnetron/lagoon/world/PolarWorld.java
@@ -0,0 +1,97 @@
+package me.combimagnetron.lagoon.world;
+
+import it.unimi.dsi.fastutil.longs.Long2ObjectMap;
+import it.unimi.dsi.fastutil.longs.Long2ObjectOpenHashMap;
+import org.jetbrains.annotations.NotNull;
+import org.jetbrains.annotations.Nullable;
+
+import java.util.Collection;
+import java.util.List;
+
+/**
+ * A Java type representing the latest version of the world format.
+ */
+public class PolarWorld {
+ public static final int MAGIC_NUMBER = 0x506F6C72; // `Polr`
+ public static final short LATEST_VERSION = 3;
+
+ static final short VERSION_UNIFIED_LIGHT = 1;
+ static final short VERSION_USERDATA_OPT_BLOCK_ENT_NBT = 2;
+
+ public static CompressionType DEFAULT_COMPRESSION = CompressionType.ZSTD;
+
+ // Polar metadata
+ private final short version;
+ private CompressionType compression;
+
+ // World metadata
+ private final byte minSection;
+ private final byte maxSection;
+
+ // Chunk data
+ private final Long2ObjectMap chunks = new Long2ObjectOpenHashMap<>();
+
+ public PolarWorld() {
+ this(LATEST_VERSION, DEFAULT_COMPRESSION, (byte) -4, (byte) 19, List.of());
+ }
+
+ public PolarWorld(
+ short version,
+ @NotNull CompressionType compression,
+ byte minSection, byte maxSection,
+ @NotNull Collection chunks
+ ) {
+ this.version = version;
+ this.compression = compression;
+
+ this.minSection = minSection;
+ this.maxSection = maxSection;
+
+ for (var chunk : chunks) {
+ var index = ChunkUtils.getChunkIndex(chunk.x(), chunk.z());
+ this.chunks.put(index, chunk);
+ }
+ }
+
+ public short version() {
+ return version;
+ }
+
+ public @NotNull CompressionType compression() {
+ return compression;
+ }
+ public void setCompression(@NotNull CompressionType compression) {
+ this.compression = compression;
+ }
+
+ public byte minSection() {
+ return minSection;
+ }
+
+ public byte maxSection() {
+ return maxSection;
+ }
+
+ public @Nullable PolarChunk chunkAt(int x, int z) {
+ return chunks.getOrDefault(ChunkUtils.getChunkIndex(x, z), null);
+ }
+ public void updateChunkAt(int x, int z, @NotNull PolarChunk chunk) {
+ chunks.put(ChunkUtils.getChunkIndex(x, z), chunk);
+ }
+
+ public @NotNull Collection chunks() {
+ return chunks.values();
+ }
+
+ public enum CompressionType {
+ NONE,
+ ZSTD;
+
+ private static final CompressionType[] VALUES = values();
+
+ public static @Nullable CompressionType fromId(int id) {
+ if (id < 0 || id >= VALUES.length) return null;
+ return VALUES[id];
+ }
+ }
+}
diff --git a/api/src/main/java/me/combimagnetron/lagoon/world/PolarWorldAccess.java b/api/src/main/java/me/combimagnetron/lagoon/world/PolarWorldAccess.java
new file mode 100644
index 0000000..246a1fd
--- /dev/null
+++ b/api/src/main/java/me/combimagnetron/lagoon/world/PolarWorldAccess.java
@@ -0,0 +1,38 @@
+package me.combimagnetron.lagoon.world;
+
+import net.minestom.server.instance.Chunk;
+import net.minestom.server.network.NetworkBuffer;
+import org.jetbrains.annotations.NotNull;
+import org.jetbrains.annotations.Nullable;
+
+/**
+ * Provides access to user world data for a {@link PolarLoader} to get and set user
+ * specific world data such as objects, as well as provides some relevant callbacks.
+ *
+ * Usage if world access is completely optional, dependent features will not add
+ * overhead to the format if unused.
+ */
+@SuppressWarnings("UnstableApiUsage")
+public interface PolarWorldAccess {
+
+ /**
+ * Called when a chunk is created, just before it is added to the world.
+ *
+ * Can be used to initialize the chunk based on saved user data in the world.
+ *
+ * @param chunk The Minestom chunk being created
+ * @param userData The saved user data, or null if none is present
+ */
+ default void loadChunkData(@NotNull Chunk chunk, @Nullable NetworkBuffer userData) {}
+
+ /**
+ * Called when a chunk is being saved.
+ *
+ * Can be used to save user data in the chunk by writing it to the buffer.
+ *
+ * @param chunk The Minestom chunk being saved
+ * @param userData A buffer to write user data to save
+ */
+ default void saveChunkData(@NotNull Chunk chunk, @NotNull NetworkBuffer userData) {}
+
+}
diff --git a/api/src/main/java/me/combimagnetron/lagoon/world/PolarWriter.java b/api/src/main/java/me/combimagnetron/lagoon/world/PolarWriter.java
new file mode 100644
index 0000000..e26ae9d
--- /dev/null
+++ b/api/src/main/java/me/combimagnetron/lagoon/world/PolarWriter.java
@@ -0,0 +1,111 @@
+package me.combimagnetron.lagoon.world;
+
+import com.github.luben.zstd.Zstd;
+import me.combimagnetron.lagoon.communication.serializer.ByteBuffer;
+import org.jetbrains.annotations.NotNull;
+import org.jglrxavpok.hephaistos.nbt.CompressedProcesser;
+import org.jglrxavpok.hephaistos.nbt.NBTCompound;
+import org.jglrxavpok.hephaistos.nbt.NBTWriter;
+
+import java.io.IOException;
+import java.io.OutputStream;
+import java.util.Arrays;
+
+public class PolarWriter {
+ private PolarWriter() {}
+
+ public static byte[] write(@NotNull PolarWorld world) {
+ // Write the compressed content first
+ var content = new ByteBuffer();
+ content.writeByte(world.minSection());
+ content.writeByte(world.maxSection());
+ content.writeCollection(world.chunks(), PolarWriter::writeChunk);
+ ByteBuffer byteBuf = new ByteBuffer();
+ byteBuf.writeInt(PolarWorld.MAGIC_NUMBER);
+ byteBuf.writeShort(PolarWorld.LATEST_VERSION);
+ byteBuf.writeByte((byte) world.compression().ordinal());
+ switch (world.compression()) {
+ case NONE -> {
+ byteBuf.writeByteArray(content.toBytes());
+ }
+ case ZSTD -> {
+ byteBuf.writeByteArray(Zstd.compress(content.toBytes()));
+ }
+ }
+ // Create final buffer
+ return byteBuf.toBytes();
+ }
+
+ private static void writeChunk(@NotNull ByteBuffer buffer, @NotNull PolarChunk chunk) {
+ buffer.writeVarInt(chunk.x());
+ buffer.writeVarInt(chunk.z());
+
+ for (var section : chunk.sections()) {
+ writeSection(buffer, section);
+ }
+ buffer.writeCollection(chunk.blockEntities(), PolarWriter::writeBlockEntity);
+
+ //todo heightmaps
+ buffer.writeInt(PolarChunk.HEIGHTMAP_NONE);
+
+ buffer.writeByteArray(chunk.userData());
+ }
+
+ private static void writeSection(@NotNull ByteBuffer buffer, @NotNull PolarSection section) {
+ buffer.writeBoolean(section.isEmpty());
+ if (section.isEmpty()) return;
+
+ // Blocks
+ String[] blockPalette = section.blockPalette();
+ buffer.writeCollection(Arrays.stream(blockPalette).toList(), ByteBuffer::writeString);
+ if (blockPalette.length > 1) {
+ var blockData = section.blockData();
+ var bitsPerEntry = (int) Math.ceil(Math.log(blockPalette.length) / Math.log(2));
+ if (bitsPerEntry < 1) bitsPerEntry = 1;
+ buffer.writeCollection(Arrays.stream(PaletteUtil.pack(blockData, bitsPerEntry)).boxed().toList(), ByteBuffer::writeLong);
+ }
+
+ // Biomes
+ var biomePalette = section.biomePalette();
+ buffer.writeCollection(Arrays.stream(biomePalette).toList(), ByteBuffer::writeString);
+ if (biomePalette.length > 1) {
+ var biomeData = section.biomeData();
+ var bitsPerEntry = (int) Math.ceil(Math.log(biomePalette.length) / Math.log(2));
+ if (bitsPerEntry < 1) bitsPerEntry = 1;
+ buffer.writeCollection(Arrays.stream(PaletteUtil.pack(biomeData, bitsPerEntry)).boxed().toList(), ByteBuffer::writeLong);
+ }
+
+ // Light
+ buffer.writeBoolean(section.hasBlockLightData());
+ if (section.hasBlockLightData())
+ buffer.writeByteArray(section.blockLight());
+ buffer.writeBoolean(section.hasSkyLightData());
+ if (section.hasSkyLightData())
+ buffer.writeByteArray(section.skyLight());
+ }
+
+ private static void writeBlockEntity(@NotNull ByteBuffer buffer, @NotNull PolarChunk.BlockEntity blockEntity) {
+ var index = ChunkUtils.getBlockIndex(blockEntity.x(), blockEntity.y(), blockEntity.z());
+ buffer.writeInt(index);
+ buffer.writeBoolean(blockEntity.id() == null);
+ buffer.writeString(blockEntity.id());
+ buffer.writeBoolean(blockEntity.data() == null);
+ writeNbt(buffer, blockEntity.data());
+ }
+
+ private static void writeNbt(ByteBuffer buffer, NBTCompound compound) {
+ NBTWriter nbtWriter = null;
+ nbtWriter = new NBTWriter(new OutputStream() {
+ @Override
+ public void write(int b) {
+ buffer.writeByte((byte) b);
+ }
+ }, CompressedProcesser.NONE);
+ try {
+ nbtWriter.writeNamed("", compound);
+ } catch (IOException e) {
+ throw new RuntimeException(e);
+ }
+ }
+
+}
diff --git a/api/src/main/java/me/combimagnetron/lagoon/world/compat/ChunkSupplierShim.java b/api/src/main/java/me/combimagnetron/lagoon/world/compat/ChunkSupplierShim.java
new file mode 100644
index 0000000..684627b
--- /dev/null
+++ b/api/src/main/java/me/combimagnetron/lagoon/world/compat/ChunkSupplierShim.java
@@ -0,0 +1,30 @@
+package me.combimagnetron.lagoon.world.compat;
+
+import net.minestom.server.instance.Chunk;
+import net.minestom.server.instance.DynamicChunk;
+import net.minestom.server.instance.Instance;
+import org.jetbrains.annotations.ApiStatus;
+import org.jetbrains.annotations.NotNull;
+
+/**
+ * A shim for {@link net.minestom.server.utils.chunk.ChunkSupplier} to allow for
+ * compatibility with main Minestom which does not have the lighting PR (which
+ * adds {@link net.minestom.server.utils.chunk.ChunkSupplier}.
+ */
+@ApiStatus.Internal
+@FunctionalInterface
+public interface ChunkSupplierShim {
+
+ static @NotNull ChunkSupplierShim select() {
+ try {
+ // If this function is present we have the lighting branch and should use that chunk supplier
+ Instance.class.getDeclaredMethod("getChunkSupplier");
+ return (instance, cx, cz) -> instance.getChunkSupplier().createChunk(instance, cx, cz);
+ } catch (NoSuchMethodException e) {
+ // Otherwise we should use the default chunk supplier
+ return DynamicChunk::new;
+ }
+ }
+
+ @NotNull Chunk createChunk(@NotNull Instance instance, int chunkX, int chunkZ);
+}
diff --git a/gradle/libs.versions.toml b/gradle/libs.versions.toml
index 7dba0fb..5f78bd8 100644
--- a/gradle/libs.versions.toml
+++ b/gradle/libs.versions.toml
@@ -4,12 +4,7 @@ metadata.format.version = "1.1"
# Important dependencies
adventure = "4.12.0"
-kotlin = "1.7.22"
-hydrazine = "1.7.2"
-dependencyGetter = "v1.0.1"
-minestomData = "1c1921cd41"
hephaistos = "2.5.3"
-jetbrainsAnnotations = "23.0.0"
# Terminal / Logging
tinylog = "2.5.0"
@@ -51,59 +46,10 @@ adventure-serializer-gson = { group = "net.kyori", name = "adventure-text-serial
adventure-serializer-legacy = { group = "net.kyori", name = "adventure-text-serializer-legacy", version.ref = "adventure" }
adventure-serializer-plain = { group = "net.kyori", name = "adventure-text-serializer-plain", version.ref = "adventure" }
adventure-text-logger-slf4j = { group = "net.kyori", name = "adventure-text-logger-slf4j", version.ref = "adventure" }
-
-# Kotlin
-kotlin-reflect = { group = "org.jetbrains.kotlin", name = "kotlin-reflect", version.ref = "kotlin" }
-kotlin-stdlib-jdk8 = { group = "org.jetbrains.kotlin", name = "kotlin-stdlib-jdk8", version.ref = "kotlin" }
-
-# Miscellaneous
-hydrazine = { group = "com.github.MadMartian", name = "hydrazine-path-finding", version.ref = "hydrazine" }
-dependencyGetter = { group = "com.github.Minestom", name = "DependencyGetter", version.ref = "dependencyGetter" }
-minestomData = { group = "com.github.Minestom", name = "MinestomDataGenerator", version.ref = "minestomData" }
-jetbrainsAnnotations = { group = "org.jetbrains", name = "annotations", version.ref = "jetbrainsAnnotations" }
-
-# Logging
-tinylog-api = { group = "org.tinylog", name = "tinylog-api", version.ref = "tinylog" }
-tinylog-impl = { group = "org.tinylog", name = "tinylog-impl", version.ref = "tinylog" }
-tinylog-slf4j = { group = "org.tinylog", name = "slf4j-tinylog", version.ref = "tinylog" }
-
-# Terminal
-jline = { group = "org.jline", name = "jline", version.ref = "jline" }
-jline-jansi = { group = "org.jline", name = "jline-terminal-jansi", version.ref = "jline" }
-
-# Performance / Data Structures
caffeine = { group = "com.github.ben-manes.caffeine", name = "caffeine", version.ref = "caffeine" }
fastutil = { group = "it.unimi.dsi", name = "fastutil", version.ref = "fastutil" }
-flare = { group = "space.vectrix.flare", name = "flare", version.ref = "flare" }
-flare-fastutil = { group = "space.vectrix.flare", name = "flare-fastutil", version.ref = "flare" }
gson = { group = "com.google.code.gson", name = "gson", version.ref = "gson" }
-jcTools = { group = "org.jctools", name = "jctools-core", version.ref = "jcTools" }
-
-# Test
-junit-api = { group = "org.junit.jupiter", name = "junit-jupiter-api", version.ref = "junit-jupiter" }
-junit-engine = { group = "org.junit.jupiter", name = "junit-jupiter-engine", version.ref = "junit-jupiter" }
-junit-params = { group = "org.junit.jupiter", name = "junit-jupiter-params", version.ref = "junit-jupiter" }
-junit-suite-api = { group = "org.junit.platform", name = "junit-platform-suite-api", version.ref = "junit-platform" }
-junit-suite-engine = { group = "org.junit.platform", name = "junit-platform-suite-engine", version.ref = "junit-platform" }
-mockito-core = { group = "org.mockito", name = "mockito-core", version.ref = "mockito" }
-
-# Code Generation
-javaPoet = { group = "com.squareup", name = "javapoet", version.ref = "javaPoet" }
-
-# Demo
-jNoise = { group = "com.github.Articdive.JNoise", name = "jnoise-pipeline", version.ref = "jNoise" }
-
-# JMH
-jmh-core = { group = "org.openjdk.jmh", name = "jmh-core", version.ref = "jmh" }
-jmh-annotationprocessor = { group = "org.openjdk.jmh", name = "jmh-generator-annprocess", version.ref = "jmh" }
-
-# JCStress
-jcstress-core = { group = "org.openjdk.jcstress", name = "jcstress-core", version.ref = "jcstress" }
[bundles]
-kotlin = ["kotlin-stdlib-jdk8", "kotlin-reflect"]
-flare = ["flare", "flare-fastutil"]
adventure = ["adventure-api", "adventure-serializer-gson", "adventure-serializer-legacy", "adventure-serializer-plain", "adventure-text-logger-slf4j"]
-logging = ["tinylog-api", "tinylog-impl", "tinylog-slf4j"]
-terminal = ["jline", "jline-jansi"]
From 317afe206f4f94c08b480ac09d4e88f9ec25b047 Mon Sep 17 00:00:00 2001
From: Combimagnetron
Date: Fri, 4 Aug 2023 23:13:32 +0200
Subject: [PATCH 2/6] feat(n/a): update ByteBuffer to the master branch
variant.
---
.../communication/serializer/ByteBuffer.java | 122 ++++++++++++++----
1 file changed, 94 insertions(+), 28 deletions(-)
diff --git a/api/src/main/java/me/combimagnetron/lagoon/communication/serializer/ByteBuffer.java b/api/src/main/java/me/combimagnetron/lagoon/communication/serializer/ByteBuffer.java
index c789e70..578bfe5 100644
--- a/api/src/main/java/me/combimagnetron/lagoon/communication/serializer/ByteBuffer.java
+++ b/api/src/main/java/me/combimagnetron/lagoon/communication/serializer/ByteBuffer.java
@@ -7,9 +7,16 @@
import net.kyori.adventure.text.serializer.gson.GsonComponentSerializer;
import java.io.*;
+import java.util.ArrayList;
+import java.util.Collection;
+import java.util.List;
import java.util.UUID;
+import java.util.function.BiConsumer;
+import java.util.function.Function;
-public abstract class ByteBuffer {
+public class ByteBuffer {
+ private static final int SEGMENT_BITS = 0x7F;
+ private static final int CONTINUE_BIT = 0x80;
private ByteArrayDataInput byteArrayDataInput;
private ByteArrayDataOutput byteArrayDataOutput;
@@ -21,69 +28,83 @@ public void read(byte[] bytes) {
byteArrayDataInput = ByteStreams.newDataInput(bytes);
}
- protected void writeString(String string) {
+ public void writeString(String string) {
checkNotNull();
byteArrayDataOutput.writeUTF(string);
}
- protected void writeUUID(UUID uuid) {
+ public void writeUUID(UUID uuid) {
checkNotNull();
byteArrayDataOutput.writeLong(uuid.getMostSignificantBits());
byteArrayDataOutput.writeLong(uuid.getLeastSignificantBits());
}
- protected void writeAdventureComponent(Component component) {
+ public void writeAdventureComponent(Component component) {
checkNotNull();
byteArrayDataOutput.writeUTF(GsonComponentSerializer.gson().serialize(component));
}
- protected void writeChar(char chr) {
+ public void writeChar(char chr) {
checkNotNull();
byteArrayDataOutput.writeChar(chr);
}
- protected void writeDouble(double dbl) {
+ public void writeDouble(double dbl) {
checkNotNull();
byteArrayDataOutput.writeDouble(dbl);
}
- protected void writeFloat(float flt) {
+ public void writeFloat(float flt) {
checkNotNull();
byteArrayDataOutput.writeFloat(flt);
}
- protected void writeLong(long lng) {
+ public void writeLong(long lng) {
checkNotNull();
byteArrayDataOutput.writeLong(lng);
}
- protected void writeInt(int i) {
+ public void writeInt(int i) {
checkNotNull();
byteArrayDataOutput.writeInt(i);
}
- protected void writeShort(short shrt) {
+ public void writeShort(short shrt) {
checkNotNull();
byteArrayDataOutput.writeShort(shrt);
}
- protected void writeByteArray(byte... bytes) {
+ public void writeByteArray(byte... bytes) {
checkNotNull();
byteArrayDataOutput.write(bytes.length);
byteArrayDataOutput.write(bytes);
}
- protected void writeByte(byte bte) {
+ public void writeByte(byte bte) {
checkNotNull();
byteArrayDataOutput.write(bte);
}
- protected void writeBoolean(boolean bool) {
+ public void writeBoolean(boolean bool) {
checkNotNull();
byteArrayDataOutput.writeBoolean(bool);
}
- protected void writeObject(Object object) {
+ public void writeVarInt(int value) {
+ while (true) {
+ if ((value & ~SEGMENT_BITS) == 0) {
+ writeByte((byte) value);
+ return;
+ }
+
+ writeByte((byte) ((value & SEGMENT_BITS) | CONTINUE_BIT));
+
+ // Note: >>> means that the sign bit is shifted with the rest of the number rather than being left alone
+ value >>>= 7;
+ }
+ }
+
+ public void writeObject(Object object) {
checkNotNull();
try (ByteArrayOutputStream byteArrayOutputStream = new ByteArrayOutputStream();
ObjectOutputStream objectOutputStream = new ObjectOutputStream(byteArrayOutputStream)) {
@@ -96,60 +117,105 @@ protected void writeObject(Object object) {
}
}
- protected String readString() {
+ public String readString() {
return byteArrayDataInput.readUTF();
}
- protected UUID readUUID() {
+ public UUID readUUID() {
return new UUID(byteArrayDataInput.readLong(), byteArrayDataInput.readLong());
}
- protected Component readAdventureComponent() {
+ public Component readAdventureComponent() {
return GsonComponentSerializer.gson().deserialize(readString());
}
- protected char readChar() {
+ public char readChar() {
return byteArrayDataInput.readChar();
}
- protected double readDouble() {
+ public double readDouble() {
return byteArrayDataInput.readDouble();
}
- protected float readFloat() {
+ public float readFloat() {
return byteArrayDataInput.readFloat();
}
- protected long readLong() {
+ public long readLong() {
return byteArrayDataInput.readLong();
}
- protected int readInt() {
+ public int readInt() {
return byteArrayDataInput.readInt();
}
- protected short readShort() {
+ public short readShort() {
return byteArrayDataInput.readShort();
}
- protected byte[] readByteArray(byte... bytes) {
+ public byte[] readByteArray() {
int arraySize = readInt();
byte[] byteArray = new byte[arraySize];
for (int i = 0; i < arraySize; i++)
byteArray[i] = readByte();
+ return byteArray;
+ }
+ public byte[] readByteArray(int length) {
+ byte[] byteArray = new byte[length];
+ for (int i = 0; i < length; i++)
+ byteArray[i] = readByte();
return byteArray;
}
- protected byte readByte() {
+ public void writeCollection(Collection values, BiConsumer consumer) {
+ if (values == null) {
+ writeByte((byte) 0);
+ return;
+ }
+ writeVarInt(values.size());
+ for (T value : values) {
+ consumer.accept(this, value);
+ }
+ }
+
+ public Collection readCollection(Function function) {
+ final int size = readInt();
+ List values = new ArrayList<>(size);
+ for (int i = 0; i < size; i++) {
+ values.add(function.apply(this));
+ }
+ return values;
+ }
+
+ public int readVarInt() {
+ int value = 0;
+ int position = 0;
+ byte currentByte;
+
+ while (true) {
+ currentByte = readByte();
+ value |= (currentByte & SEGMENT_BITS) << position;
+
+ if ((currentByte & CONTINUE_BIT) == 0) break;
+
+ position += 7;
+
+ if (position >= 32) throw new RuntimeException("VarInt is too big");
+ }
+
+ return value;
+ }
+
+ public byte readByte() {
return byteArrayDataInput.readByte();
}
- protected boolean readBoolean() {
+ public boolean readBoolean() {
return byteArrayDataInput.readBoolean();
}
- protected Object readObject() {
+ public Object readObject() {
byte[] bytes = readByteArray();
try (ByteArrayInputStream byteArrayInputStream = new ByteArrayInputStream(bytes);
ObjectInput objectInputStream = new ObjectInputStream(byteArrayInputStream)) {
@@ -163,4 +229,4 @@ public byte[] toBytes() {
return byteArrayDataOutput.toByteArray();
}
-}
+}
\ No newline at end of file
From 77cc4c8da863847af369206ed188a99c4c390a4e Mon Sep 17 00:00:00 2001
From: Combimagnetron
Date: Fri, 4 Aug 2023 23:14:04 +0200
Subject: [PATCH 3/6] refact(n/a): rename world system and remove unneeded
files.
---
.../{AnvilPolar.java => AnvilMeridian.java} | 46 ++++++++--------
.../{PolarChunk.java => MeridianChunk.java} | 6 +--
.../{PolarLoader.java => MeridianLoader.java} | 46 ++++++++--------
.../{PolarReader.java => MeridianReader.java} | 54 +++++++++----------
...PolarSection.java => MeridianSection.java} | 6 +--
.../{PolarWorld.java => MeridianWorld.java} | 16 +++---
.../{PolarWriter.java => MeridianWriter.java} | 22 ++++----
.../lagoon/world/PolarWorldAccess.java | 38 -------------
.../world/compat/ChunkSupplierShim.java | 30 -----------
9 files changed, 98 insertions(+), 166 deletions(-)
rename api/src/main/java/me/combimagnetron/lagoon/world/{AnvilPolar.java => AnvilMeridian.java} (88%)
rename api/src/main/java/me/combimagnetron/lagoon/world/{PolarChunk.java => MeridianChunk.java} (93%)
rename api/src/main/java/me/combimagnetron/lagoon/world/{PolarLoader.java => MeridianLoader.java} (89%)
rename api/src/main/java/me/combimagnetron/lagoon/world/{PolarReader.java => MeridianReader.java} (69%)
rename api/src/main/java/me/combimagnetron/lagoon/world/{PolarSection.java => MeridianSection.java} (97%)
rename api/src/main/java/me/combimagnetron/lagoon/world/{PolarWorld.java => MeridianWorld.java} (84%)
rename api/src/main/java/me/combimagnetron/lagoon/world/{PolarWriter.java => MeridianWriter.java} (86%)
delete mode 100644 api/src/main/java/me/combimagnetron/lagoon/world/PolarWorldAccess.java
delete mode 100644 api/src/main/java/me/combimagnetron/lagoon/world/compat/ChunkSupplierShim.java
diff --git a/api/src/main/java/me/combimagnetron/lagoon/world/AnvilPolar.java b/api/src/main/java/me/combimagnetron/lagoon/world/AnvilMeridian.java
similarity index 88%
rename from api/src/main/java/me/combimagnetron/lagoon/world/AnvilPolar.java
rename to api/src/main/java/me/combimagnetron/lagoon/world/AnvilMeridian.java
index 7766795..1cda9e0 100644
--- a/api/src/main/java/me/combimagnetron/lagoon/world/AnvilPolar.java
+++ b/api/src/main/java/me/combimagnetron/lagoon/world/AnvilMeridian.java
@@ -21,8 +21,8 @@
import java.util.List;
import java.util.Objects;
-public class AnvilPolar {
- private static final Logger logger = LoggerFactory.getLogger(AnvilPolar.class);
+public class AnvilMeridian {
+ private static final Logger logger = LoggerFactory.getLogger(AnvilMeridian.class);
private static final boolean FILE_RW_MODE = Boolean.getBoolean("polar.anvil_rw_mode");
public static final String FILE_RW_MODE_ERROR = """
@@ -43,7 +43,7 @@ public class AnvilPolar {
* @return The Polar world representing the given Anvil world
* @throws IOException If there was an error reading the anvil world
*/
- public static @NotNull PolarWorld anvilToPolar(@NotNull Path path) throws IOException {
+ public static @NotNull MeridianWorld anvilToPolar(@NotNull Path path) throws IOException {
return anvilToPolar(path, -4, 19, ChunkSelector.all());
}
@@ -58,7 +58,7 @@ public class AnvilPolar {
* @return The Polar world representing the given Anvil world
* @throws IOException If there was an error reading the anvil world
*/
- public static @NotNull PolarWorld anvilToPolar(@NotNull Path path, @NotNull ChunkSelector selector) throws IOException {
+ public static @NotNull MeridianWorld anvilToPolar(@NotNull Path path, @NotNull ChunkSelector selector) throws IOException {
return anvilToPolar(path, -4, 19, selector);
}
@@ -75,7 +75,7 @@ public class AnvilPolar {
* @return The Polar world representing the given Anvil world
* @throws IOException If there was an error reading the anvil world
*/
- public static @NotNull PolarWorld anvilToPolar(@NotNull Path path, int minSection, int maxSection) throws IOException {
+ public static @NotNull MeridianWorld anvilToPolar(@NotNull Path path, int minSection, int maxSection) throws IOException {
return anvilToPolar(path, minSection, maxSection, ChunkSelector.all());
}
@@ -93,8 +93,8 @@ public class AnvilPolar {
* @return The Polar world representing the given Anvil world
* @throws IOException If there was an error reading the anvil world
*/
- public static @NotNull PolarWorld anvilToPolar(@NotNull Path path, int minSection, int maxSection, @NotNull ChunkSelector selector) throws IOException {
- var chunks = new ArrayList();
+ public static @NotNull MeridianWorld anvilToPolar(@NotNull Path path, int minSection, int maxSection, @NotNull ChunkSelector selector) throws IOException {
+ var chunks = new ArrayList();
try (var files = Files.walk(path.resolve("region"), 1)) {
for (var regionFile : files.toList()) {
if (!regionFile.getFileName().toString().endsWith(".mca")) continue;
@@ -116,16 +116,16 @@ public class AnvilPolar {
throw new IOException(e);
}
- return new PolarWorld(
- PolarWorld.LATEST_VERSION,
- PolarWorld.DEFAULT_COMPRESSION,
+ return new MeridianWorld(
+ MeridianWorld.LATEST_VERSION,
+ MeridianWorld.DEFAULT_COMPRESSION,
(byte) minSection, (byte) maxSection,
chunks
);
}
- private static @NotNull List readAnvilChunks(@NotNull RegionFile regionFile, int minSection, int maxSection, @NotNull ChunkSelector selector) throws AnvilException, IOException {
- var chunks = new ArrayList();
+ private static @NotNull List readAnvilChunks(@NotNull RegionFile regionFile, int minSection, int maxSection, @NotNull ChunkSelector selector) throws AnvilException, IOException {
+ var chunks = new ArrayList();
for (int x = 0; x < 32; x++) {
for (int z = 0; z < 32; z++) {
int chunkX = x + (regionFile.getRegionX() * 32);
@@ -138,7 +138,7 @@ public class AnvilPolar {
var chunkReader = new ChunkReader(chunkData);
- var sections = new PolarSection[maxSection - minSection + 1];
+ var sections = new MeridianSection[maxSection - minSection + 1];
for (var sectionData : chunkReader.getSections()) {
var sectionReader = new ChunkSectionReader(chunkReader.getMinecraftVersion(), sectionData);
@@ -165,9 +165,9 @@ public class AnvilPolar {
// Single block palette, no block data.
blockPalette = new String[]{readBlock(blockInfo.get(0))};
} else {
- blockData = new int[PolarSection.BLOCK_PALETTE_SIZE];
+ blockData = new int[MeridianSection.BLOCK_PALETTE_SIZE];
Long[] rawBlockData = Arrays.stream(sectionReader.getCompactedBlockStates().copyArray()).boxed().toArray(Long[]::new);
- var bitsPerEntry = rawBlockData.length * 64 / PolarSection.BLOCK_PALETTE_SIZE;
+ var bitsPerEntry = rawBlockData.length * 64 / MeridianSection.BLOCK_PALETTE_SIZE;
PaletteUtil.unpack(blockData, rawBlockData, bitsPerEntry);
// blockData = sectionReader.getUncompressedBlockStateIDs();
@@ -193,7 +193,7 @@ public class AnvilPolar {
} else {
// Full palette case, convert from 64 strings provided by anvil to a normal palette (split data + palette)
var palette = new ArrayList();
- biomeData = new int[PolarSection.BIOME_PALETTE_SIZE];
+ biomeData = new int[MeridianSection.BIOME_PALETTE_SIZE];
for (int i = 0; i < biomeData.length; i++) {
var biome = biomeInfo.getBiomes()[i];
var paletteId = palette.indexOf(biome);
@@ -217,7 +217,7 @@ public class AnvilPolar {
skyLight = sectionReader.getSkyLight().copyArray();
}
- sections[sectionReader.getY() - minSection] = new PolarSection(
+ sections[sectionReader.getY() - minSection] = new MeridianSection(
blockPalette, blockData,
biomePalette, biomeData,
blockLight, skyLight
@@ -226,16 +226,16 @@ public class AnvilPolar {
// Fill in the remaining sections with empty sections
for (int i = 0; i < sections.length; i++) {
if (sections[i] != null) continue;
- sections[i] = new PolarSection();
+ sections[i] = new MeridianSection();
}
- var blockEntities = new ArrayList();
+ var blockEntities = new ArrayList();
for (var blockEntityCompound : chunkReader.getBlockEntities()) {
var blockEntity = convertBlockEntity(blockEntityCompound);
if (blockEntity != null) blockEntities.add(blockEntity);
}
- var heightmaps = new byte[PolarChunk.HEIGHTMAP_BYTE_SIZE][PolarChunk.HEIGHTMAPS.length];
+ var heightmaps = new byte[MeridianChunk.HEIGHTMAP_BYTE_SIZE][MeridianChunk.HEIGHTMAPS.length];
chunkData.getCompound("Heightmaps");
//todo: heightmaps
// MOTION_BLOCKING MOTION_BLOCKING_NO_LEAVES
@@ -244,7 +244,7 @@ public class AnvilPolar {
var userData = new byte[0];
- chunks.add(new PolarChunk(
+ chunks.add(new MeridianChunk(
chunkReader.getChunkX(),
chunkReader.getChunkZ(),
sections,
@@ -257,7 +257,7 @@ public class AnvilPolar {
return chunks;
}
- private static @Nullable PolarChunk.BlockEntity convertBlockEntity(@NotNull NBTCompound blockEntityCompound) {
+ private static @Nullable MeridianChunk.BlockEntity convertBlockEntity(@NotNull NBTCompound blockEntityCompound) {
final var x = blockEntityCompound.getInt("x");
final var y = blockEntityCompound.getInt("y");
final var z = blockEntityCompound.getInt("z");
@@ -280,7 +280,7 @@ public class AnvilPolar {
mutableCopy.remove("z");
mutableCopy.remove("keepPacked");
- return new PolarChunk.BlockEntity(x, y, z, blockEntityId, mutableCopy.toCompound());
+ return new MeridianChunk.BlockEntity(x, y, z, blockEntityId, mutableCopy.toCompound());
}
private static @NotNull String readBlock(@NotNull NBTCompound paletteEntry) {
diff --git a/api/src/main/java/me/combimagnetron/lagoon/world/PolarChunk.java b/api/src/main/java/me/combimagnetron/lagoon/world/MeridianChunk.java
similarity index 93%
rename from api/src/main/java/me/combimagnetron/lagoon/world/PolarChunk.java
rename to api/src/main/java/me/combimagnetron/lagoon/world/MeridianChunk.java
index 27e2dd9..76d4799 100644
--- a/api/src/main/java/me/combimagnetron/lagoon/world/PolarChunk.java
+++ b/api/src/main/java/me/combimagnetron/lagoon/world/MeridianChunk.java
@@ -1,19 +1,19 @@
package me.combimagnetron.lagoon.world;
+import org.bukkit.generator.ChunkGenerator;
import org.jetbrains.annotations.Nullable;
import org.jglrxavpok.hephaistos.nbt.NBTCompound;
import java.util.Collection;
-import java.util.List;
/**
* A Java type representing the latest version of the chunk format.
*/
-public record PolarChunk(
+public record MeridianChunk(
int x,
int z,
- PolarSection[] sections,
+ MeridianSection[] sections,
Collection blockEntities,
byte[][] heightmaps,
byte[] userData
diff --git a/api/src/main/java/me/combimagnetron/lagoon/world/PolarLoader.java b/api/src/main/java/me/combimagnetron/lagoon/world/MeridianLoader.java
similarity index 89%
rename from api/src/main/java/me/combimagnetron/lagoon/world/PolarLoader.java
rename to api/src/main/java/me/combimagnetron/lagoon/world/MeridianLoader.java
index 1897542..37b67be 100644
--- a/api/src/main/java/me/combimagnetron/lagoon/world/PolarLoader.java
+++ b/api/src/main/java/me/combimagnetron/lagoon/world/MeridianLoader.java
@@ -26,11 +26,11 @@
import java.util.concurrent.locks.ReentrantReadWriteLock;
@SuppressWarnings("UnstableApiUsage")
-public class PolarLoader implements IChunkLoader {
+public class MeridianLoader implements IChunkLoader {
private static final BlockManager BLOCK_MANAGER = MinecraftServer.getBlockManager();
private static final BiomeManager BIOME_MANAGER = MinecraftServer.getBiomeManager();
private static final ExceptionManager EXCEPTION_HANDLER = MinecraftServer.getExceptionManager();
- private static final Logger logger = LoggerFactory.getLogger(PolarLoader.class);
+ private static final Logger logger = LoggerFactory.getLogger(MeridianLoader.class);
// Account for changes between main Minestom and minestom-ce.
private static final ChunkSupplierShim CHUNK_SUPPLIER = ChunkSupplierShim.select();
@@ -39,38 +39,38 @@ public class PolarLoader implements IChunkLoader {
private final Path savePath;
private final ReentrantReadWriteLock worldDataLock = new ReentrantReadWriteLock();
- private final PolarWorld worldData;
+ private final MeridianWorld worldData;
private PolarWorldAccess worldAccess = null;
private boolean parallel = false;
- public PolarLoader(@NotNull Path path) throws IOException {
+ public MeridianLoader(@NotNull Path path) throws IOException {
this.savePath = path;
if (Files.exists(path)) {
- this.worldData = PolarReader.read(Files.readAllBytes(path));
+ this.worldData = MeridianReader.read(Files.readAllBytes(path));
} else {
- this.worldData = new PolarWorld();
+ this.worldData = new MeridianWorld();
}
}
- public PolarLoader(@NotNull InputStream inputStream) throws IOException {
+ public MeridianLoader(@NotNull InputStream inputStream) throws IOException {
try (inputStream) {
- this.worldData = PolarReader.read(inputStream.readAllBytes());
+ this.worldData = MeridianReader.read(inputStream.readAllBytes());
this.savePath = null;
}
}
- public PolarLoader(@NotNull PolarWorld world) {
+ public MeridianLoader(@NotNull MeridianWorld world) {
this.worldData = world;
this.savePath = null;
}
- public @NotNull PolarWorld world() {
+ public @NotNull MeridianWorld world() {
return worldData;
}
@Contract("_ -> this")
- public @NotNull PolarLoader setWorldAccess(@NotNull PolarWorldAccess worldAccess) {
+ public @NotNull MeridianLoader setWorldAccess(@NotNull PolarWorldAccess worldAccess) {
this.worldAccess = worldAccess;
return this;
}
@@ -86,7 +86,7 @@ public PolarLoader(@NotNull PolarWorld world) {
* @return this
*/
@Contract("_ -> this")
- public @NotNull PolarLoader setParallel(boolean parallel) {
+ public @NotNull MeridianLoader setParallel(boolean parallel) {
this.parallel = parallel;
return this;
}
@@ -144,7 +144,7 @@ public void loadInstance(@NotNull Instance instance) {
return CompletableFuture.completedFuture(chunk);
}
- private void loadSection(@NotNull PolarSection sectionData, @NotNull Section section) {
+ private void loadSection(@NotNull MeridianSection sectionData, @NotNull Section section) {
// assumed that section is _not_ empty
// Blocks
@@ -207,7 +207,7 @@ private void loadSection(@NotNull PolarSection sectionData, @NotNull Section sec
section.setSkyLight(sectionData.skyLight());
}
- private void loadBlockEntity(@NotNull PolarChunk.BlockEntity blockEntity, @NotNull Chunk chunk) {
+ private void loadBlockEntity(@NotNull MeridianChunk.BlockEntity blockEntity, @NotNull Chunk chunk) {
// Fetch the block type, we can ignore Handler/NBT since we are about to replace it
var block = chunk.getBlock(blockEntity.x(), blockEntity.y(), blockEntity.z(), Block.Getter.Condition.TYPE);
@@ -248,7 +248,7 @@ public void unloadChunk(Chunk chunk) {
if (savePath != null) {
return CompletableFuture.runAsync(() -> {
try {
- Files.write(savePath, PolarWriter.write(worldData),
+ Files.write(savePath, MeridianWriter.write(worldData),
StandardOpenOption.CREATE, StandardOpenOption.TRUNCATE_EXISTING);
} catch (IOException e) {
EXCEPTION_HANDLER.handleException(new RuntimeException("Failed to save world", e));
@@ -262,11 +262,11 @@ public void unloadChunk(Chunk chunk) {
private void updateChunkData(@NotNull Short2ObjectMap blockCache, @NotNull Chunk chunk) {
var dimension = chunk.getInstance().getDimensionType();
- var blockEntities = new ArrayList();
- var sections = new PolarSection[dimension.getHeight() / Chunk.CHUNK_SECTION_SIZE];
+ var blockEntities = new ArrayList();
+ var sections = new MeridianSection[dimension.getHeight() / Chunk.CHUNK_SECTION_SIZE];
assert sections.length == chunk.getSections().size(): "World height mismatch";
- var heightmaps = new byte[32][PolarChunk.HEIGHTMAPS.length];
+ var heightmaps = new byte[32][MeridianChunk.HEIGHTMAPS.length];
var userData = new byte[0];
@@ -282,7 +282,7 @@ private void updateChunkData(@NotNull Short2ObjectMap blockCache, @NotNu
// Short circuit empty palette
blockPalette.add("air");
} else {
- var localBlockData = new int[PolarSection.BLOCK_PALETTE_SIZE];
+ var localBlockData = new int[MeridianSection.BLOCK_PALETTE_SIZE];
section.blockPalette().getAll((x, sectionLocalY, z, blockStateId) -> {
final int blockIndex = x + sectionLocalY * 16 * 16 + z * 16;
@@ -309,7 +309,7 @@ private void updateChunkData(@NotNull Short2ObjectMap blockCache, @NotNu
var handlerId = block.handler() == null ? null : block.handler().getNamespaceId().asString();
if (handlerId != null || block.hasNbt()) {
- blockEntities.add(new PolarChunk.BlockEntity(
+ blockEntities.add(new MeridianChunk.BlockEntity(
x, y, z, handlerId, block.nbt()
));
}
@@ -319,7 +319,7 @@ private void updateChunkData(@NotNull Short2ObjectMap blockCache, @NotNu
}
var biomePalette = new ArrayList();
- var biomeData = new int[PolarSection.BIOME_PALETTE_SIZE];
+ var biomeData = new int[MeridianSection.BIOME_PALETTE_SIZE];
section.biomePalette().getAll((x, y, z, id) -> {
var biomeId = BIOME_MANAGER.getById(id).name().asString();
@@ -340,7 +340,7 @@ private void updateChunkData(@NotNull Short2ObjectMap blockCache, @NotNu
skyLight = null;
}
- sections[i] = new PolarSection(
+ sections[i] = new MeridianSection(
blockPalette.toArray(new String[0]), blockData,
biomePalette.toArray(new String[0]), biomeData,
blockLight, skyLight
@@ -358,7 +358,7 @@ private void updateChunkData(@NotNull Short2ObjectMap blockCache, @NotNu
worldData.updateChunkAt(
chunk.getChunkX(),
chunk.getChunkZ(),
- new PolarChunk(
+ new MeridianChunk(
chunk.getChunkX(),
chunk.getChunkZ(),
sections,
diff --git a/api/src/main/java/me/combimagnetron/lagoon/world/PolarReader.java b/api/src/main/java/me/combimagnetron/lagoon/world/MeridianReader.java
similarity index 69%
rename from api/src/main/java/me/combimagnetron/lagoon/world/PolarReader.java
rename to api/src/main/java/me/combimagnetron/lagoon/world/MeridianReader.java
index 9c066f4..4e60bee 100644
--- a/api/src/main/java/me/combimagnetron/lagoon/world/PolarReader.java
+++ b/api/src/main/java/me/combimagnetron/lagoon/world/MeridianReader.java
@@ -9,19 +9,19 @@
import java.io.IOException;
import java.io.InputStream;
-public class PolarReader {
- private PolarReader() {}
+public class MeridianReader {
+ private MeridianReader() {}
- public static @NotNull PolarWorld read(byte @NotNull [] data) {
+ public static @NotNull MeridianWorld read(byte @NotNull [] data) {
ByteBuffer buffer = new ByteBuffer();
buffer.read(data);
var magicNumber = buffer.readInt();
- assertThat(magicNumber == PolarWorld.MAGIC_NUMBER, "Invalid magic number");
+ assertThat(magicNumber == MeridianWorld.MAGIC_NUMBER, "Invalid magic number");
short version = buffer.readShort();
validateVersion(version);
- PolarWorld.CompressionType compression = PolarWorld.CompressionType.fromId(buffer.readByte());
+ MeridianWorld.CompressionType compression = MeridianWorld.CompressionType.fromId(buffer.readByte());
assertThat(compression != null, "Invalid compression type");
int compressedDataLength = buffer.readVarInt();
@@ -33,24 +33,24 @@ private PolarReader() {}
var chunks = buffer.readCollection(b -> readChunk(version, b, maxSection - minSection + 1));
- return new PolarWorld(version, compression, minSection, maxSection, chunks);
+ return new MeridianWorld(version, compression, minSection, maxSection, chunks);
}
- private static @NotNull PolarChunk readChunk(short version, ByteBuffer buffer, int sectionCount) {
+ private static @NotNull MeridianChunk readChunk(short version, ByteBuffer buffer, int sectionCount) {
int chunkX = buffer.readVarInt();
int chunkZ = buffer.readVarInt();
- PolarSection[] sections = new PolarSection[sectionCount];
+ MeridianSection[] sections = new MeridianSection[sectionCount];
for (int i = 0; i < sectionCount; i++) {
sections[i] = readSection(version, buffer);
}
var blockEntities = buffer.readCollection(b -> readBlockEntity(version, b));
- var heightmaps = new byte[PolarChunk.HEIGHTMAP_BYTE_SIZE][PolarChunk.HEIGHTMAPS.length];
+ var heightmaps = new byte[MeridianChunk.HEIGHTMAP_BYTE_SIZE][MeridianChunk.HEIGHTMAPS.length];
int heightmapMask = buffer.readInt();
- for (int i = 0; i < PolarChunk.HEIGHTMAPS.length; i++) {
- if ((heightmapMask & PolarChunk.HEIGHTMAPS[i]) == 0)
+ for (int i = 0; i < MeridianChunk.HEIGHTMAPS.length; i++) {
+ if ((heightmapMask & MeridianChunk.HEIGHTMAPS[i]) == 0)
continue;
heightmaps[i] = buffer.readByteArray(32);
@@ -58,10 +58,10 @@ private PolarReader() {}
// Objects
byte[] userData = new byte[0];
- if (version > PolarWorld.VERSION_USERDATA_OPT_BLOCK_ENT_NBT)
+ if (version > MeridianWorld.VERSION_USERDATA_OPT_BLOCK_ENT_NBT)
userData = buffer.readByteArray();
- return new PolarChunk(
+ return new MeridianChunk(
chunkX, chunkZ,
sections,
blockEntities,
@@ -70,33 +70,33 @@ private PolarReader() {}
);
}
- private static @NotNull PolarSection readSection(short version, @NotNull ByteBuffer buffer) {
+ private static @NotNull MeridianSection readSection(short version, @NotNull ByteBuffer buffer) {
// If section is empty exit immediately
- if (buffer.readBoolean()) return new PolarSection();
+ if (buffer.readBoolean()) return new MeridianSection();
var blockPalette = buffer.readCollection(ByteBuffer::readString).toArray(String[]::new);
int[] blockData = null;
if (blockPalette.length > 1) {
- blockData = new int[PolarSection.BLOCK_PALETTE_SIZE];
+ blockData = new int[MeridianSection.BLOCK_PALETTE_SIZE];
var rawBlockData = buffer.readCollection(ByteBuffer::readLong).toArray(Long[]::new);
- var bitsPerEntry = rawBlockData.length * 64 / PolarSection.BLOCK_PALETTE_SIZE;
+ var bitsPerEntry = rawBlockData.length * 64 / MeridianSection.BLOCK_PALETTE_SIZE;
PaletteUtil.unpack(blockData, rawBlockData, bitsPerEntry);
}
var biomePalette = buffer.readCollection(ByteBuffer::readString).toArray(String[]::new);
int[] biomeData = null;
if (biomePalette.length > 1) {
- biomeData = new int[PolarSection.BIOME_PALETTE_SIZE];
+ biomeData = new int[MeridianSection.BIOME_PALETTE_SIZE];
var rawBiomeData = buffer.readCollection(ByteBuffer::readLong).toArray(Long[]::new);
- var bitsPerEntry = rawBiomeData.length * 64 / PolarSection.BIOME_PALETTE_SIZE;
+ var bitsPerEntry = rawBiomeData.length * 64 / MeridianSection.BIOME_PALETTE_SIZE;
PaletteUtil.unpack(biomeData, rawBiomeData, bitsPerEntry);
}
byte[] blockLight = null, skyLight = null;
- if (version > PolarWorld.VERSION_UNIFIED_LIGHT) {
+ if (version > MeridianWorld.VERSION_UNIFIED_LIGHT) {
if (buffer.readBoolean())
blockLight = buffer.readByteArray(2048);
if (buffer.readBoolean())
@@ -106,18 +106,18 @@ private PolarReader() {}
skyLight = buffer.readByteArray(2048);
}
- return new PolarSection(blockPalette, blockData, biomePalette, biomeData, blockLight, skyLight);
+ return new MeridianSection(blockPalette, blockData, biomePalette, biomeData, blockLight, skyLight);
}
- private static @NotNull PolarChunk.BlockEntity readBlockEntity(int version, @NotNull ByteBuffer buffer) {
+ private static @NotNull MeridianChunk.BlockEntity readBlockEntity(int version, @NotNull ByteBuffer buffer) {
int posIndex = buffer.readInt();
var id = buffer.readBoolean() ? buffer.readString() : null;
NBTCompound nbt = null;
- if (version <= PolarWorld.VERSION_USERDATA_OPT_BLOCK_ENT_NBT || buffer.readBoolean())
+ if (version <= MeridianWorld.VERSION_USERDATA_OPT_BLOCK_ENT_NBT || buffer.readBoolean())
nbt = (NBTCompound) readNbt(buffer);
- return new PolarChunk.BlockEntity(
+ return new MeridianChunk.BlockEntity(
ChunkUtils.blockIndexToChunkPositionX(posIndex),
ChunkUtils.blockIndexToChunkPositionY(posIndex),
ChunkUtils.blockIndexToChunkPositionZ(posIndex),
@@ -149,11 +149,11 @@ public int available() {
private static void validateVersion(int version) {
var invalidVersionError = String.format("Unsupported Polar version. Up to %d is supported, found %d.",
- PolarWorld.LATEST_VERSION, version);
- assertThat(version <= PolarWorld.LATEST_VERSION, invalidVersionError);
+ MeridianWorld.LATEST_VERSION, version);
+ assertThat(version <= MeridianWorld.LATEST_VERSION, invalidVersionError);
}
- private static @NotNull ByteBuffer decompressBuffer(@NotNull ByteBuffer buffer, @NotNull PolarWorld.CompressionType compression, int length) {
+ private static @NotNull ByteBuffer decompressBuffer(@NotNull ByteBuffer buffer, @NotNull MeridianWorld.CompressionType compression, int length) {
return switch (compression) {
case NONE -> buffer;
case ZSTD -> {
diff --git a/api/src/main/java/me/combimagnetron/lagoon/world/PolarSection.java b/api/src/main/java/me/combimagnetron/lagoon/world/MeridianSection.java
similarity index 97%
rename from api/src/main/java/me/combimagnetron/lagoon/world/PolarSection.java
rename to api/src/main/java/me/combimagnetron/lagoon/world/MeridianSection.java
index 2b3422a..acd321e 100644
--- a/api/src/main/java/me/combimagnetron/lagoon/world/PolarSection.java
+++ b/api/src/main/java/me/combimagnetron/lagoon/world/MeridianSection.java
@@ -11,7 +11,7 @@
* This class should be considered immutable.
*/
@ApiStatus.Internal
-public class PolarSection {
+public class MeridianSection {
public static final int BLOCK_PALETTE_SIZE = 4096;
public static final int BIOME_PALETTE_SIZE = 64;
@@ -27,7 +27,7 @@ public class PolarSection {
private final byte @Nullable [] blockLight;
private final byte @Nullable [] skyLight;
- public PolarSection() {
+ public MeridianSection() {
this.empty = true;
this.blockPalette = new String[]{"minecraft:air"};
@@ -39,7 +39,7 @@ public PolarSection() {
this.skyLight = null;
}
- public PolarSection(
+ public MeridianSection(
String @NotNull [] blockPalette, int @Nullable [] blockData,
String @NotNull [] biomePalette, int @Nullable [] biomeData,
byte @Nullable [] blockLight, byte @Nullable [] skyLight
diff --git a/api/src/main/java/me/combimagnetron/lagoon/world/PolarWorld.java b/api/src/main/java/me/combimagnetron/lagoon/world/MeridianWorld.java
similarity index 84%
rename from api/src/main/java/me/combimagnetron/lagoon/world/PolarWorld.java
rename to api/src/main/java/me/combimagnetron/lagoon/world/MeridianWorld.java
index af8f6eb..4ddd6dc 100644
--- a/api/src/main/java/me/combimagnetron/lagoon/world/PolarWorld.java
+++ b/api/src/main/java/me/combimagnetron/lagoon/world/MeridianWorld.java
@@ -11,7 +11,7 @@
/**
* A Java type representing the latest version of the world format.
*/
-public class PolarWorld {
+public class MeridianWorld {
public static final int MAGIC_NUMBER = 0x506F6C72; // `Polr`
public static final short LATEST_VERSION = 3;
@@ -29,17 +29,17 @@ public class PolarWorld {
private final byte maxSection;
// Chunk data
- private final Long2ObjectMap chunks = new Long2ObjectOpenHashMap<>();
+ private final Long2ObjectMap chunks = new Long2ObjectOpenHashMap<>();
- public PolarWorld() {
+ public MeridianWorld() {
this(LATEST_VERSION, DEFAULT_COMPRESSION, (byte) -4, (byte) 19, List.of());
}
- public PolarWorld(
+ public MeridianWorld(
short version,
@NotNull CompressionType compression,
byte minSection, byte maxSection,
- @NotNull Collection chunks
+ @NotNull Collection chunks
) {
this.version = version;
this.compression = compression;
@@ -72,14 +72,14 @@ public byte maxSection() {
return maxSection;
}
- public @Nullable PolarChunk chunkAt(int x, int z) {
+ public @Nullable MeridianChunk chunkAt(int x, int z) {
return chunks.getOrDefault(ChunkUtils.getChunkIndex(x, z), null);
}
- public void updateChunkAt(int x, int z, @NotNull PolarChunk chunk) {
+ public void updateChunkAt(int x, int z, @NotNull MeridianChunk chunk) {
chunks.put(ChunkUtils.getChunkIndex(x, z), chunk);
}
- public @NotNull Collection chunks() {
+ public @NotNull Collection chunks() {
return chunks.values();
}
diff --git a/api/src/main/java/me/combimagnetron/lagoon/world/PolarWriter.java b/api/src/main/java/me/combimagnetron/lagoon/world/MeridianWriter.java
similarity index 86%
rename from api/src/main/java/me/combimagnetron/lagoon/world/PolarWriter.java
rename to api/src/main/java/me/combimagnetron/lagoon/world/MeridianWriter.java
index e26ae9d..47ebc46 100644
--- a/api/src/main/java/me/combimagnetron/lagoon/world/PolarWriter.java
+++ b/api/src/main/java/me/combimagnetron/lagoon/world/MeridianWriter.java
@@ -11,18 +11,18 @@
import java.io.OutputStream;
import java.util.Arrays;
-public class PolarWriter {
- private PolarWriter() {}
+public class MeridianWriter {
+ private MeridianWriter() {}
- public static byte[] write(@NotNull PolarWorld world) {
+ public static byte[] write(@NotNull MeridianWorld world) {
// Write the compressed content first
var content = new ByteBuffer();
content.writeByte(world.minSection());
content.writeByte(world.maxSection());
- content.writeCollection(world.chunks(), PolarWriter::writeChunk);
+ content.writeCollection(world.chunks(), MeridianWriter::writeChunk);
ByteBuffer byteBuf = new ByteBuffer();
- byteBuf.writeInt(PolarWorld.MAGIC_NUMBER);
- byteBuf.writeShort(PolarWorld.LATEST_VERSION);
+ byteBuf.writeInt(MeridianWorld.MAGIC_NUMBER);
+ byteBuf.writeShort(MeridianWorld.LATEST_VERSION);
byteBuf.writeByte((byte) world.compression().ordinal());
switch (world.compression()) {
case NONE -> {
@@ -36,22 +36,22 @@ public static byte[] write(@NotNull PolarWorld world) {
return byteBuf.toBytes();
}
- private static void writeChunk(@NotNull ByteBuffer buffer, @NotNull PolarChunk chunk) {
+ private static void writeChunk(@NotNull ByteBuffer buffer, @NotNull MeridianChunk chunk) {
buffer.writeVarInt(chunk.x());
buffer.writeVarInt(chunk.z());
for (var section : chunk.sections()) {
writeSection(buffer, section);
}
- buffer.writeCollection(chunk.blockEntities(), PolarWriter::writeBlockEntity);
+ buffer.writeCollection(chunk.blockEntities(), MeridianWriter::writeBlockEntity);
//todo heightmaps
- buffer.writeInt(PolarChunk.HEIGHTMAP_NONE);
+ buffer.writeInt(MeridianChunk.HEIGHTMAP_NONE);
buffer.writeByteArray(chunk.userData());
}
- private static void writeSection(@NotNull ByteBuffer buffer, @NotNull PolarSection section) {
+ private static void writeSection(@NotNull ByteBuffer buffer, @NotNull MeridianSection section) {
buffer.writeBoolean(section.isEmpty());
if (section.isEmpty()) return;
@@ -84,7 +84,7 @@ private static void writeSection(@NotNull ByteBuffer buffer, @NotNull PolarSecti
buffer.writeByteArray(section.skyLight());
}
- private static void writeBlockEntity(@NotNull ByteBuffer buffer, @NotNull PolarChunk.BlockEntity blockEntity) {
+ private static void writeBlockEntity(@NotNull ByteBuffer buffer, @NotNull MeridianChunk.BlockEntity blockEntity) {
var index = ChunkUtils.getBlockIndex(blockEntity.x(), blockEntity.y(), blockEntity.z());
buffer.writeInt(index);
buffer.writeBoolean(blockEntity.id() == null);
diff --git a/api/src/main/java/me/combimagnetron/lagoon/world/PolarWorldAccess.java b/api/src/main/java/me/combimagnetron/lagoon/world/PolarWorldAccess.java
deleted file mode 100644
index 246a1fd..0000000
--- a/api/src/main/java/me/combimagnetron/lagoon/world/PolarWorldAccess.java
+++ /dev/null
@@ -1,38 +0,0 @@
-package me.combimagnetron.lagoon.world;
-
-import net.minestom.server.instance.Chunk;
-import net.minestom.server.network.NetworkBuffer;
-import org.jetbrains.annotations.NotNull;
-import org.jetbrains.annotations.Nullable;
-
-/**
- * Provides access to user world data for a {@link PolarLoader} to get and set user
- * specific world data such as objects, as well as provides some relevant callbacks.
- *
- * Usage if world access is completely optional, dependent features will not add
- * overhead to the format if unused.
- */
-@SuppressWarnings("UnstableApiUsage")
-public interface PolarWorldAccess {
-
- /**
- * Called when a chunk is created, just before it is added to the world.
- *
- * Can be used to initialize the chunk based on saved user data in the world.
- *
- * @param chunk The Minestom chunk being created
- * @param userData The saved user data, or null if none is present
- */
- default void loadChunkData(@NotNull Chunk chunk, @Nullable NetworkBuffer userData) {}
-
- /**
- * Called when a chunk is being saved.
- *
- * Can be used to save user data in the chunk by writing it to the buffer.
- *
- * @param chunk The Minestom chunk being saved
- * @param userData A buffer to write user data to save
- */
- default void saveChunkData(@NotNull Chunk chunk, @NotNull NetworkBuffer userData) {}
-
-}
diff --git a/api/src/main/java/me/combimagnetron/lagoon/world/compat/ChunkSupplierShim.java b/api/src/main/java/me/combimagnetron/lagoon/world/compat/ChunkSupplierShim.java
deleted file mode 100644
index 684627b..0000000
--- a/api/src/main/java/me/combimagnetron/lagoon/world/compat/ChunkSupplierShim.java
+++ /dev/null
@@ -1,30 +0,0 @@
-package me.combimagnetron.lagoon.world.compat;
-
-import net.minestom.server.instance.Chunk;
-import net.minestom.server.instance.DynamicChunk;
-import net.minestom.server.instance.Instance;
-import org.jetbrains.annotations.ApiStatus;
-import org.jetbrains.annotations.NotNull;
-
-/**
- * A shim for {@link net.minestom.server.utils.chunk.ChunkSupplier} to allow for
- * compatibility with main Minestom which does not have the lighting PR (which
- * adds {@link net.minestom.server.utils.chunk.ChunkSupplier}.
- */
-@ApiStatus.Internal
-@FunctionalInterface
-public interface ChunkSupplierShim {
-
- static @NotNull ChunkSupplierShim select() {
- try {
- // If this function is present we have the lighting branch and should use that chunk supplier
- Instance.class.getDeclaredMethod("getChunkSupplier");
- return (instance, cx, cz) -> instance.getChunkSupplier().createChunk(instance, cx, cz);
- } catch (NoSuchMethodException e) {
- // Otherwise we should use the default chunk supplier
- return DynamicChunk::new;
- }
- }
-
- @NotNull Chunk createChunk(@NotNull Instance instance, int chunkX, int chunkZ);
-}
From a3216c967c5ef3ede8f597d6c78b61e1f5e41dfe Mon Sep 17 00:00:00 2001
From: Combimagnetron
Date: Fri, 4 Aug 2023 23:14:22 +0200
Subject: [PATCH 4/6] feat(n/a): implement GameLevel world to load meridian
worlds
---
.../lagoon/world/GameLevel.java | 179 ++++++++++++++++++
1 file changed, 179 insertions(+)
create mode 100644 paper-v1_19_R3/src/main/java/me/combimagnetron/lagoon/world/GameLevel.java
diff --git a/paper-v1_19_R3/src/main/java/me/combimagnetron/lagoon/world/GameLevel.java b/paper-v1_19_R3/src/main/java/me/combimagnetron/lagoon/world/GameLevel.java
new file mode 100644
index 0000000..c06e0a2
--- /dev/null
+++ b/paper-v1_19_R3/src/main/java/me/combimagnetron/lagoon/world/GameLevel.java
@@ -0,0 +1,179 @@
+package me.combimagnetron.lagoon.world;
+
+import com.mojang.serialization.Lifecycle;
+import me.combimagnetron.lagoon.data.Identifier;
+import net.minecraft.server.MinecraftServer;
+import net.minecraft.server.level.ServerLevel;
+import net.minecraft.server.level.progress.ChunkProgressListener;
+import net.minecraft.util.datafix.DataFixers;
+import net.minecraft.world.Difficulty;
+import net.minecraft.world.flag.FeatureFlagSet;
+import net.minecraft.world.level.*;
+import net.minecraft.world.level.chunk.ChunkStatus;
+import net.minecraft.world.level.dimension.DimensionType;
+import net.minecraft.world.level.dimension.LevelStem;
+import net.minecraft.world.level.levelgen.WorldOptions;
+import net.minecraft.world.level.storage.LevelStorageSource;
+import net.minecraft.world.level.storage.PrimaryLevelData;
+import org.apache.commons.io.FileUtils;
+import org.bukkit.Bukkit;
+import org.bukkit.World;
+import org.bukkit.block.Biome;
+import org.bukkit.block.data.BlockData;
+import org.bukkit.generator.BiomeProvider;
+import org.bukkit.generator.WorldInfo;
+import org.jetbrains.annotations.NotNull;
+import org.jetbrains.annotations.Nullable;
+
+import java.io.IOException;
+import java.nio.file.Files;
+import java.nio.file.Path;
+import java.util.*;
+
+public class GameLevel {
+ private final ServerLevel serverLevel;
+ private static String name;
+
+ public GameLevel(Identifier identifier, MeridianWorld meridianWorld) {
+ this.serverLevel = new ServerLevel(
+ MinecraftServer.getServer(),
+ MinecraftServer.getServer().executor,
+ LevelAccess.levelStorageAccess(),
+ LevelData.primaryLevelData(),
+ Level.END,
+ LevelStem.END,
+ new ChunkProgressListenerDummy(),
+ false,
+ 0L,
+ List.of(),
+ false,
+ World.Environment.THE_END,
+ ChunkGeneratorImpl.chunkGenerator(meridianWorld),
+ BiomeProviderImpl.biomeProvider()
+ );
+ name = "comet_" + identifier.string() + "_" + UUID.randomUUID().toString().substring(0, 6);
+ }
+
+ static final class LevelAccess {
+ public static LevelStorageSource.LevelStorageAccess levelStorageAccess() {
+ try {
+ final Path worldFolder;
+ worldFolder = Files.createTempDirectory(name).toAbsolutePath();
+ FileUtils.forceDeleteOnExit(worldFolder.toFile());
+ LevelStorageSource levelStorageSource = new LevelStorageSource(worldFolder, worldFolder, DataFixers.getDataFixer());
+ return levelStorageSource.new LevelStorageAccess(name, LevelStem.END);
+ } catch (IOException e) {
+ throw new RuntimeException(e);
+ }
+ }
+ }
+
+ static final class LevelData {
+
+ public static PrimaryLevelData primaryLevelData() {
+ return new PrimaryLevelData(
+ new LevelSettings(name,
+ GameType.SURVIVAL,
+ false,
+ Difficulty.NORMAL,
+ true,
+ new GameRules(),
+ new WorldDataConfiguration(
+ new DataPackConfig(List.of(), List.of()),
+ FeatureFlagSet.of()
+ )
+ ),
+ new WorldOptions(0L, false, false),
+ PrimaryLevelData.SpecialWorldProperty.NONE,
+ Lifecycle.stable()
+ );
+ }
+
+ }
+
+ static final class ChunkGeneratorImpl extends org.bukkit.generator.ChunkGenerator {
+ private final MeridianWorld meridianWorld;
+
+ private ChunkGeneratorImpl(MeridianWorld world) {
+ this.meridianWorld = world;
+ }
+
+ public static ChunkGeneratorImpl chunkGenerator(MeridianWorld meridianWorld) {
+ return new ChunkGeneratorImpl(meridianWorld);
+ }
+
+
+ @Override
+ public void generateNoise(WorldInfo worldInfo, Random random, int chunkX, int chunkZ, ChunkData chunkData) {
+ MeridianChunk meridianChunk = meridianWorld.chunkAt(chunkX, chunkZ);
+ Map> blockData = new HashMap<>();
+ for (MeridianSection section : meridianChunk.sections()) {
+ int y = -64;
+ Set blockDataSet = new LinkedHashSet<>();
+ for (String string : section.blockPalette()) {
+ blockDataSet.add(Bukkit.createBlockData(string));
+ }
+ blockData.put(y, blockDataSet);
+ }
+ for(int y = chunkData.getMinHeight(); y <= chunkData.getMaxHeight(); y++) {
+ Collection blockDataCollection = blockData.get(y);
+ for(int x = 0; x < 16; x++) {
+ for(int z = 0; z < 16; z++) {
+ chunkData.setBlock(x, y, z, blockDataCollection.iterator().next());
+ }
+ }
+ }
+ }
+
+ }
+
+ static final class BiomeProviderImpl extends BiomeProvider {
+
+ private BiomeProviderImpl() {
+ }
+
+ public static BiomeProviderImpl biomeProvider() {
+ return new BiomeProviderImpl();
+ }
+
+ @Override
+ public @NotNull Biome getBiome(@NotNull WorldInfo worldInfo, int x, int y, int z) {
+ return Biome.END_BARRENS;
+ }
+
+ @Override
+ public @NotNull List getBiomes(@NotNull WorldInfo worldInfo) {
+ return List.of(Biome.END_BARRENS);
+ }
+ }
+
+ static final class ChunkProgressListenerDummy implements ChunkProgressListener {
+
+ @Override
+ public void updateSpawnPos(ChunkPos spawnPos) {
+
+ }
+
+ @Override
+ public void onStatusChange(ChunkPos pos, @Nullable ChunkStatus status) {
+
+ }
+
+ @Override
+ public void start() {
+
+ }
+
+ @Override
+ public void stop() {
+
+ }
+
+ @Override
+ public void setChunkRadius(int radius) {
+
+ }
+ }
+
+
+}
From 42595d7dbbdc5db4ae33ce03b576d3393d6d34e8 Mon Sep 17 00:00:00 2001
From: Combimagnetron
Date: Mon, 21 Aug 2023 10:34:06 +0200
Subject: [PATCH 5/6] feat(n/a): update instance blueprint api
---
...BoundRequestInstanceBlueprintsMessage.java | 30 +++++++++++-
.../lagoon/util/VersionCollection.java | 8 ++++
.../java/me/combimagnetron/pilot/Pilot.java | 10 ++++
.../ServiceRequestBlueprintListener.java | 47 ++++++++++++++++++-
4 files changed, 92 insertions(+), 3 deletions(-)
diff --git a/api/src/main/java/me/combimagnetron/lagoon/communication/message/impl/servicebound/ServiceBoundRequestInstanceBlueprintsMessage.java b/api/src/main/java/me/combimagnetron/lagoon/communication/message/impl/servicebound/ServiceBoundRequestInstanceBlueprintsMessage.java
index ee65a8b..7bf2cdf 100644
--- a/api/src/main/java/me/combimagnetron/lagoon/communication/message/impl/servicebound/ServiceBoundRequestInstanceBlueprintsMessage.java
+++ b/api/src/main/java/me/combimagnetron/lagoon/communication/message/impl/servicebound/ServiceBoundRequestInstanceBlueprintsMessage.java
@@ -11,16 +11,34 @@
public class ServiceBoundRequestInstanceBlueprintsMessage extends ServiceBoundMessage {
private final Identifier identifier;
private final String version;
+ private final Type type;
+
public ServiceBoundRequestInstanceBlueprintsMessage(Identifier identifier) {
super(2, null, null);
this.identifier = identifier;
- this.version = "main";
+ this.version = "latest";
+ this.type = Type.RELEASE;
+ }
+
+ public ServiceBoundRequestInstanceBlueprintsMessage(Identifier identifier, Type type) {
+ super(2, null, null);
+ this.identifier = identifier;
+ this.version = "latest";
+ this.type = type;
}
public ServiceBoundRequestInstanceBlueprintsMessage(Identifier identifier, String version) {
super(2, null, null);
this.identifier = identifier;
this.version = version;
+ this.type = Type.RELEASE;
+ }
+
+ public ServiceBoundRequestInstanceBlueprintsMessage(Identifier identifier, String version, Type type) {
+ super(2, null, null);
+ this.identifier = identifier;
+ this.version = version;
+ this.type = type;
}
public ServiceBoundRequestInstanceBlueprintsMessage(byte[] bytes) {
@@ -28,6 +46,7 @@ public ServiceBoundRequestInstanceBlueprintsMessage(byte[] bytes) {
final String[] id = readString().split(":");
this.identifier = Identifier.of(id[0], id[1]);
this.version = readString();
+ this.type = Type.valueOf(readString());
}
@Override
@@ -39,6 +58,7 @@ public ServiceBoundRequestInstanceBlueprintsMessage(byte[] bytes) {
public void write() {
writeString(identifier.string());
writeString(version);
+ writeString(type.name());
}
public Identifier identifier() {
@@ -49,6 +69,14 @@ public String version() {
return this.version;
}
+ public Type type() {
+ return this.type;
+ }
+
+ public enum Type {
+ DEV, STAGING, RELEASE
+ }
+
public static class Response extends InstanceBoundMessage {
private final VersionCollection versionCollection;
diff --git a/api/src/main/java/me/combimagnetron/lagoon/util/VersionCollection.java b/api/src/main/java/me/combimagnetron/lagoon/util/VersionCollection.java
index e887656..2bbe6ed 100644
--- a/api/src/main/java/me/combimagnetron/lagoon/util/VersionCollection.java
+++ b/api/src/main/java/me/combimagnetron/lagoon/util/VersionCollection.java
@@ -2,9 +2,17 @@
import java.util.ArrayList;
import java.util.HashSet;
+import java.util.List;
public class VersionCollection extends ArrayList {
+ @SafeVarargs
+ public static VersionCollection of(E... es) {
+ VersionCollection versionCollection = new VersionCollection<>();
+ versionCollection.addAll(List.of(es));
+ return versionCollection;
+ }
+
public E newest() {
return get(0);
}
diff --git a/pilot/src/main/java/me/combimagnetron/pilot/Pilot.java b/pilot/src/main/java/me/combimagnetron/pilot/Pilot.java
index 5a10b9e..1fb8c72 100644
--- a/pilot/src/main/java/me/combimagnetron/pilot/Pilot.java
+++ b/pilot/src/main/java/me/combimagnetron/pilot/Pilot.java
@@ -28,6 +28,7 @@ public class Pilot implements Service {
private final MessageChannel channel;
private final ApiClient k8sApiClient;
private final CoreV1Api k8sApi;
+ private static Pilot pilot;
static {
try {
@@ -44,6 +45,7 @@ public Pilot(MessageClient client) throws IOException, ApiException {
this.k8sApiClient = Config.defaultClient();
this.k8sApi = new CoreV1Api(k8sApiClient);
V1Deployment deployment = new V1Deployment();
+ pilot = this;
}
@Override
@@ -66,8 +68,16 @@ public StringStringParameter config() {
return new StringStringParameter();
}
+ public MessageClient messageClient() {
+ return this.client;
+ }
+
public static GitHub gitHub() {
return GIT_HUB;
}
+ public static Pilot pilot() {
+ return pilot;
+ }
+
}
diff --git a/pilot/src/main/java/me/combimagnetron/pilot/listener/ServiceRequestBlueprintListener.java b/pilot/src/main/java/me/combimagnetron/pilot/listener/ServiceRequestBlueprintListener.java
index b5af0d9..4235795 100644
--- a/pilot/src/main/java/me/combimagnetron/pilot/listener/ServiceRequestBlueprintListener.java
+++ b/pilot/src/main/java/me/combimagnetron/pilot/listener/ServiceRequestBlueprintListener.java
@@ -2,13 +2,19 @@
import me.combimagnetron.lagoon.communication.MessageHandler;
import me.combimagnetron.lagoon.communication.MessageListener;
+import me.combimagnetron.lagoon.communication.message.MessageChannel;
import me.combimagnetron.lagoon.communication.message.impl.servicebound.ServiceBoundRequestInstanceBlueprintsMessage;
import me.combimagnetron.lagoon.data.Identifier;
import me.combimagnetron.lagoon.instance.InstanceBlueprint;
+import me.combimagnetron.lagoon.util.VersionCollection;
import me.combimagnetron.pilot.Pilot;
+import org.kohsuke.github.GHBranch;
import org.kohsuke.github.GHRepository;
import java.io.IOException;
+import java.util.HashMap;
+import java.util.Map;
+import java.util.stream.IntStream;
@MessageHandler(filter = ServiceBoundRequestInstanceBlueprintsMessage.class, channel = "service:pilot")
public class ServiceRequestBlueprintListener implements MessageListener {
@@ -20,14 +26,51 @@ public void send(ServiceBoundRequestInstanceBlueprintsMessage message) {
@Override
public void receive(ServiceBoundRequestInstanceBlueprintsMessage message) {
final Identifier identifier = message.identifier();
- final String version = message.version();
+ final String type = message.type().name().toLowerCase();
+ String version = message.version();
GHRepository repository;
try {
repository = Pilot.gitHub().getRepository(identifier.namespace().string() + "/" + identifier.key().string());
} catch (IOException e) {
throw new RuntimeException(e);
}
- InstanceBlueprint blueprint = new InstanceBlueprint(InstanceBlueprint.Info.info(null, null, null, null));
+ if (version.equals("latest")) {
+ try {
+ version = latestVersion(repository, type);
+ } catch (IOException e) {
+ throw new RuntimeException(e);
+ }
+ }
+ String versionPrefix = "release(" + version + "): ";
+ GHBranch branch;
+ try {
+ branch = repository.getBranches().entrySet().stream().filter(entry -> entry.getKey().startsWith(versionPrefix)).findAny().orElse(null).getValue();
+ } catch (IOException e) {
+ throw new RuntimeException(e);
+ }
+ InstanceBlueprint blueprint;
+ try {
+ blueprint = new InstanceBlueprint(InstanceBlueprint.Info.info(Identifier.of(repository.getOwnerName(), repository.getName()), branch.getName(), branch.getOwner().getCreatedAt(), ""));
+ } catch (IOException e) {
+ throw new RuntimeException(e);
+ }
+ VersionCollection blueprintVersionCollection = VersionCollection.of(blueprint);
+ MessageChannel messageChannel = Pilot.pilot().messageClient().channel(Identifier.of("service", "pilot")).async();
+ ServiceBoundRequestInstanceBlueprintsMessage.Response response = new ServiceBoundRequestInstanceBlueprintsMessage.Response(blueprintVersionCollection);
+ messageChannel.send(response).async();
+ }
+
+ private String latestVersion(GHRepository ghRepository, String type) throws IOException {
+ Map intMap = new HashMap<>();
+ ghRepository.getBranches().forEach((key, value) -> {
+ int intKey = Integer.parseInt(extractVersion(key, type));
+ intMap.put(intKey, value);
+ });
+ return extractVersion(intMap.get(intMap.keySet().stream().flatMapToInt(IntStream::of).max().orElse(0)).getName(), type);
+ }
+
+ private String extractVersion(String string, String type) {
+ return string.split(":")[0].replace(type + "(", "").replace(")", "").replace(".", "");
}
@Override
From 8272b3a223b86035a312d9ea9b1235bd601341ca Mon Sep 17 00:00:00 2001
From: Combimagnetron
Date: Mon, 21 Aug 2023 10:34:13 +0200
Subject: [PATCH 6/6] fix(n/a): gamelevel
---
.../lagoon/world/GameLevel.java | 77 ++++++++++++++++++-
1 file changed, 75 insertions(+), 2 deletions(-)
diff --git a/paper-v1_19_R3/src/main/java/me/combimagnetron/lagoon/world/GameLevel.java b/paper-v1_19_R3/src/main/java/me/combimagnetron/lagoon/world/GameLevel.java
index c06e0a2..fbe4d5f 100644
--- a/paper-v1_19_R3/src/main/java/me/combimagnetron/lagoon/world/GameLevel.java
+++ b/paper-v1_19_R3/src/main/java/me/combimagnetron/lagoon/world/GameLevel.java
@@ -1,18 +1,37 @@
package me.combimagnetron.lagoon.world;
+import com.mojang.serialization.Codec;
import com.mojang.serialization.Lifecycle;
import me.combimagnetron.lagoon.data.Identifier;
+import net.minecraft.core.BlockPos;
+import net.minecraft.core.Holder;
+import net.minecraft.core.registries.Registries;
+import net.minecraft.resources.ResourceLocation;
import net.minecraft.server.MinecraftServer;
import net.minecraft.server.level.ServerLevel;
+import net.minecraft.server.level.WorldGenRegion;
import net.minecraft.server.level.progress.ChunkProgressListener;
+import net.minecraft.tags.TagKey;
+import net.minecraft.util.RandomSource;
import net.minecraft.util.datafix.DataFixers;
+import net.minecraft.util.valueproviders.IntProvider;
+import net.minecraft.util.valueproviders.IntProviderType;
import net.minecraft.world.Difficulty;
import net.minecraft.world.flag.FeatureFlagSet;
import net.minecraft.world.level.*;
+import net.minecraft.world.level.biome.BiomeManager;
+import net.minecraft.world.level.biome.BiomeSource;
+import net.minecraft.world.level.biome.Climate;
+import net.minecraft.world.level.chunk.ChunkAccess;
+import net.minecraft.world.level.chunk.ChunkGenerator;
import net.minecraft.world.level.chunk.ChunkStatus;
import net.minecraft.world.level.dimension.DimensionType;
import net.minecraft.world.level.dimension.LevelStem;
+import net.minecraft.world.level.levelgen.GenerationStep;
+import net.minecraft.world.level.levelgen.Heightmap;
+import net.minecraft.world.level.levelgen.RandomState;
import net.minecraft.world.level.levelgen.WorldOptions;
+import net.minecraft.world.level.levelgen.blending.Blender;
import net.minecraft.world.level.storage.LevelStorageSource;
import net.minecraft.world.level.storage.PrimaryLevelData;
import org.apache.commons.io.FileUtils;
@@ -22,6 +41,7 @@
import org.bukkit.block.data.BlockData;
import org.bukkit.generator.BiomeProvider;
import org.bukkit.generator.WorldInfo;
+import org.checkerframework.checker.units.qual.C;
import org.jetbrains.annotations.NotNull;
import org.jetbrains.annotations.Nullable;
@@ -29,6 +49,10 @@
import java.nio.file.Files;
import java.nio.file.Path;
import java.util.*;
+import java.util.concurrent.CompletableFuture;
+import java.util.concurrent.Executor;
+import java.util.function.IntSupplier;
+import java.util.stream.Stream;
public class GameLevel {
private final ServerLevel serverLevel;
@@ -41,8 +65,8 @@ public GameLevel(Identifier identifier, MeridianWorld meridianWorld) {
LevelAccess.levelStorageAccess(),
LevelData.primaryLevelData(),
Level.END,
- LevelStem.END,
- new ChunkProgressListenerDummy(),
+ LevelDimensionStem.levelStem(),
+ ChunkProgressListenerDummy.chunkProgressListenerDummy(),
false,
0L,
List.of(),
@@ -91,6 +115,51 @@ public static PrimaryLevelData primaryLevelData() {
}
+ static final class LevelDimensionStem {
+
+ public static LevelStem levelStem() {
+ IntProvider intProvider = new IntProvider() {
+ @Override
+ public int sample(RandomSource random) {
+ return 15;
+ }
+
+ @Override
+ public int getMinValue() {
+ return 15;
+ }
+
+ @Override
+ public int getMaxValue() {
+ return 15;
+ }
+
+ @Override
+ public @NotNull IntProviderType> getType() {
+ return IntProviderType.CONSTANT;
+ }
+ };
+ DimensionType dimensionType = new DimensionType(
+ OptionalLong.of(0L),
+ false,
+ false,
+ false,
+ true,
+ 1.0,
+ true,
+ false,
+ -64,
+ 255,
+ 64,
+ TagKey.create(Registries.BLOCK, ResourceLocation.of("air", ' ')),
+ ResourceLocation.of("", ' '),
+ 0.20f,
+ new DimensionType.MonsterSettings(true, false, intProvider, 15)
+ );
+ return new LevelStem(Holder.direct(dimensionType), null);
+ }
+ }
+
static final class ChunkGeneratorImpl extends org.bukkit.generator.ChunkGenerator {
private final MeridianWorld meridianWorld;
@@ -149,6 +218,10 @@ public static BiomeProviderImpl biomeProvider() {
static final class ChunkProgressListenerDummy implements ChunkProgressListener {
+ public static ChunkProgressListenerDummy chunkProgressListenerDummy() {
+ return new ChunkProgressListenerDummy();
+ }
+
@Override
public void updateSpawnPos(ChunkPos spawnPos) {