Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Original file line number Diff line number Diff line change
Expand Up @@ -15,6 +15,7 @@
import com.infernalsuite.asp.api.world.SlimeChunk;
import com.infernalsuite.asp.api.world.SlimeChunkSection;
import com.infernalsuite.asp.api.world.SlimeWorld;
import com.infernalsuite.asp.util.Util;
import net.kyori.adventure.nbt.CompoundBinaryTag;
import it.unimi.dsi.fastutil.longs.Long2ObjectMap;
import it.unimi.dsi.fastutil.longs.Long2ObjectOpenHashMap;
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -3,7 +3,7 @@
import ca.spottedleaf.moonrise.patches.chunk_system.level.entity.ChunkEntitySlices;
import ca.spottedleaf.moonrise.patches.chunk_system.level.poi.PoiChunk;
import com.infernalsuite.asp.Converter;
import com.infernalsuite.asp.Util;
import com.infernalsuite.asp.util.Util;
import com.infernalsuite.asp.api.exceptions.WorldAlreadyExistsException;
import com.infernalsuite.asp.api.loaders.SlimeLoader;
import com.infernalsuite.asp.api.world.properties.SlimeProperties;
Expand Down
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
package com.infernalsuite.asp.serialization.anvil;

import com.infernalsuite.asp.Util;
import com.infernalsuite.asp.util.Util;
import com.infernalsuite.asp.api.exceptions.InvalidWorldException;
import com.infernalsuite.asp.api.utils.NibbleArray;
import com.infernalsuite.asp.api.world.SlimeChunk;
Expand Down
Original file line number Diff line number Diff line change
@@ -1,13 +1,15 @@
package com.infernalsuite.asp.serialization.slime;

import com.github.luben.zstd.Zstd;
import com.github.luben.zstd.ZstdOutputStream;
import com.infernalsuite.asp.api.utils.SlimeFormat;
import com.infernalsuite.asp.api.world.SlimeChunk;
import com.infernalsuite.asp.api.world.SlimeChunkSection;
import com.infernalsuite.asp.api.world.SlimeWorld;
import com.infernalsuite.asp.api.world.properties.SlimeProperties;
import com.infernalsuite.asp.api.world.properties.SlimePropertyMap;
import com.infernalsuite.asp.serialization.slime.reader.impl.v13.v13AdditionalWorldData;
import com.infernalsuite.asp.util.CountingOutputStream;
import com.infernalsuite.asp.util.ThrowingConsumer;
import net.kyori.adventure.nbt.BinaryTag;
import net.kyori.adventure.nbt.BinaryTagIO;
import net.kyori.adventure.nbt.BinaryTagTypes;
Expand All @@ -16,9 +18,7 @@
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;

import java.io.ByteArrayOutputStream;
import java.io.DataOutputStream;
import java.io.IOException;
import java.io.*;
import java.util.*;

public class SlimeSerializer {
Expand Down Expand Up @@ -60,20 +60,13 @@ public static byte[] serialize(SlimeWorld world) {
outStream.writeByte(v13AdditionalWorldData.fromSet(additionalWorldData));

// Chunks
byte[] chunkData = serializeChunks(world, world.getChunkStorage(), additionalWorldData);
byte[] compressedChunkData = Zstd.compress(chunkData);

outStream.writeInt(compressedChunkData.length);
outStream.writeInt(chunkData.length);
outStream.write(compressedChunkData);

// Extra Tag
byte[] extra = serializeCompoundTag(CompoundBinaryTag.builder().put(extraData).build());
byte[] compressedExtra = Zstd.compress(extra);
writeCompressed(outStream, value -> serializeChunks(value, world, world.getChunkStorage(), additionalWorldData));

outStream.writeInt(compressedExtra.length);
outStream.writeInt(extra.length);
outStream.write(compressedExtra);
writeCompressed(outStream, value -> {
//Avoid a buffered output stream by casting to DataOutput. Buffered Output Streams make the memory usage explode
BinaryTagIO.writer().write(CompoundBinaryTag.builder().put(extraData).build(), (DataOutput) new DataOutputStream(value));
});

} catch (Exception e) {
throw new RuntimeException(e);
Expand All @@ -82,9 +75,7 @@ public static byte[] serialize(SlimeWorld world) {
return outByteStream.toByteArray();
}

static byte[] serializeChunks(SlimeWorld world, Collection<SlimeChunk> chunks, EnumSet<v13AdditionalWorldData> data) throws IOException {
ByteArrayOutputStream outByteStream = new ByteArrayOutputStream(16384);
DataOutputStream outStream = new DataOutputStream(outByteStream);
static void serializeChunks(DataOutputStream outStream, SlimeWorld world, Collection<SlimeChunk> chunks, EnumSet<v13AdditionalWorldData> data) throws IOException {

// Prune chunks
List<SlimeChunk> chunksToSave = chunks.stream()
Expand Down Expand Up @@ -175,15 +166,33 @@ static byte[] serializeChunks(SlimeWorld world, Collection<SlimeChunk> chunks, E

// Extra Tag
if (chunk.getExtraData() == null) {
LOGGER.warn("Chunk at " + chunk.getX() + ", " + chunk.getZ() + " from world " + world.getName() + " has no extra data! When deserialized, this chunk will have an empty extra data tag!");
LOGGER.warn("Chunk at {}, {} from world {} has no extra data! When deserialized, this chunk will have an empty extra data tag!", chunk.getX(), chunk.getZ(), world.getName());
}
byte[] extra = serializeCompoundTag(CompoundBinaryTag.from(chunk.getExtraData()));

outStream.writeInt(extra.length);
outStream.write(extra);
}
}

return outByteStream.toByteArray();
private static void writeCompressed(DataOutputStream out, ThrowingConsumer<DataOutputStream> writer) throws Exception {
ByteArrayOutputStream compressedOut = new ByteArrayOutputStream();
ZstdOutputStream zstd = new ZstdOutputStream(compressedOut);
DataOutputStream dataOut = new DataOutputStream(zstd);

CountingOutputStream counting = new CountingOutputStream(dataOut);

// write uncompressed data into zstd stream
writer.accept(new DataOutputStream(counting));

dataOut.flush();
zstd.close();

byte[] compressed = compressedOut.toByteArray();

out.writeInt(compressed.length);
out.writeInt((int) counting.getCount());
out.write(compressed);
Comment on lines +183 to +195
Copy link
Copy Markdown
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Can't we use dataOut.size() instead of the entire CountingOutputStream thing?

}

private static CompoundBinaryTag wrap(String key, ListBinaryTag list) {
Expand All @@ -197,7 +206,8 @@ protected static byte[] serializeCompoundTag(CompoundBinaryTag tag) throws IOExc
if (tag == null || tag.isEmpty()) return new byte[0];

ByteArrayOutputStream outByteStream = new ByteArrayOutputStream();
BinaryTagIO.writer().write(tag, outByteStream);
//Avoid a buffered output stream by casting to DataOutput. Buffered Output Streams make the memory usage explode
BinaryTagIO.writer().write(tag, (DataOutput) new DataOutputStream(outByteStream));

return outByteStream.toByteArray();
}
Expand Down
Original file line number Diff line number Diff line change
@@ -1,15 +1,14 @@
package com.infernalsuite.asp.serialization.slime.reader.impl.v10;

import com.github.luben.zstd.Zstd;
import com.infernalsuite.asp.Util;
import com.infernalsuite.asp.util.Util;
import com.infernalsuite.asp.api.exceptions.CorruptedWorldException;
import com.infernalsuite.asp.api.loaders.SlimeLoader;
import com.infernalsuite.asp.serialization.slime.reader.VersionedByteSlimeWorldReader;
import com.infernalsuite.asp.api.utils.NibbleArray;
import com.infernalsuite.asp.api.world.SlimeChunk;
import com.infernalsuite.asp.api.world.SlimeChunkSection;
import com.infernalsuite.asp.api.world.SlimeWorld;
import com.infernalsuite.asp.api.world.properties.SlimeProperties;
import com.infernalsuite.asp.api.world.properties.SlimePropertyMap;

import com.infernalsuite.asp.skeleton.SlimeChunkSectionSkeleton;
Expand Down
Original file line number Diff line number Diff line change
@@ -1,23 +1,26 @@
package com.infernalsuite.asp.serialization.slime.reader.impl.v11;

import com.github.luben.zstd.Zstd;
import com.infernalsuite.asp.Util;
import com.github.luben.zstd.ZstdInputStream;
import com.infernalsuite.asp.util.Util;
import com.infernalsuite.asp.api.exceptions.CorruptedWorldException;
import com.infernalsuite.asp.api.exceptions.NewerFormatException;
import com.infernalsuite.asp.api.loaders.SlimeLoader;
import com.infernalsuite.asp.api.utils.NibbleArray;
import com.infernalsuite.asp.api.world.SlimeChunk;
import com.infernalsuite.asp.api.world.SlimeChunkSection;
import com.infernalsuite.asp.api.world.SlimeWorld;
import com.infernalsuite.asp.api.world.properties.SlimeProperties;
import com.infernalsuite.asp.api.world.properties.SlimePropertyMap;
import com.infernalsuite.asp.util.LimitedInputStream;
import com.infernalsuite.asp.skeleton.SlimeChunkSectionSkeleton;
import com.infernalsuite.asp.skeleton.SlimeChunkSkeleton;
import it.unimi.dsi.fastutil.longs.Long2ObjectMap;
import it.unimi.dsi.fastutil.longs.Long2ObjectOpenHashMap;
import net.kyori.adventure.nbt.*;
import org.jetbrains.annotations.NotNull;
import org.jetbrains.annotations.Nullable;

import java.io.ByteArrayInputStream;
import java.io.BufferedInputStream;
import java.io.DataInput;
import java.io.DataInputStream;
import java.io.IOException;
import java.util.ArrayList;
Expand All @@ -35,11 +38,10 @@ public class v11SlimeWorldDeSerializer implements com.infernalsuite.asp.serializ
public SlimeWorld deserializeWorld(byte version, @Nullable SlimeLoader loader, String worldName, DataInputStream dataStream, SlimePropertyMap propertyMap, boolean readOnly) throws IOException, CorruptedWorldException, NewerFormatException {
int worldVersion = dataStream.readInt();

byte[] chunkBytes = readCompressed(dataStream);
DataInputStream chunkBytes = openCompressedStream(dataStream);
Long2ObjectMap<SlimeChunk> chunks = readChunks(propertyMap, chunkBytes);

byte[] extraTagBytes = readCompressed(dataStream);
CompoundBinaryTag extraTag = readCompound(extraTagBytes);
CompoundBinaryTag extraTag = readCompressedCompound(dataStream);

SlimePropertyMap worldPropertyMap = propertyMap;
CompoundBinaryTag propertiesMap = extraTag.get("properties") != null
Expand All @@ -57,12 +59,13 @@ public SlimeWorld deserializeWorld(byte version, @Nullable SlimeLoader loader, S
ConcurrentMap<String, BinaryTag> extraData = new ConcurrentHashMap<>();
extraTag.forEach(entry -> extraData.put(entry.getKey(), entry.getValue()));

chunkBytes.close();
dataStream.close();
return new com.infernalsuite.asp.skeleton.SkeletonSlimeWorld(worldName, loader, readOnly, chunks, extraData, worldPropertyMap, worldVersion);
}

private static Long2ObjectMap<SlimeChunk> readChunks(SlimePropertyMap slimePropertyMap, byte[] chunkBytes) throws IOException {
private static Long2ObjectMap<SlimeChunk> readChunks(SlimePropertyMap slimePropertyMap, DataInputStream chunkData) throws IOException {
Long2ObjectMap<SlimeChunk> chunkMap = new Long2ObjectOpenHashMap<>();
DataInputStream chunkData = new DataInputStream(new ByteArrayInputStream(chunkBytes));

int chunks = chunkData.readInt();
for (int i = 0; i < chunks; i++) {
Expand Down Expand Up @@ -97,33 +100,20 @@ private static Long2ObjectMap<SlimeChunk> readChunks(SlimePropertyMap slimePrope
}

// Block Data
byte[] blockStateData = new byte[chunkData.readInt()];
chunkData.read(blockStateData);
CompoundBinaryTag blockStateTag = readCompound(blockStateData);
CompoundBinaryTag blockStateTag = readLimitedCompound(chunkData);

// Biome Data
byte[] biomeData = new byte[chunkData.readInt()];
chunkData.read(biomeData);
CompoundBinaryTag biomeTag = readCompound(biomeData);
CompoundBinaryTag biomeTag = readLimitedCompound(chunkData);

chunkSections[sectionId] = new com.infernalsuite.asp.skeleton.SlimeChunkSectionSkeleton(blockStateTag, biomeTag, blockLightArray, skyLightArray);
chunkSections[sectionId] = new SlimeChunkSectionSkeleton(blockStateTag, biomeTag, blockLightArray, skyLightArray);
}

// HeightMaps
byte[] heightMapData = new byte[chunkData.readInt()];
chunkData.read(heightMapData);
CompoundBinaryTag heightMaps = readCompound(heightMapData);
CompoundBinaryTag heightMaps = readLimitedCompound(chunkData);

// Tile Entities

int compressedTileEntitiesLength = chunkData.readInt();
int decompressedTileEntitiesLength = chunkData.readInt();
byte[] compressedTileEntitiesData = new byte[compressedTileEntitiesLength];
byte[] decompressedTileEntitiesData = new byte[decompressedTileEntitiesLength];
chunkData.read(compressedTileEntitiesData);
Zstd.decompress(decompressedTileEntitiesData, compressedTileEntitiesData);

CompoundBinaryTag tileEntitiesCompound = readCompound(decompressedTileEntitiesData);
CompoundBinaryTag tileEntitiesCompound = readCompressedCompound(chunkData);

ListBinaryTag tileEntitiesTag = tileEntitiesCompound.getList("tileEntities", BinaryTagTypes.COMPOUND);
List<CompoundBinaryTag> serializedTileEntities = new ArrayList<>(tileEntitiesTag.size());
Expand All @@ -133,14 +123,7 @@ private static Long2ObjectMap<SlimeChunk> readChunks(SlimePropertyMap slimePrope

// Entities

int compressedEntitiesLength = chunkData.readInt();
int decompressedEntitiesLength = chunkData.readInt();
byte[] compressedEntitiesData = new byte[compressedEntitiesLength];
byte[] decompressedEntitiesData = new byte[decompressedEntitiesLength];
chunkData.read(compressedEntitiesData);
Zstd.decompress(decompressedEntitiesData, compressedEntitiesData);

CompoundBinaryTag entitiesCompound = readCompound(decompressedEntitiesData);
CompoundBinaryTag entitiesCompound = readCompressedCompound(chunkData);
ListBinaryTag entitiesTag = entitiesCompound.getList("entities", BinaryTagTypes.COMPOUND);
List<CompoundBinaryTag> serializedEntities = new ArrayList<>(entitiesTag.size());
for (BinaryTag binaryTag : entitiesTag) {
Expand All @@ -153,19 +136,50 @@ private static Long2ObjectMap<SlimeChunk> readChunks(SlimePropertyMap slimePrope
return chunkMap;
}

private static byte[] readCompressed(DataInputStream stream) throws IOException {
private static DataInputStream openCompressedStream(DataInputStream stream) throws IOException {
int compressedLength = stream.readInt();
int decompressedLength = stream.readInt();
byte[] compressedData = new byte[compressedLength];
byte[] decompressedData = new byte[decompressedLength];
stream.read(compressedData);
Zstd.decompress(decompressedData, compressedData);
return decompressedData;
stream.readInt(); //Decompressed length, legacy

LimitedInputStream limitedInputStream = new LimitedInputStream(stream, compressedLength);
ZstdInputStream inputStream = new ZstdInputStream(limitedInputStream);
return new DataInputStream(new BufferedInputStream(inputStream));
}

private static @NotNull CompoundBinaryTag readLimitedCompound(DataInputStream stream) throws IOException {
int length = stream.readInt();
if(length == 0) return CompoundBinaryTag.empty();

LimitedInputStream limitedInputStream = new LimitedInputStream(stream, length);

//Avoid a buffered input stream by casting to DataInput. Buffered Input Streams make the memory
//usage explode (e.g. with buffered streams here 1,3gb; with a data input directly: 300mb)
CompoundBinaryTag tag = BinaryTagIO.unlimitedReader().read((DataInput) new DataInputStream(limitedInputStream));

//binary tag reading does not guarantee that the buffer is fully read. If we don't do this,
//we might error out later
limitedInputStream.drainRemaining();
return tag;
}

private static CompoundBinaryTag readCompound(byte[] tagBytes) throws IOException {
if (tagBytes.length == 0) return CompoundBinaryTag.empty();
private static @NotNull CompoundBinaryTag readCompressedCompound(DataInputStream stream) throws IOException {
int compressedLength = stream.readInt();
int decompressedLength = stream.readInt();

if(decompressedLength == 0) return CompoundBinaryTag.empty();

return BinaryTagIO.unlimitedReader().read(new ByteArrayInputStream(tagBytes));
LimitedInputStream limitedInputStream = new LimitedInputStream(stream, compressedLength);
try(ZstdInputStream zstd = new ZstdInputStream(limitedInputStream)) {

//Avoid a buffered input stream by casting to DataInput. Buffered Input Streams make the memory
//usage explode (e.g. with buffered streams here 1,3gb; with a data input directly: 300mb)
CompoundBinaryTag tag = BinaryTagIO.unlimitedReader().read((DataInput) new DataInputStream(zstd));

//binary tag reading does not guarantee that the buffer is fully read. If we don't do this,
//we might error out later
byte[] buffer = new byte[512];
while (zstd.read(buffer) != -1) {}

return tag;
}
}
}
Loading