Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Original file line number Diff line number Diff line change
@@ -0,0 +1,157 @@
/*
* This file is part of ViaRewind - https://github.com/ViaVersion/ViaRewind
* Copyright (C) 2018-2026 ViaVersion and contributors
*
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*/
package com.viaversion.viarewind.api.compression;

import io.netty.buffer.ByteBuf;
import io.netty.buffer.ByteBufAllocator;

import java.util.zip.DataFormatException;
import java.util.zip.Deflater;
import java.util.zip.Inflater;

/**
* A ThreadLocal-based compression provider that either uses Velocity's native compression
* or falls back to Java's built-in compression (Deflater/Inflater).
*/
public final class ThreadLocalCompressionProvider {

private static final boolean VELOCITY_NATIVES_AVAILABLE;

static {
boolean velocityAvailable = false;
try {
Class.forName("com.velocitypowered.natives.compression.VelocityCompressor");
velocityAvailable = true;
} catch (final ClassNotFoundException ignored) {
}
VELOCITY_NATIVES_AVAILABLE = velocityAvailable;
}

// ThreadLocal for Java's Deflater (used when Velocity natives are not available)
private static final ThreadLocal<Deflater> JAVA_DEFLATER = ThreadLocal.withInitial(Deflater::new);

// ThreadLocal for Java's Inflater (used when Velocity natives are not available)
private static final ThreadLocal<Inflater> JAVA_INFLATER = ThreadLocal.withInitial(Inflater::new);

private ThreadLocalCompressionProvider() {
}

/**
* Compresses data from the source buffer into the destination buffer.
*
* @param source the source buffer containing uncompressed data
* @param destination the destination buffer to write compressed data to
* @throws DataFormatException if compression fails
*/
public static void deflate(final ByteBuf source, final ByteBuf destination) throws DataFormatException {
if (VELOCITY_NATIVES_AVAILABLE) {
deflateVelocity(source, destination);
} else {
deflateJava(source, destination);
}
}


/**
* Decompresses data from the source buffer into the destination buffer.
*
* @param source the source buffer containing compressed data
* @param destination the destination buffer to write decompressed data to
* @param expectedSize the expected size of the decompressed data
* @throws DataFormatException if decompression fails
*/
public static void inflate(final ByteBuf source, final ByteBuf destination, final int expectedSize) throws DataFormatException {
if (VELOCITY_NATIVES_AVAILABLE) {
inflateVelocity(source, destination, expectedSize);
} else {
inflateJava(source, destination, expectedSize);
}
}

// Java native implementation

private static void deflateJava(final ByteBuf source, final ByteBuf destination) throws DataFormatException {
ByteBuf temp = source;
if (!source.hasArray()) {
temp = ByteBufAllocator.DEFAULT.heapBuffer().writeBytes(source);
} else {
source.retain();
}
ByteBuf output = ByteBufAllocator.DEFAULT.heapBuffer();
try {
final Deflater deflater = JAVA_DEFLATER.get();
deflater.setInput(temp.array(), temp.arrayOffset() + temp.readerIndex(), temp.readableBytes());
deflater.finish();

while (!deflater.finished()) {
output.ensureWritable(4096);
output.writerIndex(output.writerIndex() + deflater.deflate(output.array(), output.arrayOffset() + output.writerIndex(), output.writableBytes()));
}
destination.writeBytes(output);
} finally {
output.release();
temp.release();
JAVA_DEFLATER.get().reset();
}
}


private static void inflateJava(final ByteBuf source, final ByteBuf destination, final int expectedSize) throws DataFormatException {
ByteBuf temp = source;
if (!source.hasArray()) {
temp = ByteBufAllocator.DEFAULT.heapBuffer().writeBytes(source);
} else {
source.retain();
}
ByteBuf output = ByteBufAllocator.DEFAULT.heapBuffer(expectedSize, expectedSize);
try {
final Inflater inflater = JAVA_INFLATER.get();
inflater.setInput(temp.array(), temp.arrayOffset() + temp.readerIndex(), temp.readableBytes());
output.writerIndex(output.writerIndex() + inflater.inflate(output.array(), output.arrayOffset(), expectedSize));
destination.writeBytes(output);
} finally {
output.release();
temp.release();
JAVA_INFLATER.get().reset();
}
}

// Velocity native implementation
private static void deflateVelocity(final ByteBuf source, final ByteBuf destination) throws DataFormatException {
VelocityHolder.deflate(source, destination);
}

private static void inflateVelocity(final ByteBuf source, final ByteBuf destination, final int expectedSize) throws DataFormatException {
VelocityHolder.inflate(source, destination, expectedSize);
}

private static final class VelocityHolder {

private static final ThreadLocal<com.velocitypowered.natives.compression.VelocityCompressor> COMPRESSOR =
ThreadLocal.withInitial(() -> com.velocitypowered.natives.util.Natives.compress.get().create(-1));

static void deflate(final ByteBuf source, final ByteBuf destination) throws DataFormatException {
COMPRESSOR.get().deflate(source, destination);
}

static void inflate(final ByteBuf source, final ByteBuf destination, final int expectedSize) throws DataFormatException {
COMPRESSOR.get().inflate(source, destination, expectedSize);
}
}
}

Original file line number Diff line number Diff line change
Expand Up @@ -17,11 +17,13 @@
*/
package com.viaversion.viarewind.api.type.chunk;

import com.viaversion.viarewind.api.compression.ThreadLocalCompressionProvider;
import com.viaversion.viaversion.api.minecraft.chunks.Chunk;
import com.viaversion.viaversion.api.type.Type;
import io.netty.buffer.ByteBuf;

import java.util.zip.Deflater;
import java.util.zip.DataFormatException;


public class BulkChunkType1_7_6 extends Type<Chunk[]> {

Expand Down Expand Up @@ -63,41 +65,48 @@ public void write(ByteBuf buffer, Chunk[] chunks) {
);
}

final byte[] data = new byte[totalSize];
int offset = 0;
final ByteBuf uncompressed = buffer.alloc().buffer(totalSize);

for (int i = 0; i < chunkCount; i++) {
Chunk chunk = chunks[i];
boolean biomes = chunk.isFullChunk() && chunk.getBiomeData() != null;
try {
for (int i = 0; i < chunkCount; i++) {
Chunk chunk = chunks[i];
boolean biomes = chunk.isFullChunk() && chunk.getBiomeData() != null;

ChunkType1_7_6.serialize(
chunk,
uncompressed,
addBitMasks[i],
anySkyLight,
biomes
);
}

offset = ChunkType1_7_6.serialize(
chunk,
data,
offset,
addBitMasks[i],
anySkyLight,
biomes
);
}
buffer.writeShort(chunkCount);

buffer.writeShort(chunkCount);
// Reserve 4 bytes for the compressed size
final int sizeIndex = buffer.writerIndex();
buffer.writeInt(0); // Placeholder for compressed size

final Deflater deflater = new Deflater();
byte[] compressedData;
int compressedSize;
try {
deflater.setInput(data, 0, data.length);
deflater.finish();
compressedData = new byte[data.length];
compressedSize = deflater.deflate(compressedData);
buffer.writeBoolean(anySkyLight);

// Write compressed data directly to output buffer
final int compressedStart = buffer.writerIndex();
try {
ThreadLocalCompressionProvider.deflate(uncompressed, buffer);
} catch (DataFormatException e) {
throw new RuntimeException("Failed to compress bulk chunk data", e);
}
final int compressedSize = buffer.writerIndex() - compressedStart;

// Go back and write the compressed size
final int endIndex = buffer.writerIndex();
buffer.writerIndex(sizeIndex);
buffer.writeInt(compressedSize);
buffer.writerIndex(endIndex);
} finally {
deflater.end();
uncompressed.release();
}

buffer.writeInt(compressedSize);
buffer.writeBoolean(anySkyLight);
buffer.writeBytes(compressedData, 0, compressedSize);

for (int i = 0; i < chunkCount; i++) {
Chunk chunk = chunks[i];
buffer.writeInt(chunk.getX());
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -17,13 +17,15 @@
*/
package com.viaversion.viarewind.api.type.chunk;

import com.viaversion.viarewind.api.compression.ThreadLocalCompressionProvider;
import com.viaversion.viaversion.api.minecraft.chunks.Chunk;
import com.viaversion.viaversion.api.minecraft.chunks.ChunkSection;
import com.viaversion.viaversion.api.minecraft.chunks.DataPalette;
import com.viaversion.viaversion.api.minecraft.chunks.PaletteType;
import com.viaversion.viaversion.api.type.Type;
import io.netty.buffer.ByteBuf;
import java.util.zip.Deflater;

import java.util.zip.DataFormatException;

import static com.viaversion.viaversion.api.minecraft.chunks.ChunkSection.SIZE;
import static com.viaversion.viaversion.api.minecraft.chunks.ChunkSectionLight.LIGHT_LENGTH;
Expand All @@ -49,33 +51,41 @@ public void write(ByteBuf buffer, Chunk chunk) {
final boolean biomes = chunk.isFullChunk() && chunk.getBiomeData() != null;

final int size = calcSize(bitmask, addBitmask, hasSkyLight, biomes);
final byte[] data = new byte[size];

serialize(chunk, data, 0, addBitmask, hasSkyLight, biomes);
final ByteBuf uncompressed = buffer.alloc().buffer(size);

buffer.writeInt(chunk.getX());
buffer.writeInt(chunk.getZ());
buffer.writeBoolean(chunk.isFullChunk());
buffer.writeShort(bitmask);
buffer.writeShort(addBitmask);

final Deflater deflater = new Deflater();
byte[] compressedData;
int compressedSize;
try {
deflater.setInput(data, 0, data.length);
deflater.finish();
compressedData = new byte[data.length];
compressedSize = deflater.deflate(compressedData);
serialize(chunk, uncompressed, addBitmask, hasSkyLight, biomes);

buffer.writeInt(chunk.getX());
buffer.writeInt(chunk.getZ());
buffer.writeBoolean(chunk.isFullChunk());
buffer.writeShort(bitmask);
buffer.writeShort(addBitmask);

// Reserve 4 bytes for the compressed size
final int sizeIndex = buffer.writerIndex();
buffer.writeInt(0); // Placeholder for compressed size

// Write compressed data directly to output buffer
final int compressedStart = buffer.writerIndex();
try {
ThreadLocalCompressionProvider.deflate(uncompressed, buffer);
} catch (DataFormatException e) {
throw new RuntimeException("Failed to compress chunk data", e);
}
final int compressedSize = buffer.writerIndex() - compressedStart;

// Go back and write the compressed size
final int endIndex = buffer.writerIndex();
buffer.writerIndex(sizeIndex);
buffer.writeInt(compressedSize);
buffer.writerIndex(endIndex);
} finally {
deflater.end();
uncompressed.release();
}

buffer.writeInt(compressedSize);
buffer.writeBytes(compressedData, 0, compressedSize);
}

public static int serialize(Chunk chunk, byte[] output, int offset, int addBitmask, boolean writeSkyLight, boolean biomes) {
public static void serialize(Chunk chunk, ByteBuf output, int addBitmask, boolean writeSkyLight, boolean biomes) {
final ChunkSection[] sections = chunk.getSections();
final int bitmask = chunk.getBitmask();

Expand All @@ -85,7 +95,7 @@ public static int serialize(Chunk chunk, byte[] output, int offset, int addBitma
final DataPalette palette = section.palette(PaletteType.BLOCKS);
for (int j = 0; j < SIZE; j++) {
final int block = palette.idAt(j);
output[offset++] = (byte) ((block >> 4) & 0xFF);
output.writeByte((block >> 4) & 0xFF);
}
}
}
Expand All @@ -97,16 +107,15 @@ public static int serialize(Chunk chunk, byte[] output, int offset, int addBitma
for (int j = 0; j < ChunkSection.SIZE; j += 2) {
final int meta1 = palette.idAt(j) & 0xF;
final int meta2 = palette.idAt(j + 1) & 0xF;
output[offset++] = (byte) (meta1 | (meta2 << 4));
output.writeByte(meta1 | (meta2 << 4));
}
}
}

for (int i = 0; i < 16; i++) {
if ((bitmask & (1 << i)) != 0) {
final byte[] blockLight = sections[i].getLight().getBlockLight();
System.arraycopy(blockLight, 0, output, offset, LIGHT_LENGTH);
offset += LIGHT_LENGTH;
output.writeBytes(blockLight);
}
}

Expand All @@ -115,9 +124,11 @@ public static int serialize(Chunk chunk, byte[] output, int offset, int addBitma
if ((bitmask & (1 << i)) != 0) {
if (sections[i].getLight().hasSkyLight()) {
final byte[] skyLight = sections[i].getLight().getSkyLight();
System.arraycopy(skyLight, 0, output, offset, LIGHT_LENGTH);
output.writeBytes(skyLight);
} else {
// Write empty skylight data
output.writeZero(LIGHT_LENGTH);
}
offset += LIGHT_LENGTH;
}
}
}
Expand All @@ -130,7 +141,7 @@ public static int serialize(Chunk chunk, byte[] output, int offset, int addBitma
for (int j = 0; j < SIZE; j += 2) {
final int add1 = (palette.idAt(j) >> 12) & 0xF;
final int add2 = (palette.idAt(j + 1) >> 12) & 0xF;
output[offset++] = (byte) (add1 | (add2 << 4));
output.writeByte(add1 | (add2 << 4));
}
}
}
Expand All @@ -139,11 +150,9 @@ public static int serialize(Chunk chunk, byte[] output, int offset, int addBitma
if (biomes && chunk.getBiomeData() != null) {
final int[] biomeData = chunk.getBiomeData();
for (int biome : biomeData) {
output[offset++] = (byte) biome;
output.writeByte(biome);
}
}

return offset;
}

public static int calcSize(int bitmask, int addBitmask, boolean hasSkyLight, boolean biomes) {
Expand Down
Loading