Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Upgrade leveldb-mcpe bindings to 0.0.10-SNAPSHOT and optimize chunk (de)serialization #28

Open
wants to merge 6 commits into
base: bleeding
Choose a base branch
from
Open
Changes from 1 commit
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Next Next commit
upgrade leveldb-mcpe bindings to 0.0.10-SNAPSHOT and optimize chunk (…
…de)serialization
  • Loading branch information
DaMatrix committed Jul 31, 2020
commit 9d517813480cab502fdbd89e3b4ab3d7461abcb4
2 changes: 1 addition & 1 deletion pom.xml
Original file line number Diff line number Diff line change
@@ -272,7 +272,7 @@
<dependency>
<groupId>net.daporkchop</groupId>
<artifactId>leveldb-mcpe-jni</artifactId>
<version>0.0.7-SNAPSHOT</version>
<version>0.0.10-SNAPSHOT</version>
<exclusions>
<exclusion>
<groupId>com.google.guava</groupId>
Original file line number Diff line number Diff line change
@@ -1,13 +1,13 @@
package org.cloudburstmc.server.level.provider.leveldb.serializer;

import net.daporkchop.ldbjni.direct.DirectDB;
import net.daporkchop.ldbjni.direct.DirectWriteBatch;
import org.cloudburstmc.server.level.chunk.Chunk;
import org.cloudburstmc.server.level.chunk.ChunkBuilder;
import org.iq80.leveldb.DB;
import org.iq80.leveldb.WriteBatch;

interface ChunkSerializer {

void serialize(WriteBatch db, Chunk chunk);
void serialize(DirectWriteBatch db, Chunk chunk);

void deserialize(DB db, ChunkBuilder chunkBuilder);
void deserialize(DirectDB db, ChunkBuilder chunkBuilder);
}
Original file line number Diff line number Diff line change
@@ -4,25 +4,26 @@
import io.netty.buffer.Unpooled;
import lombok.AccessLevel;
import lombok.NoArgsConstructor;
import net.daporkchop.ldbjni.direct.DirectDB;
import net.daporkchop.ldbjni.direct.DirectWriteBatch;
import org.cloudburstmc.server.level.chunk.Chunk;
import org.cloudburstmc.server.level.chunk.ChunkBuilder;
import org.cloudburstmc.server.level.provider.leveldb.LevelDBKey;
import org.cloudburstmc.server.utils.ChunkException;
import org.iq80.leveldb.DB;
import org.iq80.leveldb.WriteBatch;

@NoArgsConstructor(access = AccessLevel.PROTECTED)
class ChunkSerializerV1 implements ChunkSerializer {

static final ChunkSerializer INSTANCE = new ChunkSerializerV1();

@Override
public void serialize(WriteBatch db, Chunk chunk) {
public void serialize(DirectWriteBatch db, Chunk chunk) {
throw new UnsupportedOperationException();
}

@Override
public void deserialize(DB db, ChunkBuilder chunkBuilder) {
public void deserialize(DirectDB db, ChunkBuilder chunkBuilder) {
this.deserializeExtraData(db, chunkBuilder);

this.deserializeTerrain(db, chunkBuilder);
Original file line number Diff line number Diff line change
@@ -7,23 +7,23 @@
import it.unimi.dsi.fastutil.ints.Int2ShortOpenHashMap;
import lombok.AccessLevel;
import lombok.NoArgsConstructor;
import net.daporkchop.ldbjni.direct.DirectDB;
import net.daporkchop.ldbjni.direct.DirectWriteBatch;
import org.cloudburstmc.server.level.chunk.BlockStorage;
import org.cloudburstmc.server.level.chunk.Chunk;
import org.cloudburstmc.server.level.chunk.ChunkBuilder;
import org.cloudburstmc.server.level.chunk.ChunkSection;
import org.cloudburstmc.server.level.provider.leveldb.LevelDBKey;
import org.cloudburstmc.server.registry.BlockRegistry;
import org.cloudburstmc.server.utils.ChunkException;
import org.iq80.leveldb.DB;
import org.iq80.leveldb.WriteBatch;

@NoArgsConstructor(access = AccessLevel.PROTECTED)
class ChunkSerializerV3 extends ChunkSerializerV1 {

static ChunkSerializer INSTANCE = new ChunkSerializerV3();

@Override
public void serialize(WriteBatch db, Chunk chunk) {
public void serialize(DirectWriteBatch db, Chunk chunk) {
// Write chunk sections
for (int ySection = 0; ySection < Chunk.SECTION_COUNT; ySection++) {
ChunkSection section = chunk.getSection(ySection);
@@ -36,18 +36,17 @@ public void serialize(WriteBatch db, Chunk chunk) {
buffer.writeByte(ChunkSection.CHUNK_SECTION_VERSION);
ChunkSectionSerializers.serialize(buffer, section.getBlockStorageArray(), ChunkSection.CHUNK_SECTION_VERSION);

byte[] payload = new byte[buffer.readableBytes()];
buffer.readBytes(payload);
db.put(Unpooled.wrappedBuffer(LevelDBKey.SUBCHUNK_PREFIX.getKey(chunk.getX(), chunk.getZ(), ySection)), buffer);

db.put(LevelDBKey.SUBCHUNK_PREFIX.getKey(chunk.getX(), chunk.getZ(), ySection), payload);
buffer.clear(); //reset indices to prevent the buffer from constantly growing
} finally {
buffer.release();
}
}
}

@Override
public void deserialize(DB db, ChunkBuilder chunkBuilder) {
public void deserialize(DirectDB db, ChunkBuilder chunkBuilder) {
int chunkX = chunkBuilder.getX();
int chunkZ = chunkBuilder.getZ();

@@ -70,40 +69,43 @@ public void deserialize(DB db, ChunkBuilder chunkBuilder) {
ChunkSection[] sections = new ChunkSection[Chunk.SECTION_COUNT];

for (int ySection = 0; ySection < Chunk.SECTION_COUNT; ySection++) {
byte[] sectionData = db.get(LevelDBKey.SUBCHUNK_PREFIX.getKey(chunkX, chunkZ, ySection));
if (sectionData == null) {
continue;
}
ByteBuf buf = Unpooled.wrappedBuffer(sectionData);
if (!buf.isReadable()) {
throw new ChunkException("Empty sub-chunk " + ySection);
ByteBuf buf = db.getZeroCopy(Unpooled.wrappedBuffer(LevelDBKey.SUBCHUNK_PREFIX.getKey(chunkX, chunkZ, ySection)));
if (buf == null) {
continue; //entry doesn't exist, skip
}
try {
if (!buf.isReadable()) {
throw new ChunkException("Empty sub-chunk " + ySection);
}

int subChunkVersion = buf.readUnsignedByte();
if (subChunkVersion < ChunkSection.CHUNK_SECTION_VERSION) {
chunkBuilder.dirty();
}
BlockStorage[] blockStorage = ChunkSectionSerializers.deserialize(buf, chunkBuilder, subChunkVersion);

if (blockStorage[1] == null) {
blockStorage[1] = new BlockStorage();
if (extraDataMap != null) {
for (int x = 0; x < 16; x++) {
for (int z = 0; z < 16; z++) {
for (int y = ySection * 16, lim = y + 16; y < lim; y++) {
int key = Chunk.blockKey(x, y, z);
if (extraDataMap.containsKey(key)) {
short value = extraDataMap.get(Chunk.blockKey(x, y, z));
int blockId = value & 0xff;
int blockData = (value >> 8) & 0xf;
blockStorage[1].setBlock(ChunkSection.blockIndex(x, y, z), BlockRegistry.get().getBlock(blockId, blockData));
int subChunkVersion = buf.readUnsignedByte();
if (subChunkVersion < ChunkSection.CHUNK_SECTION_VERSION) {
chunkBuilder.dirty();
}
BlockStorage[] blockStorage = ChunkSectionSerializers.deserialize(buf, chunkBuilder, subChunkVersion);

if (blockStorage[1] == null) {
blockStorage[1] = new BlockStorage();
if (extraDataMap != null) {
for (int x = 0; x < 16; x++) {
for (int z = 0; z < 16; z++) {
for (int y = ySection * 16, lim = y + 16; y < lim; y++) {
int key = Chunk.blockKey(x, y, z);
if (extraDataMap.containsKey(key)) {
short value = extraDataMap.get(Chunk.blockKey(x, y, z));
int blockId = value & 0xff;
int blockData = (value >> 8) & 0xf;
blockStorage[1].setBlock(ChunkSection.blockIndex(x, y, z), BlockRegistry.get().getBlock(blockId, blockData));
}
}
}
}
}
}
sections[ySection] = new ChunkSection(blockStorage);
} finally {
buf.release(); //release buffer to avoid memory leak
}
sections[ySection] = new ChunkSection(blockStorage);
}

chunkBuilder.sections(sections);
Original file line number Diff line number Diff line change
@@ -2,10 +2,10 @@

import io.netty.util.collection.IntObjectHashMap;
import io.netty.util.collection.IntObjectMap;
import net.daporkchop.ldbjni.direct.DirectDB;
import net.daporkchop.ldbjni.direct.DirectWriteBatch;
import org.cloudburstmc.server.level.chunk.Chunk;
import org.cloudburstmc.server.level.chunk.ChunkBuilder;
import org.iq80.leveldb.DB;
import org.iq80.leveldb.WriteBatch;

public class ChunkSerializers {

@@ -36,11 +36,11 @@ private static ChunkSerializer getChunkSerializer(int version) {
return chunkSerializer;
}

public static void serializeChunk(WriteBatch db, Chunk chunk, int version) {
public static void serializeChunk(DirectWriteBatch db, Chunk chunk, int version) {
getChunkSerializer(version).serialize(db, chunk);
}

public static void deserializeChunk(DB db, ChunkBuilder chunkBuilder, int version) {
public static void deserializeChunk(DirectDB db, ChunkBuilder chunkBuilder, int version) {
getChunkSerializer(version).deserialize(db, chunkBuilder);
}
}