Remove obsolete chunk merging

This commit is contained in:
Camotoy 2021-06-23 16:48:56 -04:00
parent 3220190904
commit 71fe2621fc
No known key found for this signature in database
GPG key ID: 7EEFB66FE798081F
2 changed files with 9 additions and 27 deletions

View file

@ -46,27 +46,12 @@ public class ChunkCache {
chunks = cache ? new Long2ObjectOpenHashMap<>() : null;
}
public Column addToCache(Column chunk) {
public void addToCache(Column chunk) {
if (!cache) {
return chunk;
return;
}
long chunkPosition = MathUtils.chunkPositionToLong(chunk.getX(), chunk.getZ());
Column existingChunk;
if (chunk.getBiomeData() == null // Only consider merging columns if the new chunk isn't a full chunk
&& (existingChunk = chunks.getOrDefault(chunkPosition, null)) != null) { // Column is already present in cache, we can merge with existing
boolean changed = false;
for (int i = 0; i < chunk.getChunks().length; i++) { // The chunks member is final, so chunk.getChunks() will probably be inlined and then completely optimized away
if (chunk.getChunks()[i] != null) {
existingChunk.getChunks()[i] = chunk.getChunks()[i];
changed = true;
}
}
return changed ? existingChunk : null;
} else {
chunks.put(chunkPosition, chunk);
return chunk;
}
chunks.put(MathUtils.chunkPositionToLong(chunk.getX(), chunk.getZ()), chunk);
}
public Column getChunk(int chunkX, int chunkZ) {

View file

@ -52,15 +52,12 @@ public class JavaChunkDataTranslator extends PacketTranslator<ServerChunkDataPac
ChunkUtils.updateChunkPosition(session, session.getPlayerEntity().getPosition().toInt());
}
// Merge received column with cache on network thread
Column mergedColumn = session.getChunkCache().addToCache(packet.getColumn());
if (mergedColumn == null) { // There were no changes?!?
return;
}
session.getChunkCache().addToCache(packet.getColumn());
Column column = packet.getColumn();
GeyserConnector.getInstance().getGeneralThreadPool().execute(() -> {
try {
ChunkUtils.ChunkData chunkData = ChunkUtils.translateToBedrock(session, mergedColumn);
ChunkUtils.ChunkData chunkData = ChunkUtils.translateToBedrock(session, column);
ChunkSection[] sections = chunkData.getSections();
// Find highest section
@ -90,7 +87,7 @@ public class JavaChunkDataTranslator extends PacketTranslator<ServerChunkDataPac
(section != null ? section : session.getBlockTranslator().getEmptyChunkSection()).writeToNetwork(byteBuf);
}
byteBuf.writeBytes(BiomeTranslator.toBedrockBiome(mergedColumn.getBiomeData())); // Biomes - 256 bytes
byteBuf.writeBytes(BiomeTranslator.toBedrockBiome(column.getBiomeData())); // Biomes - 256 bytes
byteBuf.writeByte(0); // Border blocks - Edu edition only
VarInts.writeUnsignedInt(byteBuf, 0); // extra data length, 0 for now
@ -109,8 +106,8 @@ public class JavaChunkDataTranslator extends PacketTranslator<ServerChunkDataPac
LevelChunkPacket levelChunkPacket = new LevelChunkPacket();
levelChunkPacket.setSubChunksLength(sectionCount);
levelChunkPacket.setCachingEnabled(false);
levelChunkPacket.setChunkX(mergedColumn.getX());
levelChunkPacket.setChunkZ(mergedColumn.getZ());
levelChunkPacket.setChunkX(column.getX());
levelChunkPacket.setChunkZ(column.getZ());
levelChunkPacket.setData(payload);
session.sendUpstreamPacket(levelChunkPacket);
} catch (Exception ex) {