mirror of
https://github.com/GeyserMC/Geyser.git
synced 2024-08-14 23:57:35 +00:00
TEMPORARY fix for chunk memory leaks
This commit is contained in:
parent
abba88112a
commit
b66088e434
2 changed files with 9 additions and 3 deletions
|
@ -105,6 +105,7 @@ public class JavaLevelChunkWithLightTranslator extends PacketTranslator<Clientbo
|
||||||
int maxBedrockSectionY = (bedrockDimension.height() >> 4) - 1;
|
int maxBedrockSectionY = (bedrockDimension.height() >> 4) - 1;
|
||||||
|
|
||||||
int sectionCount;
|
int sectionCount;
|
||||||
|
byte[] payload;
|
||||||
ByteBuf byteBuf = null;
|
ByteBuf byteBuf = null;
|
||||||
GeyserChunkSection[] sections = new GeyserChunkSection[javaChunks.length - (yOffset + (bedrockDimension.minY() >> 4))];
|
GeyserChunkSection[] sections = new GeyserChunkSection[javaChunks.length - (yOffset + (bedrockDimension.minY() >> 4))];
|
||||||
|
|
||||||
|
@ -347,7 +348,8 @@ public class JavaLevelChunkWithLightTranslator extends PacketTranslator<Clientbo
|
||||||
for (NbtMap blockEntity : bedrockBlockEntities) {
|
for (NbtMap blockEntity : bedrockBlockEntities) {
|
||||||
nbtStream.writeTag(blockEntity);
|
nbtStream.writeTag(blockEntity);
|
||||||
}
|
}
|
||||||
byteBuf.retain();
|
payload = new byte[byteBuf.readableBytes()];
|
||||||
|
byteBuf.readBytes(payload);
|
||||||
} catch (IOException e) {
|
} catch (IOException e) {
|
||||||
session.getGeyser().getLogger().error("IO error while encoding chunk", e);
|
session.getGeyser().getLogger().error("IO error while encoding chunk", e);
|
||||||
return;
|
return;
|
||||||
|
@ -362,7 +364,7 @@ public class JavaLevelChunkWithLightTranslator extends PacketTranslator<Clientbo
|
||||||
levelChunkPacket.setCachingEnabled(false);
|
levelChunkPacket.setCachingEnabled(false);
|
||||||
levelChunkPacket.setChunkX(packet.getX());
|
levelChunkPacket.setChunkX(packet.getX());
|
||||||
levelChunkPacket.setChunkZ(packet.getZ());
|
levelChunkPacket.setChunkZ(packet.getZ());
|
||||||
levelChunkPacket.setData(byteBuf);
|
levelChunkPacket.setData(Unpooled.wrappedBuffer(payload));
|
||||||
session.sendUpstreamPacket(levelChunkPacket);
|
session.sendUpstreamPacket(levelChunkPacket);
|
||||||
|
|
||||||
if (!lecterns.isEmpty()) {
|
if (!lecterns.isEmpty()) {
|
||||||
|
|
|
@ -175,6 +175,7 @@ public class ChunkUtils {
|
||||||
BedrockDimension bedrockDimension = session.getChunkCache().getBedrockDimension();
|
BedrockDimension bedrockDimension = session.getChunkCache().getBedrockDimension();
|
||||||
int bedrockSubChunkCount = bedrockDimension.height() >> 4;
|
int bedrockSubChunkCount = bedrockDimension.height() >> 4;
|
||||||
|
|
||||||
|
byte[] payload;
|
||||||
// Allocate output buffer
|
// Allocate output buffer
|
||||||
ByteBuf byteBuf = ByteBufAllocator.DEFAULT.buffer(ChunkUtils.EMPTY_BIOME_DATA.length * bedrockSubChunkCount + 1); // Consists only of biome data and border blocks
|
ByteBuf byteBuf = ByteBufAllocator.DEFAULT.buffer(ChunkUtils.EMPTY_BIOME_DATA.length * bedrockSubChunkCount + 1); // Consists only of biome data and border blocks
|
||||||
try {
|
try {
|
||||||
|
@ -185,11 +186,14 @@ public class ChunkUtils {
|
||||||
|
|
||||||
byteBuf.writeByte(0); // Border blocks - Edu edition only
|
byteBuf.writeByte(0); // Border blocks - Edu edition only
|
||||||
|
|
||||||
|
payload = new byte[byteBuf.readableBytes()];
|
||||||
|
byteBuf.readBytes(payload);
|
||||||
|
|
||||||
LevelChunkPacket data = new LevelChunkPacket();
|
LevelChunkPacket data = new LevelChunkPacket();
|
||||||
data.setChunkX(chunkX);
|
data.setChunkX(chunkX);
|
||||||
data.setChunkZ(chunkZ);
|
data.setChunkZ(chunkZ);
|
||||||
data.setSubChunksLength(0);
|
data.setSubChunksLength(0);
|
||||||
data.setData(byteBuf.retain());
|
data.setData(Unpooled.wrappedBuffer(payload));
|
||||||
data.setCachingEnabled(false);
|
data.setCachingEnabled(false);
|
||||||
session.sendUpstreamPacket(data);
|
session.sendUpstreamPacket(data);
|
||||||
} finally {
|
} finally {
|
||||||
|
|
Loading…
Reference in a new issue