Fix biomes crashing and more (huge thanks to @dktapps

This commit is contained in:
Camotoy 2021-07-14 21:14:53 -04:00
parent c7d4130a44
commit feb64e08df
No known key found for this signature in database
GPG key ID: 7EEFB66FE798081F
6 changed files with 85 additions and 19 deletions

View file

@ -130,7 +130,7 @@
<dependency>
<groupId>com.github.GeyserMC</groupId>
<artifactId>MCProtocolLib</artifactId>
<version>6e318f5</version>
<version>e427237</version>
<scope>compile</scope>
<exclusions>
<exclusion>

View file

@ -41,7 +41,6 @@ public class UpstreamSession {
private boolean initialized = false;
public void sendPacket(@NonNull BedrockPacket packet) {
System.out.println(packet);
if (!isClosed()) {
session.sendPacket(packet);
}

View file

@ -74,19 +74,19 @@ public class ChunkCache {
return;
}
if (y < minY || (y >> 4) > column.getChunks().length - 1) {
if (y < minY || ((y - minY) >> 4) > column.getChunks().length - 1) {
// Y likely goes above or below the height limit of this world
return;
}
Chunk chunk = column.getChunks()[(y >> 4) - getChunkMinY()];
Chunk chunk = column.getChunks()[(y - minY) >> 4];
if (chunk == null) {
if (block != BlockStateValues.JAVA_AIR_ID) {
// A previously empty chunk, which is no longer empty as a block has been added to it
chunk = new Chunk();
// Fixes the chunk assuming that all blocks is the `block` variable we are updating. /shrug
chunk.getPalette().stateToId(BlockStateValues.JAVA_AIR_ID);
column.getChunks()[(y >> 4) - getChunkMinY()] = chunk;
column.getChunks()[(y - minY) >> 4] = chunk;
} else {
// Nothing to update
return;
@ -106,12 +106,12 @@ public class ChunkCache {
return BlockStateValues.JAVA_AIR_ID;
}
if (y < minY || (y >> 4) > column.getChunks().length - 1) {
if (y < minY || ((y - minY) >> 4) > column.getChunks().length - 1) {
// Y likely goes above or below the height limit of this world
return BlockStateValues.JAVA_AIR_ID;
}
Chunk chunk = column.getChunks()[(y >> 4) - getChunkMinY()];
Chunk chunk = column.getChunks()[(y - minY) >> 4];
if (chunk != null) {
return chunk.get(x & 0xF, y & 0xF, z & 0xF);
}

View file

@ -37,14 +37,16 @@ import io.netty.buffer.ByteBufAllocator;
import io.netty.buffer.ByteBufOutputStream;
import org.geysermc.connector.GeyserConnector;
import org.geysermc.connector.network.session.GeyserSession;
import org.geysermc.connector.utils.BiomeUtils;
import org.geysermc.connector.network.translators.PacketTranslator;
import org.geysermc.connector.network.translators.Translator;
import org.geysermc.connector.network.translators.world.chunk.ChunkSection;
import org.geysermc.connector.utils.BiomeUtils;
import org.geysermc.connector.utils.ChunkUtils;
@Translator(packet = ServerChunkDataPacket.class)
public class JavaChunkDataTranslator extends PacketTranslator<ServerChunkDataPacket> {
// Caves and cliffs supports 3D biomes by implementing a very similar palette system to blocks
private static final boolean NEW_BIOME_WRITE = GeyserConnector.getInstance().getConfig().isExtendedWorldHeight();
@Override
public void translate(ServerChunkDataPacket packet, GeyserSession session) {
@ -79,7 +81,7 @@ public class JavaChunkDataTranslator extends PacketTranslator<ServerChunkDataPac
ChunkSection section = sections[i];
size += (section != null ? section : session.getBlockMappings().getEmptyChunkSection()).estimateNetworkSize();
}
size += 256; // Biomes
size += 256; // Biomes pre-1.18
size += 1; // Border blocks
size += 1; // Extra data length (always 0)
size += chunkData.getBlockEntities().length * 64; // Conservative estimate of 64 bytes per tile entity
@ -93,7 +95,18 @@ public class JavaChunkDataTranslator extends PacketTranslator<ServerChunkDataPac
(section != null ? section : session.getBlockMappings().getEmptyChunkSection()).writeToNetwork(byteBuf);
}
byteBuf.writeBytes(BiomeUtils.toBedrockBiome(column.getBiomeData())); // Biomes - 256 bytes
if (NEW_BIOME_WRITE) {
for (int i = 0; i < sectionCount; i++) {
BiomeUtils.toNewBedrockBiome(column.getBiomeData(), i).writeToNetwork(byteBuf);
}
int remainingEmptyBiomes = 32 - sectionCount;
for (int i = 0; i < remainingEmptyBiomes; i++) {
byteBuf.writeBytes(ChunkUtils.EMPTY_BIOME_DATA);
}
} else {
byteBuf.writeBytes(BiomeUtils.toBedrockBiome(column.getBiomeData())); // Biomes - 256 bytes
}
byteBuf.writeByte(0); // Border blocks - Edu edition only
VarInts.writeUnsignedInt(byteBuf, 0); // extra data length, 0 for now

View file

@ -25,6 +25,8 @@
package org.geysermc.connector.utils;
import org.geysermc.connector.network.translators.world.chunk.BlockStorage;
import java.util.Arrays;
// Based off of ProtocolSupport's LegacyBiomeData.java:
@ -40,7 +42,7 @@ public class BiomeUtils {
for (int y = 0; y < 16; y += 4) {
for (int z = 0; z < 16; z += 4) {
for (int x = 0; x < 16; x += 4) {
byte biomeId = biomeID(biomeData, x, y, z);
byte biomeId = (byte) biomeID(biomeData, x, y, z);
int offset = ((z + (y / 4)) << 4) | x;
Arrays.fill(bedrockData, offset, offset + 4, biomeId);
}
@ -49,7 +51,25 @@ public class BiomeUtils {
return bedrockData;
}
private static byte biomeID(int[] biomeData, int x, int y, int z) {
public static BlockStorage toNewBedrockBiome(int[] biomeData, int ySection) {
BlockStorage storage = new BlockStorage(0);
int blockY = ySection << 4;
int i = 0;
// Iterate over biomes like a chunk, grab the biome from Java, and add it to Bedrock's biome palette
// Might be able to be optimized by iterating over Java's biome data section?? Unsure.
for (int x = 0; x < 16; x++) {
for (int z = 0; z < 16; z++) {
for (int y = blockY; y < (blockY + 16); y++) {
int biomeId = biomeID(biomeData, x, y, z);
storage.setFullBlock(i, biomeId);
i++;
}
}
}
return storage;
}
private static int biomeID(int[] biomeData, int x, int y, int z) {
int biomeId = biomeData[((y >> 2) & 63) << 4 | ((z >> 2) & 3) << 2 | ((x >> 2) & 3)];
if (biomeId == 0) {
biomeId = 42; // Ocean
@ -58,6 +78,6 @@ public class BiomeUtils {
} else if (biomeId >= 170) { // Nether biomes. Dunno why it's like this :microjang:
biomeId += 8;
}
return (byte) biomeId;
return biomeId;
}
}

View file

@ -41,6 +41,8 @@ import com.nukkitx.nbt.NbtMap;
import com.nukkitx.protocol.bedrock.packet.LevelChunkPacket;
import com.nukkitx.protocol.bedrock.packet.NetworkChunkPublisherUpdatePacket;
import com.nukkitx.protocol.bedrock.packet.UpdateBlockPacket;
import io.netty.buffer.ByteBuf;
import io.netty.buffer.Unpooled;
import it.unimi.dsi.fastutil.ints.IntArrayList;
import it.unimi.dsi.fastutil.ints.IntList;
import lombok.Data;
@ -71,11 +73,41 @@ public class ChunkUtils {
/**
* The minimum height Bedrock Edition will accept.
*/
private static final int MINIMUM_ACCEPTED_HEIGHT = GeyserConnector.getInstance().getConfig().isExtendedWorldHeight() ? -64 : 0;
private static final int MINIMUM_ACCEPTED_HEIGHT = 0;
private static final int MINIMUM_ACCEPTED_HEIGHT_OVERWORLD = GeyserConnector.getInstance().getConfig().isExtendedWorldHeight() ? -64 : MINIMUM_ACCEPTED_HEIGHT;
/**
* The maximum chunk height Bedrock Edition will accept, from the lowest point to the highest.
*/
private static final int MAXIMUM_ACCEPTED_HEIGHT = GeyserConnector.getInstance().getConfig().isExtendedWorldHeight() ? 380 : 256;
private static final int MAXIMUM_ACCEPTED_HEIGHT = 256;
private static final int MAXIMUM_ACCEPTED_HEIGHT_OVERWORLD = GeyserConnector.getInstance().getConfig().isExtendedWorldHeight() ? 384 : MAXIMUM_ACCEPTED_HEIGHT;
private static final byte[] EMPTY_CHUNK_DATA;
public static final byte[] EMPTY_BIOME_DATA;
static {
ByteBuf byteBuf = Unpooled.buffer();
try {
BlockStorage blockStorage = new BlockStorage(0);
blockStorage.writeToNetwork(byteBuf);
EMPTY_BIOME_DATA = new byte[byteBuf.readableBytes()];
byteBuf.readBytes(EMPTY_BIOME_DATA);
} finally {
byteBuf.release();
}
byteBuf = Unpooled.buffer();
try {
for (int i = 0; i < 32; i++) {
byteBuf.writeBytes(EMPTY_BIOME_DATA);
}
EMPTY_CHUNK_DATA = new byte[byteBuf.readableBytes()];
byteBuf.readBytes(EMPTY_CHUNK_DATA);
} finally {
byteBuf.release();
}
}
private static int indexYZXtoXZY(int yzx) {
return (yzx >> 8) | (yzx & 0x0F0) | ((yzx & 0x00F) << 8);
@ -91,8 +123,10 @@ public class ChunkUtils {
BitSet waterloggedPaletteIds = new BitSet();
BitSet pistonOrFlowerPaletteIds = new BitSet();
boolean overworld = session.getDimension().equals(DimensionUtils.OVERWORLD);
for (int sectionY = 0; sectionY < javaSections.length; sectionY++) {
if (yOffset < MINIMUM_ACCEPTED_HEIGHT && sectionY < -yOffset) {
if (yOffset < ((overworld ? MINIMUM_ACCEPTED_HEIGHT_OVERWORLD : MINIMUM_ACCEPTED_HEIGHT) >> 4) && sectionY < -yOffset) {
// Ignore this chunk since it goes below the accepted height limit
continue;
}
@ -128,7 +162,7 @@ public class ChunkUtils {
));
}
}
sections[sectionY + yOffset] = section;
sections[sectionY + (yOffset - ((overworld ? MINIMUM_ACCEPTED_HEIGHT_OVERWORLD : MINIMUM_ACCEPTED_HEIGHT) >> 4))] = section;
continue;
}
@ -201,7 +235,7 @@ public class ChunkUtils {
layers = new BlockStorage[]{ layer0, new BlockStorage(BitArrayVersion.V1.createArray(BlockStorage.SIZE, layer1Data), layer1Palette) };
}
sections[sectionY + yOffset] = new ChunkSection(layers);
sections[sectionY + (yOffset - ((overworld ? MINIMUM_ACCEPTED_HEIGHT_OVERWORLD : MINIMUM_ACCEPTED_HEIGHT) >> 4))] = new ChunkSection(layers);
}
CompoundTag[] blockEntities = column.getTileEntities();
@ -383,7 +417,7 @@ public class ChunkUtils {
data.setChunkX(chunkX + x);
data.setChunkZ(chunkZ + z);
data.setSubChunksLength(0);
data.setData(new byte[0]);
data.setData(EMPTY_CHUNK_DATA);
data.setCachingEnabled(false);
session.sendUpstreamPacket(data);