mirror of
https://github.com/PaperMC/Paper.git
synced 2024-12-28 23:38:25 +01:00
Apply more patches
This commit is contained in:
parent
7ce0d944b6
commit
3cf764645b
3 changed files with 58 additions and 58 deletions
|
@ -9,38 +9,6 @@ we instead drop the current regionfile header and recalculate -
|
|||
hoping that at least then we don't swap chunks, and maybe recover
|
||||
them all.
|
||||
|
||||
diff --git a/src/main/java/net/minecraft/world/level/chunk/storage/ChunkSerializer.java b/src/main/java/net/minecraft/world/level/chunk/storage/ChunkSerializer.java
|
||||
index 0000000000000000000000000000000000000000..0000000000000000000000000000000000000000 100644
|
||||
--- a/src/main/java/net/minecraft/world/level/chunk/storage/ChunkSerializer.java
|
||||
+++ b/src/main/java/net/minecraft/world/level/chunk/storage/ChunkSerializer.java
|
||||
@@ -0,0 +0,0 @@ import net.minecraft.world.ticks.ProtoChunkTicks;
|
||||
import org.slf4j.Logger;
|
||||
|
||||
public class ChunkSerializer {
|
||||
+ // Paper start - Attempt to recalculate regionfile header if it is corrupt
|
||||
+ // TODO: Check on update
|
||||
+ public static long getLastWorldSaveTime(CompoundTag chunkData) {
|
||||
+ final int dataVersion = ChunkStorage.getVersion(chunkData);
|
||||
+ if (dataVersion < 2842) { // Level tag is removed after this version
|
||||
+ final CompoundTag levelData = chunkData.getCompound("Level");
|
||||
+ return levelData.getLong("LastUpdate");
|
||||
+ } else {
|
||||
+ return chunkData.getLong("LastUpdate");
|
||||
+ }
|
||||
+ }
|
||||
+ // Paper end - Attempt to recalculate regionfile header if it is corrupt
|
||||
|
||||
public static final Codec<PalettedContainer<BlockState>> BLOCK_STATE_CODEC = PalettedContainer.codecRW(Block.BLOCK_STATE_REGISTRY, BlockState.CODEC, PalettedContainer.Strategy.SECTION_STATES, Blocks.AIR.defaultBlockState(), null); // Paper - Anti-Xray - Add preset block states
|
||||
private static final Logger LOGGER = LogUtils.getLogger();
|
||||
@@ -0,0 +0,0 @@ public class ChunkSerializer {
|
||||
nbttagcompound.putInt("xPos", chunkcoordintpair.x);
|
||||
nbttagcompound.putInt("yPos", chunk.getMinSection());
|
||||
nbttagcompound.putInt("zPos", chunkcoordintpair.z);
|
||||
- nbttagcompound.putLong("LastUpdate", asyncsavedata != null ? asyncsavedata.worldTime() : world.getGameTime()); // Paper - async chunk saving
|
||||
+ nbttagcompound.putLong("LastUpdate", asyncsavedata != null ? asyncsavedata.worldTime() : world.getGameTime()); // Paper - async chunk saving // Paper - diff on change
|
||||
nbttagcompound.putLong("InhabitedTime", chunk.getInhabitedTime());
|
||||
nbttagcompound.putString("Status", BuiltInRegistries.CHUNK_STATUS.getKey(chunk.getPersistedStatus()).toString());
|
||||
BlendingData blendingdata = chunk.getBlendingData();
|
||||
diff --git a/src/main/java/net/minecraft/world/level/chunk/storage/RegionBitmap.java b/src/main/java/net/minecraft/world/level/chunk/storage/RegionBitmap.java
|
||||
index 0000000000000000000000000000000000000000..0000000000000000000000000000000000000000 100644
|
||||
--- a/src/main/java/net/minecraft/world/level/chunk/storage/RegionBitmap.java
|
||||
|
@ -77,7 +45,7 @@ diff --git a/src/main/java/net/minecraft/world/level/chunk/storage/RegionFile.ja
|
|||
index 0000000000000000000000000000000000000000..0000000000000000000000000000000000000000 100644
|
||||
--- a/src/main/java/net/minecraft/world/level/chunk/storage/RegionFile.java
|
||||
+++ b/src/main/java/net/minecraft/world/level/chunk/storage/RegionFile.java
|
||||
@@ -0,0 +0,0 @@ public class RegionFile implements AutoCloseable {
|
||||
@@ -0,0 +0,0 @@ public class RegionFile implements AutoCloseable, ca.spottedleaf.moonrise.patche
|
||||
private final IntBuffer timestamps;
|
||||
@VisibleForTesting
|
||||
protected final RegionBitmap usedSectors;
|
||||
|
@ -191,7 +159,7 @@ index 0000000000000000000000000000000000000000..00000000000000000000000000000000
|
|||
+ continue;
|
||||
+ }
|
||||
+
|
||||
+ ChunkPos chunkPos = ChunkSerializer.getChunkCoordinate(compound);
|
||||
+ ChunkPos chunkPos = SerializableChunkData.getChunkCoordinate(compound);
|
||||
+ if (!inSameRegionfile(ourLowerLeftPosition, chunkPos)) {
|
||||
+ LOGGER.error("Ignoring absolute chunk " + chunkPos + " in regionfile as it is not contained in the bounds of the regionfile '" + this.path.toAbsolutePath() + "'. It should be in regionfile (" + (chunkPos.x >> 5) + "," + (chunkPos.z >> 5) + ")");
|
||||
+ continue;
|
||||
|
@ -200,7 +168,7 @@ index 0000000000000000000000000000000000000000..00000000000000000000000000000000
|
|||
+
|
||||
+ CompoundTag otherCompound = compounds[location];
|
||||
+
|
||||
+ if (otherCompound != null && ChunkSerializer.getLastWorldSaveTime(otherCompound) > ChunkSerializer.getLastWorldSaveTime(compound)) {
|
||||
+ if (otherCompound != null && SerializableChunkData.getLastWorldSaveTime(otherCompound) > SerializableChunkData.getLastWorldSaveTime(compound)) {
|
||||
+ continue; // don't overwrite newer data.
|
||||
+ }
|
||||
+
|
||||
|
@ -210,7 +178,7 @@ index 0000000000000000000000000000000000000000..00000000000000000000000000000000
|
|||
+ if (Files.exists(aikarOversizedFile)) {
|
||||
+ try {
|
||||
+ CompoundTag aikarOversizedCompound = this.getOversizedData(chunkPos.x, chunkPos.z);
|
||||
+ if (ChunkSerializer.getLastWorldSaveTime(compound) == ChunkSerializer.getLastWorldSaveTime(aikarOversizedCompound)) {
|
||||
+ if (SerializableChunkData.getLastWorldSaveTime(compound) == SerializableChunkData.getLastWorldSaveTime(aikarOversizedCompound)) {
|
||||
+ // best we got for an id. hope it's good enough
|
||||
+ isAikarOversized = true;
|
||||
+ }
|
||||
|
@ -288,12 +256,12 @@ index 0000000000000000000000000000000000000000..00000000000000000000000000000000
|
|||
+ continue;
|
||||
+ }
|
||||
+
|
||||
+ if (!ChunkSerializer.getChunkCoordinate(compound).equals(oversizedCoords)) {
|
||||
+ LOGGER.error("Can't use oversized chunk stored in " + regionFile.toAbsolutePath() + ", got absolute chunkpos: " + ChunkSerializer.getChunkCoordinate(compound) + ", expected " + oversizedCoords);
|
||||
+ if (!SerializableChunkData.getChunkCoordinate(compound).equals(oversizedCoords)) {
|
||||
+ LOGGER.error("Can't use oversized chunk stored in " + regionFile.toAbsolutePath() + ", got absolute chunkpos: " + SerializableChunkData.getChunkCoordinate(compound) + ", expected " + oversizedCoords);
|
||||
+ continue;
|
||||
+ }
|
||||
+
|
||||
+ if (compounds[location] == null || ChunkSerializer.getLastWorldSaveTime(compound) > ChunkSerializer.getLastWorldSaveTime(compounds[location])) {
|
||||
+ if (compounds[location] == null || SerializableChunkData.getLastWorldSaveTime(compound) > SerializableChunkData.getLastWorldSaveTime(compounds[location])) {
|
||||
+ oversized[location] = true;
|
||||
+ oversizedCompressionTypes[location] = compression;
|
||||
+ }
|
||||
|
@ -430,9 +398,9 @@ index 0000000000000000000000000000000000000000..00000000000000000000000000000000
|
|||
+ final boolean canRecalcHeader; // final forces compile fail on new constructor
|
||||
+ // Paper end - Attempt to recalculate regionfile header if it is corrupt
|
||||
|
||||
public RegionFile(RegionStorageInfo storageKey, Path directory, Path path, boolean dsync) throws IOException {
|
||||
this(storageKey, directory, path, RegionFileVersion.getCompressionFormat(), dsync); // Paper - Configurable region compression format
|
||||
@@ -0,0 +0,0 @@ public class RegionFile implements AutoCloseable {
|
||||
// Paper start - rewrite chunk system
|
||||
@Override
|
||||
@@ -0,0 +0,0 @@ public class RegionFile implements AutoCloseable, ca.spottedleaf.moonrise.patche
|
||||
throw new IllegalArgumentException("Expected directory, got " + String.valueOf(directory.toAbsolutePath()));
|
||||
} else {
|
||||
this.externalFileDir = directory;
|
||||
|
@ -440,7 +408,7 @@ index 0000000000000000000000000000000000000000..00000000000000000000000000000000
|
|||
this.offsets = this.header.asIntBuffer();
|
||||
((java.nio.Buffer) this.offsets).limit(1024); // CraftBukkit - decompile error
|
||||
((java.nio.Buffer) this.header).position(4096); // CraftBukkit - decompile error
|
||||
@@ -0,0 +0,0 @@ public class RegionFile implements AutoCloseable {
|
||||
@@ -0,0 +0,0 @@ public class RegionFile implements AutoCloseable, ca.spottedleaf.moonrise.patche
|
||||
RegionFile.LOGGER.warn("Region file {} has truncated header: {}", path, i);
|
||||
}
|
||||
|
||||
|
@ -462,7 +430,7 @@ index 0000000000000000000000000000000000000000..00000000000000000000000000000000
|
|||
// Spigot start
|
||||
if (j1 == 255) {
|
||||
// We're maxed out, so we need to read the proper length from the section
|
||||
@@ -0,0 +0,0 @@ public class RegionFile implements AutoCloseable {
|
||||
@@ -0,0 +0,0 @@ public class RegionFile implements AutoCloseable, ca.spottedleaf.moonrise.patche
|
||||
j1 = (realLen.getInt(0) + 4) / 4096 + 1;
|
||||
}
|
||||
// Spigot end
|
||||
|
@ -533,7 +501,7 @@ index 0000000000000000000000000000000000000000..00000000000000000000000000000000
|
|||
}
|
||||
|
||||
}
|
||||
@@ -0,0 +0,0 @@ public class RegionFile implements AutoCloseable {
|
||||
@@ -0,0 +0,0 @@ public class RegionFile implements AutoCloseable, ca.spottedleaf.moonrise.patche
|
||||
}
|
||||
|
||||
private Path getExternalChunkPath(ChunkPos chunkPos) {
|
||||
|
@ -571,7 +539,7 @@ index 0000000000000000000000000000000000000000..00000000000000000000000000000000
|
|||
@Nullable
|
||||
public synchronized DataInputStream getChunkDataInputStream(ChunkPos pos) throws IOException {
|
||||
int i = this.getOffset(pos);
|
||||
@@ -0,0 +0,0 @@ public class RegionFile implements AutoCloseable {
|
||||
@@ -0,0 +0,0 @@ public class RegionFile implements AutoCloseable, ca.spottedleaf.moonrise.patche
|
||||
((java.nio.Buffer) bytebuffer).flip(); // CraftBukkit - decompile error
|
||||
if (bytebuffer.remaining() < 5) {
|
||||
RegionFile.LOGGER.error("Chunk {} header is truncated: expected {} but read {}", new Object[]{pos, l, bytebuffer.remaining()});
|
||||
|
@ -583,7 +551,7 @@ index 0000000000000000000000000000000000000000..00000000000000000000000000000000
|
|||
return null;
|
||||
} else {
|
||||
int i1 = bytebuffer.getInt();
|
||||
@@ -0,0 +0,0 @@ public class RegionFile implements AutoCloseable {
|
||||
@@ -0,0 +0,0 @@ public class RegionFile implements AutoCloseable, ca.spottedleaf.moonrise.patche
|
||||
|
||||
if (i1 == 0) {
|
||||
RegionFile.LOGGER.warn("Chunk {} is allocated, but stream is missing", pos);
|
||||
|
@ -595,7 +563,7 @@ index 0000000000000000000000000000000000000000..00000000000000000000000000000000
|
|||
return null;
|
||||
} else {
|
||||
int j1 = i1 - 1;
|
||||
@@ -0,0 +0,0 @@ public class RegionFile implements AutoCloseable {
|
||||
@@ -0,0 +0,0 @@ public class RegionFile implements AutoCloseable, ca.spottedleaf.moonrise.patche
|
||||
if (RegionFile.isExternalStreamChunk(b0)) {
|
||||
if (j1 != 0) {
|
||||
RegionFile.LOGGER.warn("Chunk has both internal and external streams");
|
||||
|
@ -643,7 +611,7 @@ index 0000000000000000000000000000000000000000..00000000000000000000000000000000
|
|||
}
|
||||
}
|
||||
}
|
||||
@@ -0,0 +0,0 @@ public class RegionFile implements AutoCloseable {
|
||||
@@ -0,0 +0,0 @@ public class RegionFile implements AutoCloseable, ca.spottedleaf.moonrise.patche
|
||||
}
|
||||
|
||||
private ByteBuffer createExternalStub() {
|
||||
|
@ -665,7 +633,7 @@ index 0000000000000000000000000000000000000000..00000000000000000000000000000000
|
|||
--- a/src/main/java/net/minecraft/world/level/chunk/storage/RegionFileStorage.java
|
||||
+++ b/src/main/java/net/minecraft/world/level/chunk/storage/RegionFileStorage.java
|
||||
@@ -0,0 +0,0 @@ public class RegionFileStorage implements AutoCloseable, ca.spottedleaf.moonrise
|
||||
return ret;
|
||||
}
|
||||
}
|
||||
// Paper end - rewrite chunk system
|
||||
+ // Paper start - recalculate region file headers
|
||||
|
@ -706,14 +674,14 @@ index 0000000000000000000000000000000000000000..00000000000000000000000000000000
|
|||
+ this.isChunkData = isChunkDataFolder(this.folder); // Paper - recalculate region file headers
|
||||
}
|
||||
|
||||
public RegionFile getRegionFile(ChunkPos chunkcoordintpair, boolean existingOnly) throws IOException { // CraftBukkit // Paper - public
|
||||
// Paper start - rewrite chunk system
|
||||
@@ -0,0 +0,0 @@ public class RegionFileStorage implements AutoCloseable, ca.spottedleaf.moonrise
|
||||
try {
|
||||
if (datainputstream != null) {
|
||||
nbttagcompound = NbtIo.read((DataInput) datainputstream);
|
||||
+ // Paper start - recover from corrupt regionfile header
|
||||
+ if (this.isChunkData) {
|
||||
+ ChunkPos chunkPos = ChunkSerializer.getChunkCoordinate(nbttagcompound);
|
||||
+ ChunkPos chunkPos = SerializableChunkData.getChunkCoordinate(nbttagcompound);
|
||||
+ if (!chunkPos.equals(pos)) {
|
||||
+ net.minecraft.server.MinecraftServer.LOGGER.error("Attempting to read chunk data at " + pos + " but got chunk data for " + chunkPos + " instead! Attempting regionfile recalculation for regionfile " + regionfile.getPath().toAbsolutePath());
|
||||
+ if (regionfile.recalculateHeader()) {
|
||||
|
@ -740,3 +708,35 @@ index 0000000000000000000000000000000000000000..00000000000000000000000000000000
|
|||
private static final Object2ObjectMap<String, RegionFileVersion> VERSIONS_BY_NAME = new Object2ObjectOpenHashMap<>();
|
||||
public static final RegionFileVersion VERSION_GZIP = register(
|
||||
new RegionFileVersion(
|
||||
diff --git a/src/main/java/net/minecraft/world/level/chunk/storage/SerializableChunkData.java b/src/main/java/net/minecraft/world/level/chunk/storage/SerializableChunkData.java
|
||||
index 0000000000000000000000000000000000000000..0000000000000000000000000000000000000000 100644
|
||||
--- a/src/main/java/net/minecraft/world/level/chunk/storage/SerializableChunkData.java
|
||||
+++ b/src/main/java/net/minecraft/world/level/chunk/storage/SerializableChunkData.java
|
||||
@@ -0,0 +0,0 @@ public record SerializableChunkData(Registry<Biome> biomeRegistry, ChunkPos chun
|
||||
}
|
||||
}
|
||||
// Paper end - guard against serializing mismatching coordinates
|
||||
+ // Paper start - Attempt to recalculate regionfile header if it is corrupt
|
||||
+ // TODO: Check on update
|
||||
+ public static long getLastWorldSaveTime(final CompoundTag chunkData) {
|
||||
+ final int dataVersion = ChunkStorage.getVersion(chunkData);
|
||||
+ if (dataVersion < 2842) { // Level tag is removed after this version
|
||||
+ final CompoundTag levelData = chunkData.getCompound("Level");
|
||||
+ return levelData.getLong("LastUpdate");
|
||||
+ } else {
|
||||
+ return chunkData.getLong("LastUpdate");
|
||||
+ }
|
||||
+ }
|
||||
+ // Paper end - Attempt to recalculate regionfile header if it is corrupt
|
||||
|
||||
// Paper start - Do not let the server load chunks from newer versions
|
||||
private static final int CURRENT_DATA_VERSION = net.minecraft.SharedConstants.getCurrentVersion().getDataVersion().getVersion();
|
||||
@@ -0,0 +0,0 @@ public record SerializableChunkData(Registry<Biome> biomeRegistry, ChunkPos chun
|
||||
nbttagcompound.putInt("xPos", this.chunkPos.x);
|
||||
nbttagcompound.putInt("yPos", this.minSectionY);
|
||||
nbttagcompound.putInt("zPos", this.chunkPos.z);
|
||||
- nbttagcompound.putLong("LastUpdate", this.lastUpdateTime);
|
||||
+ nbttagcompound.putLong("LastUpdate", this.lastUpdateTime); // Paper - Diff on change
|
||||
nbttagcompound.putLong("InhabitedTime", this.inhabitedTime);
|
||||
nbttagcompound.putString("Status", BuiltInRegistries.CHUNK_STATUS.getKey(this.chunkStatus).toString());
|
||||
DataResult<Tag> dataresult; // CraftBukkit - decompile error
|
|
@ -306,13 +306,13 @@ index 0000000000000000000000000000000000000000..00000000000000000000000000000000
|
|||
|
||||
++this.tickCount;
|
||||
@@ -0,0 +0,0 @@ public abstract class MinecraftServer extends ReentrantBlockableEventLoop<TickTa
|
||||
long endTime = System.nanoTime();
|
||||
long remaining = (TICK_TIME - (endTime - lastTick)) - catchupTime;
|
||||
new com.destroystokyo.paper.event.server.ServerTickEndEvent(this.tickCount, ((double)(endTime - lastTick) / 1000000D), remaining).callEvent();
|
||||
+ this.server.spark.tickEnd(((double)(endTime - lastTick) / 1000000D)); // Paper - spark
|
||||
// Paper end - Server Tick Events
|
||||
this.profiler.push("tallying");
|
||||
long j = Util.getNanos() - i;
|
||||
+ this.server.spark.tickEnd(((double)(endTime - lastTick) / 1000000D)); // Paper - spark
|
||||
gameprofilerfiller.push("tallying");
|
||||
long k = Util.getNanos() - i;
|
||||
int l = this.tickCount % 100;
|
||||
diff --git a/src/main/java/net/minecraft/server/dedicated/DedicatedServer.java b/src/main/java/net/minecraft/server/dedicated/DedicatedServer.java
|
||||
index 0000000000000000000000000000000000000000..0000000000000000000000000000000000000000 100644
|
||||
--- a/src/main/java/net/minecraft/server/dedicated/DedicatedServer.java
|
|
@ -17,7 +17,7 @@ index 0000000000000000000000000000000000000000..00000000000000000000000000000000
|
|||
|
||||
final ListTag entitiesTag = new ListTag();
|
||||
+ final java.util.Map<net.minecraft.world.entity.EntityType<?>, Integer> savedEntityCounts = new java.util.HashMap<>(); // Paper - Entity load/save limit per chunk
|
||||
for (final Entity entity : entities) {
|
||||
for (final Entity entity : PlatformHooks.get().modifySavedEntities(world, chunkPos.x, chunkPos.z, entities)) {
|
||||
+ // Paper start - Entity load/save limit per chunk
|
||||
+ final EntityType<?> entityType = entity.getType();
|
||||
+ final int saveLimit = world.paperConfig().chunks.entityPerChunkSaveLimit.getOrDefault(entityType, -1);
|
||||
|
@ -42,7 +42,7 @@ index 0000000000000000000000000000000000000000..00000000000000000000000000000000
|
|||
+ final java.util.Map<EntityType<?>, Integer> loadedEntityCounts = new java.util.HashMap<>(); // Paper - Entity load/save limit per chunk
|
||||
public boolean tryAdvance(Consumer<? super Entity> consumer) {
|
||||
return spliterator.tryAdvance((nbtbase) -> {
|
||||
EntityType.loadEntityRecursive((CompoundTag) nbtbase, world, (entity) -> {
|
||||
EntityType.loadEntityRecursive((CompoundTag) nbtbase, world, reason, (entity) -> {
|
||||
+ // Paper start - Entity load/save limit per chunk
|
||||
+ final EntityType<?> entityType = entity.getType();
|
||||
+ final int saveLimit = world.paperConfig().chunks.entityPerChunkSaveLimit.getOrDefault(entityType, -1);
|
Loading…
Reference in a new issue