mirror of
https://github.com/PaperMC/Paper.git
synced 2025-01-01 17:01:56 +01:00
Discard out of bounds chunks during regionfile header recalc
The logic cannot even determine what local chunk they should be, and out of bounds chunks can only occur from external modifications to the regionfile. If regionfile recalculation cannot occur, then do not attempt to retry read actions. Fixes https://github.com/PaperMC/Paper/issues/6718
This commit is contained in:
parent
c932582273
commit
0981bf8e38
1 changed files with 73 additions and 63 deletions
|
@ -171,10 +171,19 @@ index 0000000000000000000000000000000000000000..00000000000000000000000000000000
|
||||||
+ }
|
+ }
|
||||||
+ }
|
+ }
|
||||||
+
|
+
|
||||||
|
+ private static boolean inSameRegionfile(ChunkPos first, ChunkPos second) {
|
||||||
|
+ return (first.x & ~31) == (second.x & ~31) && (first.z & ~31) == (second.z & ~31);
|
||||||
|
+ }
|
||||||
|
+
|
||||||
+ // note: only call for CHUNK regionfiles
|
+ // note: only call for CHUNK regionfiles
|
||||||
+ void recalculateHeader() throws IOException {
|
+ boolean recalculateHeader() throws IOException {
|
||||||
+ if (!this.canRecalcHeader) {
|
+ if (!this.canRecalcHeader) {
|
||||||
+ return;
|
+ return false;
|
||||||
|
+ }
|
||||||
|
+ ChunkPos ourLowerLeftPosition = RegionFileStorage.getRegionFileCoordinates(this.regionFile);
|
||||||
|
+ if (ourLowerLeftPosition == null) {
|
||||||
|
+ LOGGER.fatal("Unable to get chunk location of regionfile " + this.regionFile.getAbsolutePath() + ", cannot recover header");
|
||||||
|
+ return false;
|
||||||
+ }
|
+ }
|
||||||
+ synchronized (this) {
|
+ synchronized (this) {
|
||||||
+ LOGGER.warn("Corrupt regionfile header detected! Attempting to re-calculate header offsets for regionfile " + this.regionFile.getAbsolutePath(), new Throwable());
|
+ LOGGER.warn("Corrupt regionfile header detected! Attempting to re-calculate header offsets for regionfile " + this.regionFile.getAbsolutePath(), new Throwable());
|
||||||
|
@ -200,6 +209,10 @@ index 0000000000000000000000000000000000000000..00000000000000000000000000000000
|
||||||
+ }
|
+ }
|
||||||
+
|
+
|
||||||
+ ChunkPos chunkPos = ChunkSerializer.getChunkCoordinate(compound);
|
+ ChunkPos chunkPos = ChunkSerializer.getChunkCoordinate(compound);
|
||||||
|
+ if (!inSameRegionfile(ourLowerLeftPosition, chunkPos)) {
|
||||||
|
+ LOGGER.error("Ignoring absolute chunk " + chunkPos + " in regionfile as it is not contained in the bounds of the regionfile '" + this.regionFile.getAbsolutePath() + "'. It should be in regionfile (" + (chunkPos.x >> 5) + "," + (chunkPos.z >> 5) + ")");
|
||||||
|
+ continue;
|
||||||
|
+ }
|
||||||
+ int location = (chunkPos.x & 31) | ((chunkPos.z & 31) << 5);
|
+ int location = (chunkPos.x & 31) | ((chunkPos.z & 31) << 5);
|
||||||
+
|
+
|
||||||
+ CompoundTag otherCompound = compounds[location];
|
+ CompoundTag otherCompound = compounds[location];
|
||||||
|
@ -244,11 +257,6 @@ index 0000000000000000000000000000000000000000..00000000000000000000000000000000
|
||||||
+ RegionFileVersion[] oversizedCompressionTypes = new RegionFileVersion[32 * 32];
|
+ RegionFileVersion[] oversizedCompressionTypes = new RegionFileVersion[32 * 32];
|
||||||
+
|
+
|
||||||
+ if (regionFiles != null) {
|
+ if (regionFiles != null) {
|
||||||
+ ChunkPos ourLowerLeftPosition = RegionFileStorage.getRegionFileCoordinates(this.regionFile);
|
|
||||||
+
|
|
||||||
+ if (ourLowerLeftPosition == null) {
|
|
||||||
+ LOGGER.fatal("Unable to get chunk location of regionfile " + this.regionFile.getAbsolutePath() + ", cannot recover oversized chunks");
|
|
||||||
+ } else {
|
|
||||||
+ int lowerXBound = ourLowerLeftPosition.x; // inclusive
|
+ int lowerXBound = ourLowerLeftPosition.x; // inclusive
|
||||||
+ int lowerZBound = ourLowerLeftPosition.z; // inclusive
|
+ int lowerZBound = ourLowerLeftPosition.z; // inclusive
|
||||||
+ int upperXBound = lowerXBound + 32 - 1; // inclusive
|
+ int upperXBound = lowerXBound + 32 - 1; // inclusive
|
||||||
|
@ -297,13 +305,17 @@ index 0000000000000000000000000000000000000000..00000000000000000000000000000000
|
||||||
+ continue;
|
+ continue;
|
||||||
+ }
|
+ }
|
||||||
+
|
+
|
||||||
|
+ if (!ChunkSerializer.getChunkCoordinate(compound).equals(oversizedCoords)) {
|
||||||
|
+ LOGGER.error("Can't use oversized chunk stored in " + regionFile.getAbsolutePath() + ", got absolute chunkpos: " + ChunkSerializer.getChunkCoordinate(compound) + ", expected " + oversizedCoords);
|
||||||
|
+ continue;
|
||||||
|
+ }
|
||||||
|
+
|
||||||
+ if (compounds[location] == null || ChunkSerializer.getLastWorldSaveTime(compound) > ChunkSerializer.getLastWorldSaveTime(compounds[location])) {
|
+ if (compounds[location] == null || ChunkSerializer.getLastWorldSaveTime(compound) > ChunkSerializer.getLastWorldSaveTime(compounds[location])) {
|
||||||
+ oversized[location] = true;
|
+ oversized[location] = true;
|
||||||
+ oversizedCompressionTypes[location] = compression;
|
+ oversizedCompressionTypes[location] = compression;
|
||||||
+ }
|
+ }
|
||||||
+ }
|
+ }
|
||||||
+ }
|
+ }
|
||||||
+ }
|
|
||||||
+
|
+
|
||||||
+ // now we need to calculate a new offset header
|
+ // now we need to calculate a new offset header
|
||||||
+
|
+
|
||||||
|
@ -428,6 +440,8 @@ index 0000000000000000000000000000000000000000..00000000000000000000000000000000
|
||||||
+ LOGGER.fatal("Failed to write new header to disk for regionfile " + this.regionFile.getAbsolutePath(), ex);
|
+ LOGGER.fatal("Failed to write new header to disk for regionfile " + this.regionFile.getAbsolutePath(), ex);
|
||||||
+ }
|
+ }
|
||||||
+ }
|
+ }
|
||||||
|
+
|
||||||
|
+ return true;
|
||||||
+ }
|
+ }
|
||||||
+
|
+
|
||||||
+ final boolean canRecalcHeader; // final forces compile fail on new constructor
|
+ final boolean canRecalcHeader; // final forces compile fail on new constructor
|
||||||
|
@ -590,8 +604,7 @@ index 0000000000000000000000000000000000000000..00000000000000000000000000000000
|
||||||
if (bytebuffer.remaining() < 5) {
|
if (bytebuffer.remaining() < 5) {
|
||||||
RegionFile.LOGGER.error("Chunk {} header is truncated: expected {} but read {}", pos, l, bytebuffer.remaining());
|
RegionFile.LOGGER.error("Chunk {} header is truncated: expected {} but read {}", pos, l, bytebuffer.remaining());
|
||||||
+ // Paper start - recalculate header on regionfile corruption
|
+ // Paper start - recalculate header on regionfile corruption
|
||||||
+ if (this.canRecalcHeader) {
|
+ if (this.canRecalcHeader && this.recalculateHeader()) {
|
||||||
+ this.recalculateHeader();
|
|
||||||
+ return this.getChunkDataInputStream(pos);
|
+ return this.getChunkDataInputStream(pos);
|
||||||
+ }
|
+ }
|
||||||
+ // Paper end - recalculate header on regionfile corruption
|
+ // Paper end - recalculate header on regionfile corruption
|
||||||
|
@ -603,8 +616,7 @@ index 0000000000000000000000000000000000000000..00000000000000000000000000000000
|
||||||
if (i1 == 0) {
|
if (i1 == 0) {
|
||||||
RegionFile.LOGGER.warn("Chunk {} is allocated, but stream is missing", pos);
|
RegionFile.LOGGER.warn("Chunk {} is allocated, but stream is missing", pos);
|
||||||
+ // Paper start - recalculate header on regionfile corruption
|
+ // Paper start - recalculate header on regionfile corruption
|
||||||
+ if (this.canRecalcHeader) {
|
+ if (this.canRecalcHeader && this.recalculateHeader()) {
|
||||||
+ this.recalculateHeader();
|
|
||||||
+ return this.getChunkDataInputStream(pos);
|
+ return this.getChunkDataInputStream(pos);
|
||||||
+ }
|
+ }
|
||||||
+ // Paper end - recalculate header on regionfile corruption
|
+ // Paper end - recalculate header on regionfile corruption
|
||||||
|
@ -616,8 +628,7 @@ index 0000000000000000000000000000000000000000..00000000000000000000000000000000
|
||||||
if (j1 != 0) {
|
if (j1 != 0) {
|
||||||
RegionFile.LOGGER.warn("Chunk has both internal and external streams");
|
RegionFile.LOGGER.warn("Chunk has both internal and external streams");
|
||||||
+ // Paper start - recalculate header on regionfile corruption
|
+ // Paper start - recalculate header on regionfile corruption
|
||||||
+ if (this.canRecalcHeader) {
|
+ if (this.canRecalcHeader && this.recalculateHeader()) {
|
||||||
+ this.recalculateHeader();
|
|
||||||
+ return this.getChunkDataInputStream(pos);
|
+ return this.getChunkDataInputStream(pos);
|
||||||
+ }
|
+ }
|
||||||
+ // Paper end - recalculate header on regionfile corruption
|
+ // Paper end - recalculate header on regionfile corruption
|
||||||
|
@ -626,8 +637,7 @@ index 0000000000000000000000000000000000000000..00000000000000000000000000000000
|
||||||
- return this.createExternalChunkInputStream(pos, RegionFile.getExternalChunkVersion(b0));
|
- return this.createExternalChunkInputStream(pos, RegionFile.getExternalChunkVersion(b0));
|
||||||
+ // Paper start - recalculate header on regionfile corruption
|
+ // Paper start - recalculate header on regionfile corruption
|
||||||
+ final DataInputStream ret = this.createExternalChunkInputStream(pos, RegionFile.getExternalChunkVersion(b0));
|
+ final DataInputStream ret = this.createExternalChunkInputStream(pos, RegionFile.getExternalChunkVersion(b0));
|
||||||
+ if (ret == null && this.canRecalcHeader) {
|
+ if (ret == null && this.canRecalcHeader && this.recalculateHeader()) {
|
||||||
+ this.recalculateHeader();
|
|
||||||
+ return this.getChunkDataInputStream(pos);
|
+ return this.getChunkDataInputStream(pos);
|
||||||
+ }
|
+ }
|
||||||
+ return ret;
|
+ return ret;
|
||||||
|
@ -635,8 +645,7 @@ index 0000000000000000000000000000000000000000..00000000000000000000000000000000
|
||||||
} else if (j1 > bytebuffer.remaining()) {
|
} else if (j1 > bytebuffer.remaining()) {
|
||||||
RegionFile.LOGGER.error("Chunk {} stream is truncated: expected {} but read {}", pos, j1, bytebuffer.remaining());
|
RegionFile.LOGGER.error("Chunk {} stream is truncated: expected {} but read {}", pos, j1, bytebuffer.remaining());
|
||||||
+ // Paper start - recalculate header on regionfile corruption
|
+ // Paper start - recalculate header on regionfile corruption
|
||||||
+ if (this.canRecalcHeader) {
|
+ if (this.canRecalcHeader && this.recalculateHeader()) {
|
||||||
+ this.recalculateHeader();
|
|
||||||
+ return this.getChunkDataInputStream(pos);
|
+ return this.getChunkDataInputStream(pos);
|
||||||
+ }
|
+ }
|
||||||
+ // Paper end - recalculate header on regionfile corruption
|
+ // Paper end - recalculate header on regionfile corruption
|
||||||
|
@ -644,8 +653,7 @@ index 0000000000000000000000000000000000000000..00000000000000000000000000000000
|
||||||
} else if (j1 < 0) {
|
} else if (j1 < 0) {
|
||||||
RegionFile.LOGGER.error("Declared size {} of chunk {} is negative", i1, pos);
|
RegionFile.LOGGER.error("Declared size {} of chunk {} is negative", i1, pos);
|
||||||
+ // Paper start - recalculate header on regionfile corruption
|
+ // Paper start - recalculate header on regionfile corruption
|
||||||
+ if (this.canRecalcHeader) {
|
+ if (this.canRecalcHeader && this.recalculateHeader()) {
|
||||||
+ this.recalculateHeader();
|
|
||||||
+ return this.getChunkDataInputStream(pos);
|
+ return this.getChunkDataInputStream(pos);
|
||||||
+ }
|
+ }
|
||||||
+ // Paper end - recalculate header on regionfile corruption
|
+ // Paper end - recalculate header on regionfile corruption
|
||||||
|
@ -654,8 +662,7 @@ index 0000000000000000000000000000000000000000..00000000000000000000000000000000
|
||||||
- return this.createChunkInputStream(pos, b0, RegionFile.createStream(bytebuffer, j1));
|
- return this.createChunkInputStream(pos, b0, RegionFile.createStream(bytebuffer, j1));
|
||||||
+ // Paper start - recalculate header on regionfile corruption
|
+ // Paper start - recalculate header on regionfile corruption
|
||||||
+ final DataInputStream ret = this.createChunkInputStream(pos, b0, RegionFile.createStream(bytebuffer, j1));
|
+ final DataInputStream ret = this.createChunkInputStream(pos, b0, RegionFile.createStream(bytebuffer, j1));
|
||||||
+ if (ret == null && this.canRecalcHeader) {
|
+ if (ret == null && this.canRecalcHeader && this.recalculateHeader()) {
|
||||||
+ this.recalculateHeader();
|
|
||||||
+ return this.getChunkDataInputStream(pos);
|
+ return this.getChunkDataInputStream(pos);
|
||||||
+ }
|
+ }
|
||||||
+ return ret;
|
+ return ret;
|
||||||
|
@ -735,10 +742,13 @@ index 0000000000000000000000000000000000000000..00000000000000000000000000000000
|
||||||
+ ChunkPos chunkPos = ChunkSerializer.getChunkCoordinate(nbttagcompound);
|
+ ChunkPos chunkPos = ChunkSerializer.getChunkCoordinate(nbttagcompound);
|
||||||
+ if (!chunkPos.equals(pos)) {
|
+ if (!chunkPos.equals(pos)) {
|
||||||
+ MinecraftServer.LOGGER.error("Attempting to read chunk data at " + pos.toString() + " but got chunk data for " + chunkPos.toString() + " instead! Attempting regionfile recalculation for regionfile " + regionfile.regionFile.getAbsolutePath());
|
+ MinecraftServer.LOGGER.error("Attempting to read chunk data at " + pos.toString() + " but got chunk data for " + chunkPos.toString() + " instead! Attempting regionfile recalculation for regionfile " + regionfile.regionFile.getAbsolutePath());
|
||||||
+ regionfile.recalculateHeader();
|
+ if (regionfile.recalculateHeader()) {
|
||||||
+ regionfile.fileLock.lock(); // otherwise we will unlock twice and only lock once.
|
+ regionfile.fileLock.lock(); // otherwise we will unlock twice and only lock once.
|
||||||
+ return this.read(pos, regionfile);
|
+ return this.read(pos, regionfile);
|
||||||
+ }
|
+ }
|
||||||
|
+ MinecraftServer.LOGGER.fatal("Can't recalculate regionfile header, regenerating chunk " + pos.toString() + " for " + regionfile.regionFile.getAbsolutePath());
|
||||||
|
+ return null;
|
||||||
|
+ }
|
||||||
+ }
|
+ }
|
||||||
+ // Paper end - recover from corrupt regionfile header
|
+ // Paper end - recover from corrupt regionfile header
|
||||||
break label43;
|
break label43;
|
||||||
|
|
Loading…
Reference in a new issue