Apply more patches
This commit is contained in:
parent
98af0e0d3b
commit
20507b45cf
3 changed files with 83 additions and 83 deletions
|
@ -1,81 +0,0 @@
|
|||
From 0000000000000000000000000000000000000000 Mon Sep 17 00:00:00 2001
|
||||
From: Jason Penilla <11360596+jpenilla@users.noreply.github.com>
|
||||
Date: Wed, 18 Nov 2020 20:52:25 -0800
|
||||
Subject: [PATCH] Entity load/save limit per chunk
|
||||
|
||||
Adds a config option to limit the number of entities saved and loaded
|
||||
to a chunk. The default values of -1 disable the limit. Although
|
||||
defaults are only included for certain entites, this allows setting
|
||||
limits for any entity type.
|
||||
|
||||
diff --git a/src/main/java/ca/spottedleaf/moonrise/patches/chunk_system/level/entity/ChunkEntitySlices.java b/src/main/java/ca/spottedleaf/moonrise/patches/chunk_system/level/entity/ChunkEntitySlices.java
|
||||
index 997b05167c19472acb98edac32d4548cc65efa8e..5c7f2471a0b15ac2e714527296ad2aa7291999eb 100644
|
||||
--- a/src/main/java/ca/spottedleaf/moonrise/patches/chunk_system/level/entity/ChunkEntitySlices.java
|
||||
+++ b/src/main/java/ca/spottedleaf/moonrise/patches/chunk_system/level/entity/ChunkEntitySlices.java
|
||||
@@ -100,7 +100,18 @@ public final class ChunkEntitySlices {
|
||||
}
|
||||
|
||||
final ListTag entitiesTag = new ListTag();
|
||||
+ final java.util.Map<net.minecraft.world.entity.EntityType<?>, Integer> savedEntityCounts = new java.util.HashMap<>(); // Paper - Entity load/save limit per chunk
|
||||
for (final Entity entity : entities) {
|
||||
+ // Paper start - Entity load/save limit per chunk
|
||||
+ final EntityType<?> entityType = entity.getType();
|
||||
+ final int saveLimit = world.paperConfig().chunks.entityPerChunkSaveLimit.getOrDefault(entityType, -1);
|
||||
+ if (saveLimit > -1) {
|
||||
+ if (savedEntityCounts.getOrDefault(entityType, 0) >= saveLimit) {
|
||||
+ continue;
|
||||
+ }
|
||||
+ savedEntityCounts.merge(entityType, 1, Integer::sum);
|
||||
+ }
|
||||
+ // Paper end - Entity load/save limit per chunk
|
||||
CompoundTag compoundTag = new CompoundTag();
|
||||
if (entity.save(compoundTag)) {
|
||||
entitiesTag.add(compoundTag);
|
||||
diff --git a/src/main/java/net/minecraft/world/entity/EntityType.java b/src/main/java/net/minecraft/world/entity/EntityType.java
|
||||
index 842b0cec0397d7ae5166617627340ffac0e35db1..cb61462d4691a055a4b25f7b953609d8a154fdfe 100644
|
||||
--- a/src/main/java/net/minecraft/world/entity/EntityType.java
|
||||
+++ b/src/main/java/net/minecraft/world/entity/EntityType.java
|
||||
@@ -649,9 +649,20 @@ public class EntityType<T extends Entity> implements FeatureElement, EntityTypeT
|
||||
final Spliterator<? extends Tag> spliterator = entityNbtList.spliterator();
|
||||
|
||||
return StreamSupport.stream(new Spliterator<Entity>() {
|
||||
+ final java.util.Map<EntityType<?>, Integer> loadedEntityCounts = new java.util.HashMap<>(); // Paper - Entity load/save limit per chunk
|
||||
public boolean tryAdvance(Consumer<? super Entity> consumer) {
|
||||
return spliterator.tryAdvance((nbtbase) -> {
|
||||
EntityType.loadEntityRecursive((CompoundTag) nbtbase, world, (entity) -> {
|
||||
+ // Paper start - Entity load/save limit per chunk
|
||||
+ final EntityType<?> entityType = entity.getType();
|
||||
+ final int saveLimit = world.paperConfig().chunks.entityPerChunkSaveLimit.getOrDefault(entityType, -1);
|
||||
+ if (saveLimit > -1) {
|
||||
+ if (this.loadedEntityCounts.getOrDefault(entityType, 0) >= saveLimit) {
|
||||
+ return null;
|
||||
+ }
|
||||
+ this.loadedEntityCounts.merge(entityType, 1, Integer::sum);
|
||||
+ }
|
||||
+ // Paper end - Entity load/save limit per chunk
|
||||
consumer.accept(entity);
|
||||
return entity;
|
||||
});
|
||||
diff --git a/src/main/java/net/minecraft/world/level/chunk/storage/EntityStorage.java b/src/main/java/net/minecraft/world/level/chunk/storage/EntityStorage.java
|
||||
index 503ac0374e0c9f9993ad37bb8bd8cf1570d3615a..d4a505ef4af9ded02aeb1a817bcbe5b1a912a5b3 100644
|
||||
--- a/src/main/java/net/minecraft/world/level/chunk/storage/EntityStorage.java
|
||||
+++ b/src/main/java/net/minecraft/world/level/chunk/storage/EntityStorage.java
|
||||
@@ -92,7 +92,18 @@ public class EntityStorage implements EntityPersistentStorage<Entity> {
|
||||
}
|
||||
} else {
|
||||
ListTag listTag = new ListTag();
|
||||
+ final java.util.Map<net.minecraft.world.entity.EntityType<?>, Integer> savedEntityCounts = new java.util.HashMap<>(); // Paper - Entity load/save limit per chunk
|
||||
dataList.getEntities().forEach(entity -> {
|
||||
+ // Paper start - Entity load/save limit per chunk
|
||||
+ final EntityType<?> entityType = entity.getType();
|
||||
+ final int saveLimit = this.level.paperConfig().chunks.entityPerChunkSaveLimit.getOrDefault(entityType, -1);
|
||||
+ if (saveLimit > -1) {
|
||||
+ if (savedEntityCounts.getOrDefault(entityType, 0) >= saveLimit) {
|
||||
+ return;
|
||||
+ }
|
||||
+ savedEntityCounts.merge(entityType, 1, Integer::sum);
|
||||
+ }
|
||||
+ // Paper end - Entity load/save limit per chunk
|
||||
CompoundTag compoundTagx = new CompoundTag();
|
||||
if (entity.save(compoundTagx)) {
|
||||
listTag.add(compoundTagx);
|
|
@ -1,742 +0,0 @@
|
|||
From 0000000000000000000000000000000000000000 Mon Sep 17 00:00:00 2001
|
||||
From: Spottedleaf <Spottedleaf@users.noreply.github.com>
|
||||
Date: Sun, 2 Feb 2020 02:25:10 -0800
|
||||
Subject: [PATCH] Attempt to recalculate regionfile header if it is corrupt
|
||||
|
||||
Instead of trying to relocate the chunk, which is seems to never
|
||||
be the correct choice, so we end up duplicating or swapping chunks,
|
||||
we instead drop the current regionfile header and recalculate -
|
||||
hoping that at least then we don't swap chunks, and maybe recover
|
||||
them all.
|
||||
|
||||
diff --git a/src/main/java/net/minecraft/world/level/chunk/storage/ChunkSerializer.java b/src/main/java/net/minecraft/world/level/chunk/storage/ChunkSerializer.java
|
||||
index 711541a9b12b823c1da779ed6027a53e9078a733..4bd048387651250135f963303c78c17f8473cfee 100644
|
||||
--- a/src/main/java/net/minecraft/world/level/chunk/storage/ChunkSerializer.java
|
||||
+++ b/src/main/java/net/minecraft/world/level/chunk/storage/ChunkSerializer.java
|
||||
@@ -72,6 +72,18 @@ import net.minecraft.world.ticks.ProtoChunkTicks;
|
||||
import org.slf4j.Logger;
|
||||
|
||||
public class ChunkSerializer {
|
||||
+ // Paper start - Attempt to recalculate regionfile header if it is corrupt
|
||||
+ // TODO: Check on update
|
||||
+ public static long getLastWorldSaveTime(CompoundTag chunkData) {
|
||||
+ final int dataVersion = ChunkStorage.getVersion(chunkData);
|
||||
+ if (dataVersion < 2842) { // Level tag is removed after this version
|
||||
+ final CompoundTag levelData = chunkData.getCompound("Level");
|
||||
+ return levelData.getLong("LastUpdate");
|
||||
+ } else {
|
||||
+ return chunkData.getLong("LastUpdate");
|
||||
+ }
|
||||
+ }
|
||||
+ // Paper end - Attempt to recalculate regionfile header if it is corrupt
|
||||
|
||||
public static final Codec<PalettedContainer<BlockState>> BLOCK_STATE_CODEC = PalettedContainer.codecRW(Block.BLOCK_STATE_REGISTRY, BlockState.CODEC, PalettedContainer.Strategy.SECTION_STATES, Blocks.AIR.defaultBlockState(), null); // Paper - Anti-Xray - Add preset block states
|
||||
private static final Logger LOGGER = LogUtils.getLogger();
|
||||
@@ -384,7 +396,7 @@ public class ChunkSerializer {
|
||||
nbttagcompound.putInt("xPos", chunkcoordintpair.x);
|
||||
nbttagcompound.putInt("yPos", chunk.getMinSection());
|
||||
nbttagcompound.putInt("zPos", chunkcoordintpair.z);
|
||||
- nbttagcompound.putLong("LastUpdate", asyncsavedata != null ? asyncsavedata.worldTime() : world.getGameTime()); // Paper - async chunk saving
|
||||
+ nbttagcompound.putLong("LastUpdate", asyncsavedata != null ? asyncsavedata.worldTime() : world.getGameTime()); // Paper - async chunk saving // Paper - diff on change
|
||||
nbttagcompound.putLong("InhabitedTime", chunk.getInhabitedTime());
|
||||
nbttagcompound.putString("Status", BuiltInRegistries.CHUNK_STATUS.getKey(chunk.getPersistedStatus()).toString());
|
||||
BlendingData blendingdata = chunk.getBlendingData();
|
||||
diff --git a/src/main/java/net/minecraft/world/level/chunk/storage/RegionBitmap.java b/src/main/java/net/minecraft/world/level/chunk/storage/RegionBitmap.java
|
||||
index a23dc2f8f4475de1ee35bf18a7a8a53233ccac12..226af44fd469053451a0403a95ffb446face9530 100644
|
||||
--- a/src/main/java/net/minecraft/world/level/chunk/storage/RegionBitmap.java
|
||||
+++ b/src/main/java/net/minecraft/world/level/chunk/storage/RegionBitmap.java
|
||||
@@ -9,6 +9,27 @@ import java.util.BitSet;
|
||||
public class RegionBitmap {
|
||||
private final BitSet used = new BitSet();
|
||||
|
||||
+ // Paper start - Attempt to recalculate regionfile header if it is corrupt
|
||||
+ public final void copyFrom(RegionBitmap other) {
|
||||
+ BitSet thisBitset = this.used;
|
||||
+ BitSet otherBitset = other.used;
|
||||
+
|
||||
+ for (int i = 0; i < Math.max(thisBitset.size(), otherBitset.size()); ++i) {
|
||||
+ thisBitset.set(i, otherBitset.get(i));
|
||||
+ }
|
||||
+ }
|
||||
+
|
||||
+ public final boolean tryAllocate(int from, int length) {
|
||||
+ BitSet bitset = this.used;
|
||||
+ int firstSet = bitset.nextSetBit(from);
|
||||
+ if (firstSet > 0 && firstSet < (from + length)) {
|
||||
+ return false;
|
||||
+ }
|
||||
+ bitset.set(from, from + length);
|
||||
+ return true;
|
||||
+ }
|
||||
+ // Paper end - Attempt to recalculate regionfile header if it is corrupt
|
||||
+
|
||||
public void force(int start, int size) {
|
||||
this.used.set(start, start + size);
|
||||
}
|
||||
diff --git a/src/main/java/net/minecraft/world/level/chunk/storage/RegionFile.java b/src/main/java/net/minecraft/world/level/chunk/storage/RegionFile.java
|
||||
index e761b63eebc1e76b2bb1cb887d83d0b63ad6ec90..1e0439cf3f4008fa430acb90b45f5bc4cdd6d7f2 100644
|
||||
--- a/src/main/java/net/minecraft/world/level/chunk/storage/RegionFile.java
|
||||
+++ b/src/main/java/net/minecraft/world/level/chunk/storage/RegionFile.java
|
||||
@@ -51,6 +51,354 @@ public class RegionFile implements AutoCloseable {
|
||||
private final IntBuffer timestamps;
|
||||
@VisibleForTesting
|
||||
protected final RegionBitmap usedSectors;
|
||||
+ // Paper start - Attempt to recalculate regionfile header if it is corrupt
|
||||
+ private static long roundToSectors(long bytes) {
|
||||
+ long sectors = bytes >>> 12; // 4096 = 2^12
|
||||
+ long remainingBytes = bytes & 4095;
|
||||
+ long sign = -remainingBytes; // sign is 1 if nonzero
|
||||
+ return sectors + (sign >>> 63);
|
||||
+ }
|
||||
+
|
||||
+ private static final CompoundTag OVERSIZED_COMPOUND = new CompoundTag();
|
||||
+
|
||||
+ private CompoundTag attemptRead(long sector, int chunkDataLength, long fileLength) throws IOException {
|
||||
+ try {
|
||||
+ if (chunkDataLength < 0) {
|
||||
+ return null;
|
||||
+ }
|
||||
+
|
||||
+ long offset = sector * 4096L + 4L; // offset for chunk data
|
||||
+
|
||||
+ if ((offset + chunkDataLength) > fileLength) {
|
||||
+ return null;
|
||||
+ }
|
||||
+
|
||||
+ ByteBuffer chunkData = ByteBuffer.allocate(chunkDataLength);
|
||||
+ if (chunkDataLength != this.file.read(chunkData, offset)) {
|
||||
+ return null;
|
||||
+ }
|
||||
+
|
||||
+ ((java.nio.Buffer)chunkData).flip();
|
||||
+
|
||||
+ byte compressionType = chunkData.get();
|
||||
+ if (compressionType < 0) { // compressionType & 128 != 0
|
||||
+ // oversized chunk
|
||||
+ return OVERSIZED_COMPOUND;
|
||||
+ }
|
||||
+
|
||||
+ RegionFileVersion compression = RegionFileVersion.fromId(compressionType);
|
||||
+ if (compression == null) {
|
||||
+ return null;
|
||||
+ }
|
||||
+
|
||||
+ InputStream input = compression.wrap(new ByteArrayInputStream(chunkData.array(), chunkData.position(), chunkDataLength - chunkData.position()));
|
||||
+
|
||||
+ return NbtIo.read(new DataInputStream(input));
|
||||
+ } catch (Exception ex) {
|
||||
+ return null;
|
||||
+ }
|
||||
+ }
|
||||
+
|
||||
+ private int getLength(long sector) throws IOException {
|
||||
+ ByteBuffer length = ByteBuffer.allocate(4);
|
||||
+ if (4 != this.file.read(length, sector * 4096L)) {
|
||||
+ return -1;
|
||||
+ }
|
||||
+
|
||||
+ return length.getInt(0);
|
||||
+ }
|
||||
+
|
||||
+ private void backupRegionFile() {
|
||||
+ Path backup = this.path.getParent().resolve(this.path.getFileName() + "." + new java.util.Random().nextLong() + ".backup");
|
||||
+ this.backupRegionFile(backup);
|
||||
+ }
|
||||
+
|
||||
+ private void backupRegionFile(Path to) {
|
||||
+ try {
|
||||
+ this.file.force(true);
|
||||
+ LOGGER.warn("Backing up regionfile \"" + this.path.toAbsolutePath() + "\" to " + to.toAbsolutePath());
|
||||
+ java.nio.file.Files.copy(this.path, to, java.nio.file.StandardCopyOption.COPY_ATTRIBUTES);
|
||||
+ LOGGER.warn("Backed up the regionfile to " + to.toAbsolutePath());
|
||||
+ } catch (IOException ex) {
|
||||
+ LOGGER.error("Failed to backup to " + to.toAbsolutePath(), ex);
|
||||
+ }
|
||||
+ }
|
||||
+
|
||||
+ private static boolean inSameRegionfile(ChunkPos first, ChunkPos second) {
|
||||
+ return (first.x & ~31) == (second.x & ~31) && (first.z & ~31) == (second.z & ~31);
|
||||
+ }
|
||||
+
|
||||
+ // note: only call for CHUNK regionfiles
|
||||
+ boolean recalculateHeader() throws IOException {
|
||||
+ if (!this.canRecalcHeader) {
|
||||
+ return false;
|
||||
+ }
|
||||
+ ChunkPos ourLowerLeftPosition = RegionFileStorage.getRegionFileCoordinates(this.path);
|
||||
+ if (ourLowerLeftPosition == null) {
|
||||
+ LOGGER.error("Unable to get chunk location of regionfile " + this.path.toAbsolutePath() + ", cannot recover header");
|
||||
+ return false;
|
||||
+ }
|
||||
+ synchronized (this) {
|
||||
+ LOGGER.warn("Corrupt regionfile header detected! Attempting to re-calculate header offsets for regionfile " + this.path.toAbsolutePath(), new Throwable());
|
||||
+
|
||||
+ // try to backup file so maybe it could be sent to us for further investigation
|
||||
+
|
||||
+ this.backupRegionFile();
|
||||
+ CompoundTag[] compounds = new CompoundTag[32 * 32]; // only in the regionfile (i.e exclude mojang/aikar oversized data)
|
||||
+ int[] rawLengths = new int[32 * 32]; // length of chunk data including 4 byte length field, bytes
|
||||
+ int[] sectorOffsets = new int[32 * 32]; // in sectors
|
||||
+ boolean[] hasAikarOversized = new boolean[32 * 32];
|
||||
+
|
||||
+ long fileLength = this.file.size();
|
||||
+ long totalSectors = roundToSectors(fileLength);
|
||||
+
|
||||
+ // search the regionfile from start to finish for the most up-to-date chunk data
|
||||
+
|
||||
+ for (long i = 2, maxSector = Math.min((long)(Integer.MAX_VALUE >>> 8), totalSectors); i < maxSector; ++i) { // first two sectors are header, skip
|
||||
+ int chunkDataLength = this.getLength(i);
|
||||
+ CompoundTag compound = this.attemptRead(i, chunkDataLength, fileLength);
|
||||
+ if (compound == null || compound == OVERSIZED_COMPOUND) {
|
||||
+ continue;
|
||||
+ }
|
||||
+
|
||||
+ ChunkPos chunkPos = ChunkSerializer.getChunkCoordinate(compound);
|
||||
+ if (!inSameRegionfile(ourLowerLeftPosition, chunkPos)) {
|
||||
+ LOGGER.error("Ignoring absolute chunk " + chunkPos + " in regionfile as it is not contained in the bounds of the regionfile '" + this.path.toAbsolutePath() + "'. It should be in regionfile (" + (chunkPos.x >> 5) + "," + (chunkPos.z >> 5) + ")");
|
||||
+ continue;
|
||||
+ }
|
||||
+ int location = (chunkPos.x & 31) | ((chunkPos.z & 31) << 5);
|
||||
+
|
||||
+ CompoundTag otherCompound = compounds[location];
|
||||
+
|
||||
+ if (otherCompound != null && ChunkSerializer.getLastWorldSaveTime(otherCompound) > ChunkSerializer.getLastWorldSaveTime(compound)) {
|
||||
+ continue; // don't overwrite newer data.
|
||||
+ }
|
||||
+
|
||||
+ // aikar oversized?
|
||||
+ Path aikarOversizedFile = this.getOversizedFile(chunkPos.x, chunkPos.z);
|
||||
+ boolean isAikarOversized = false;
|
||||
+ if (Files.exists(aikarOversizedFile)) {
|
||||
+ try {
|
||||
+ CompoundTag aikarOversizedCompound = this.getOversizedData(chunkPos.x, chunkPos.z);
|
||||
+ if (ChunkSerializer.getLastWorldSaveTime(compound) == ChunkSerializer.getLastWorldSaveTime(aikarOversizedCompound)) {
|
||||
+ // best we got for an id. hope it's good enough
|
||||
+ isAikarOversized = true;
|
||||
+ }
|
||||
+ } catch (Exception ex) {
|
||||
+ LOGGER.error("Failed to read aikar oversized data for absolute chunk (" + chunkPos.x + "," + chunkPos.z + ") in regionfile " + this.path.toAbsolutePath() + ", oversized data for this chunk will be lost", ex);
|
||||
+ // fall through, if we can't read aikar oversized we can't risk corrupting chunk data
|
||||
+ }
|
||||
+ }
|
||||
+
|
||||
+ hasAikarOversized[location] = isAikarOversized;
|
||||
+ compounds[location] = compound;
|
||||
+ rawLengths[location] = chunkDataLength + 4;
|
||||
+ sectorOffsets[location] = (int)i;
|
||||
+
|
||||
+ int chunkSectorLength = (int)roundToSectors(rawLengths[location]);
|
||||
+ i += chunkSectorLength;
|
||||
+ --i; // gets incremented next iteration
|
||||
+ }
|
||||
+
|
||||
+ // forge style oversized data is already handled by the local search, and aikar data we just hope
|
||||
+ // we get it right as aikar data has no identifiers we could use to try and find its corresponding
|
||||
+ // local data compound
|
||||
+
|
||||
+ java.nio.file.Path containingFolder = this.externalFileDir;
|
||||
+ Path[] regionFiles = Files.list(containingFolder).toArray(Path[]::new);
|
||||
+ boolean[] oversized = new boolean[32 * 32];
|
||||
+ RegionFileVersion[] oversizedCompressionTypes = new RegionFileVersion[32 * 32];
|
||||
+
|
||||
+ if (regionFiles != null) {
|
||||
+ int lowerXBound = ourLowerLeftPosition.x; // inclusive
|
||||
+ int lowerZBound = ourLowerLeftPosition.z; // inclusive
|
||||
+ int upperXBound = lowerXBound + 32 - 1; // inclusive
|
||||
+ int upperZBound = lowerZBound + 32 - 1; // inclusive
|
||||
+
|
||||
+ // read mojang oversized data
|
||||
+ for (Path regionFile : regionFiles) {
|
||||
+ ChunkPos oversizedCoords = getOversizedChunkPair(regionFile);
|
||||
+ if (oversizedCoords == null) {
|
||||
+ continue;
|
||||
+ }
|
||||
+
|
||||
+ if ((oversizedCoords.x < lowerXBound || oversizedCoords.x > upperXBound) || (oversizedCoords.z < lowerZBound || oversizedCoords.z > upperZBound)) {
|
||||
+ continue; // not in our regionfile
|
||||
+ }
|
||||
+
|
||||
+ // ensure oversized data is valid & is newer than data in the regionfile
|
||||
+
|
||||
+ int location = (oversizedCoords.x & 31) | ((oversizedCoords.z & 31) << 5);
|
||||
+
|
||||
+ byte[] chunkData;
|
||||
+ try {
|
||||
+ chunkData = Files.readAllBytes(regionFile);
|
||||
+ } catch (Exception ex) {
|
||||
+ LOGGER.error("Failed to read oversized chunk data in file " + regionFile.toAbsolutePath() + ", data will be lost", ex);
|
||||
+ continue;
|
||||
+ }
|
||||
+
|
||||
+ CompoundTag compound = null;
|
||||
+
|
||||
+ // We do not know the compression type, as it's stored in the regionfile. So we need to try all of them
|
||||
+ RegionFileVersion compression = null;
|
||||
+ for (RegionFileVersion compressionType : RegionFileVersion.VERSIONS.values()) {
|
||||
+ try {
|
||||
+ DataInputStream in = new DataInputStream(compressionType.wrap(new ByteArrayInputStream(chunkData))); // typical java
|
||||
+ compound = NbtIo.read((java.io.DataInput)in);
|
||||
+ compression = compressionType;
|
||||
+ break; // reaches here iff readNBT does not throw
|
||||
+ } catch (Exception ex) {
|
||||
+ continue;
|
||||
+ }
|
||||
+ }
|
||||
+
|
||||
+ if (compound == null) {
|
||||
+ LOGGER.error("Failed to read oversized chunk data in file " + regionFile.toAbsolutePath() + ", it's corrupt. Its data will be lost");
|
||||
+ continue;
|
||||
+ }
|
||||
+
|
||||
+ if (!ChunkSerializer.getChunkCoordinate(compound).equals(oversizedCoords)) {
|
||||
+ LOGGER.error("Can't use oversized chunk stored in " + regionFile.toAbsolutePath() + ", got absolute chunkpos: " + ChunkSerializer.getChunkCoordinate(compound) + ", expected " + oversizedCoords);
|
||||
+ continue;
|
||||
+ }
|
||||
+
|
||||
+ if (compounds[location] == null || ChunkSerializer.getLastWorldSaveTime(compound) > ChunkSerializer.getLastWorldSaveTime(compounds[location])) {
|
||||
+ oversized[location] = true;
|
||||
+ oversizedCompressionTypes[location] = compression;
|
||||
+ }
|
||||
+ }
|
||||
+ }
|
||||
+
|
||||
+ // now we need to calculate a new offset header
|
||||
+
|
||||
+ int[] calculatedOffsets = new int[32 * 32];
|
||||
+ RegionBitmap newSectorAllocations = new RegionBitmap();
|
||||
+ newSectorAllocations.force(0, 2); // make space for header
|
||||
+
|
||||
+ // allocate sectors for normal chunks
|
||||
+
|
||||
+ for (int chunkX = 0; chunkX < 32; ++chunkX) {
|
||||
+ for (int chunkZ = 0; chunkZ < 32; ++chunkZ) {
|
||||
+ int location = chunkX | (chunkZ << 5);
|
||||
+
|
||||
+ if (oversized[location]) {
|
||||
+ continue;
|
||||
+ }
|
||||
+
|
||||
+ int rawLength = rawLengths[location]; // bytes
|
||||
+ int sectorOffset = sectorOffsets[location]; // sectors
|
||||
+ int sectorLength = (int)roundToSectors(rawLength);
|
||||
+
|
||||
+ if (newSectorAllocations.tryAllocate(sectorOffset, sectorLength)) {
|
||||
+ calculatedOffsets[location] = sectorOffset << 8 | (sectorLength > 255 ? 255 : sectorLength); // support forge style oversized
|
||||
+ } else {
|
||||
+ LOGGER.error("Failed to allocate space for local chunk (overlapping data??) at (" + chunkX + "," + chunkZ + ") in regionfile " + this.path.toAbsolutePath() + ", chunk will be regenerated");
|
||||
+ }
|
||||
+ }
|
||||
+ }
|
||||
+
|
||||
+ // allocate sectors for oversized chunks
|
||||
+
|
||||
+ for (int chunkX = 0; chunkX < 32; ++chunkX) {
|
||||
+ for (int chunkZ = 0; chunkZ < 32; ++chunkZ) {
|
||||
+ int location = chunkX | (chunkZ << 5);
|
||||
+
|
||||
+ if (!oversized[location]) {
|
||||
+ continue;
|
||||
+ }
|
||||
+
|
||||
+ int sectorOffset = newSectorAllocations.allocate(1);
|
||||
+ int sectorLength = 1;
|
||||
+
|
||||
+ try {
|
||||
+ this.file.write(this.createExternalStub(oversizedCompressionTypes[location]), sectorOffset * 4096);
|
||||
+ // only allocate in the new offsets if the write succeeds
|
||||
+ calculatedOffsets[location] = sectorOffset << 8 | (sectorLength > 255 ? 255 : sectorLength); // support forge style oversized
|
||||
+ } catch (IOException ex) {
|
||||
+ newSectorAllocations.free(sectorOffset, sectorLength);
|
||||
+ LOGGER.error("Failed to write new oversized chunk data holder, local chunk at (" + chunkX + "," + chunkZ + ") in regionfile " + this.path.toAbsolutePath() + " will be regenerated");
|
||||
+ }
|
||||
+ }
|
||||
+ }
|
||||
+
|
||||
+ // rewrite aikar oversized data
|
||||
+
|
||||
+ this.oversizedCount = 0;
|
||||
+ for (int chunkX = 0; chunkX < 32; ++chunkX) {
|
||||
+ for (int chunkZ = 0; chunkZ < 32; ++chunkZ) {
|
||||
+ int location = chunkX | (chunkZ << 5);
|
||||
+ int isAikarOversized = hasAikarOversized[location] ? 1 : 0;
|
||||
+
|
||||
+ this.oversizedCount += isAikarOversized;
|
||||
+ this.oversized[location] = (byte)isAikarOversized;
|
||||
+ }
|
||||
+ }
|
||||
+
|
||||
+ if (this.oversizedCount > 0) {
|
||||
+ try {
|
||||
+ this.writeOversizedMeta();
|
||||
+ } catch (Exception ex) {
|
||||
+ LOGGER.error("Failed to write aikar oversized chunk meta, all aikar style oversized chunk data will be lost for regionfile " + this.path.toAbsolutePath(), ex);
|
||||
+ Files.deleteIfExists(this.getOversizedMetaFile());
|
||||
+ }
|
||||
+ } else {
|
||||
+ Files.deleteIfExists(this.getOversizedMetaFile());
|
||||
+ }
|
||||
+
|
||||
+ this.usedSectors.copyFrom(newSectorAllocations);
|
||||
+
|
||||
+ // before we overwrite the old sectors, print a summary of the chunks that got changed.
|
||||
+
|
||||
+ LOGGER.info("Starting summary of changes for regionfile " + this.path.toAbsolutePath());
|
||||
+
|
||||
+ for (int chunkX = 0; chunkX < 32; ++chunkX) {
|
||||
+ for (int chunkZ = 0; chunkZ < 32; ++chunkZ) {
|
||||
+ int location = chunkX | (chunkZ << 5);
|
||||
+
|
||||
+ int oldOffset = this.offsets.get(location);
|
||||
+ int newOffset = calculatedOffsets[location];
|
||||
+
|
||||
+ if (oldOffset == newOffset) {
|
||||
+ continue;
|
||||
+ }
|
||||
+
|
||||
+ this.offsets.put(location, newOffset); // overwrite incorrect offset
|
||||
+
|
||||
+ if (oldOffset == 0) {
|
||||
+ // found lost data
|
||||
+ LOGGER.info("Found missing data for local chunk (" + chunkX + "," + chunkZ + ") in regionfile " + this.path.toAbsolutePath());
|
||||
+ } else if (newOffset == 0) {
|
||||
+ LOGGER.warn("Data for local chunk (" + chunkX + "," + chunkZ + ") could not be recovered in regionfile " + this.path.toAbsolutePath() + ", it will be regenerated");
|
||||
+ } else {
|
||||
+ LOGGER.info("Local chunk (" + chunkX + "," + chunkZ + ") changed to point to newer data or correct chunk in regionfile " + this.path.toAbsolutePath());
|
||||
+ }
|
||||
+ }
|
||||
+ }
|
||||
+
|
||||
+ LOGGER.info("End of change summary for regionfile " + this.path.toAbsolutePath());
|
||||
+
|
||||
+ // simply destroy the timestamp header, it's not used
|
||||
+
|
||||
+ for (int i = 0; i < 32 * 32; ++i) {
|
||||
+ this.timestamps.put(i, calculatedOffsets[i] != 0 ? RegionFile.getTimestamp() : 0); // write a valid timestamp for valid chunks, I do not want to find out whatever dumb program actually checks this
|
||||
+ }
|
||||
+
|
||||
+ // write new header
|
||||
+ try {
|
||||
+ this.flush();
|
||||
+ this.file.force(true); // try to ensure it goes through...
|
||||
+ LOGGER.info("Successfully wrote new header to disk for regionfile " + this.path.toAbsolutePath());
|
||||
+ } catch (IOException ex) {
|
||||
+ LOGGER.error("Failed to write new header to disk for regionfile " + this.path.toAbsolutePath(), ex);
|
||||
+ }
|
||||
+ }
|
||||
+
|
||||
+ return true;
|
||||
+ }
|
||||
+
|
||||
+ final boolean canRecalcHeader; // final forces compile fail on new constructor
|
||||
+ // Paper end - Attempt to recalculate regionfile header if it is corrupt
|
||||
|
||||
public RegionFile(RegionStorageInfo storageKey, Path directory, Path path, boolean dsync) throws IOException {
|
||||
this(storageKey, directory, path, RegionFileVersion.getCompressionFormat(), dsync); // Paper - Configurable region compression format
|
||||
@@ -67,6 +415,7 @@ public class RegionFile implements AutoCloseable {
|
||||
throw new IllegalArgumentException("Expected directory, got " + String.valueOf(directory.toAbsolutePath()));
|
||||
} else {
|
||||
this.externalFileDir = directory;
|
||||
+ this.canRecalcHeader = RegionFileStorage.isChunkDataFolder(this.externalFileDir); // Paper - add can recalc flag
|
||||
this.offsets = this.header.asIntBuffer();
|
||||
((java.nio.Buffer) this.offsets).limit(1024); // CraftBukkit - decompile error
|
||||
((java.nio.Buffer) this.header).position(4096); // CraftBukkit - decompile error
|
||||
@@ -86,14 +435,16 @@ public class RegionFile implements AutoCloseable {
|
||||
RegionFile.LOGGER.warn("Region file {} has truncated header: {}", path, i);
|
||||
}
|
||||
|
||||
- long j = Files.size(path);
|
||||
+ final long j = Files.size(path); final long regionFileSize = j; // Paper - recalculate header on header corruption
|
||||
|
||||
- for (int k = 0; k < 1024; ++k) {
|
||||
- int l = this.offsets.get(k);
|
||||
+ boolean needsHeaderRecalc = false; // Paper - recalculate header on header corruption
|
||||
+ boolean hasBackedUp = false; // Paper - recalculate header on header corruption
|
||||
+ for (int k = 0; k < 1024; ++k) { final int headerLocation = k; // Paper - we expect this to be the header location
|
||||
+ final int l = this.offsets.get(k);
|
||||
|
||||
if (l != 0) {
|
||||
- int i1 = RegionFile.getSectorNumber(l);
|
||||
- int j1 = RegionFile.getNumSectors(l);
|
||||
+ final int i1 = RegionFile.getSectorNumber(l); final int offset = i1; // Paper - we expect this to be offset in file in sectors
|
||||
+ int j1 = RegionFile.getNumSectors(l); final int sectorLength; // Paper - diff on change, we expect this to be sector length of region - watch out for reassignments
|
||||
// Spigot start
|
||||
if (j1 == 255) {
|
||||
// We're maxed out, so we need to read the proper length from the section
|
||||
@@ -102,21 +453,66 @@ public class RegionFile implements AutoCloseable {
|
||||
j1 = (realLen.getInt(0) + 4) / 4096 + 1;
|
||||
}
|
||||
// Spigot end
|
||||
+ sectorLength = j1; // Paper - diff on change, we expect this to be sector length of region
|
||||
|
||||
if (i1 < 2) {
|
||||
RegionFile.LOGGER.warn("Region file {} has invalid sector at index: {}; sector {} overlaps with header", new Object[]{path, k, i1});
|
||||
- this.offsets.put(k, 0);
|
||||
+ //this.offsets.put(k, 0); // Paper - we catch this, but need it in the header for the summary change
|
||||
} else if (j1 == 0) {
|
||||
RegionFile.LOGGER.warn("Region file {} has an invalid sector at index: {}; size has to be > 0", path, k);
|
||||
- this.offsets.put(k, 0);
|
||||
+ //this.offsets.put(k, 0); // Paper - we catch this, but need it in the header for the summary change
|
||||
} else if ((long) i1 * 4096L > j) {
|
||||
RegionFile.LOGGER.warn("Region file {} has an invalid sector at index: {}; sector {} is out of bounds", new Object[]{path, k, i1});
|
||||
- this.offsets.put(k, 0);
|
||||
+ //this.offsets.put(k, 0); // Paper - we catch this, but need it in the header for the summary change
|
||||
} else {
|
||||
- this.usedSectors.force(i1, j1);
|
||||
+ //this.usedSectors.force(i1, j1); // Paper - move this down so we can check if it fails to allocate
|
||||
+ }
|
||||
+ // Paper start - recalculate header on header corruption
|
||||
+ if (offset < 2 || sectorLength <= 0 || ((long)offset * 4096L) > regionFileSize) {
|
||||
+ if (canRecalcHeader) {
|
||||
+ LOGGER.error("Detected invalid header for regionfile " + this.path.toAbsolutePath() + "! Recalculating header...");
|
||||
+ needsHeaderRecalc = true;
|
||||
+ break;
|
||||
+ } else {
|
||||
+ // location = chunkX | (chunkZ << 5);
|
||||
+ LOGGER.error("Detected invalid header for regionfile " + this.path.toAbsolutePath() +
|
||||
+ "! Cannot recalculate, removing local chunk (" + (headerLocation & 31) + "," + (headerLocation >>> 5) + ") from header");
|
||||
+ if (!hasBackedUp) {
|
||||
+ hasBackedUp = true;
|
||||
+ this.backupRegionFile();
|
||||
+ }
|
||||
+ this.timestamps.put(headerLocation, 0); // be consistent, delete the timestamp too
|
||||
+ this.offsets.put(headerLocation, 0); // delete the entry from header
|
||||
+ continue;
|
||||
+ }
|
||||
+ }
|
||||
+ boolean failedToAllocate = !this.usedSectors.tryAllocate(offset, sectorLength);
|
||||
+ if (failedToAllocate) {
|
||||
+ LOGGER.error("Overlapping allocation by local chunk (" + (headerLocation & 31) + "," + (headerLocation >>> 5) + ") in regionfile " + this.path.toAbsolutePath());
|
||||
}
|
||||
+ if (failedToAllocate & !canRecalcHeader) {
|
||||
+ // location = chunkX | (chunkZ << 5);
|
||||
+ LOGGER.error("Detected invalid header for regionfile " + this.path.toAbsolutePath() +
|
||||
+ "! Cannot recalculate, removing local chunk (" + (headerLocation & 31) + "," + (headerLocation >>> 5) + ") from header");
|
||||
+ if (!hasBackedUp) {
|
||||
+ hasBackedUp = true;
|
||||
+ this.backupRegionFile();
|
||||
+ }
|
||||
+ this.timestamps.put(headerLocation, 0); // be consistent, delete the timestamp too
|
||||
+ this.offsets.put(headerLocation, 0); // delete the entry from header
|
||||
+ continue;
|
||||
+ }
|
||||
+ needsHeaderRecalc |= failedToAllocate;
|
||||
+ // Paper end - recalculate header on header corruption
|
||||
}
|
||||
}
|
||||
+ // Paper start - recalculate header on header corruption
|
||||
+ // we move the recalc here so comparison to old header is correct when logging to console
|
||||
+ if (needsHeaderRecalc) { // true if header gave us overlapping allocations or had other issues
|
||||
+ LOGGER.error("Recalculating regionfile " + this.path.toAbsolutePath() + ", header gave erroneous offsets & locations");
|
||||
+ this.recalculateHeader();
|
||||
+ }
|
||||
+ // Paper end
|
||||
}
|
||||
|
||||
}
|
||||
@@ -127,11 +523,36 @@ public class RegionFile implements AutoCloseable {
|
||||
}
|
||||
|
||||
private Path getExternalChunkPath(ChunkPos chunkPos) {
|
||||
- String s = "c." + chunkPos.x + "." + chunkPos.z + ".mcc";
|
||||
+ String s = "c." + chunkPos.x + "." + chunkPos.z + ".mcc"; // Paper - diff on change
|
||||
|
||||
return this.externalFileDir.resolve(s);
|
||||
}
|
||||
|
||||
+ // Paper start
|
||||
+ private static ChunkPos getOversizedChunkPair(Path file) {
|
||||
+ String fileName = file.getFileName().toString();
|
||||
+
|
||||
+ if (!fileName.startsWith("c.") || !fileName.endsWith(".mcc")) {
|
||||
+ return null;
|
||||
+ }
|
||||
+
|
||||
+ String[] split = fileName.split("\\.");
|
||||
+
|
||||
+ if (split.length != 4) {
|
||||
+ return null;
|
||||
+ }
|
||||
+
|
||||
+ try {
|
||||
+ int x = Integer.parseInt(split[1]);
|
||||
+ int z = Integer.parseInt(split[2]);
|
||||
+
|
||||
+ return new ChunkPos(x, z);
|
||||
+ } catch (NumberFormatException ex) {
|
||||
+ return null;
|
||||
+ }
|
||||
+ }
|
||||
+ // Paper end
|
||||
+
|
||||
@Nullable
|
||||
public synchronized DataInputStream getChunkDataInputStream(ChunkPos pos) throws IOException {
|
||||
int i = this.getOffset(pos);
|
||||
@@ -155,6 +576,11 @@ public class RegionFile implements AutoCloseable {
|
||||
((java.nio.Buffer) bytebuffer).flip(); // CraftBukkit - decompile error
|
||||
if (bytebuffer.remaining() < 5) {
|
||||
RegionFile.LOGGER.error("Chunk {} header is truncated: expected {} but read {}", new Object[]{pos, l, bytebuffer.remaining()});
|
||||
+ // Paper start - recalculate header on regionfile corruption
|
||||
+ if (this.canRecalcHeader && this.recalculateHeader()) {
|
||||
+ return this.getChunkDataInputStream(pos);
|
||||
+ }
|
||||
+ // Paper end - recalculate header on regionfile corruption
|
||||
return null;
|
||||
} else {
|
||||
int i1 = bytebuffer.getInt();
|
||||
@@ -162,6 +588,11 @@ public class RegionFile implements AutoCloseable {
|
||||
|
||||
if (i1 == 0) {
|
||||
RegionFile.LOGGER.warn("Chunk {} is allocated, but stream is missing", pos);
|
||||
+ // Paper start - recalculate header on regionfile corruption
|
||||
+ if (this.canRecalcHeader && this.recalculateHeader()) {
|
||||
+ return this.getChunkDataInputStream(pos);
|
||||
+ }
|
||||
+ // Paper end - recalculate header on regionfile corruption
|
||||
return null;
|
||||
} else {
|
||||
int j1 = i1 - 1;
|
||||
@@ -169,18 +600,45 @@ public class RegionFile implements AutoCloseable {
|
||||
if (RegionFile.isExternalStreamChunk(b0)) {
|
||||
if (j1 != 0) {
|
||||
RegionFile.LOGGER.warn("Chunk has both internal and external streams");
|
||||
+ // Paper start - recalculate header on regionfile corruption
|
||||
+ if (this.canRecalcHeader && this.recalculateHeader()) {
|
||||
+ return this.getChunkDataInputStream(pos);
|
||||
+ }
|
||||
+ // Paper end - recalculate header on regionfile corruption
|
||||
}
|
||||
|
||||
- return this.createExternalChunkInputStream(pos, RegionFile.getExternalChunkVersion(b0));
|
||||
+ // Paper start - recalculate header on regionfile corruption
|
||||
+ final DataInputStream ret = this.createExternalChunkInputStream(pos, RegionFile.getExternalChunkVersion(b0));
|
||||
+ if (ret == null && this.canRecalcHeader && this.recalculateHeader()) {
|
||||
+ return this.getChunkDataInputStream(pos);
|
||||
+ }
|
||||
+ return ret;
|
||||
+ // Paper end - recalculate header on regionfile corruption
|
||||
} else if (j1 > bytebuffer.remaining()) {
|
||||
RegionFile.LOGGER.error("Chunk {} stream is truncated: expected {} but read {}", new Object[]{pos, j1, bytebuffer.remaining()});
|
||||
+ // Paper start - recalculate header on regionfile corruption
|
||||
+ if (this.canRecalcHeader && this.recalculateHeader()) {
|
||||
+ return this.getChunkDataInputStream(pos);
|
||||
+ }
|
||||
+ // Paper end - recalculate header on regionfile corruption
|
||||
return null;
|
||||
} else if (j1 < 0) {
|
||||
RegionFile.LOGGER.error("Declared size {} of chunk {} is negative", i1, pos);
|
||||
+ // Paper start - recalculate header on regionfile corruption
|
||||
+ if (this.canRecalcHeader && this.recalculateHeader()) {
|
||||
+ return this.getChunkDataInputStream(pos);
|
||||
+ }
|
||||
+ // Paper end - recalculate header on regionfile corruption
|
||||
return null;
|
||||
} else {
|
||||
JvmProfiler.INSTANCE.onRegionFileRead(this.info, pos, this.version, j1);
|
||||
- return this.createChunkInputStream(pos, b0, RegionFile.createStream(bytebuffer, j1));
|
||||
+ // Paper start - recalculate header on regionfile corruption
|
||||
+ final DataInputStream ret = this.createChunkInputStream(pos, b0, RegionFile.createStream(bytebuffer, j1));
|
||||
+ if (ret == null && this.canRecalcHeader && this.recalculateHeader()) {
|
||||
+ return this.getChunkDataInputStream(pos);
|
||||
+ }
|
||||
+ return ret;
|
||||
+ // Paper end - recalculate header on regionfile corruption
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -366,10 +824,15 @@ public class RegionFile implements AutoCloseable {
|
||||
}
|
||||
|
||||
private ByteBuffer createExternalStub() {
|
||||
+ // Paper start - add version param
|
||||
+ return this.createExternalStub(this.version);
|
||||
+ }
|
||||
+ private ByteBuffer createExternalStub(RegionFileVersion version) {
|
||||
+ // Paper end - add version param
|
||||
ByteBuffer bytebuffer = ByteBuffer.allocate(5);
|
||||
|
||||
bytebuffer.putInt(1);
|
||||
- bytebuffer.put((byte) (this.version.getId() | 128));
|
||||
+ bytebuffer.put((byte) (version.getId() | 128)); // Paper - replace with version param
|
||||
((java.nio.Buffer) bytebuffer).flip(); // CraftBukkit - decompile error
|
||||
return bytebuffer;
|
||||
}
|
||||
diff --git a/src/main/java/net/minecraft/world/level/chunk/storage/RegionFileStorage.java b/src/main/java/net/minecraft/world/level/chunk/storage/RegionFileStorage.java
|
||||
index 0615fd82b71efb9a397de01615050e6d906c2844..40689256711cc94a806ca1da346f4f62eda31526 100644
|
||||
--- a/src/main/java/net/minecraft/world/level/chunk/storage/RegionFileStorage.java
|
||||
+++ b/src/main/java/net/minecraft/world/level/chunk/storage/RegionFileStorage.java
|
||||
@@ -105,11 +105,42 @@ public class RegionFileStorage implements AutoCloseable, ca.spottedleaf.moonrise
|
||||
return ret;
|
||||
}
|
||||
// Paper end - rewrite chunk system
|
||||
+ // Paper start - recalculate region file headers
|
||||
+ private final boolean isChunkData;
|
||||
+
|
||||
+ public static boolean isChunkDataFolder(Path path) {
|
||||
+ return path.toFile().getName().equalsIgnoreCase("region");
|
||||
+ }
|
||||
+
|
||||
+ @Nullable
|
||||
+ public static ChunkPos getRegionFileCoordinates(Path file) {
|
||||
+ String fileName = file.getFileName().toString();
|
||||
+ if (!fileName.startsWith("r.") || !fileName.endsWith(".mca")) {
|
||||
+ return null;
|
||||
+ }
|
||||
+
|
||||
+ String[] split = fileName.split("\\.");
|
||||
+
|
||||
+ if (split.length != 4) {
|
||||
+ return null;
|
||||
+ }
|
||||
+
|
||||
+ try {
|
||||
+ int x = Integer.parseInt(split[1]);
|
||||
+ int z = Integer.parseInt(split[2]);
|
||||
+
|
||||
+ return new ChunkPos(x << 5, z << 5);
|
||||
+ } catch (NumberFormatException ex) {
|
||||
+ return null;
|
||||
+ }
|
||||
+ }
|
||||
+ // Paper end
|
||||
|
||||
protected RegionFileStorage(RegionStorageInfo storageKey, Path directory, boolean dsync) { // Paper - protected
|
||||
this.folder = directory;
|
||||
this.sync = dsync;
|
||||
this.info = storageKey;
|
||||
+ this.isChunkData = isChunkDataFolder(this.folder); // Paper - recalculate region file headers
|
||||
}
|
||||
|
||||
public RegionFile getRegionFile(ChunkPos chunkcoordintpair, boolean existingOnly) throws IOException { // CraftBukkit // Paper - public
|
||||
@@ -203,6 +234,19 @@ public class RegionFileStorage implements AutoCloseable, ca.spottedleaf.moonrise
|
||||
try {
|
||||
if (datainputstream != null) {
|
||||
nbttagcompound = NbtIo.read((DataInput) datainputstream);
|
||||
+ // Paper start - recover from corrupt regionfile header
|
||||
+ if (this.isChunkData) {
|
||||
+ ChunkPos chunkPos = ChunkSerializer.getChunkCoordinate(nbttagcompound);
|
||||
+ if (!chunkPos.equals(pos)) {
|
||||
+ net.minecraft.server.MinecraftServer.LOGGER.error("Attempting to read chunk data at " + pos + " but got chunk data for " + chunkPos + " instead! Attempting regionfile recalculation for regionfile " + regionfile.getPath().toAbsolutePath());
|
||||
+ if (regionfile.recalculateHeader()) {
|
||||
+ return this.read(pos);
|
||||
+ }
|
||||
+ net.minecraft.server.MinecraftServer.LOGGER.error("Can't recalculate regionfile header, regenerating chunk " + pos + " for " + regionfile.getPath().toAbsolutePath());
|
||||
+ return null;
|
||||
+ }
|
||||
+ }
|
||||
+ // Paper end - recover from corrupt regionfile header
|
||||
break label43;
|
||||
}
|
||||
|
||||
diff --git a/src/main/java/net/minecraft/world/level/chunk/storage/RegionFileVersion.java b/src/main/java/net/minecraft/world/level/chunk/storage/RegionFileVersion.java
|
||||
index ef68b57ef1d8d7cb317c417569dd23a777fba4ad..f4a39f49b354c560d614483db1cd3dfc154e94b4 100644
|
||||
--- a/src/main/java/net/minecraft/world/level/chunk/storage/RegionFileVersion.java
|
||||
+++ b/src/main/java/net/minecraft/world/level/chunk/storage/RegionFileVersion.java
|
||||
@@ -21,7 +21,7 @@ import org.slf4j.Logger;
|
||||
|
||||
public class RegionFileVersion {
|
||||
private static final Logger LOGGER = LogUtils.getLogger();
|
||||
- private static final Int2ObjectMap<RegionFileVersion> VERSIONS = new Int2ObjectOpenHashMap<>();
|
||||
+ public static final Int2ObjectMap<RegionFileVersion> VERSIONS = new Int2ObjectOpenHashMap<>(); // Paper - private -> public
|
||||
private static final Object2ObjectMap<String, RegionFileVersion> VERSIONS_BY_NAME = new Object2ObjectOpenHashMap<>();
|
||||
public static final RegionFileVersion VERSION_GZIP = register(
|
||||
new RegionFileVersion(
|
|
@ -1,371 +0,0 @@
|
|||
From 0000000000000000000000000000000000000000 Mon Sep 17 00:00:00 2001
|
||||
From: Riley Park <rileysebastianpark@gmail.com>
|
||||
Date: Tue, 16 Jul 2024 14:55:23 -0700
|
||||
Subject: [PATCH] Bundle spark
|
||||
|
||||
|
||||
diff --git a/build.gradle.kts b/build.gradle.kts
|
||||
index a0f086ec054c456067ee07bf3c7685e2b9458d72..2ed184ccaa963ba1ea007628d2472f31c053be98 100644
|
||||
--- a/build.gradle.kts
|
||||
+++ b/build.gradle.kts
|
||||
@@ -62,6 +62,10 @@ dependencies {
|
||||
implementation("io.papermc:reflection-rewriter-runtime:$reflectionRewriterVersion")
|
||||
implementation("io.papermc:reflection-rewriter-proxy-generator:$reflectionRewriterVersion")
|
||||
// Paper end - Remap reflection
|
||||
+ // Paper start - spark
|
||||
+ implementation("me.lucko:spark-api:0.1-20240720.200737-2")
|
||||
+ implementation("me.lucko:spark-paper:1.10.105-SNAPSHOT")
|
||||
+ // Paper end - spark
|
||||
}
|
||||
|
||||
paperweight {
|
||||
diff --git a/src/main/java/io/papermc/paper/SparksFly.java b/src/main/java/io/papermc/paper/SparksFly.java
|
||||
new file mode 100644
|
||||
index 0000000000000000000000000000000000000000..2955b7ec9832a5752ea4aff9fc9d34ae2f9ee83e
|
||||
--- /dev/null
|
||||
+++ b/src/main/java/io/papermc/paper/SparksFly.java
|
||||
@@ -0,0 +1,201 @@
|
||||
+package io.papermc.paper;
|
||||
+
|
||||
+import io.papermc.paper.configuration.GlobalConfiguration;
|
||||
+import io.papermc.paper.plugin.entrypoint.classloader.group.PaperPluginClassLoaderStorage;
|
||||
+import io.papermc.paper.plugin.provider.classloader.ConfiguredPluginClassLoader;
|
||||
+import io.papermc.paper.plugin.provider.classloader.PaperClassLoaderStorage;
|
||||
+import io.papermc.paper.util.MCUtil;
|
||||
+import java.util.Collection;
|
||||
+import java.util.List;
|
||||
+import java.util.logging.Level;
|
||||
+import java.util.logging.Logger;
|
||||
+import me.lucko.spark.paper.api.Compatibility;
|
||||
+import me.lucko.spark.paper.api.PaperClassLookup;
|
||||
+import me.lucko.spark.paper.api.PaperScheduler;
|
||||
+import me.lucko.spark.paper.api.PaperSparkModule;
|
||||
+import net.kyori.adventure.text.Component;
|
||||
+import net.kyori.adventure.text.format.TextColor;
|
||||
+import net.minecraft.util.ExceptionCollector;
|
||||
+import org.bukkit.Server;
|
||||
+import org.bukkit.command.Command;
|
||||
+import org.bukkit.command.CommandSender;
|
||||
+import org.bukkit.craftbukkit.CraftServer;
|
||||
+
|
||||
+// It's like electricity.
|
||||
+public final class SparksFly {
|
||||
+ public static final String ID = "spark";
|
||||
+ public static final String COMMAND_NAME = "spark";
|
||||
+
|
||||
+ private static final String PREFER_SPARK_PLUGIN_PROPERTY = "paper.preferSparkPlugin";
|
||||
+
|
||||
+ private static final int SPARK_YELLOW = 0xffc93a;
|
||||
+
|
||||
+ private final Logger logger;
|
||||
+ private final PaperSparkModule spark;
|
||||
+
|
||||
+ private boolean enabled;
|
||||
+ private boolean disabledInConfigurationWarningLogged;
|
||||
+
|
||||
+ public SparksFly(final Server server) {
|
||||
+ this.logger = Logger.getLogger(ID);
|
||||
+ this.logger.log(Level.INFO, "This server bundles the spark profiler. For more information please visit https://docs.papermc.io/paper/profiling");
|
||||
+ this.spark = PaperSparkModule.create(Compatibility.VERSION_1_0, server, this.logger, new PaperScheduler() {
|
||||
+ @Override
|
||||
+ public void executeAsync(final Runnable runnable) {
|
||||
+ MCUtil.scheduleAsyncTask(this.catching(runnable, "asynchronous"));
|
||||
+ }
|
||||
+
|
||||
+ @Override
|
||||
+ public void executeSync(final Runnable runnable) {
|
||||
+ MCUtil.ensureMain(this.catching(runnable, "synchronous"));
|
||||
+ }
|
||||
+
|
||||
+ private Runnable catching(final Runnable runnable, final String type) {
|
||||
+ return () -> {
|
||||
+ try {
|
||||
+ runnable.run();
|
||||
+ } catch (final Throwable t) {
|
||||
+ SparksFly.this.logger.log(Level.SEVERE, "An exception was encountered while executing a " + type + " spark task", t);
|
||||
+ }
|
||||
+ };
|
||||
+ }
|
||||
+ }, new PaperClassLookup() {
|
||||
+ @Override
|
||||
+ public Class<?> lookup(final String className) throws Exception {
|
||||
+ final ExceptionCollector<ClassNotFoundException> exceptions = new ExceptionCollector<>();
|
||||
+ try {
|
||||
+ return Class.forName(className);
|
||||
+ } catch (final ClassNotFoundException e) {
|
||||
+ exceptions.add(e);
|
||||
+ for (final ConfiguredPluginClassLoader loader : ((PaperPluginClassLoaderStorage) PaperClassLoaderStorage.instance()).getGlobalGroup().getClassLoaders()) {
|
||||
+ try {
|
||||
+ final Class<?> loadedClass = loader.loadClass(className, true, false, true);
|
||||
+ if (loadedClass != null) {
|
||||
+ return loadedClass;
|
||||
+ }
|
||||
+ } catch (final ClassNotFoundException exception) {
|
||||
+ exceptions.add(exception);
|
||||
+ }
|
||||
+ }
|
||||
+ exceptions.throwIfPresent();
|
||||
+ return null;
|
||||
+ }
|
||||
+ }
|
||||
+ });
|
||||
+ }
|
||||
+
|
||||
+ public void enableEarlyIfRequested() {
|
||||
+ if (!isPluginPreferred() && shouldEnableImmediately()) {
|
||||
+ this.enable();
|
||||
+ }
|
||||
+ }
|
||||
+
|
||||
+ public void enableBeforePlugins() {
|
||||
+ if (!isPluginPreferred()) {
|
||||
+ this.enable();
|
||||
+ }
|
||||
+ }
|
||||
+
|
||||
+ public void enableAfterPlugins(final Server server) {
|
||||
+ final boolean isPluginPreferred = isPluginPreferred();
|
||||
+ final boolean isPluginEnabled = isPluginEnabled(server);
|
||||
+ if (!isPluginPreferred || !isPluginEnabled) {
|
||||
+ if (isPluginPreferred && !this.enabled) {
|
||||
+ this.logger.log(Level.INFO, "The spark plugin has been preferred but was not loaded. The bundled spark profiler will enabled instead.");
|
||||
+ }
|
||||
+ this.enable();
|
||||
+ }
|
||||
+ }
|
||||
+
|
||||
+ private void enable() {
|
||||
+ if (!this.enabled) {
|
||||
+ if (GlobalConfiguration.get().spark.enabled) {
|
||||
+ this.enabled = true;
|
||||
+ this.spark.enable();
|
||||
+ } else {
|
||||
+ if (!this.disabledInConfigurationWarningLogged) {
|
||||
+ this.logger.log(Level.INFO, "The spark profiler will not be enabled because it is currently disabled in the configuration.");
|
||||
+ this.disabledInConfigurationWarningLogged = true;
|
||||
+ }
|
||||
+ }
|
||||
+ }
|
||||
+ }
|
||||
+
|
||||
+ public void disable() {
|
||||
+ if (this.enabled) {
|
||||
+ this.spark.disable();
|
||||
+ this.enabled = false;
|
||||
+ }
|
||||
+ }
|
||||
+
|
||||
+ public void registerCommandBeforePlugins(final Server server) {
|
||||
+ if (!isPluginPreferred()) {
|
||||
+ this.registerCommand(server);
|
||||
+ }
|
||||
+ }
|
||||
+
|
||||
+ public void registerCommandAfterPlugins(final Server server) {
|
||||
+ if ((!isPluginPreferred() || !isPluginEnabled(server)) && server.getCommandMap().getCommand(COMMAND_NAME) == null) {
|
||||
+ this.registerCommand(server);
|
||||
+ }
|
||||
+ }
|
||||
+
|
||||
+ private void registerCommand(final Server server) {
|
||||
+ server.getCommandMap().register(COMMAND_NAME, "paper", new CommandImpl(COMMAND_NAME, this.spark.getPermissions()));
|
||||
+ }
|
||||
+
|
||||
+ public void tickStart() {
|
||||
+ this.spark.onServerTickStart();
|
||||
+ }
|
||||
+
|
||||
+ public void tickEnd(final double duration) {
|
||||
+ this.spark.onServerTickEnd(duration);
|
||||
+ }
|
||||
+
|
||||
+ void executeCommand(final CommandSender sender, final String[] args) {
|
||||
+ this.spark.executeCommand(sender, args);
|
||||
+ }
|
||||
+
|
||||
+ List<String> tabComplete(final CommandSender sender, final String[] args) {
|
||||
+ return this.spark.tabComplete(sender, args);
|
||||
+ }
|
||||
+
|
||||
+ public static boolean isPluginPreferred() {
|
||||
+ return Boolean.getBoolean(PREFER_SPARK_PLUGIN_PROPERTY);
|
||||
+ }
|
||||
+
|
||||
+ private static boolean isPluginEnabled(final Server server) {
|
||||
+ return server.getPluginManager().isPluginEnabled(ID);
|
||||
+ }
|
||||
+
|
||||
+ private static boolean shouldEnableImmediately() {
|
||||
+ return GlobalConfiguration.get().spark.enableImmediately;
|
||||
+ }
|
||||
+
|
||||
+ public static final class CommandImpl extends Command {
|
||||
+ CommandImpl(final String name, final Collection<String> permissions) {
|
||||
+ super(name);
|
||||
+ this.setPermission(String.join(";", permissions));
|
||||
+ }
|
||||
+
|
||||
+ @Override
|
||||
+ public boolean execute(final CommandSender sender, final String commandLabel, final String[] args) {
|
||||
+ final SparksFly spark = ((CraftServer) sender.getServer()).spark;
|
||||
+ if (spark.enabled) {
|
||||
+ spark.executeCommand(sender, args);
|
||||
+ } else {
|
||||
+ sender.sendMessage(Component.text("The spark profiler is currently disabled.", TextColor.color(SPARK_YELLOW)));
|
||||
+ }
|
||||
+ return true;
|
||||
+ }
|
||||
+
|
||||
+ @Override
|
||||
+ public List<String> tabComplete(final CommandSender sender, final String alias, final String[] args) throws IllegalArgumentException {
|
||||
+ final SparksFly spark = ((CraftServer) sender.getServer()).spark;
|
||||
+ if (spark.enabled) {
|
||||
+ return spark.tabComplete(sender, args);
|
||||
+ }
|
||||
+ return List.of();
|
||||
+ }
|
||||
+ }
|
||||
+}
|
||||
diff --git a/src/main/java/io/papermc/paper/plugin/provider/source/FileProviderSource.java b/src/main/java/io/papermc/paper/plugin/provider/source/FileProviderSource.java
|
||||
index 6b8ed8a0baaf4a57d20e57cec3400af5561ddd79..48604e7f96adc9e226e034054c5e2bad0b024eb5 100644
|
||||
--- a/src/main/java/io/papermc/paper/plugin/provider/source/FileProviderSource.java
|
||||
+++ b/src/main/java/io/papermc/paper/plugin/provider/source/FileProviderSource.java
|
||||
@@ -1,6 +1,9 @@
|
||||
package io.papermc.paper.plugin.provider.source;
|
||||
|
||||
+import com.mojang.logging.LogUtils;
|
||||
+import io.papermc.paper.SparksFly;
|
||||
import io.papermc.paper.plugin.PluginInitializerManager;
|
||||
+import io.papermc.paper.plugin.configuration.PluginMeta;
|
||||
import io.papermc.paper.plugin.entrypoint.EntrypointHandler;
|
||||
import io.papermc.paper.plugin.provider.type.PluginFileType;
|
||||
import org.bukkit.plugin.InvalidPluginException;
|
||||
@@ -17,12 +20,14 @@ import java.nio.file.attribute.BasicFileAttributes;
|
||||
import java.util.Set;
|
||||
import java.util.function.Function;
|
||||
import java.util.jar.JarFile;
|
||||
+import org.slf4j.Logger;
|
||||
|
||||
/**
|
||||
* Loads a plugin provider at the given plugin jar file path.
|
||||
*/
|
||||
public class FileProviderSource implements ProviderSource<Path, Path> {
|
||||
|
||||
+ private static final Logger LOGGER = LogUtils.getClassLogger();
|
||||
private final Function<Path, String> contextChecker;
|
||||
private final boolean applyRemap;
|
||||
|
||||
@@ -82,6 +87,12 @@ public class FileProviderSource implements ProviderSource<Path, Path> {
|
||||
);
|
||||
}
|
||||
|
||||
+ final PluginMeta config = type.getConfig(file);
|
||||
+ if ((config.getName().equals("spark") && config.getMainClass().equals("me.lucko.spark.bukkit.BukkitSparkPlugin")) && !SparksFly.isPluginPreferred()) {
|
||||
+ LOGGER.info("The spark plugin will not be loaded as this server bundles the spark profiler.");
|
||||
+ return;
|
||||
+ }
|
||||
+
|
||||
type.register(entrypointHandler, file, context);
|
||||
}
|
||||
|
||||
diff --git a/src/main/java/net/minecraft/server/MinecraftServer.java b/src/main/java/net/minecraft/server/MinecraftServer.java
|
||||
index aa38e3b297209cc121e7dc7a6ac9588c18bf9357..696d075ca2883f3c37e35f983c4d020e5db89d16 100644
|
||||
--- a/src/main/java/net/minecraft/server/MinecraftServer.java
|
||||
+++ b/src/main/java/net/minecraft/server/MinecraftServer.java
|
||||
@@ -751,6 +751,8 @@ public abstract class MinecraftServer extends ReentrantBlockableEventLoop<TickTa
|
||||
// Paper end - Configurable player collision
|
||||
|
||||
this.server.enablePlugins(org.bukkit.plugin.PluginLoadOrder.POSTWORLD);
|
||||
+ this.server.spark.registerCommandBeforePlugins(this.server); // Paper - spark
|
||||
+ this.server.spark.enableAfterPlugins(this.server); // Paper - spark
|
||||
if (io.papermc.paper.plugin.PluginInitializerManager.instance().pluginRemapper != null) io.papermc.paper.plugin.PluginInitializerManager.instance().pluginRemapper.pluginsEnabled(); // Paper - Remap plugins
|
||||
io.papermc.paper.command.brigadier.PaperCommands.INSTANCE.setValid(); // Paper - reset invalid state for event fire below
|
||||
io.papermc.paper.plugin.lifecycle.event.LifecycleEventRunner.INSTANCE.callReloadableRegistrarEvent(io.papermc.paper.plugin.lifecycle.event.types.LifecycleEvents.COMMANDS, io.papermc.paper.command.brigadier.PaperCommands.INSTANCE, org.bukkit.plugin.Plugin.class, io.papermc.paper.plugin.lifecycle.event.registrar.ReloadableRegistrarEvent.Cause.INITIAL); // Paper - call commands event for regular plugins
|
||||
@@ -1037,6 +1039,7 @@ public abstract class MinecraftServer extends ReentrantBlockableEventLoop<TickTa
|
||||
MinecraftServer.LOGGER.info("Stopping server");
|
||||
Commands.COMMAND_SENDING_POOL.shutdownNow(); // Paper - Perf: Async command map building; Shutdown and don't bother finishing
|
||||
MinecraftTimings.stopServer(); // Paper
|
||||
+ this.server.spark.disable(); // Paper - spark
|
||||
// CraftBukkit start
|
||||
if (this.server != null) {
|
||||
this.server.disablePlugins();
|
||||
@@ -1226,6 +1229,7 @@ public abstract class MinecraftServer extends ReentrantBlockableEventLoop<TickTa
|
||||
// tasks are default scheduled at -1 + delay, and first tick will tick at 1
|
||||
final long actualDoneTimeMs = System.currentTimeMillis() - org.bukkit.craftbukkit.Main.BOOT_TIME.toEpochMilli(); // Paper - Add total time
|
||||
LOGGER.info("Done ({})! For help, type \"help\"", String.format(java.util.Locale.ROOT, "%.3fs", actualDoneTimeMs / 1000.00D)); // Paper - Add total time
|
||||
+ this.server.spark.enableBeforePlugins(); // Paper - spark
|
||||
org.spigotmc.WatchdogThread.tick();
|
||||
// Paper end - Improved Watchdog Support
|
||||
org.spigotmc.WatchdogThread.hasStarted = true; // Paper
|
||||
@@ -1584,6 +1588,7 @@ public abstract class MinecraftServer extends ReentrantBlockableEventLoop<TickTa
|
||||
});
|
||||
isOversleep = false;MinecraftTimings.serverOversleep.stopTiming();
|
||||
// Paper end
|
||||
+ this.server.spark.tickStart(); // Paper - spark
|
||||
new com.destroystokyo.paper.event.server.ServerTickStartEvent(this.tickCount+1).callEvent(); // Paper - Server Tick Events
|
||||
|
||||
++this.tickCount;
|
||||
@@ -1626,6 +1631,7 @@ public abstract class MinecraftServer extends ReentrantBlockableEventLoop<TickTa
|
||||
long endTime = System.nanoTime();
|
||||
long remaining = (TICK_TIME - (endTime - lastTick)) - catchupTime;
|
||||
new com.destroystokyo.paper.event.server.ServerTickEndEvent(this.tickCount, ((double)(endTime - lastTick) / 1000000D), remaining).callEvent();
|
||||
+ this.server.spark.tickEnd(((double)(endTime - lastTick) / 1000000D)); // Paper - spark
|
||||
// Paper end - Server Tick Events
|
||||
this.profiler.push("tallying");
|
||||
long j = Util.getNanos() - i;
|
||||
diff --git a/src/main/java/net/minecraft/server/dedicated/DedicatedServer.java b/src/main/java/net/minecraft/server/dedicated/DedicatedServer.java
|
||||
index d43b98bdfcb00603737a309c0fb7793d42289b8c..dd56c8e041116ef3602a9f89c998c8208ab89b51 100644
|
||||
--- a/src/main/java/net/minecraft/server/dedicated/DedicatedServer.java
|
||||
+++ b/src/main/java/net/minecraft/server/dedicated/DedicatedServer.java
|
||||
@@ -226,6 +226,7 @@ public class DedicatedServer extends MinecraftServer implements ServerInterface
|
||||
this.paperConfigurations.initializeGlobalConfiguration(this.registryAccess());
|
||||
this.paperConfigurations.initializeWorldDefaultsConfiguration(this.registryAccess());
|
||||
// Paper end - initialize global and world-defaults configuration
|
||||
+ this.server.spark.enableEarlyIfRequested(); // Paper - spark
|
||||
// Paper start - fix converting txt to json file; convert old users earlier after PlayerList creation but before file load/save
|
||||
if (this.convertOldUsers()) {
|
||||
this.getProfileCache().save(false); // Paper
|
||||
@@ -235,6 +236,7 @@ public class DedicatedServer extends MinecraftServer implements ServerInterface
|
||||
org.spigotmc.WatchdogThread.doStart(org.spigotmc.SpigotConfig.timeoutTime, org.spigotmc.SpigotConfig.restartOnCrash); // Paper - start watchdog thread
|
||||
thread.start(); // Paper - Enhance console tab completions for brigadier commands; start console thread after MinecraftServer.console & PaperConfig are initialized
|
||||
io.papermc.paper.command.PaperCommands.registerCommands(this); // Paper - setup /paper command
|
||||
+ this.server.spark.registerCommandBeforePlugins(this.server); // Paper - spark
|
||||
com.destroystokyo.paper.Metrics.PaperMetrics.startMetrics(); // Paper - start metrics
|
||||
com.destroystokyo.paper.VersionHistoryManager.INSTANCE.getClass(); // Paper - load version history now
|
||||
|
||||
diff --git a/src/main/java/org/bukkit/craftbukkit/CraftServer.java b/src/main/java/org/bukkit/craftbukkit/CraftServer.java
|
||||
index 27f47383c8065cc3b421001028b6cba528c38865..c7df339aeb62ee627edaf1bb4c8474b61e357ba6 100644
|
||||
--- a/src/main/java/org/bukkit/craftbukkit/CraftServer.java
|
||||
+++ b/src/main/java/org/bukkit/craftbukkit/CraftServer.java
|
||||
@@ -309,6 +309,7 @@ public final class CraftServer implements Server {
|
||||
public static Exception excessiveVelEx; // Paper - Velocity warnings
|
||||
private final io.papermc.paper.logging.SysoutCatcher sysoutCatcher = new io.papermc.paper.logging.SysoutCatcher(); // Paper
|
||||
private final io.papermc.paper.potion.PaperPotionBrewer potionBrewer; // Paper - Custom Potion Mixes
|
||||
+ public final io.papermc.paper.SparksFly spark; // Paper - spark
|
||||
|
||||
// Paper start - Folia region threading API
|
||||
private final io.papermc.paper.threadedregions.scheduler.FallbackRegionScheduler regionizedScheduler = new io.papermc.paper.threadedregions.scheduler.FallbackRegionScheduler();
|
||||
@@ -475,6 +476,7 @@ public final class CraftServer implements Server {
|
||||
}
|
||||
this.potionBrewer = new io.papermc.paper.potion.PaperPotionBrewer(console); // Paper - custom potion mixes
|
||||
datapackManager = new io.papermc.paper.datapack.PaperDatapackManager(console.getPackRepository()); // Paper
|
||||
+ this.spark = new io.papermc.paper.SparksFly(this); // Paper - spark
|
||||
}
|
||||
|
||||
public boolean getCommandBlockOverride(String command) {
|
||||
@@ -1101,6 +1103,7 @@ public final class CraftServer implements Server {
|
||||
this.reloadData();
|
||||
org.spigotmc.SpigotConfig.registerCommands(); // Spigot
|
||||
io.papermc.paper.command.PaperCommands.registerCommands(this.console); // Paper
|
||||
+ this.spark.registerCommandBeforePlugins(this); // Paper - spark
|
||||
this.overrideAllCommandBlockCommands = this.commandsConfiguration.getStringList("command-block-overrides").contains("*");
|
||||
this.ignoreVanillaPermissions = this.commandsConfiguration.getBoolean("ignore-vanilla-permissions");
|
||||
|
||||
@@ -1129,6 +1132,7 @@ public final class CraftServer implements Server {
|
||||
this.loadPlugins();
|
||||
this.enablePlugins(PluginLoadOrder.STARTUP);
|
||||
this.enablePlugins(PluginLoadOrder.POSTWORLD);
|
||||
+ this.spark.registerCommandAfterPlugins(this); // Paper - spark
|
||||
if (io.papermc.paper.plugin.PluginInitializerManager.instance().pluginRemapper != null) io.papermc.paper.plugin.PluginInitializerManager.instance().pluginRemapper.pluginsEnabled(); // Paper - Remap plugins
|
||||
// Paper start - brigadier command API
|
||||
io.papermc.paper.command.brigadier.PaperCommands.INSTANCE.setValid(); // to clear invalid state for event fire below
|
Loading…
Add table
Add a link
Reference in a new issue