From a616fad6174bf29ea0b345756ffe99b80ed4486c Mon Sep 17 00:00:00 2001 From: Jason Penilla <11360596+jpenilla@users.noreply.github.com> Date: Tue, 23 Apr 2024 11:44:28 -0700 Subject: [PATCH] Add plugin remapping patches --- build.gradle.kts | 8 +- .../Add-WorldEdit-plugin-flag-test-task.patch | 48 + .../unapplied/server/Plugin-remapping.patch | 1925 +++++++++++++++++ ...ion-calls-in-plugins-using-internals.patch | 763 +++++++ .../build-replace-use-of-shadow-plugin.patch | 44 + 5 files changed, 2787 insertions(+), 1 deletion(-) create mode 100644 patches/unapplied/server/Add-WorldEdit-plugin-flag-test-task.patch create mode 100644 patches/unapplied/server/Plugin-remapping.patch create mode 100644 patches/unapplied/server/Remap-reflection-calls-in-plugins-using-internals.patch create mode 100644 patches/unapplied/server/build-replace-use-of-shadow-plugin.patch diff --git a/build.gradle.kts b/build.gradle.kts index 803c1d07c8..1c49486c94 100644 --- a/build.gradle.kts +++ b/build.gradle.kts @@ -11,7 +11,7 @@ plugins { java `maven-publish` id("com.github.johnrengelman.shadow") version "8.1.1" apply false - id("io.papermc.paperweight.core") version "1.5.15" + id("io.papermc.paperweight.core") version "1.6.0-SNAPSHOT" } allprojects { @@ -166,6 +166,7 @@ if (providers.gradleProperty("updatingMinecraft").getOrElse("false").toBoolean() appliedPatches = file("patches/server") unappliedPatches = file("patches/unapplied/server") applyTaskName = "applyServerPatches" + patchedDir = "Paper-Server" } } @@ -183,6 +184,9 @@ abstract class RebasePatches : BaseTask() { @get:Input abstract val applyTaskName: Property + @get:Input + abstract val patchedDir: Property + private fun unapplied(): List = unappliedPatches.path.listDirectoryEntries("*.patch").sortedBy { it.name } @@ -245,6 +249,8 @@ abstract class RebasePatches : BaseTask() { } } + // Delete the build file before resetting the AM session in case it has compilation errors + projectDir.path.resolve(patchedDir.get()).resolve("build.gradle.kts").deleteIfExists() // Apply again to reset the am session (so it ends on the failed patch, to allow us to rebuild after fixing it) val apply2 = ProcessBuilder() .directory(projectDir.path) diff --git a/patches/unapplied/server/Add-WorldEdit-plugin-flag-test-task.patch b/patches/unapplied/server/Add-WorldEdit-plugin-flag-test-task.patch new file mode 100644 index 0000000000..99aa44bb55 --- /dev/null +++ b/patches/unapplied/server/Add-WorldEdit-plugin-flag-test-task.patch @@ -0,0 +1,48 @@ +From 0000000000000000000000000000000000000000 Mon Sep 17 00:00:00 2001 +From: Jason Penilla <11360596+jpenilla@users.noreply.github.com> +Date: Mon, 12 Feb 2024 22:19:03 -0700 +Subject: [PATCH] Add WorldEdit plugin flag test task + + +diff --git a/build.gradle.kts b/build.gradle.kts +index 0000000000000000000000000000000000000000..0000000000000000000000000000000000000000 100644 +--- a/build.gradle.kts ++++ b/build.gradle.kts +@@ -0,0 +0,0 @@ + import io.papermc.paperweight.util.* ++import xyz.jpenilla.runpaper.task.RunServer + + plugins { + java + `maven-publish` + id("com.github.johnrengelman.shadow") ++ id("xyz.jpenilla.run-paper") version "2.2.3" apply false + } + + val log4jPlugins = sourceSets.create("log4jPlugins") +@@ -0,0 +0,0 @@ tasks.registerRunTask("runDevServer") { + jvmArgs("-DPaper.pushPaperAssetsRoot=true") + } + ++tasks.register("runWithPlugins") { ++ version.set(providers.gradleProperty("mcVersion")) ++ runJar(rootProject.tasks.named("createMojmapBundlerJar").flatMap { it.outputZip }) ++ downloadPlugins { ++ url("https://ci.enginehub.org/repository/download/bt10/23382:id/worldedit-bukkit-7.2.19-dist.jar?branch=version/7.2.x&guest=1") ++ url("https://www.patreon.com/file?h=89830486&i=15920178") ++ url("https://dev.bukkit.org/projects/grief-prevention/files/4433061/download") ++ github("EssentialsX", "Essentials", "2.20.1", "EssentialsX-2.20.1.jar") ++ hangar("squaremap", "1.2.3") ++ hangar("FancyHolograms", "2.0.5") ++ hangar("Chunky", "1.3.92") ++ hangar("Multiverse-Core", "4.3.12") ++ // Once they fix package parsing ++ // hangar("Denizen", "1.3.0-Build-1803") ++ // hangar("GrimAnticheat", "2.3.58") ++ // hangar("ProtocolLib", "5.1.0") ++ } ++ runDirectory.set(rootProject.layout.projectDirectory.dir("run")) ++} + tasks.registerRunTask("runBundler") { + description = "Spin up a test server from the Mojang mapped bundler jar" + classpath(rootProject.tasks.named("createMojmapBundlerJar").flatMap { it.outputZip }) diff --git a/patches/unapplied/server/Plugin-remapping.patch b/patches/unapplied/server/Plugin-remapping.patch new file mode 100644 index 0000000000..14efb64e56 --- /dev/null +++ b/patches/unapplied/server/Plugin-remapping.patch @@ -0,0 +1,1925 @@ +From 0000000000000000000000000000000000000000 Mon Sep 17 00:00:00 2001 +From: Jason Penilla <11360596+jpenilla@users.noreply.github.com> +Date: Sat, 29 Oct 2022 15:22:32 -0700 +Subject: [PATCH] Plugin remapping + +Co-authored-by: Nassim Jahnke + +diff --git a/build.gradle.kts b/build.gradle.kts +index 0000000000000000000000000000000000000000..0000000000000000000000000000000000000000 100644 +--- a/build.gradle.kts ++++ b/build.gradle.kts +@@ -0,0 +0,0 @@ dependencies { + testImplementation("org.mockito:mockito-core:5.11.0") + testImplementation("org.ow2.asm:asm-tree:9.7") + testImplementation("org.junit-pioneer:junit-pioneer:2.2.0") // Paper - CartesianTest ++ implementation("net.neoforged:AutoRenamingTool:2.0.3") // Paper - remap plugins ++ implementation("net.neoforged:srgutils:1.0.9") // Paper - remap plugins - bump transitive of ART ++} ++ ++paperweight { ++ craftBukkitPackageVersion.set("v1_20_R3") // also needs to be updated in MappingEnvironment + } + +-val craftbukkitPackageVersion = "1_20_R3" // Paper + tasks.jar { + archiveClassifier.set("dev") + +@@ -0,0 +0,0 @@ tasks.jar { + "Specification-Vendor" to "Bukkit Team", + "Git-Branch" to gitBranch, // Paper + "Git-Commit" to gitHash, // Paper +- "CraftBukkit-Package-Version" to craftbukkitPackageVersion, // Paper ++ "CraftBukkit-Package-Version" to paperweight.craftBukkitPackageVersion.get(), // Paper + ) + for (tld in setOf("net", "com", "org")) { + attributes("$tld/bukkit", "Sealed" to true) +@@ -0,0 +0,0 @@ tasks.compileTestJava { + + publishing { + publications.create("maven") { +- artifact(tasks.shadowJar) +- } +-} +- +-relocation { +- // Order matters here - e.g. craftbukkit proper must be relocated before any of the libs are relocated into the cb package +- relocate("org.bukkit.craftbukkit" to "org.bukkit.craftbukkit.v$craftbukkitPackageVersion") { +- exclude("org.bukkit.craftbukkit.Main*") + } + } + + tasks.shadowJar { + configurations = listOf(project.configurations.vanillaServer.get(), alsoShade) +- archiveClassifier.set("mojang-mapped") +- +- for (relocation in relocation.relocations.get()) { +- relocate(relocation.fromPackage, relocation.toPackage) { +- for (exclude in relocation.excludes) { +- exclude(exclude) +- } +- } +- } + } + + // Paper start +@@ -0,0 +0,0 @@ tasks.check { + } + // Paper end + +-// Paper start - include reobf mappings in jar for stacktrace deobfuscation +-val includeMappings = tasks.register("includeMappings") { +- inputJar.set(tasks.fixJarForReobf.flatMap { it.outputJar }) +- mappings.set(tasks.reobfJar.flatMap { it.mappingsFile }) +- mappingsDest.set("META-INF/mappings/reobf.tiny") +-} +- +-tasks.reobfJar { +- inputJar.set(includeMappings.flatMap { it.outputJar }) +-} +-// Paper end - include reobf mappings in jar for stacktrace deobfuscation +- + tasks.test { + exclude("org/bukkit/craftbukkit/inventory/ItemStack*Test.class") + useJUnitPlatform() +@@ -0,0 +0,0 @@ val runtimeClasspathWithoutVanillaServer = configurations.runtimeClasspath.flatM + runtime.filterNot { it.asFile.absolutePath == vanilla } + } + +-tasks.registerRunTask("runShadow") { +- description = "Spin up a test server from the shadowJar archiveFile" +- classpath(tasks.shadowJar.flatMap { it.archiveFile }) ++tasks.registerRunTask("runServer") { ++ description = "Spin up a test server from the Mojang mapped server jar" ++ classpath(tasks.includeMappings.flatMap { it.outputJar }) + classpath(runtimeClasspathWithoutVanillaServer) + } + +-tasks.registerRunTask("runReobf") { ++tasks.registerRunTask("runReobfServer") { + description = "Spin up a test server from the reobfJar output jar" + classpath(tasks.reobfJar.flatMap { it.outputJar }) + classpath(runtimeClasspathWithoutVanillaServer) + } + +-tasks.registerRunTask("runDev") { +- description = "Spin up a non-relocated Mojang-mapped test server" ++tasks.registerRunTask("runDevServer") { ++ description = "Spin up a test server without assembling a jar" + classpath(sourceSets.main.map { it.runtimeClasspath }) + jvmArgs("-DPaper.pushPaperAssetsRoot=true") + } ++ ++tasks.registerRunTask("runBundler") { ++ description = "Spin up a test server from the Mojang mapped bundler jar" ++ classpath(rootProject.tasks.named("createMojmapBundlerJar").flatMap { it.outputZip }) ++ mainClass.set(null as String?) ++} ++tasks.registerRunTask("runReobfBundler") { ++ description = "Spin up a test server from the reobf bundler jar" ++ classpath(rootProject.tasks.named("createReobfBundlerJar").flatMap { it.outputZip }) ++ mainClass.set(null as String?) ++} ++tasks.registerRunTask("runPaperclip") { ++ description = "Spin up a test server from the Mojang mapped Paperclip jar" ++ classpath(rootProject.tasks.named("createMojmapPaperclipJar").flatMap { it.outputZip }) ++ mainClass.set(null as String?) ++} ++tasks.registerRunTask("runReobfPaperclip") { ++ description = "Spin up a test server from the reobf Paperclip jar" ++ classpath(rootProject.tasks.named("createReobfPaperclipJar").flatMap { it.outputZip }) ++ mainClass.set(null as String?) ++} +diff --git a/src/main/java/io/papermc/paper/plugin/PluginInitializerManager.java b/src/main/java/io/papermc/paper/plugin/PluginInitializerManager.java +index 0000000000000000000000000000000000000000..0000000000000000000000000000000000000000 100644 +--- a/src/main/java/io/papermc/paper/plugin/PluginInitializerManager.java ++++ b/src/main/java/io/papermc/paper/plugin/PluginInitializerManager.java +@@ -0,0 +0,0 @@ import io.papermc.paper.plugin.entrypoint.Entrypoint; + import io.papermc.paper.plugin.entrypoint.LaunchEntryPointHandler; + import io.papermc.paper.plugin.provider.PluginProvider; + import io.papermc.paper.plugin.provider.type.paper.PaperPluginParent; ++import io.papermc.paper.pluginremap.PluginRemapper; + import joptsimple.OptionSet; + import net.minecraft.server.dedicated.DedicatedServer; + import org.bukkit.configuration.file.YamlConfiguration; +-import org.bukkit.craftbukkit.CraftServer; + import org.jetbrains.annotations.NotNull; + import org.jetbrains.annotations.Nullable; + import org.slf4j.Logger; +@@ -0,0 +0,0 @@ public class PluginInitializerManager { + private static PluginInitializerManager impl; + private final Path pluginDirectory; + private final Path updateDirectory; ++ public final io.papermc.paper.pluginremap.@org.checkerframework.checker.nullness.qual.MonotonicNonNull PluginRemapper pluginRemapper; // Paper + + PluginInitializerManager(final Path pluginDirectory, final Path updateDirectory) { + this.pluginDirectory = pluginDirectory; + this.updateDirectory = updateDirectory; ++ this.pluginRemapper = Boolean.getBoolean("paper.disable-plugin-rewriting") ++ ? null ++ : PluginRemapper.create(pluginDirectory); + } + + private static PluginInitializerManager parse(@NotNull final OptionSet minecraftOptionSet) throws Exception { +@@ -0,0 +0,0 @@ public class PluginInitializerManager { + public static void load(OptionSet optionSet) throws Exception { + // We have to load the bukkit configuration inorder to get the update folder location. + io.papermc.paper.plugin.PluginInitializerManager pluginSystem = io.papermc.paper.plugin.PluginInitializerManager.init(optionSet); ++ if (pluginSystem.pluginRemapper != null) pluginSystem.pluginRemapper.loadingPlugins(); + + // Register the default plugin directory + io.papermc.paper.plugin.util.EntrypointUtil.registerProvidersFromSource(io.papermc.paper.plugin.provider.source.DirectoryProviderSource.INSTANCE, pluginSystem.pluginDirectoryPath()); +diff --git a/src/main/java/io/papermc/paper/plugin/provider/source/DirectoryProviderSource.java b/src/main/java/io/papermc/paper/plugin/provider/source/DirectoryProviderSource.java +index 0000000000000000000000000000000000000000..0000000000000000000000000000000000000000 100644 +--- a/src/main/java/io/papermc/paper/plugin/provider/source/DirectoryProviderSource.java ++++ b/src/main/java/io/papermc/paper/plugin/provider/source/DirectoryProviderSource.java +@@ -0,0 +0,0 @@ import org.slf4j.Logger; + public class DirectoryProviderSource implements ProviderSource> { + + public static final DirectoryProviderSource INSTANCE = new DirectoryProviderSource(); +- private static final FileProviderSource FILE_PROVIDER_SOURCE = new FileProviderSource("Directory '%s'"::formatted); ++ private static final FileProviderSource FILE_PROVIDER_SOURCE = new FileProviderSource("Directory '%s'"::formatted, false); // Paper - Remap plugins + private static final Logger LOGGER = LogUtils.getClassLogger(); + + @Override +@@ -0,0 +0,0 @@ public class DirectoryProviderSource implements ProviderSource> + LOGGER.error("Error preparing plugin context: " + e.getMessage(), e); + } + }); ++ // Paper start - Remap plugins ++ if (io.papermc.paper.plugin.PluginInitializerManager.instance().pluginRemapper != null) { ++ return io.papermc.paper.plugin.PluginInitializerManager.instance().pluginRemapper.rewritePluginDirectory(files); ++ } ++ // Paper end - Remap plugins + return files; + } + +diff --git a/src/main/java/io/papermc/paper/plugin/provider/source/FileProviderSource.java b/src/main/java/io/papermc/paper/plugin/provider/source/FileProviderSource.java +index 0000000000000000000000000000000000000000..0000000000000000000000000000000000000000 100644 +--- a/src/main/java/io/papermc/paper/plugin/provider/source/FileProviderSource.java ++++ b/src/main/java/io/papermc/paper/plugin/provider/source/FileProviderSource.java +@@ -0,0 +0,0 @@ import java.util.jar.JarFile; + public class FileProviderSource implements ProviderSource { + + private final Function contextChecker; ++ private final boolean applyRemap; + +- public FileProviderSource(Function contextChecker) { ++ public FileProviderSource(Function contextChecker, boolean applyRemap) { + this.contextChecker = contextChecker; ++ this.applyRemap = applyRemap; ++ } ++ ++ public FileProviderSource(Function contextChecker) { ++ this(contextChecker, true); + } + + @Override +@@ -0,0 +0,0 @@ public class FileProviderSource implements ProviderSource { + } catch (Exception exception) { + throw new RuntimeException(source + " failed to update!", exception); + } ++ // Paper start - Remap plugins ++ if (this.applyRemap && io.papermc.paper.plugin.PluginInitializerManager.instance().pluginRemapper != null) { ++ context = io.papermc.paper.plugin.PluginInitializerManager.instance().pluginRemapper.rewritePlugin(context); ++ } ++ // Paper end - Remap plugins + return context; + } + +diff --git a/src/main/java/io/papermc/paper/plugin/provider/source/PluginFlagProviderSource.java b/src/main/java/io/papermc/paper/plugin/provider/source/PluginFlagProviderSource.java +index 0000000000000000000000000000000000000000..0000000000000000000000000000000000000000 100644 +--- a/src/main/java/io/papermc/paper/plugin/provider/source/PluginFlagProviderSource.java ++++ b/src/main/java/io/papermc/paper/plugin/provider/source/PluginFlagProviderSource.java +@@ -0,0 +0,0 @@ import java.util.List; + public class PluginFlagProviderSource implements ProviderSource, List> { + + public static final PluginFlagProviderSource INSTANCE = new PluginFlagProviderSource(); +- private static final FileProviderSource FILE_PROVIDER_SOURCE = new FileProviderSource("File '%s' specified through 'add-plugin' argument"::formatted); ++ private static final FileProviderSource FILE_PROVIDER_SOURCE = new FileProviderSource("File '%s' specified through 'add-plugin' argument"::formatted, false); + private static final Logger LOGGER = LogUtils.getClassLogger(); + + @Override +@@ -0,0 +0,0 @@ public class PluginFlagProviderSource implements ProviderSource, List + LOGGER.error("Error preparing plugin context: " + e.getMessage(), e); + } + } ++ // Paper start - Remap plugins ++ if (io.papermc.paper.plugin.PluginInitializerManager.instance().pluginRemapper != null && !files.isEmpty()) { ++ return io.papermc.paper.plugin.PluginInitializerManager.instance().pluginRemapper.rewriteExtraPlugins(files); ++ } ++ // Paper end - Remap plugins + return files; + } + +diff --git a/src/main/java/io/papermc/paper/plugin/provider/type/PluginFileType.java b/src/main/java/io/papermc/paper/plugin/provider/type/PluginFileType.java +index 0000000000000000000000000000000000000000..0000000000000000000000000000000000000000 100644 +--- a/src/main/java/io/papermc/paper/plugin/provider/type/PluginFileType.java ++++ b/src/main/java/io/papermc/paper/plugin/provider/type/PluginFileType.java +@@ -0,0 +0,0 @@ import java.util.jar.JarFile; + */ + public abstract class PluginFileType { + ++ public static final String PAPER_PLUGIN_YML = "paper-plugin.yml"; + private static final List CONFIG_TYPES = new ArrayList<>(); + +- public static final PluginFileType PAPER = new PluginFileType<>("paper-plugin.yml", PaperPluginParent.FACTORY) { ++ public static final PluginFileType PAPER = new PluginFileType<>(PAPER_PLUGIN_YML, PaperPluginParent.FACTORY) { + @Override + protected void register(EntrypointHandler entrypointHandler, PaperPluginParent parent) { + PaperPluginParent.PaperBootstrapProvider bootstrapPluginProvider = null; +diff --git a/src/main/java/io/papermc/paper/pluginremap/DebugLogger.java b/src/main/java/io/papermc/paper/pluginremap/DebugLogger.java +new file mode 100644 +index 0000000000000000000000000000000000000000..0000000000000000000000000000000000000000 +--- /dev/null ++++ b/src/main/java/io/papermc/paper/pluginremap/DebugLogger.java +@@ -0,0 +0,0 @@ ++package io.papermc.paper.pluginremap; ++ ++import java.io.IOException; ++import java.io.PrintWriter; ++import java.nio.file.Files; ++import java.nio.file.Path; ++import java.util.function.Consumer; ++import org.checkerframework.checker.nullness.qual.NonNull; ++import org.checkerframework.checker.nullness.qual.Nullable; ++import org.checkerframework.framework.qual.DefaultQualifier; ++ ++/** ++ * {@link PrintWriter}-backed logger implementation for use with {@link net.neoforged.art.api.Renamer} which ++ * only opens the backing writer and logs messages when the {@link #DEBUG} system property ++ * is set to true. ++ */ ++@DefaultQualifier(NonNull.class) ++final class DebugLogger implements Consumer, AutoCloseable { ++ private static final boolean DEBUG = Boolean.getBoolean("paper.remap-debug"); ++ ++ private final @Nullable PrintWriter writer; ++ ++ DebugLogger(final Path logFile) { ++ try { ++ this.writer = createWriter(logFile); ++ } catch (final IOException ex) { ++ throw new RuntimeException("Failed to initialize DebugLogger for file '" + logFile + "'", ex); ++ } ++ } ++ ++ @Override ++ public void accept(final String line) { ++ this.useWriter(writer -> writer.println(line)); ++ } ++ ++ @Override ++ public void close() { ++ this.useWriter(PrintWriter::close); ++ } ++ ++ private void useWriter(final Consumer op) { ++ final @Nullable PrintWriter writer = this.writer; ++ if (writer != null) { ++ op.accept(writer); ++ } ++ } ++ ++ Consumer debug() { ++ return line -> this.accept("[debug]: " + line); ++ } ++ ++ static DebugLogger forOutputFile(final Path outputFile) { ++ return new DebugLogger(outputFile.resolveSibling(outputFile.getFileName() + ".log")); ++ } ++ ++ private static @Nullable PrintWriter createWriter(final Path logFile) throws IOException { ++ if (!DEBUG) { ++ return null; ++ } ++ if (!Files.exists(logFile.getParent())) { ++ Files.createDirectories(logFile.getParent()); ++ } ++ return new PrintWriter(logFile.toFile()); ++ } ++} +diff --git a/src/main/java/io/papermc/paper/pluginremap/InsertManifestAttribute.java b/src/main/java/io/papermc/paper/pluginremap/InsertManifestAttribute.java +new file mode 100644 +index 0000000000000000000000000000000000000000..0000000000000000000000000000000000000000 +--- /dev/null ++++ b/src/main/java/io/papermc/paper/pluginremap/InsertManifestAttribute.java +@@ -0,0 +0,0 @@ ++package io.papermc.paper.pluginremap; ++ ++import java.io.ByteArrayInputStream; ++import java.io.ByteArrayOutputStream; ++import java.io.IOException; ++import java.util.Collection; ++import java.util.List; ++import java.util.jar.Attributes; ++import java.util.jar.Manifest; ++import net.neoforged.art.api.Transformer; ++ ++final class InsertManifestAttribute implements Transformer { ++ static final String PAPERWEIGHT_NAMESPACE_MANIFEST_KEY = "paperweight-mappings-namespace"; ++ static final String MOJANG_NAMESPACE = "mojang"; ++ static final String MOJANG_PLUS_YARN_NAMESPACE = "mojang+yarn"; ++ static final String SPIGOT_NAMESPACE = "spigot"; ++ ++ private final String mainAttributesKey; ++ private final String namespace; ++ private final boolean createIfMissing; ++ private volatile boolean visitedManifest = false; ++ ++ static Transformer addNamespaceManifestAttribute(final String namespace) { ++ return new InsertManifestAttribute(PAPERWEIGHT_NAMESPACE_MANIFEST_KEY, namespace, true); ++ } ++ ++ InsertManifestAttribute( ++ final String mainAttributesKey, ++ final String namespace, ++ final boolean createIfMissing ++ ) { ++ this.mainAttributesKey = mainAttributesKey; ++ this.namespace = namespace; ++ this.createIfMissing = createIfMissing; ++ } ++ ++ @Override ++ public ManifestEntry process(final ManifestEntry entry) { ++ this.visitedManifest = true; ++ try { ++ final Manifest manifest = new Manifest(new ByteArrayInputStream(entry.getData())); ++ manifest.getMainAttributes().putValue(this.mainAttributesKey, this.namespace); ++ final ByteArrayOutputStream out = new ByteArrayOutputStream(); ++ manifest.write(out); ++ return ManifestEntry.create(Entry.STABLE_TIMESTAMP, out.toByteArray()); ++ } catch (final IOException e) { ++ throw new RuntimeException("Failed to modify manifest", e); ++ } ++ } ++ ++ @Override ++ public Collection getExtras() { ++ if (!this.visitedManifest && this.createIfMissing) { ++ final Manifest manifest = new Manifest(); ++ manifest.getMainAttributes().put(Attributes.Name.MANIFEST_VERSION, "1.0"); ++ manifest.getMainAttributes().putValue(this.mainAttributesKey, this.namespace); ++ final ByteArrayOutputStream out = new ByteArrayOutputStream(); ++ try { ++ manifest.write(out); ++ } catch (final IOException e) { ++ throw new RuntimeException("Failed to write manifest", e); ++ } ++ return List.of(ManifestEntry.create(Entry.STABLE_TIMESTAMP, out.toByteArray())); ++ } ++ return Transformer.super.getExtras(); ++ } ++} +diff --git a/src/main/java/io/papermc/paper/pluginremap/PluginRemapper.java b/src/main/java/io/papermc/paper/pluginremap/PluginRemapper.java +new file mode 100644 +index 0000000000000000000000000000000000000000..0000000000000000000000000000000000000000 +--- /dev/null ++++ b/src/main/java/io/papermc/paper/pluginremap/PluginRemapper.java +@@ -0,0 +0,0 @@ ++package io.papermc.paper.pluginremap; ++ ++import com.google.common.util.concurrent.ThreadFactoryBuilder; ++import com.mojang.logging.LogUtils; ++import io.papermc.paper.util.AtomicFiles; ++import io.papermc.paper.util.MappingEnvironment; ++import io.papermc.paper.util.concurrent.ScalingThreadPool; ++import java.io.BufferedInputStream; ++import java.io.IOException; ++import java.io.InputStream; ++import java.nio.file.FileSystem; ++import java.nio.file.FileSystems; ++import java.nio.file.Files; ++import java.nio.file.Path; ++import java.util.ArrayList; ++import java.util.HashMap; ++import java.util.List; ++import java.util.concurrent.CompletableFuture; ++import java.util.concurrent.CompletionException; ++import java.util.concurrent.Executor; ++import java.util.concurrent.ExecutorService; ++import java.util.concurrent.ThreadPoolExecutor; ++import java.util.concurrent.TimeUnit; ++import java.util.function.Predicate; ++import java.util.function.Supplier; ++import java.util.jar.Manifest; ++import java.util.stream.Stream; ++import net.minecraft.DefaultUncaughtExceptionHandlerWithName; ++import net.minecraft.util.ExceptionCollector; ++import net.neoforged.art.api.Renamer; ++import net.neoforged.art.api.SignatureStripperConfig; ++import net.neoforged.art.api.Transformer; ++import net.neoforged.srgutils.IMappingFile; ++import org.checkerframework.checker.nullness.qual.NonNull; ++import org.checkerframework.checker.nullness.qual.Nullable; ++import org.checkerframework.framework.qual.DefaultQualifier; ++import org.slf4j.Logger; ++ ++import static io.papermc.paper.pluginremap.InsertManifestAttribute.addNamespaceManifestAttribute; ++ ++@DefaultQualifier(NonNull.class) ++public final class PluginRemapper { ++ public static final boolean DEBUG_LOGGING = Boolean.getBoolean("Paper.PluginRemapperDebug"); ++ private static final String PAPER_REMAPPED = ".paper-remapped"; ++ private static final String UNKNOWN_ORIGIN = "unknown-origin"; ++ private static final String EXTRA_PLUGINS = "extra-plugins"; ++ private static final String REMAP_CLASSPATH = "remap-classpath"; ++ private static final String REVERSED_MAPPINGS = "mappings/reversed"; ++ private static final Logger LOGGER = LogUtils.getClassLogger(); ++ ++ private final ExecutorService threadPool; ++ private final ReobfServer reobf; ++ private final RemappedPluginIndex remappedPlugins; ++ private final RemappedPluginIndex extraPlugins; ++ private final UnknownOriginRemappedPluginIndex unknownOrigin; ++ private @Nullable CompletableFuture reversedMappings; ++ ++ public PluginRemapper(final Path pluginsDir) { ++ this.threadPool = createThreadPool(); ++ final CompletableFuture mappings = CompletableFuture.supplyAsync(PluginRemapper::loadReobfMappings, this.threadPool); ++ final Path remappedPlugins = pluginsDir.resolve(PAPER_REMAPPED); ++ this.reversedMappings = this.reversedMappingsFuture(() -> mappings, remappedPlugins, this.threadPool); ++ this.reobf = new ReobfServer(remappedPlugins.resolve(REMAP_CLASSPATH), mappings, this.threadPool); ++ this.remappedPlugins = new RemappedPluginIndex(remappedPlugins, false); ++ this.extraPlugins = new RemappedPluginIndex(this.remappedPlugins.dir().resolve(EXTRA_PLUGINS), true); ++ this.unknownOrigin = new UnknownOriginRemappedPluginIndex(this.remappedPlugins.dir().resolve(UNKNOWN_ORIGIN)); ++ } ++ ++ public static @Nullable PluginRemapper create(final Path pluginsDir) { ++ if (MappingEnvironment.reobf() || !MappingEnvironment.hasMappings()) { ++ return null; ++ } ++ ++ return new PluginRemapper(pluginsDir); ++ } ++ ++ public void shutdown() { ++ this.threadPool.shutdown(); ++ this.save(true); ++ boolean didShutdown; ++ try { ++ didShutdown = this.threadPool.awaitTermination(3L, TimeUnit.SECONDS); ++ } catch (final InterruptedException ex) { ++ didShutdown = false; ++ } ++ if (!didShutdown) { ++ this.threadPool.shutdownNow(); ++ } ++ } ++ ++ public void save(final boolean clean) { ++ this.remappedPlugins.write(); ++ this.extraPlugins.write(); ++ this.unknownOrigin.write(clean); ++ } ++ ++ // Called on startup and reload ++ public void loadingPlugins() { ++ if (this.reversedMappings == null) { ++ this.reversedMappings = this.reversedMappingsFuture( ++ () -> CompletableFuture.supplyAsync(PluginRemapper::loadReobfMappings, this.threadPool), ++ this.remappedPlugins.dir(), ++ this.threadPool ++ ); ++ } ++ } ++ ++ // Called after all plugins enabled during startup/reload ++ public void pluginsEnabled() { ++ this.reversedMappings = null; ++ this.save(false); ++ } ++ ++ public Path rewritePlugin(final Path plugin) { ++ // Already remapped ++ if (plugin.getParent().equals(this.remappedPlugins.dir()) ++ || plugin.getParent().equals(this.extraPlugins.dir())) { ++ return plugin; ++ } ++ ++ final @Nullable Path cached = this.unknownOrigin.getIfPresent(plugin); ++ if (cached != null) { ++ if (DEBUG_LOGGING) { ++ LOGGER.info("Plugin '{}' has not changed since last remap.", plugin); ++ } ++ return cached; ++ } ++ ++ return this.remapPlugin(this.unknownOrigin, plugin).join(); ++ } ++ ++ public List rewriteExtraPlugins(final List plugins) { ++ final @Nullable List allCached = this.extraPlugins.getAllIfPresent(plugins); ++ if (allCached != null) { ++ if (DEBUG_LOGGING) { ++ LOGGER.info("All extra plugins have a remapped variant cached."); ++ } ++ return allCached; ++ } ++ ++ final List> tasks = new ArrayList<>(); ++ for (final Path file : plugins) { ++ final @Nullable Path cached = this.extraPlugins.getIfPresent(file); ++ if (cached != null) { ++ if (DEBUG_LOGGING) { ++ LOGGER.info("Extra plugin '{}' has not changed since last remap.", file); ++ } ++ tasks.add(CompletableFuture.completedFuture(cached)); ++ continue; ++ } ++ tasks.add(this.remapPlugin(this.extraPlugins, file)); ++ } ++ return waitForAll(tasks); ++ } ++ ++ public List rewritePluginDirectory(final List jars) { ++ final @Nullable List remappedJars = this.remappedPlugins.getAllIfPresent(jars); ++ if (remappedJars != null) { ++ if (DEBUG_LOGGING) { ++ LOGGER.info("All plugins have a remapped variant cached."); ++ } ++ return remappedJars; ++ } ++ ++ final List> tasks = new ArrayList<>(); ++ for (final Path file : jars) { ++ final @Nullable Path existingFile = this.remappedPlugins.getIfPresent(file); ++ if (existingFile != null) { ++ if (DEBUG_LOGGING) { ++ LOGGER.info("Plugin '{}' has not changed since last remap.", file); ++ } ++ tasks.add(CompletableFuture.completedFuture(existingFile)); ++ continue; ++ } ++ ++ tasks.add(this.remapPlugin(this.remappedPlugins, file)); ++ } ++ return waitForAll(tasks); ++ } ++ ++ private static IMappingFile reverse(final IMappingFile mappings) { ++ if (DEBUG_LOGGING) { ++ LOGGER.info("Reversing mappings..."); ++ } ++ final long start = System.currentTimeMillis(); ++ final IMappingFile reversed = mappings.reverse(); ++ if (DEBUG_LOGGING) { ++ LOGGER.info("Done reversing mappings in {}ms.", System.currentTimeMillis() - start); ++ } ++ return reversed; ++ } ++ ++ private CompletableFuture reversedMappingsFuture( ++ final Supplier> mappingsFuture, ++ final Path remappedPlugins, ++ final Executor executor ++ ) { ++ return CompletableFuture.supplyAsync(() -> { ++ try { ++ final String mappingsHash = MappingEnvironment.mappingsHash(); ++ final String fName = mappingsHash + ".tiny"; ++ final Path reversedMappings1 = remappedPlugins.resolve(REVERSED_MAPPINGS); ++ final Path file = reversedMappings1.resolve(fName); ++ if (Files.isDirectory(reversedMappings1)) { ++ if (Files.isRegularFile(file)) { ++ return CompletableFuture.completedFuture( ++ loadMappings("Reversed", Files.newInputStream(file)) ++ ); ++ } else { ++ for (final Path oldFile : list(reversedMappings1, Files::isRegularFile)) { ++ Files.delete(oldFile); ++ } ++ } ++ } else { ++ Files.createDirectories(reversedMappings1); ++ } ++ return mappingsFuture.get().thenApply(loadedMappings -> { ++ final IMappingFile reversed = reverse(loadedMappings); ++ try { ++ AtomicFiles.atomicWrite(file, writeTo -> { ++ reversed.write(writeTo, IMappingFile.Format.TINY, false); ++ }); ++ } catch (final IOException e) { ++ throw new RuntimeException("Failed to write reversed mappings", e); ++ } ++ return reversed; ++ }); ++ } catch (final IOException e) { ++ throw new RuntimeException("Failed to load reversed mappings", e); ++ } ++ }, executor).thenCompose(f -> f); ++ } ++ ++ /** ++ * Returns the remapped file if remapping was necessary, otherwise null. ++ * ++ * @param index remapped plugin index ++ * @param inputFile input file ++ * @return remapped file, or inputFile if no remapping was necessary ++ */ ++ private CompletableFuture remapPlugin(final RemappedPluginIndex index, final Path inputFile) { ++ final Path destination = index.input(inputFile); ++ ++ try (final FileSystem fs = FileSystems.newFileSystem(inputFile, new HashMap<>())) { ++ // Leave dummy files if no remapping is required, so that we can check if they exist without copying the whole file ++ /*if (Files.exists(fs.getPath(PluginFileType.PAPER_PLUGIN_YML))) { // TODO Uncomment on release ++ if (DEBUG_LOGGING) { ++ LOGGER.info("Plugin '{}' is a Paper plugin, no remapping necessary.", inputFile); ++ } ++ index.skip(inputFile); ++ return CompletableFuture.completedFuture(inputFile); ++ } else {*/ ++ // Check for paperweight mojang mapped marker ++ final Path manifestPath = fs.getPath("META-INF/MANIFEST.MF"); ++ if (Files.exists(manifestPath)) { ++ final Manifest manifest; ++ try (final InputStream in = new BufferedInputStream(Files.newInputStream(manifestPath))) { ++ manifest = new Manifest(in); ++ } ++ final String ns = manifest.getMainAttributes().getValue(InsertManifestAttribute.PAPERWEIGHT_NAMESPACE_MANIFEST_KEY); ++ if (ns != null && (ns.equals(InsertManifestAttribute.MOJANG_NAMESPACE) || ns.equals(InsertManifestAttribute.MOJANG_PLUS_YARN_NAMESPACE))) { ++ if (DEBUG_LOGGING) { ++ LOGGER.info("Plugin '{}' is already Mojang mapped.", inputFile); ++ } ++ index.skip(inputFile); ++ return CompletableFuture.completedFuture(inputFile); ++ } ++ } ++ //} ++ } catch (final IOException ex) { ++ throw new RuntimeException("Failed to open plugin jar " + inputFile, ex); ++ } ++ ++ return this.reobf.remapped().thenApplyAsync(reobfServer -> { ++ LOGGER.info("Remapping plugin '{}'...", inputFile); ++ final long start = System.currentTimeMillis(); ++ try (final DebugLogger logger = DebugLogger.forOutputFile(destination)) { ++ try (final Renamer renamer = Renamer.builder() ++ .add(Transformer.renamerFactory(this.mappings(), false)) ++ .add(addNamespaceManifestAttribute(InsertManifestAttribute.MOJANG_PLUS_YARN_NAMESPACE)) ++ .add(Transformer.signatureStripperFactory(SignatureStripperConfig.ALL)) ++ .lib(reobfServer.toFile()) ++ .threads(1) ++ .logger(logger) ++ .debug(logger.debug()) ++ .build()) { ++ renamer.run(inputFile.toFile(), destination.toFile()); ++ } ++ } catch (final Exception ex) { ++ throw new RuntimeException("Failed to remap plugin jar '" + inputFile + "'", ex); ++ } ++ LOGGER.info("Done remapping plugin '{}' in {}ms.", inputFile, System.currentTimeMillis() - start); ++ return destination; ++ }, this.threadPool); ++ } ++ ++ private IMappingFile mappings() { ++ final @Nullable CompletableFuture mappings = this.reversedMappings; ++ if (mappings == null) { ++ return this.reversedMappingsFuture( ++ () -> CompletableFuture.supplyAsync(PluginRemapper::loadReobfMappings, Runnable::run), ++ this.remappedPlugins.dir(), ++ Runnable::run ++ ).join(); ++ } ++ return mappings.join(); ++ } ++ ++ private static IMappingFile loadReobfMappings() { ++ return loadMappings("Reobf", MappingEnvironment.mappingsStream()); ++ } ++ ++ private static IMappingFile loadMappings(final String name, final InputStream stream) { ++ try (stream) { ++ if (DEBUG_LOGGING) { ++ LOGGER.info("Loading {} mappings...", name); ++ } ++ final long start = System.currentTimeMillis(); ++ final IMappingFile load = IMappingFile.load(stream); ++ if (DEBUG_LOGGING) { ++ LOGGER.info("Done loading {} mappings in {}ms.", name, System.currentTimeMillis() - start); ++ } ++ return load; ++ } catch (final IOException ex) { ++ throw new RuntimeException("Failed to load " + name + " mappings", ex); ++ } ++ } ++ ++ static List list(final Path dir, final Predicate filter) { ++ try (final Stream stream = Files.list(dir)) { ++ return stream.filter(filter).toList(); ++ } catch (final IOException ex) { ++ throw new RuntimeException("Failed to list directory '" + dir + "'", ex); ++ } ++ } ++ ++ private static List waitForAll(final List> tasks) { ++ final ExceptionCollector collector = new ExceptionCollector<>(); ++ final List ret = new ArrayList<>(); ++ for (final CompletableFuture task : tasks) { ++ try { ++ ret.add(task.join()); ++ } catch (final CompletionException ex) { ++ collector.add(ex); ++ } ++ } ++ try { ++ collector.throwIfPresent(); ++ } catch (final Exception ex) { ++ throw new RuntimeException("Encountered exception remapping plugins", ex); ++ } ++ return ret; ++ } ++ ++ private static ThreadPoolExecutor createThreadPool() { ++ return new ThreadPoolExecutor( ++ 0, ++ 4, ++ 5L, ++ TimeUnit.SECONDS, ++ ScalingThreadPool.createUnboundedQueue(), ++ new ThreadFactoryBuilder() ++ .setNameFormat("Paper Plugin Remapper Thread - %1$d") ++ .setUncaughtExceptionHandler(new DefaultUncaughtExceptionHandlerWithName(LOGGER)) ++ .build(), ++ ScalingThreadPool.defaultReEnqueuePolicy() ++ ); ++ } ++} +diff --git a/src/main/java/io/papermc/paper/pluginremap/RemappedPluginIndex.java b/src/main/java/io/papermc/paper/pluginremap/RemappedPluginIndex.java +new file mode 100644 +index 0000000000000000000000000000000000000000..0000000000000000000000000000000000000000 +--- /dev/null ++++ b/src/main/java/io/papermc/paper/pluginremap/RemappedPluginIndex.java +@@ -0,0 +0,0 @@ ++package io.papermc.paper.pluginremap; ++ ++import com.google.gson.Gson; ++import com.google.gson.GsonBuilder; ++import com.mojang.logging.LogUtils; ++import io.papermc.paper.util.Hashing; ++import io.papermc.paper.util.MappingEnvironment; ++import java.io.BufferedReader; ++import java.io.BufferedWriter; ++import java.io.IOException; ++import java.nio.charset.StandardCharsets; ++import java.nio.file.Files; ++import java.nio.file.Path; ++import java.util.ArrayList; ++import java.util.HashMap; ++import java.util.HashSet; ++import java.util.Iterator; ++import java.util.List; ++import java.util.Map; ++import java.util.Set; ++import java.util.function.Function; ++import org.checkerframework.checker.nullness.qual.NonNull; ++import org.checkerframework.checker.nullness.qual.Nullable; ++import org.checkerframework.framework.qual.DefaultQualifier; ++import org.slf4j.Logger; ++import org.spongepowered.configurate.loader.AtomicFiles; ++ ++@DefaultQualifier(NonNull.class) ++class RemappedPluginIndex { ++ private static final Logger LOGGER = LogUtils.getLogger(); ++ private static final Gson GSON = new GsonBuilder() ++ .setPrettyPrinting() ++ .create(); ++ private static final String INDEX_FILE_NAME = "index.json"; ++ ++ protected final State state; ++ private final Path dir; ++ private final Path indexFile; ++ private final boolean handleDuplicateFileNames; ++ ++ // todo maybe hash remapped variants to ensure they haven't changed? probably unneeded ++ static final class State { ++ final Map hashes = new HashMap<>(); ++ final Set skippedHashes = new HashSet<>(); ++ private final String mappingsHash = MappingEnvironment.mappingsHash(); ++ } ++ ++ RemappedPluginIndex(final Path dir, final boolean handleDuplicateFileNames) { ++ this.dir = dir; ++ this.handleDuplicateFileNames = handleDuplicateFileNames; ++ if (!Files.exists(this.dir)) { ++ try { ++ Files.createDirectories(this.dir); ++ } catch (final IOException ex) { ++ throw new RuntimeException(ex); ++ } ++ } ++ ++ this.indexFile = dir.resolve(INDEX_FILE_NAME); ++ if (Files.isRegularFile(this.indexFile)) { ++ try { ++ this.state = this.readIndex(); ++ } catch (final IOException e) { ++ throw new RuntimeException(e); ++ } ++ } else { ++ this.state = new State(); ++ } ++ } ++ ++ private State readIndex() throws IOException { ++ final State state; ++ try (final BufferedReader reader = Files.newBufferedReader(this.indexFile)) { ++ state = GSON.fromJson(reader, State.class); ++ } ++ ++ // If mappings have changed, delete all cached files and create a new index ++ if (!state.mappingsHash.equals(MappingEnvironment.mappingsHash())) { ++ for (final String fileName : state.hashes.values()) { ++ Files.deleteIfExists(this.dir.resolve(fileName)); ++ } ++ return new State(); ++ } ++ return state; ++ } ++ ++ Path dir() { ++ return this.dir; ++ } ++ ++ /** ++ * Returns a list of cached paths if all of the input paths are present in the cache. ++ * The returned list may contain paths from different directories. ++ * ++ * @param paths plugin jar paths to check ++ * @return null if any of the paths are not present in the cache, otherwise a list of the cached paths ++ */ ++ @Nullable List getAllIfPresent(final List paths) { ++ final Map hashCache = new HashMap<>(); ++ final Function inputFileHash = path -> hashCache.computeIfAbsent(path, Hashing::sha256); ++ ++ // Delete cached entries we no longer need ++ final Iterator> iterator = this.state.hashes.entrySet().iterator(); ++ while (iterator.hasNext()) { ++ final Map.Entry entry = iterator.next(); ++ final String inputHash = entry.getKey(); ++ final String fileName = entry.getValue(); ++ if (paths.stream().anyMatch(path -> inputFileHash.apply(path).equals(inputHash))) { ++ // Hash is used, keep it ++ continue; ++ } ++ ++ iterator.remove(); ++ try { ++ Files.deleteIfExists(this.dir.resolve(fileName)); ++ } catch (final IOException ex) { ++ throw new RuntimeException(ex); ++ } ++ } ++ ++ // Also clear hashes of skipped files ++ this.state.skippedHashes.removeIf(hash -> paths.stream().noneMatch(path -> inputFileHash.apply(path).equals(hash))); ++ ++ final List ret = new ArrayList<>(); ++ for (final Path path : paths) { ++ final String inputHash = inputFileHash.apply(path); ++ if (this.state.skippedHashes.contains(inputHash)) { ++ // Add the original path ++ ret.add(path); ++ continue; ++ } ++ ++ final @Nullable Path cached = this.getIfPresent(inputHash); ++ if (cached == null) { ++ // Missing the remapped file ++ return null; ++ } ++ ret.add(cached); ++ } ++ return ret; ++ } ++ ++ private String createCachedFileName(final Path in) { ++ if (this.handleDuplicateFileNames) { ++ final String fileName = in.getFileName().toString(); ++ final int i = fileName.lastIndexOf(".jar"); ++ return fileName.substring(0, i) + "-" + System.currentTimeMillis() + ".jar"; ++ } ++ return in.getFileName().toString(); ++ } ++ ++ /** ++ * Returns the given path if the file was previously skipped for being remapped, otherwise the cached path or null. ++ * ++ * @param in input file ++ * @return {@code in} if already remapped, the cached path if present, otherwise null ++ */ ++ @Nullable Path getIfPresent(final Path in) { ++ final String inHash = Hashing.sha256(in); ++ if (this.state.skippedHashes.contains(inHash)) { ++ return in; ++ } ++ return this.getIfPresent(inHash); ++ } ++ ++ /** ++ * Returns the cached path if a remapped file is present for the given hash, otherwise null. ++ * ++ * @param inHash hash of the input file ++ * @return the cached path if present, otherwise null ++ * @see #getIfPresent(Path) ++ */ ++ protected @Nullable Path getIfPresent(final String inHash) { ++ final @Nullable String fileName = this.state.hashes.get(inHash); ++ if (fileName == null) { ++ return null; ++ } ++ ++ final Path path = this.dir.resolve(fileName); ++ if (Files.exists(path)) { ++ return path; ++ } ++ return null; ++ } ++ ++ Path input(final Path in) { ++ return this.input(in, Hashing.sha256(in)); ++ } ++ ++ /** ++ * Marks the given file as skipped for remapping. ++ * ++ * @param in input file ++ */ ++ void skip(final Path in) { ++ this.state.skippedHashes.add(Hashing.sha256(in)); ++ } ++ ++ protected Path input(final Path in, final String hashString) { ++ final String name = this.createCachedFileName(in); ++ this.state.hashes.put(hashString, name); ++ return this.dir.resolve(name); ++ } ++ ++ void write() { ++ try (final BufferedWriter writer = AtomicFiles.atomicBufferedWriter(this.indexFile, StandardCharsets.UTF_8)) { ++ GSON.toJson(this.state, writer); ++ } catch (final IOException ex) { ++ LOGGER.warn("Failed to write index file '{}'", this.indexFile, ex); ++ } ++ } ++} +diff --git a/src/main/java/io/papermc/paper/pluginremap/ReobfServer.java b/src/main/java/io/papermc/paper/pluginremap/ReobfServer.java +new file mode 100644 +index 0000000000000000000000000000000000000000..0000000000000000000000000000000000000000 +--- /dev/null ++++ b/src/main/java/io/papermc/paper/pluginremap/ReobfServer.java +@@ -0,0 +0,0 @@ ++package io.papermc.paper.pluginremap; ++ ++import com.mojang.logging.LogUtils; ++import io.papermc.paper.util.AtomicFiles; ++import io.papermc.paper.util.MappingEnvironment; ++import java.io.IOException; ++import java.net.URISyntaxException; ++import java.nio.file.Files; ++import java.nio.file.Path; ++import java.util.concurrent.CompletableFuture; ++import java.util.concurrent.Executor; ++import net.neoforged.art.api.Renamer; ++import net.neoforged.art.api.Transformer; ++import net.neoforged.art.internal.RenamerImpl; ++import net.neoforged.srgutils.IMappingFile; ++import org.checkerframework.checker.nullness.qual.NonNull; ++import org.checkerframework.framework.qual.DefaultQualifier; ++import org.slf4j.Logger; ++ ++import static io.papermc.paper.pluginremap.InsertManifestAttribute.addNamespaceManifestAttribute; ++ ++@DefaultQualifier(NonNull.class) ++final class ReobfServer { ++ private static final Logger LOGGER = LogUtils.getClassLogger(); ++ ++ private final Path remapClasspathDir; ++ private final CompletableFuture load; ++ ++ ReobfServer(final Path remapClasspathDir, final CompletableFuture mappings, final Executor executor) { ++ this.remapClasspathDir = remapClasspathDir; ++ if (this.mappingsChanged()) { ++ this.load = mappings.thenAcceptAsync(this::remap, executor); ++ } else { ++ if (PluginRemapper.DEBUG_LOGGING) { ++ LOGGER.info("Have cached reobf server for current mappings."); ++ } ++ this.load = CompletableFuture.completedFuture(null); ++ } ++ } ++ ++ CompletableFuture remapped() { ++ return this.load.thenApply($ -> this.remappedPath()); ++ } ++ ++ private Path remappedPath() { ++ return this.remapClasspathDir.resolve(MappingEnvironment.mappingsHash() + ".jar"); ++ } ++ ++ private boolean mappingsChanged() { ++ return !Files.exists(this.remappedPath()); ++ } ++ ++ private void remap(final IMappingFile mappings) { ++ try { ++ if (!Files.exists(this.remapClasspathDir)) { ++ Files.createDirectories(this.remapClasspathDir); ++ } ++ for (final Path file : PluginRemapper.list(this.remapClasspathDir, Files::isRegularFile)) { ++ Files.delete(file); ++ } ++ } catch (final IOException ex) { ++ throw new RuntimeException(ex); ++ } ++ ++ LOGGER.info("Remapping server..."); ++ final long startRemap = System.currentTimeMillis(); ++ try (final DebugLogger log = DebugLogger.forOutputFile(this.remappedPath())) { ++ AtomicFiles.atomicWrite(this.remappedPath(), writeTo -> { ++ try (final RenamerImpl renamer = (RenamerImpl) Renamer.builder() ++ .logger(log) ++ .debug(log.debug()) ++ .threads(1) ++ .add(Transformer.renamerFactory(mappings, false)) ++ .add(addNamespaceManifestAttribute(InsertManifestAttribute.SPIGOT_NAMESPACE)) ++ .build()) { ++ renamer.run(serverJar().toFile(), writeTo.toFile(), true); ++ } ++ }); ++ } catch (final Exception ex) { ++ throw new RuntimeException("Failed to remap server jar", ex); ++ } ++ LOGGER.info("Done remapping server in {}ms.", System.currentTimeMillis() - startRemap); ++ } ++ ++ private static Path serverJar() { ++ try { ++ return Path.of(ReobfServer.class.getProtectionDomain().getCodeSource().getLocation().toURI()); ++ } catch (final URISyntaxException ex) { ++ throw new RuntimeException(ex); ++ } ++ } ++} +diff --git a/src/main/java/io/papermc/paper/pluginremap/UnknownOriginRemappedPluginIndex.java b/src/main/java/io/papermc/paper/pluginremap/UnknownOriginRemappedPluginIndex.java +new file mode 100644 +index 0000000000000000000000000000000000000000..0000000000000000000000000000000000000000 +--- /dev/null ++++ b/src/main/java/io/papermc/paper/pluginremap/UnknownOriginRemappedPluginIndex.java +@@ -0,0 +0,0 @@ ++package io.papermc.paper.pluginremap; ++ ++import com.mojang.logging.LogUtils; ++import io.papermc.paper.util.Hashing; ++import java.io.IOException; ++import java.nio.file.Files; ++import java.nio.file.Path; ++import java.util.HashSet; ++import java.util.Iterator; ++import java.util.Map; ++import java.util.Set; ++import org.checkerframework.checker.nullness.qual.NonNull; ++import org.checkerframework.checker.nullness.qual.Nullable; ++import org.checkerframework.framework.qual.DefaultQualifier; ++import org.slf4j.Logger; ++ ++@DefaultQualifier(NonNull.class) ++final class UnknownOriginRemappedPluginIndex extends RemappedPluginIndex { ++ private static final Logger LOGGER = LogUtils.getLogger(); ++ ++ private final Set used = new HashSet<>(); ++ ++ UnknownOriginRemappedPluginIndex(final Path dir) { ++ super(dir, true); ++ } ++ ++ @Override ++ @Nullable Path getIfPresent(final Path in) { ++ final String hash = Hashing.sha256(in); ++ if (this.state.skippedHashes.contains(hash)) { ++ return in; ++ } ++ ++ final @Nullable Path path = super.getIfPresent(hash); ++ if (path != null) { ++ this.used.add(hash); ++ } ++ return path; ++ } ++ ++ @Override ++ Path input(final Path in) { ++ final String hash = Hashing.sha256(in); ++ this.used.add(hash); ++ return super.input(in, hash); ++ } ++ ++ void write(final boolean clean) { ++ if (!clean) { ++ super.write(); ++ return; ++ } ++ ++ final Iterator> it = this.state.hashes.entrySet().iterator(); ++ while (it.hasNext()) { ++ final Map.Entry next = it.next(); ++ if (this.used.contains(next.getKey())) { ++ continue; ++ } ++ ++ // Remove unused mapped file ++ it.remove(); ++ final Path file = this.dir().resolve(next.getValue()); ++ try { ++ Files.deleteIfExists(file); ++ } catch (final IOException ex) { ++ LOGGER.warn("Failed to delete no longer needed cached jar '{}'", file, ex); ++ } ++ } ++ super.write(); ++ } ++} +diff --git a/src/main/java/io/papermc/paper/util/AtomicFiles.java b/src/main/java/io/papermc/paper/util/AtomicFiles.java +new file mode 100644 +index 0000000000000000000000000000000000000000..0000000000000000000000000000000000000000 +--- /dev/null ++++ b/src/main/java/io/papermc/paper/util/AtomicFiles.java +@@ -0,0 +0,0 @@ ++package io.papermc.paper.util; ++ ++import java.io.IOException; ++import java.nio.file.AccessDeniedException; ++import java.nio.file.AtomicMoveNotSupportedException; ++import java.nio.file.CopyOption; ++import java.nio.file.Files; ++import java.nio.file.Path; ++import java.nio.file.StandardCopyOption; ++import java.util.concurrent.ThreadLocalRandom; ++import java.util.function.Consumer; ++import org.spongepowered.configurate.util.CheckedConsumer; ++ ++// Stripped down version of https://github.com/jpenilla/squaremap/blob/7d7994b4096e5fc61364ea2d87e9aa4e14edf5c6/common/src/main/java/xyz/jpenilla/squaremap/common/util/FileUtil.java ++public final class AtomicFiles { ++ ++ private AtomicFiles() { ++ } ++ ++ public static void atomicWrite(final Path path, final CheckedConsumer op) throws IOException { ++ final Path tmp = tempFile(path); ++ ++ try { ++ op.accept(tmp); ++ atomicMove(tmp, path, true); ++ } catch (final IOException ex) { ++ try { ++ Files.deleteIfExists(tmp); ++ } catch (final IOException ex1) { ++ ex.addSuppressed(ex1); ++ } ++ throw ex; ++ } ++ } ++ ++ private static Path tempFile(final Path path) { ++ return path.resolveSibling("." + System.nanoTime() + "-" + ThreadLocalRandom.current().nextInt() + "-" + path.getFileName().toString() + ".tmp"); } ++ ++ @SuppressWarnings("BusyWait") // not busy waiting ++ public static void atomicMove(final Path from, final Path to, final boolean replaceExisting) throws IOException { ++ final int maxRetries = 2; ++ ++ try { ++ atomicMoveIfPossible(from, to, replaceExisting); ++ } catch (final AccessDeniedException ex) { ++ // Sometimes because of file locking this will fail... Let's just try again and hope for the best ++ // Thanks Windows! ++ int retries = 1; ++ while (true) { ++ try { ++ // Pause for a bit ++ Thread.sleep(10L * retries); ++ atomicMoveIfPossible(from, to, replaceExisting); ++ break; // success ++ } catch (final AccessDeniedException ex1) { ++ ex.addSuppressed(ex1); ++ if (retries == maxRetries) { ++ throw ex; ++ } ++ } catch (final InterruptedException interruptedException) { ++ ex.addSuppressed(interruptedException); ++ Thread.currentThread().interrupt(); ++ throw ex; ++ } ++ ++retries; ++ } ++ } ++ } ++ ++ private static void atomicMoveIfPossible(final Path from, final Path to, final boolean replaceExisting) throws IOException { ++ final CopyOption[] options = replaceExisting ++ ? new CopyOption[]{StandardCopyOption.ATOMIC_MOVE, StandardCopyOption.REPLACE_EXISTING} ++ : new CopyOption[]{StandardCopyOption.ATOMIC_MOVE}; ++ ++ try { ++ Files.move(from, to, options); ++ } catch (final AtomicMoveNotSupportedException ex) { ++ Files.move(from, to, replaceExisting ? new CopyOption[]{StandardCopyOption.REPLACE_EXISTING} : new CopyOption[]{}); ++ } ++ } ++ ++ private static Consumer sneaky(final CheckedConsumer consumer) { ++ return t -> { ++ try { ++ consumer.accept(t); ++ } catch (final Throwable thr) { ++ rethrow(thr); ++ } ++ }; ++ } ++ ++ @SuppressWarnings("unchecked") ++ private static RuntimeException rethrow(final Throwable t) throws X { ++ throw (X) t; ++ } ++} +diff --git a/src/main/java/io/papermc/paper/util/Hashing.java b/src/main/java/io/papermc/paper/util/Hashing.java +new file mode 100644 +index 0000000000000000000000000000000000000000..0000000000000000000000000000000000000000 +--- /dev/null ++++ b/src/main/java/io/papermc/paper/util/Hashing.java +@@ -0,0 +0,0 @@ ++package io.papermc.paper.util; ++ ++import com.google.common.hash.HashCode; ++import java.io.IOException; ++import java.io.InputStream; ++import java.nio.file.Files; ++import java.nio.file.Path; ++import java.util.Locale; ++import org.apache.commons.io.IOUtils; ++import org.checkerframework.checker.nullness.qual.NonNull; ++import org.checkerframework.framework.qual.DefaultQualifier; ++ ++@DefaultQualifier(NonNull.class) ++public final class Hashing { ++ private Hashing() { ++ } ++ ++ /** ++ * Hash the provided {@link InputStream} using SHA-256. Stream will be closed. ++ * ++ * @param stream input stream ++ * @return SHA-256 hash string ++ */ ++ public static String sha256(final InputStream stream) { ++ try (stream) { ++ return com.google.common.hash.Hashing.sha256().hashBytes(IOUtils.toByteArray(stream)).toString().toUpperCase(Locale.ENGLISH); ++ } catch (final IOException ex) { ++ throw new RuntimeException("Failed to take hash of InputStream", ex); ++ } ++ } ++ ++ /** ++ * Hash the provided file using SHA-256. ++ * ++ * @param file file ++ * @return SHA-256 hash string ++ */ ++ public static String sha256(final Path file) { ++ if (!Files.isRegularFile(file)) { ++ throw new IllegalArgumentException("'" + file + "' is not a regular file!"); ++ } ++ final HashCode hash; ++ try { ++ hash = com.google.common.io.Files.asByteSource(file.toFile()).hash(com.google.common.hash.Hashing.sha256()); ++ } catch (final IOException ex) { ++ throw new RuntimeException("Failed to take hash of file '" + file + "'", ex); ++ } ++ return hash.toString().toUpperCase(Locale.ENGLISH); ++ } ++} +diff --git a/src/main/java/io/papermc/paper/util/MappingEnvironment.java b/src/main/java/io/papermc/paper/util/MappingEnvironment.java +new file mode 100644 +index 0000000000000000000000000000000000000000..0000000000000000000000000000000000000000 +--- /dev/null ++++ b/src/main/java/io/papermc/paper/util/MappingEnvironment.java +@@ -0,0 +0,0 @@ ++package io.papermc.paper.util; ++ ++import java.io.InputStream; ++import java.util.Objects; ++import java.util.jar.Manifest; ++import net.minecraft.world.entity.MobCategory; ++import org.checkerframework.checker.nullness.qual.NonNull; ++import org.checkerframework.checker.nullness.qual.Nullable; ++import org.checkerframework.framework.qual.DefaultQualifier; ++ ++@DefaultQualifier(NonNull.class) ++public final class MappingEnvironment { ++ private static final @Nullable String MAPPINGS_HASH = readMappingsHash(); ++ private static final boolean REOBF = checkReobf(); ++ ++ private MappingEnvironment() { ++ } ++ ++ public static boolean reobf() { ++ return REOBF; ++ } ++ ++ public static boolean hasMappings() { ++ return MAPPINGS_HASH != null; ++ } ++ ++ public static InputStream mappingsStream() { ++ return Objects.requireNonNull(mappingsStreamIfPresent(), "Missing mappings!"); ++ } ++ ++ public static @Nullable InputStream mappingsStreamIfPresent() { ++ return MappingEnvironment.class.getClassLoader().getResourceAsStream("META-INF/mappings/reobf.tiny"); ++ } ++ ++ public static String mappingsHash() { ++ return Objects.requireNonNull(MAPPINGS_HASH, "MAPPINGS_HASH"); ++ } ++ ++ private static @Nullable String readMappingsHash() { ++ final @Nullable Manifest manifest = JarManifests.manifest(MappingEnvironment.class); ++ if (manifest != null) { ++ final Object hash = manifest.getMainAttributes().getValue("Included-Mappings-Hash"); ++ if (hash != null) { ++ return hash.toString(); ++ } ++ } ++ ++ final @Nullable InputStream stream = mappingsStreamIfPresent(); ++ if (stream == null) { ++ return null; ++ } ++ return Hashing.sha256(stream); ++ } ++ ++ @SuppressWarnings("ConstantConditions") ++ private static boolean checkReobf() { ++ final Class clazz = MobCategory.class; ++ if (clazz.getSimpleName().equals("MobCategory")) { ++ return false; ++ } else if (clazz.getSimpleName().equals("EnumCreatureType")) { ++ return true; ++ } ++ throw new IllegalStateException(); ++ } ++} +diff --git a/src/main/java/io/papermc/paper/util/ObfHelper.java b/src/main/java/io/papermc/paper/util/ObfHelper.java +index 0000000000000000000000000000000000000000..0000000000000000000000000000000000000000 100644 +--- a/src/main/java/io/papermc/paper/util/ObfHelper.java ++++ b/src/main/java/io/papermc/paper/util/ObfHelper.java +@@ -0,0 +0,0 @@ public enum ObfHelper { + } + + private static @Nullable Set loadMappingsIfPresent() { +- try (final @Nullable InputStream mappingsInputStream = ObfHelper.class.getClassLoader().getResourceAsStream("META-INF/mappings/reobf.tiny")) { +- if (mappingsInputStream == null) { +- return null; +- } ++ if (!MappingEnvironment.hasMappings()) { ++ return null; ++ } ++ try (final InputStream mappingsInputStream = MappingEnvironment.mappingsStream()) { + final MemoryMappingTree tree = new MemoryMappingTree(); + MappingReader.read(new InputStreamReader(mappingsInputStream, StandardCharsets.UTF_8), MappingFormat.TINY_2_FILE, tree); + final Set classes = new HashSet<>(); +diff --git a/src/main/java/io/papermc/paper/util/concurrent/ScalingThreadPool.java b/src/main/java/io/papermc/paper/util/concurrent/ScalingThreadPool.java +new file mode 100644 +index 0000000000000000000000000000000000000000..0000000000000000000000000000000000000000 +--- /dev/null ++++ b/src/main/java/io/papermc/paper/util/concurrent/ScalingThreadPool.java +@@ -0,0 +0,0 @@ ++package io.papermc.paper.util.concurrent; ++ ++import java.util.concurrent.BlockingQueue; ++import java.util.concurrent.LinkedBlockingQueue; ++import java.util.concurrent.RejectedExecutionHandler; ++import java.util.concurrent.ThreadPoolExecutor; ++import java.util.concurrent.TimeUnit; ++import java.util.concurrent.atomic.AtomicInteger; ++ ++/** ++ * Utilities for scaling thread pools. ++ * ++ * @see Java Scale First ExecutorService — A myth or a reality ++ */ ++public final class ScalingThreadPool { ++ private ScalingThreadPool() { ++ } ++ ++ public static RejectedExecutionHandler defaultReEnqueuePolicy() { ++ return reEnqueuePolicy(new ThreadPoolExecutor.AbortPolicy()); ++ } ++ ++ public static RejectedExecutionHandler reEnqueuePolicy(final RejectedExecutionHandler original) { ++ return new ReEnqueuePolicy(original); ++ } ++ ++ public static BlockingQueue createUnboundedQueue() { ++ return new Queue<>(); ++ } ++ ++ public static BlockingQueue createQueue(final int capacity) { ++ return new Queue<>(capacity); ++ } ++ ++ private static final class Queue extends LinkedBlockingQueue { ++ private final AtomicInteger idleThreads = new AtomicInteger(0); ++ ++ private Queue() { ++ super(); ++ } ++ ++ private Queue(final int capacity) { ++ super(capacity); ++ } ++ ++ @Override ++ public boolean offer(final E e) { ++ return this.idleThreads.get() > 0 && super.offer(e); ++ } ++ ++ @Override ++ public E take() throws InterruptedException { ++ this.idleThreads.incrementAndGet(); ++ try { ++ return super.take(); ++ } finally { ++ this.idleThreads.decrementAndGet(); ++ } ++ } ++ ++ @Override ++ public E poll(final long timeout, final TimeUnit unit) throws InterruptedException { ++ this.idleThreads.incrementAndGet(); ++ try { ++ return super.poll(timeout, unit); ++ } finally { ++ this.idleThreads.decrementAndGet(); ++ } ++ } ++ ++ @Override ++ public boolean add(final E e) { ++ return super.offer(e); ++ } ++ } ++ ++ private record ReEnqueuePolicy(RejectedExecutionHandler originalHandler) implements RejectedExecutionHandler { ++ @Override ++ public void rejectedExecution(final Runnable r, final ThreadPoolExecutor executor) { ++ if (!executor.getQueue().add(r)) { ++ this.originalHandler.rejectedExecution(r, executor); ++ } ++ } ++ } ++} +diff --git a/src/main/java/net/minecraft/server/MinecraftServer.java b/src/main/java/net/minecraft/server/MinecraftServer.java +index 0000000000000000000000000000000000000000..0000000000000000000000000000000000000000 100644 +--- a/src/main/java/net/minecraft/server/MinecraftServer.java ++++ b/src/main/java/net/minecraft/server/MinecraftServer.java +@@ -0,0 +0,0 @@ public abstract class MinecraftServer extends ReentrantBlockableEventLoop libraries; ++ private final List transformers; ++ private final SortedClassProvider sortedClassProvider; ++ private final List classProviders; ++ private final int threads; ++ private final Consumer logger; ++ private final Consumer debug; ++ private boolean setup = false; ++ private ClassProvider libraryClasses; ++ ++ RenamerImpl(List libraries, List transformers, SortedClassProvider sortedClassProvider, List classProviders, ++ int threads, Consumer logger, Consumer debug) { ++ this.libraries = libraries; ++ this.transformers = transformers; ++ this.sortedClassProvider = sortedClassProvider; ++ this.classProviders = Collections.unmodifiableList(classProviders); ++ this.threads = threads; ++ this.logger = logger; ++ this.debug = debug; ++ } ++ ++ private void setup() { ++ if (this.setup) ++ return; ++ ++ this.setup = true; ++ ++ ClassProvider.Builder libraryClassesBuilder = ClassProvider.builder().shouldCacheAll(true); ++ this.logger.accept("Adding Libraries to Inheritance"); ++ this.libraries.forEach(f -> libraryClassesBuilder.addLibrary(f.toPath())); ++ ++ this.libraryClasses = libraryClassesBuilder.build(); ++ } ++ ++ @Override ++ public void run(File input, File output) { ++ // Paper start - Add remappingSelf ++ this.run(input, output, false); ++ } ++ public void run(File input, File output, boolean remappingSelf) { ++ // Paper end ++ if (!this.setup) ++ this.setup(); ++ ++ if (Boolean.getBoolean(ProgressReporter.ENABLED_PROPERTY)) { ++ try { ++ PROGRESS.setMaxProgress(JarUtils.getFileCountInZip(input)); ++ } catch (IOException e) { ++ logger.accept("Failed to read zip file count: " + e); ++ } ++ } ++ ++ input = Objects.requireNonNull(input).getAbsoluteFile(); ++ output = Objects.requireNonNull(output).getAbsoluteFile(); ++ ++ if (!input.exists()) ++ throw new IllegalArgumentException("Input file not found: " + input.getAbsolutePath()); ++ ++ logger.accept("Reading Input: " + input.getAbsolutePath()); ++ PROGRESS.setStep("Reading input jar"); ++ // Read everything from the input jar! ++ List oldEntries = new ArrayList<>(); ++ try (ZipFile in = new ZipFile(input)) { ++ int amount = 0; ++ for (Enumeration entries = in.entries(); entries.hasMoreElements();) { ++ final ZipEntry e = entries.nextElement(); ++ if (e.isDirectory()) ++ continue; ++ String name = e.getName(); ++ byte[] data; ++ try (InputStream entryInput = in.getInputStream(e)) { ++ data = entryInput.readAllBytes(); // Paper - Use readAllBytes ++ } ++ ++ if (name.endsWith(".class") && !name.contains("META-INF/")) // Paper - Skip META-INF entries ++ oldEntries.add(ClassEntry.create(name, e.getTime(), data)); ++ else if (name.equals(MANIFEST_NAME)) ++ oldEntries.add(ManifestEntry.create(e.getTime(), data)); ++ else if (name.equals("javadoctor.json")) ++ oldEntries.add(Transformer.JavadoctorEntry.create(e.getTime(), data)); ++ else ++ oldEntries.add(ResourceEntry.create(name, e.getTime(), data)); ++ ++ if ((++amount) % 10 == 0) { ++ PROGRESS.setProgress(amount); ++ } ++ } ++ } catch (IOException e) { ++ throw new RuntimeException("Could not parse input: " + input.getAbsolutePath(), e); ++ } ++ ++ this.sortedClassProvider.clearCache(); ++ ArrayList classProviders = new ArrayList<>(this.classProviders); ++ classProviders.add(0, this.libraryClasses); ++ this.sortedClassProvider.classProviders = classProviders; ++ ++ AsyncHelper async = new AsyncHelper(threads); ++ try { ++ ++ /* Disabled until we do something with it ++ // Gather original file Hashes, so that we can detect changes and update the manifest if necessary ++ log("Gathering original hashes"); ++ Map oldHashes = async.invokeAll(oldEntries, ++ e -> new Pair<>(e.getName(), HashFunction.SHA256.hash(e.getData())) ++ ).stream().collect(Collectors.toMap(Pair::getLeft, Pair::getRight)); ++ */ ++ ++ PROGRESS.setProgress(0); ++ PROGRESS.setIndeterminate(true); ++ PROGRESS.setStep("Processing entries"); ++ ++ List ourClasses = oldEntries.stream() ++ .filter(e -> e instanceof ClassEntry && !e.getName().startsWith("META-INF/")) ++ .map(ClassEntry.class::cast) ++ .collect(Collectors.toList()); ++ ++ // Add the original classes to the inheritance map, TODO: Multi-Release somehow? ++ logger.accept("Adding input to inheritance map"); ++ ClassProvider.Builder inputClassesBuilder = ClassProvider.builder(); ++ async.consumeAll(ourClasses, ClassEntry::getClassName, c -> ++ inputClassesBuilder.addClass(c.getName().substring(0, c.getName().length() - 6), c.getData()) ++ ); ++ classProviders.add(0, inputClassesBuilder.build()); ++ ++ // Process everything ++ logger.accept("Processing entries"); ++ List newEntries = async.invokeAll(oldEntries, Entry::getName, this::processEntry); ++ ++ logger.accept("Adding extras"); ++ // Paper start - I'm pretty sure the duplicates are because the input is already on the classpath ++ List finalNewEntries = newEntries; ++ transformers.forEach(t -> finalNewEntries.addAll(t.getExtras())); ++ ++ Set seen = new HashSet<>(); ++ if (remappingSelf) { ++ // deduplicate ++ List n = new ArrayList<>(); ++ for (final Entry e : newEntries) { ++ if (seen.add(e.getName())) { ++ n.add(e); ++ } ++ } ++ newEntries = n; ++ } else { ++ String dupes = newEntries.stream().map(Entry::getName) ++ .filter(n -> !seen.add(n)) ++ .sorted() ++ .collect(Collectors.joining(", ")); ++ if (!dupes.isEmpty()) ++ throw new IllegalStateException("Duplicate entries detected: " + dupes); ++ } ++ // Paper end ++ ++ // We care about stable output, so sort, and single thread write. ++ logger.accept("Sorting"); ++ Collections.sort(newEntries, this::compare); ++ ++ if (!output.getParentFile().exists()) ++ output.getParentFile().mkdirs(); ++ ++ seen.clear(); ++ ++ PROGRESS.setMaxProgress(newEntries.size()); ++ PROGRESS.setStep("Writing output"); ++ ++ logger.accept("Writing Output: " + output.getAbsolutePath()); ++ try (OutputStream fos = new BufferedOutputStream(Files.newOutputStream(output.toPath())); ++ ZipOutputStream zos = new ZipOutputStream(fos)) { ++ ++ int amount = 0; ++ for (Entry e : newEntries) { ++ String name = e.getName(); ++ int idx = name.lastIndexOf('/'); ++ if (idx != -1) ++ addDirectory(zos, seen, name.substring(0, idx)); ++ ++ logger.accept(" " + name); ++ ZipEntry entry = new ZipEntry(name); ++ entry.setTime(e.getTime()); ++ zos.putNextEntry(entry); ++ zos.write(e.getData()); ++ zos.closeEntry(); ++ ++ if ((++amount) % 10 == 0) { ++ PROGRESS.setProgress(amount); ++ } ++ } ++ ++ PROGRESS.setProgress(amount); ++ } ++ } catch (final IOException e) { ++ throw new RuntimeException("Could not write to file " + output.getAbsolutePath(), e); ++ } finally { ++ async.shutdown(); ++ } ++ } ++ ++ private byte[] readAllBytes(InputStream in, long size) throws IOException { ++ // This program will crash if size exceeds MAX_INT anyway since arrays are limited to 32-bit indices ++ ByteArrayOutputStream tmp = new ByteArrayOutputStream(size >= 0 ? (int) size : 0); ++ ++ byte[] buffer = new byte[8192]; ++ int read; ++ while ((read = in.read(buffer)) != -1) { ++ tmp.write(buffer, 0, read); ++ } ++ ++ return tmp.toByteArray(); ++ } ++ ++ // Tho Directory entries are not strictly necessary, we add them because some bad implementations of Zip extractors ++ // attempt to extract files without making sure the parents exist. ++ private void addDirectory(ZipOutputStream zos, Set seen, String path) throws IOException { ++ if (!seen.add(path)) ++ return; ++ ++ int idx = path.lastIndexOf('/'); ++ if (idx != -1) ++ addDirectory(zos, seen, path.substring(0, idx)); ++ ++ logger.accept(" " + path + '/'); ++ ZipEntry dir = new ZipEntry(path + '/'); ++ dir.setTime(Entry.STABLE_TIMESTAMP); ++ zos.putNextEntry(dir); ++ zos.closeEntry(); ++ } ++ ++ private Entry processEntry(final Entry start) { ++ Entry entry = start; ++ for (Transformer transformer : RenamerImpl.this.transformers) { ++ entry = entry.process(transformer); ++ if (entry == null) ++ return null; ++ } ++ return entry; ++ } ++ ++ private int compare(Entry o1, Entry o2) { ++ // In order for JarInputStream to work, MANIFEST has to be the first entry, so make it first! ++ if (MANIFEST_NAME.equals(o1.getName())) ++ return MANIFEST_NAME.equals(o2.getName()) ? 0 : -1; ++ if (MANIFEST_NAME.equals(o2.getName())) ++ return MANIFEST_NAME.equals(o1.getName()) ? 0 : 1; ++ return o1.getName().compareTo(o2.getName()); ++ } ++ ++ @Override ++ public void close() throws IOException { ++ this.sortedClassProvider.close(); ++ } ++} +diff --git a/src/main/java/org/bukkit/craftbukkit/CraftServer.java b/src/main/java/org/bukkit/craftbukkit/CraftServer.java +index 0000000000000000000000000000000000000000..0000000000000000000000000000000000000000 100644 +--- a/src/main/java/org/bukkit/craftbukkit/CraftServer.java ++++ b/src/main/java/org/bukkit/craftbukkit/CraftServer.java +@@ -0,0 +0,0 @@ public final class CraftServer implements Server { + // Paper start + @Override + public File getPluginsFolder() { +- return (File) this.console.options.valueOf("plugins"); ++ return this.console.getPluginsFolder(); + } + + private List extraPluginJars() { +@@ -0,0 +0,0 @@ public final class CraftServer implements Server { + this.enablePlugins(PluginLoadOrder.STARTUP); + this.enablePlugins(PluginLoadOrder.POSTWORLD); + this.getPluginManager().callEvent(new ServerLoadEvent(ServerLoadEvent.LoadType.RELOAD)); ++ if (io.papermc.paper.plugin.PluginInitializerManager.instance().pluginRemapper != null) io.papermc.paper.plugin.PluginInitializerManager.instance().pluginRemapper.pluginsEnabled(); // Paper - Remap plugins + org.spigotmc.WatchdogThread.hasStarted = true; // Paper - Disable watchdog early timeout on reload + } + +diff --git a/src/main/java/org/bukkit/craftbukkit/util/CraftMagicNumbers.java b/src/main/java/org/bukkit/craftbukkit/util/CraftMagicNumbers.java +index 0000000000000000000000000000000000000000..0000000000000000000000000000000000000000 100644 +--- a/src/main/java/org/bukkit/craftbukkit/util/CraftMagicNumbers.java ++++ b/src/main/java/org/bukkit/craftbukkit/util/CraftMagicNumbers.java +@@ -0,0 +0,0 @@ import org.bukkit.potion.PotionType; + + @SuppressWarnings("deprecation") + public final class CraftMagicNumbers implements UnsafeValues { ++ public static final boolean DISABLE_PLUGIN_REWRITING = Boolean.getBoolean("paper.disable-plugin-rewriting"); + public static final UnsafeValues INSTANCE = new CraftMagicNumbers(); + + private CraftMagicNumbers() {} +@@ -0,0 +0,0 @@ public final class CraftMagicNumbers implements UnsafeValues { + if (pluginIndex < minimumIndex) { + throw new InvalidPluginException("Plugin API version " + pdf.getAPIVersion() + " is lower than the minimum allowed version. Please update or replace it."); + } +- } else { ++ } else if (!DISABLE_PLUGIN_REWRITING) { + if (minimumIndex == -1) { + CraftLegacy.init(); + Bukkit.getLogger().log(Level.WARNING, "Legacy plugin " + pdf.getFullName() + " does not specify an api-version."); +@@ -0,0 +0,0 @@ public final class CraftMagicNumbers implements UnsafeValues { + + @Override + public byte[] processClass(PluginDescriptionFile pdf, String path, byte[] clazz) { ++ if (DISABLE_PLUGIN_REWRITING) { ++ return clazz; ++ } + try { + clazz = Commodore.convert(clazz, !CraftMagicNumbers.isLegacy(pdf)); + } catch (Exception ex) { diff --git a/patches/unapplied/server/Remap-reflection-calls-in-plugins-using-internals.patch b/patches/unapplied/server/Remap-reflection-calls-in-plugins-using-internals.patch new file mode 100644 index 0000000000..0a737c48e8 --- /dev/null +++ b/patches/unapplied/server/Remap-reflection-calls-in-plugins-using-internals.patch @@ -0,0 +1,763 @@ +From 0000000000000000000000000000000000000000 Mon Sep 17 00:00:00 2001 +From: Nassim Jahnke +Date: Sun, 30 Oct 2022 23:47:26 +0100 +Subject: [PATCH] Remap reflection calls in plugins using internals + +Co-authored-by: Jason Penilla <11360596+jpenilla@users.noreply.github.com> + +diff --git a/build.gradle.kts b/build.gradle.kts +index 0000000000000000000000000000000000000000..0000000000000000000000000000000000000000 100644 +--- a/build.gradle.kts ++++ b/build.gradle.kts +@@ -0,0 +0,0 @@ dependencies { + implementation("org.ow2.asm:asm-commons:9.7") + implementation("org.spongepowered:configurate-yaml:4.2.0-SNAPSHOT") // Paper - config files + implementation("commons-lang:commons-lang:2.6") +- implementation("net.fabricmc:mapping-io:0.5.0") // Paper - needed to read mappings for stacktrace deobfuscation + runtimeOnly("org.xerial:sqlite-jdbc:3.42.0.1") + runtimeOnly("com.mysql:mysql-connector-j:8.2.0") + runtimeOnly("com.lmax:disruptor:3.4.4") // Paper +@@ -0,0 +0,0 @@ dependencies { + testImplementation("org.junit-pioneer:junit-pioneer:2.2.0") // Paper - CartesianTest + implementation("net.neoforged:AutoRenamingTool:2.0.3") // Paper - remap plugins + implementation("net.neoforged:srgutils:1.0.9") // Paper - remap plugins - bump transitive of ART ++ // Paper start - Remap reflection ++ val reflectionRewriterVersion = "0.0.1-SNAPSHOT" ++ implementation("io.papermc:reflection-rewriter:$reflectionRewriterVersion") ++ implementation("io.papermc:reflection-rewriter-runtime:$reflectionRewriterVersion") ++ implementation("io.papermc:reflection-rewriter-proxy-generator:$reflectionRewriterVersion") ++ // Paper end - Remap reflection + } + + paperweight { +diff --git a/src/main/java/com/destroystokyo/paper/entity/ai/MobGoalHelper.java b/src/main/java/com/destroystokyo/paper/entity/ai/MobGoalHelper.java +index 0000000000000000000000000000000000000000..0000000000000000000000000000000000000000 100644 +--- a/src/main/java/com/destroystokyo/paper/entity/ai/MobGoalHelper.java ++++ b/src/main/java/com/destroystokyo/paper/entity/ai/MobGoalHelper.java +@@ -0,0 +0,0 @@ import com.destroystokyo.paper.entity.RangedEntity; + import com.destroystokyo.paper.util.set.OptimizedSmallEnumSet; + import com.google.common.collect.BiMap; + import com.google.common.collect.HashBiMap; ++import io.papermc.paper.util.MappingEnvironment; + import io.papermc.paper.util.ObfHelper; + import java.lang.reflect.Constructor; + import java.util.EnumSet; +@@ -0,0 +0,0 @@ public class MobGoalHelper { + } + + public static String getUsableName(Class clazz) { +- String name = ObfHelper.INSTANCE.deobfClassName(clazz.getName()); ++ String name = MappingEnvironment.reobf() ? ObfHelper.INSTANCE.deobfClassName(clazz.getName()) : clazz.getName(); + name = name.substring(name.lastIndexOf(".") + 1); + boolean flag = false; + // inner classes +diff --git a/src/main/java/io/papermc/paper/configuration/serializer/PacketClassSerializer.java b/src/main/java/io/papermc/paper/configuration/serializer/PacketClassSerializer.java +index 0000000000000000000000000000000000000000..0000000000000000000000000000000000000000 100644 +--- a/src/main/java/io/papermc/paper/configuration/serializer/PacketClassSerializer.java ++++ b/src/main/java/io/papermc/paper/configuration/serializer/PacketClassSerializer.java +@@ -0,0 +0,0 @@ import com.google.common.collect.ImmutableBiMap; + import com.mojang.logging.LogUtils; + import io.leangen.geantyref.TypeToken; + import io.papermc.paper.configuration.serializer.collections.MapSerializer; ++import io.papermc.paper.util.MappingEnvironment; + import io.papermc.paper.util.ObfHelper; + import net.minecraft.network.protocol.Packet; + import org.checkerframework.checker.nullness.qual.Nullable; +@@ -0,0 +0,0 @@ public final class PacketClassSerializer extends ScalarSerializer> packetClass, final Predicate> typeSupported) { + final String name = packetClass.getName(); +- @Nullable String mojName = ObfHelper.INSTANCE.mappingsByMojangName() == null ? name : MOJANG_TO_OBF.inverse().get(name); // if the mappings are null, running on moj-mapped server ++ @Nullable String mojName = ObfHelper.INSTANCE.mappingsByMojangName() == null || !MappingEnvironment.reobf() ? name : MOJANG_TO_OBF.inverse().get(name); // if the mappings are null, running on moj-mapped server + if (mojName == null && MOJANG_TO_OBF.containsKey(name)) { + mojName = name; + } +diff --git a/src/main/java/io/papermc/paper/plugin/entrypoint/classloader/PaperClassloaderBytecodeModifier.java b/src/main/java/io/papermc/paper/plugin/entrypoint/classloader/PaperClassloaderBytecodeModifier.java +index 0000000000000000000000000000000000000000..0000000000000000000000000000000000000000 100644 +--- a/src/main/java/io/papermc/paper/plugin/entrypoint/classloader/PaperClassloaderBytecodeModifier.java ++++ b/src/main/java/io/papermc/paper/plugin/entrypoint/classloader/PaperClassloaderBytecodeModifier.java +@@ -0,0 +0,0 @@ + package io.papermc.paper.plugin.entrypoint.classloader; + + import io.papermc.paper.plugin.configuration.PluginMeta; ++import org.objectweb.asm.ClassReader; ++import org.objectweb.asm.ClassWriter; + + // Stub, implement in future. + public class PaperClassloaderBytecodeModifier implements ClassloaderBytecodeModifier { + + @Override + public byte[] modify(PluginMeta configuration, byte[] bytecode) { +- return bytecode; ++ ClassReader classReader = new ClassReader(bytecode); ++ ClassWriter classWriter = new ClassWriter(classReader, 0); ++ classReader.accept(io.papermc.paper.pluginremap.reflect.ReflectionRemapper.visitor(classWriter), 0); ++ return classWriter.toByteArray(); + } + } +diff --git a/src/main/java/io/papermc/paper/pluginremap/reflect/PaperReflection.java b/src/main/java/io/papermc/paper/pluginremap/reflect/PaperReflection.java +new file mode 100644 +index 0000000000000000000000000000000000000000..0000000000000000000000000000000000000000 +--- /dev/null ++++ b/src/main/java/io/papermc/paper/pluginremap/reflect/PaperReflection.java +@@ -0,0 +0,0 @@ ++package io.papermc.paper.pluginremap.reflect; ++ ++import com.mojang.logging.LogUtils; ++import io.papermc.paper.util.MappingEnvironment; ++import io.papermc.paper.util.ObfHelper; ++import io.papermc.reflectionrewriter.runtime.AbstractDefaultRulesReflectionProxy; ++import io.papermc.reflectionrewriter.runtime.DefineClassReflectionProxy; ++import java.lang.invoke.MethodHandles; ++import java.nio.ByteBuffer; ++import java.security.CodeSource; ++import java.security.ProtectionDomain; ++import java.util.Map; ++import java.util.Objects; ++import java.util.stream.Collectors; ++import org.checkerframework.checker.nullness.qual.NonNull; ++import org.checkerframework.checker.nullness.qual.Nullable; ++import org.checkerframework.framework.qual.DefaultQualifier; ++import org.objectweb.asm.ClassReader; ++import org.objectweb.asm.ClassWriter; ++import org.slf4j.Logger; ++ ++// todo proper inheritance handling ++@SuppressWarnings("unused") ++@DefaultQualifier(NonNull.class) ++public final class PaperReflection extends AbstractDefaultRulesReflectionProxy implements DefineClassReflectionProxy { ++ // concat to avoid being rewritten by shadow ++ private static final Logger LOGGER = LogUtils.getLogger(); ++ private static final String CB_PACKAGE_PREFIX = "org.bukkit.".concat("craftbukkit."); ++ private static final String LEGACY_CB_PACKAGE_PREFIX = "org.bukkit.".concat("craftbukkit.") + MappingEnvironment.LEGACY_CB_VERSION + "."; ++ ++ private final DefineClassReflectionProxy defineClassProxy; ++ private final Map mappingsByMojangName; ++ private final Map mappingsByObfName; ++ // Reflection does not care about method return values, so this map removes the return value descriptor from the key ++ private final Map> strippedMethodMappings; ++ ++ PaperReflection() { ++ this.defineClassProxy = DefineClassReflectionProxy.create(PaperReflection::processClass); ++ if (!MappingEnvironment.hasMappings()) { ++ this.mappingsByMojangName = Map.of(); ++ this.mappingsByObfName = Map.of(); ++ this.strippedMethodMappings = Map.of(); ++ return; ++ } ++ final ObfHelper obfHelper = ObfHelper.INSTANCE; ++ this.mappingsByMojangName = Objects.requireNonNull(obfHelper.mappingsByMojangName(), "mappingsByMojangName"); ++ this.mappingsByObfName = Objects.requireNonNull(obfHelper.mappingsByObfName(), "mappingsByObfName"); ++ this.strippedMethodMappings = this.mappingsByMojangName.entrySet().stream().collect(Collectors.toUnmodifiableMap( ++ Map.Entry::getKey, ++ entry -> entry.getValue().strippedMethods() ++ )); ++ } ++ ++ @Override ++ protected String mapClassName(final String name) { ++ final ObfHelper.@Nullable ClassMapping mapping = this.mappingsByObfName.get(name); ++ return mapping != null ? mapping.mojangName() : removeCraftBukkitRelocation(name); ++ } ++ ++ @Override ++ protected String mapDeclaredMethodName(final Class clazz, final String name, final Class... parameterTypes) { ++ final @Nullable Map mapping = this.strippedMethodMappings.get(clazz.getName()); ++ if (mapping == null) { ++ return name; ++ } ++ return mapping.getOrDefault(strippedMethodKey(name, parameterTypes), name); ++ } ++ ++ @Override ++ protected String mapMethodName(final Class clazz, final String name, final Class... parameterTypes) { ++ final @Nullable String mapped = this.findMappedMethodName(clazz, name, parameterTypes); ++ return mapped != null ? mapped : name; ++ } ++ ++ @Override ++ protected String mapDeclaredFieldName(final Class clazz, final String name) { ++ final ObfHelper.@Nullable ClassMapping mapping = this.mappingsByMojangName.get(clazz.getName()); ++ if (mapping == null) { ++ return name; ++ } ++ return mapping.fieldsByObf().getOrDefault(name, name); ++ } ++ ++ @Override ++ protected String mapFieldName(final Class clazz, final String name) { ++ final @Nullable String mapped = this.findMappedFieldName(clazz, name); ++ return mapped != null ? mapped : name; ++ } ++ ++ private @Nullable String findMappedMethodName(final Class clazz, final String name, final Class... parameterTypes) { ++ final Map map = this.strippedMethodMappings.get(clazz.getName()); ++ @Nullable String mapped = null; ++ if (map != null) { ++ mapped = map.get(strippedMethodKey(name, parameterTypes)); ++ if (mapped != null) { ++ return mapped; ++ } ++ } ++ // JVM checks super before interfaces ++ final Class superClass = clazz.getSuperclass(); ++ if (superClass != null) { ++ mapped = this.findMappedMethodName(superClass, name, parameterTypes); ++ } ++ if (mapped == null) { ++ for (final Class i : clazz.getInterfaces()) { ++ mapped = this.findMappedMethodName(i, name, parameterTypes); ++ if (mapped != null) { ++ break; ++ } ++ } ++ } ++ return mapped; ++ } ++ ++ private @Nullable String findMappedFieldName(final Class clazz, final String name) { ++ final ObfHelper.ClassMapping mapping = this.mappingsByMojangName.get(clazz.getName()); ++ @Nullable String mapped = null; ++ if (mapping != null) { ++ mapped = mapping.fieldsByObf().get(name); ++ if (mapped != null) { ++ return mapped; ++ } ++ } ++ // The JVM checks super before interfaces ++ final Class superClass = clazz.getSuperclass(); ++ if (superClass != null) { ++ mapped = this.findMappedFieldName(superClass, name); ++ } ++ if (mapped == null) { ++ for (final Class i : clazz.getInterfaces()) { ++ mapped = this.findMappedFieldName(i, name); ++ if (mapped != null) { ++ break; ++ } ++ } ++ } ++ return mapped; ++ } ++ ++ private static String strippedMethodKey(final String methodName, final Class... parameterTypes) { ++ return methodName + parameterDescriptor(parameterTypes); ++ } ++ ++ private static String parameterDescriptor(final Class... parameterTypes) { ++ final StringBuilder builder = new StringBuilder(); ++ builder.append('('); ++ for (final Class parameterType : parameterTypes) { ++ builder.append(parameterType.descriptorString()); ++ } ++ builder.append(')'); ++ return builder.toString(); ++ } ++ ++ private static String removeCraftBukkitRelocation(final String name) { ++ if (MappingEnvironment.hasMappings()) { ++ // Relocation is applied in reobf, and when mappings are present they handle the relocation ++ return name; ++ } ++ if (name.startsWith(LEGACY_CB_PACKAGE_PREFIX)) { ++ return CB_PACKAGE_PREFIX + name.substring(LEGACY_CB_PACKAGE_PREFIX.length()); ++ } ++ return name; ++ } ++ ++ @Override ++ public Class defineClass(final Object loader, final byte[] b, final int off, final int len) throws ClassFormatError { ++ return this.defineClassProxy.defineClass(loader, b, off, len); ++ } ++ ++ @Override ++ public Class defineClass(final Object loader, final String name, final byte[] b, final int off, final int len) throws ClassFormatError { ++ return this.defineClassProxy.defineClass(loader, name, b, off, len); ++ } ++ ++ @Override ++ public Class defineClass(final Object loader, final @Nullable String name, final byte[] b, final int off, final int len, final @Nullable ProtectionDomain protectionDomain) throws ClassFormatError { ++ return this.defineClassProxy.defineClass(loader, name, b, off, len, protectionDomain); ++ } ++ ++ @Override ++ public Class defineClass(final Object loader, final String name, final ByteBuffer b, final ProtectionDomain protectionDomain) throws ClassFormatError { ++ return this.defineClassProxy.defineClass(loader, name, b, protectionDomain); ++ } ++ ++ @Override ++ public Class defineClass(final Object secureLoader, final String name, final byte[] b, final int off, final int len, final CodeSource cs) { ++ return this.defineClassProxy.defineClass(secureLoader, name, b, off, len, cs); ++ } ++ ++ @Override ++ public Class defineClass(final Object secureLoader, final String name, final ByteBuffer b, final CodeSource cs) { ++ return this.defineClassProxy.defineClass(secureLoader, name, b, cs); ++ } ++ ++ @Override ++ public Class defineClass(final MethodHandles.Lookup lookup, final byte[] bytes) throws IllegalAccessException { ++ return this.defineClassProxy.defineClass(lookup, bytes); ++ } ++ ++ // todo apply bytecode remap here as well ++ private static byte[] processClass(final byte[] bytes) { ++ try { ++ final ClassReader reader = new ClassReader(bytes); ++ final ClassWriter writer = new ClassWriter(reader, 0); ++ reader.accept(ReflectionRemapper.visitor(writer), 0); ++ return writer.toByteArray(); ++ } catch (final Exception ex) { ++ LOGGER.warn("Failed to process class bytes", ex); ++ return bytes; ++ } ++ } ++} +diff --git a/src/main/java/io/papermc/paper/pluginremap/reflect/ReflectionRemapper.java b/src/main/java/io/papermc/paper/pluginremap/reflect/ReflectionRemapper.java +new file mode 100644 +index 0000000000000000000000000000000000000000..0000000000000000000000000000000000000000 +--- /dev/null ++++ b/src/main/java/io/papermc/paper/pluginremap/reflect/ReflectionRemapper.java +@@ -0,0 +0,0 @@ ++package io.papermc.paper.pluginremap.reflect; ++ ++import io.papermc.asm.ClassInfoProvider; ++import io.papermc.asm.RewriteRuleVisitorFactory; ++import io.papermc.paper.util.MappingEnvironment; ++import io.papermc.reflectionrewriter.BaseReflectionRules; ++import io.papermc.reflectionrewriter.DefineClassRule; ++import io.papermc.reflectionrewriter.proxygenerator.ProxyGenerator; ++import java.lang.invoke.MethodHandles; ++import java.lang.reflect.Method; ++import org.checkerframework.checker.nullness.qual.NonNull; ++import org.checkerframework.framework.qual.DefaultQualifier; ++import org.objectweb.asm.ClassVisitor; ++import org.objectweb.asm.Opcodes; ++ ++@DefaultQualifier(NonNull.class) ++public final class ReflectionRemapper { ++ private static final String PAPER_REFLECTION_HOLDER = "io.papermc.paper.pluginremap.reflect.PaperReflectionHolder"; ++ private static final String PAPER_REFLECTION_HOLDER_DESC = PAPER_REFLECTION_HOLDER.replace('.', '/'); ++ private static final RewriteRuleVisitorFactory VISITOR_FACTORY = RewriteRuleVisitorFactory.create( ++ Opcodes.ASM9, ++ chain -> chain.then(new BaseReflectionRules(PAPER_REFLECTION_HOLDER).rules()) ++ .then(DefineClassRule.create(PAPER_REFLECTION_HOLDER_DESC, true)), ++ ClassInfoProvider.basic() ++ ); ++ ++ static { ++ if (!MappingEnvironment.reobf()) { ++ setupProxy(); ++ } ++ } ++ ++ private ReflectionRemapper() { ++ } ++ ++ public static ClassVisitor visitor(final ClassVisitor parent) { ++ if (MappingEnvironment.reobf()) { ++ return parent; ++ } ++ return VISITOR_FACTORY.createVisitor(parent); ++ } ++ ++ private static void setupProxy() { ++ try { ++ final byte[] bytes = ProxyGenerator.generateProxy(PaperReflection.class, PAPER_REFLECTION_HOLDER_DESC); ++ final MethodHandles.Lookup lookup = MethodHandles.lookup(); ++ final Class generated = lookup.defineClass(bytes); ++ final Method init = generated.getDeclaredMethod("init", PaperReflection.class); ++ init.invoke(null, new PaperReflection()); ++ } catch (final ReflectiveOperationException ex) { ++ throw new RuntimeException(ex); ++ } ++ } ++} +diff --git a/src/main/java/io/papermc/paper/util/MappingEnvironment.java b/src/main/java/io/papermc/paper/util/MappingEnvironment.java +index 0000000000000000000000000000000000000000..0000000000000000000000000000000000000000 100644 +--- a/src/main/java/io/papermc/paper/util/MappingEnvironment.java ++++ b/src/main/java/io/papermc/paper/util/MappingEnvironment.java +@@ -0,0 +0,0 @@ import org.checkerframework.framework.qual.DefaultQualifier; + + @DefaultQualifier(NonNull.class) + public final class MappingEnvironment { ++ public static final String LEGACY_CB_VERSION = "v1_20_R3"; + private static final @Nullable String MAPPINGS_HASH = readMappingsHash(); + private static final boolean REOBF = checkReobf(); + +diff --git a/src/main/java/io/papermc/paper/util/ObfHelper.java b/src/main/java/io/papermc/paper/util/ObfHelper.java +index 0000000000000000000000000000000000000000..0000000000000000000000000000000000000000 100644 +--- a/src/main/java/io/papermc/paper/util/ObfHelper.java ++++ b/src/main/java/io/papermc/paper/util/ObfHelper.java +@@ -0,0 +0,0 @@ package io.papermc.paper.util; + + import java.io.IOException; + import java.io.InputStream; +-import java.io.InputStreamReader; +-import java.nio.charset.StandardCharsets; + import java.util.HashMap; + import java.util.HashSet; + import java.util.Map; + import java.util.Objects; + import java.util.Set; +-import java.util.function.Function; + import java.util.stream.Collectors; +-import net.fabricmc.mappingio.MappingReader; +-import net.fabricmc.mappingio.format.MappingFormat; +-import net.fabricmc.mappingio.tree.MappingTree; +-import net.fabricmc.mappingio.tree.MemoryMappingTree; ++import net.neoforged.srgutils.IMappingFile; + import org.checkerframework.checker.nullness.qual.NonNull; + import org.checkerframework.checker.nullness.qual.Nullable; + import org.checkerframework.framework.qual.DefaultQualifier; +@@ -0,0 +0,0 @@ import org.checkerframework.framework.qual.DefaultQualifier; + public enum ObfHelper { + INSTANCE; + +- public static final String MOJANG_PLUS_YARN_NAMESPACE = "mojang+yarn"; +- public static final String SPIGOT_NAMESPACE = "spigot"; +- + private final @Nullable Map mappingsByObfName; + private final @Nullable Map mappingsByMojangName; + +@@ -0,0 +0,0 @@ public enum ObfHelper { + return null; + } + try (final InputStream mappingsInputStream = MappingEnvironment.mappingsStream()) { +- final MemoryMappingTree tree = new MemoryMappingTree(); +- MappingReader.read(new InputStreamReader(mappingsInputStream, StandardCharsets.UTF_8), MappingFormat.TINY_2_FILE, tree); ++ final IMappingFile mappings = IMappingFile.load(mappingsInputStream); // Mappings are mojang->spigot + final Set classes = new HashSet<>(); + + final StringPool pool = new StringPool(); +- for (final MappingTree.ClassMapping cls : tree.getClasses()) { ++ for (final IMappingFile.IClass cls : mappings.getClasses()) { + final Map methods = new HashMap<>(); ++ final Map fields = new HashMap<>(); ++ final Map strippedMethods = new HashMap<>(); + +- for (final MappingTree.MethodMapping methodMapping : cls.getMethods()) { ++ for (final IMappingFile.IMethod methodMapping : cls.getMethods()) { + methods.put( + pool.string(methodKey( +- Objects.requireNonNull(methodMapping.getName(SPIGOT_NAMESPACE)), +- Objects.requireNonNull(methodMapping.getDesc(SPIGOT_NAMESPACE)) ++ Objects.requireNonNull(methodMapping.getMapped()), ++ Objects.requireNonNull(methodMapping.getMappedDescriptor()) + )), +- pool.string(Objects.requireNonNull(methodMapping.getName(MOJANG_PLUS_YARN_NAMESPACE))) ++ pool.string(Objects.requireNonNull(methodMapping.getOriginal())) ++ ); ++ ++ strippedMethods.put( ++ pool.string(pool.string(strippedMethodKey( ++ methodMapping.getMapped(), ++ methodMapping.getDescriptor() ++ ))), ++ pool.string(methodMapping.getOriginal()) ++ ); ++ } ++ for (final IMappingFile.IField field : cls.getFields()) { ++ fields.put( ++ pool.string(field.getMapped()), ++ pool.string(field.getOriginal()) + ); + } + + final ClassMapping map = new ClassMapping( +- Objects.requireNonNull(cls.getName(SPIGOT_NAMESPACE)).replace('/', '.'), +- Objects.requireNonNull(cls.getName(MOJANG_PLUS_YARN_NAMESPACE)).replace('/', '.'), +- Map.copyOf(methods) ++ Objects.requireNonNull(cls.getMapped()).replace('/', '.'), ++ Objects.requireNonNull(cls.getOriginal()).replace('/', '.'), ++ Map.copyOf(methods), ++ Map.copyOf(fields), ++ Map.copyOf(strippedMethods) + ); + classes.add(map); + } + + return Set.copyOf(classes); + } catch (final IOException ex) { +- System.err.println("Failed to load mappings for stacktrace deobfuscation."); ++ System.err.println("Failed to load mappings."); + ex.printStackTrace(); + return null; + } + } + +- public static String methodKey(final String obfName, final String obfDescriptor) { +- return obfName + obfDescriptor; ++ public static String strippedMethodKey(final String methodName, final String methodDescriptor) { ++ final String methodKey = methodKey(methodName, methodDescriptor); ++ final int returnDescriptorEnd = methodKey.indexOf(')'); ++ return methodKey.substring(0, returnDescriptorEnd + 1); + } + +- private static final class StringPool { +- private final Map pool = new HashMap<>(); +- +- public String string(final String string) { +- return this.pool.computeIfAbsent(string, Function.identity()); +- } ++ public static String methodKey(final String methodName, final String methodDescriptor) { ++ return methodName + methodDescriptor; + } + + public record ClassMapping( + String obfName, + String mojangName, +- Map methodsByObf ++ Map methodsByObf, ++ Map fieldsByObf, ++ // obf name with mapped desc to mapped name. return value is excluded from desc as reflection doesn't use it ++ Map strippedMethods + ) {} + } +diff --git a/src/main/java/io/papermc/paper/util/StacktraceDeobfuscator.java b/src/main/java/io/papermc/paper/util/StacktraceDeobfuscator.java +index 0000000000000000000000000000000000000000..0000000000000000000000000000000000000000 100644 +--- a/src/main/java/io/papermc/paper/util/StacktraceDeobfuscator.java ++++ b/src/main/java/io/papermc/paper/util/StacktraceDeobfuscator.java +@@ -0,0 +0,0 @@ + package io.papermc.paper.util; + + import io.papermc.paper.configuration.GlobalConfiguration; +-import it.unimi.dsi.fastutil.ints.IntArrayList; +-import it.unimi.dsi.fastutil.ints.IntList; ++import it.unimi.dsi.fastutil.ints.Int2ObjectMap; ++import it.unimi.dsi.fastutil.ints.Int2ObjectOpenHashMap; + import java.io.IOException; + import java.io.InputStream; + import java.util.Collections; +-import java.util.HashMap; + import java.util.LinkedHashMap; + import java.util.Map; + import org.checkerframework.checker.nullness.qual.NonNull; +@@ -0,0 +0,0 @@ import org.objectweb.asm.Opcodes; + public enum StacktraceDeobfuscator { + INSTANCE; + +- private final Map, Map> lineMapCache = Collections.synchronizedMap(new LinkedHashMap<>(128, 0.75f, true) { ++ private final Map, Int2ObjectMap> lineMapCache = Collections.synchronizedMap(new LinkedHashMap<>(128, 0.75f, true) { + @Override +- protected boolean removeEldestEntry(final Map.Entry, Map> eldest) { ++ protected boolean removeEldestEntry(final Map.Entry, Int2ObjectMap> eldest) { + return this.size() > 127; + } + }); + + public void deobfuscateThrowable(final Throwable throwable) { ++ if (!MappingEnvironment.reobf()) { ++ return; ++ } + if (GlobalConfiguration.get() != null && !GlobalConfiguration.get().logging.deobfuscateStacktraces) { // handle null as true + return; + } +@@ -0,0 +0,0 @@ public enum StacktraceDeobfuscator { + } + + public StackTraceElement[] deobfuscateStacktrace(final StackTraceElement[] traceElements) { ++ if (!MappingEnvironment.reobf()) { ++ return traceElements; ++ } + if (GlobalConfiguration.get() != null && !GlobalConfiguration.get().logging.deobfuscateStacktraces) { // handle null as true + return traceElements; + } +@@ -0,0 +0,0 @@ public enum StacktraceDeobfuscator { + } + + private @Nullable String determineMethodForLine(final Class clazz, final int lineNumber) { +- final Map lineMap = this.lineMapCache.computeIfAbsent(clazz, StacktraceDeobfuscator::buildLineMap); +- for (final var entry : lineMap.entrySet()) { +- final String methodKey = entry.getKey(); +- final IntList lines = entry.getValue(); +- for (int i = 0, linesSize = lines.size(); i < linesSize; i++) { +- final int num = lines.getInt(i); +- if (num == lineNumber) { +- return methodKey; +- } +- } +- } +- return null; ++ return this.lineMapCache.computeIfAbsent(clazz, StacktraceDeobfuscator::buildLineMap).get(lineNumber); + } + + private static String sourceFileName(final String fullClassName) { +@@ -0,0 +0,0 @@ public enum StacktraceDeobfuscator { + return rootClassName + ".java"; + } + +- private static Map buildLineMap(final Class key) { +- final Map lineMap = new HashMap<>(); ++ private static Int2ObjectMap buildLineMap(final Class key) { ++ final StringPool pool = new StringPool(); ++ final Int2ObjectMap lineMap = new Int2ObjectOpenHashMap<>(); + final class LineCollectingMethodVisitor extends MethodVisitor { +- private final IntList lines = new IntArrayList(); + private final String name; + private final String descriptor; + +- LineCollectingMethodVisitor(String name, String descriptor) { ++ LineCollectingMethodVisitor(final String name, final String descriptor) { + super(Opcodes.ASM9); + this.name = name; + this.descriptor = descriptor; + } + + @Override +- public void visitLineNumber(int line, Label start) { +- super.visitLineNumber(line, start); +- this.lines.add(line); +- } +- +- @Override +- public void visitEnd() { +- super.visitEnd(); +- lineMap.put(ObfHelper.methodKey(this.name, this.descriptor), this.lines); ++ public void visitLineNumber(final int line, final Label start) { ++ lineMap.put(line, pool.string(ObfHelper.methodKey(this.name, this.descriptor))); + } + } + final ClassVisitor classVisitor = new ClassVisitor(Opcodes.ASM9) { + @Override +- public MethodVisitor visitMethod(int access, String name, String descriptor, String signature, String[] exceptions) { ++ public MethodVisitor visitMethod(final int access, final String name, final String descriptor, final String signature, final String[] exceptions) { + return new LineCollectingMethodVisitor(name, descriptor); + } + }; +diff --git a/src/main/java/io/papermc/paper/util/StringPool.java b/src/main/java/io/papermc/paper/util/StringPool.java +new file mode 100644 +index 0000000000000000000000000000000000000000..0000000000000000000000000000000000000000 +--- /dev/null ++++ b/src/main/java/io/papermc/paper/util/StringPool.java +@@ -0,0 +0,0 @@ ++package io.papermc.paper.util; ++ ++import java.util.HashMap; ++import java.util.Map; ++import java.util.function.Function; ++import org.checkerframework.checker.nullness.qual.NonNull; ++import org.checkerframework.framework.qual.DefaultQualifier; ++ ++/** ++ * De-duplicates {@link String} instances without using {@link String#intern()}. ++ * ++ *

Interning may not be desired as we may want to use the heap for our pool, ++ * so it can be garbage collected as normal, etc.

++ * ++ *

Additionally, interning can be slow due to the potentially large size of the ++ * pool (as it is shared for the entire JVM), and because most JVMs implement ++ * it using JNI.

++ */ ++@DefaultQualifier(NonNull.class) ++public final class StringPool { ++ private final Map pool; ++ ++ public StringPool() { ++ this(new HashMap<>()); ++ } ++ ++ public StringPool(final Map map) { ++ this.pool = map; ++ } ++ ++ public String string(final String string) { ++ return this.pool.computeIfAbsent(string, Function.identity()); ++ } ++} +diff --git a/src/main/java/net/minecraft/world/entity/ai/behavior/Behavior.java b/src/main/java/net/minecraft/world/entity/ai/behavior/Behavior.java +index 0000000000000000000000000000000000000000..0000000000000000000000000000000000000000 100644 +--- a/src/main/java/net/minecraft/world/entity/ai/behavior/Behavior.java ++++ b/src/main/java/net/minecraft/world/entity/ai/behavior/Behavior.java +@@ -0,0 +0,0 @@ public abstract class Behavior implements BehaviorContro + this.maxDuration = maxRunTime; + this.entryCondition = requiredMemoryState; + // Paper start - configurable behavior tick rate and timings +- String key = io.papermc.paper.util.ObfHelper.INSTANCE.deobfClassName(this.getClass().getName()); ++ String key = io.papermc.paper.util.MappingEnvironment.reobf() ? io.papermc.paper.util.ObfHelper.INSTANCE.deobfClassName(this.getClass().getName()) : this.getClass().getName(); + int lastSeparator = key.lastIndexOf('.'); + if (lastSeparator != -1) { + key = key.substring(lastSeparator + 1); +diff --git a/src/main/java/net/minecraft/world/entity/ai/sensing/Sensor.java b/src/main/java/net/minecraft/world/entity/ai/sensing/Sensor.java +index 0000000000000000000000000000000000000000..0000000000000000000000000000000000000000 100644 +--- a/src/main/java/net/minecraft/world/entity/ai/sensing/Sensor.java ++++ b/src/main/java/net/minecraft/world/entity/ai/sensing/Sensor.java +@@ -0,0 +0,0 @@ public abstract class Sensor { + + public Sensor(int senseInterval) { + // Paper start - configurable sensor tick rate and timings +- String key = io.papermc.paper.util.ObfHelper.INSTANCE.deobfClassName(this.getClass().getName()); ++ String key = io.papermc.paper.util.MappingEnvironment.reobf() ? io.papermc.paper.util.ObfHelper.INSTANCE.deobfClassName(this.getClass().getName()) : this.getClass().getName(); + int lastSeparator = key.lastIndexOf('.'); + if (lastSeparator != -1) { + key = key.substring(lastSeparator + 1); +diff --git a/src/main/java/net/minecraft/world/level/redstone/CollectingNeighborUpdater.java b/src/main/java/net/minecraft/world/level/redstone/CollectingNeighborUpdater.java +index 0000000000000000000000000000000000000000..0000000000000000000000000000000000000000 100644 +--- a/src/main/java/net/minecraft/world/level/redstone/CollectingNeighborUpdater.java ++++ b/src/main/java/net/minecraft/world/level/redstone/CollectingNeighborUpdater.java +@@ -0,0 +0,0 @@ public class CollectingNeighborUpdater implements NeighborUpdater { + } + } + +- interface NeighborUpdates { ++ public interface NeighborUpdates { // Paper - TODO make package-private again (it is just made public for testing LambdaMetafactory remapping) + boolean runNext(Level world); + } + +diff --git a/src/main/java/org/bukkit/craftbukkit/util/Commodore.java b/src/main/java/org/bukkit/craftbukkit/util/Commodore.java +index 0000000000000000000000000000000000000000..0000000000000000000000000000000000000000 100644 +--- a/src/main/java/org/bukkit/craftbukkit/util/Commodore.java ++++ b/src/main/java/org/bukkit/craftbukkit/util/Commodore.java +@@ -0,0 +0,0 @@ public class Commodore { + + // Paper start - Plugin rewrites + private static final String CB_PACKAGE = org.bukkit.Bukkit.getServer().getClass().getPackageName().replace('.', '/'); +- private static final Map SEARCH_AND_REMOVE = initReplacementsMap(); +- private static Map initReplacementsMap() { +- Map getAndRemove = new HashMap<>(); +- // Be wary of maven shade's relocations +- +- final java.util.jar.Manifest manifest = io.papermc.paper.util.JarManifests.manifest(Commodore.class); +- if (Boolean.getBoolean( "debug.rewriteForIde") && manifest != null) +- { +- // unversion incoming calls for pre-relocate debug work +- final String NMS_REVISION_PACKAGE = "v" + manifest.getMainAttributes().getValue("CraftBukkit-Package-Version") + "/"; +- +- getAndRemove.put("org/bukkit/".concat("craftbukkit/" + NMS_REVISION_PACKAGE), NMS_REVISION_PACKAGE); +- } +- +- return getAndRemove; +- } ++ private static final String CB_PACKAGE_PREFIX = "org/bukkit/".concat("craftbukkit/"); ++ private static final String LEGACY_CB_PACKAGE_PREFIX = CB_PACKAGE_PREFIX + io.papermc.paper.util.MappingEnvironment.LEGACY_CB_VERSION + "/"; + + @Nonnull + private static String getOriginalOrRewrite(@Nonnull String original) + { +- String rewrite = null; +- for ( Map.Entry entry : SEARCH_AND_REMOVE.entrySet() ) +- { +- if ( original.contains( entry.getKey() ) ) +- { +- rewrite = original.replace( entry.getValue(), "" ); ++ // Relocation is applied in reobf, and when mappings are present they handle the relocation ++ if (!io.papermc.paper.util.MappingEnvironment.reobf() && !io.papermc.paper.util.MappingEnvironment.hasMappings()) { ++ if (original.contains(LEGACY_CB_PACKAGE_PREFIX)) { ++ original = original.replace(LEGACY_CB_PACKAGE_PREFIX, CB_PACKAGE_PREFIX); + } + } + +- return rewrite != null ? rewrite : original; ++ return original; + } + // Paper end - Plugin rewrites + +@@ -0,0 +0,0 @@ public class Commodore { + ClassReader cr = new ClassReader(b); + ClassWriter cw = new ClassWriter(cr, 0); + +- cr.accept(new ClassRemapper(new ClassVisitor(Opcodes.ASM9, cw) { ++ cr.accept(new ClassRemapper(new ClassVisitor(Opcodes.ASM9, io.papermc.paper.pluginremap.reflect.ReflectionRemapper.visitor(cw)) { // Paper + + // Paper start - Rewrite plugins + @Override diff --git a/patches/unapplied/server/build-replace-use-of-shadow-plugin.patch b/patches/unapplied/server/build-replace-use-of-shadow-plugin.patch new file mode 100644 index 0000000000..937bf68d02 --- /dev/null +++ b/patches/unapplied/server/build-replace-use-of-shadow-plugin.patch @@ -0,0 +1,44 @@ +From 0000000000000000000000000000000000000000 Mon Sep 17 00:00:00 2001 +From: Jason Penilla <11360596+jpenilla@users.noreply.github.com> +Date: Thu, 18 Apr 2024 13:39:18 -0700 +Subject: [PATCH] build: replace use of shadow plugin + + +diff --git a/build.gradle.kts b/build.gradle.kts +index 0000000000000000000000000000000000000000..0000000000000000000000000000000000000000 100644 +--- a/build.gradle.kts ++++ b/build.gradle.kts +@@ -0,0 +0,0 @@ import xyz.jpenilla.runpaper.task.RunServer + plugins { + java + `maven-publish` +- id("com.github.johnrengelman.shadow") + id("xyz.jpenilla.run-paper") version "2.2.3" apply false + } + +@@ -0,0 +0,0 @@ publishing { + } + } + +-tasks.shadowJar { +- configurations = listOf(project.configurations.vanillaServer.get(), alsoShade) ++tasks.serverJar { ++ from(alsoShade.elements.map { ++ it.map { f -> ++ if (f.asFile.isFile) { ++ zipTree(f.asFile) ++ } else { ++ f.asFile ++ } ++ } ++ }) + } + + // Paper start + val scanJar = tasks.register("scanJarForBadCalls", io.papermc.paperweight.tasks.ScanJarForBadCalls::class) { + badAnnotations.add("Lio/papermc/paper/annotation/DoNotUse;") +- jarToScan.set(tasks.shadowJar.flatMap { it.archiveFile }) ++ jarToScan.set(tasks.serverJar.flatMap { it.archiveFile }) + classpath.from(configurations.compileClasspath) + } + tasks.check {