Plugin remapping

Co-authored-by: Nassim Jahnke <nassim@njahnke.dev>
This commit is contained in:
Jason Penilla 2022-10-29 15:22:32 -07:00
parent 216388dfdf
commit 13e0a1a71e
22 changed files with 1691 additions and 70 deletions

View file

@ -62,6 +62,7 @@ dependencies {
testImplementation("org.ow2.asm:asm-tree:9.7.1")
testImplementation("org.junit-pioneer:junit-pioneer:2.2.0") // Paper - CartesianTest
implementation("net.neoforged:srgutils:1.0.9") // Paper - mappings handling
implementation("net.neoforged:AutoRenamingTool:2.0.3") // Paper - remap plugins
}
paperweight {
@ -189,20 +190,41 @@ val runtimeClasspathWithoutVanillaServer = configurations.runtimeClasspath.flatM
runtime.filterNot { it.asFile.absolutePath == vanilla }
}
tasks.registerRunTask("runServerJar") {
description = "Spin up a test server from the serverJar archiveFile"
classpath(tasks.serverJar.flatMap { it.archiveFile })
tasks.registerRunTask("runServer") {
description = "Spin up a test server from the Mojang mapped server jar"
classpath(tasks.includeMappings.flatMap { it.outputJar })
classpath(runtimeClasspathWithoutVanillaServer)
}
tasks.registerRunTask("runReobf") {
tasks.registerRunTask("runReobfServer") {
description = "Spin up a test server from the reobfJar output jar"
classpath(tasks.reobfJar.flatMap { it.outputJar })
classpath(runtimeClasspathWithoutVanillaServer)
}
tasks.registerRunTask("runDev") {
description = "Spin up a non-relocated Mojang-mapped test server"
tasks.registerRunTask("runDevServer") {
description = "Spin up a test server without assembling a jar"
classpath(sourceSets.main.map { it.runtimeClasspath })
jvmArgs("-DPaper.pushPaperAssetsRoot=true")
}
tasks.registerRunTask("runBundler") {
description = "Spin up a test server from the Mojang mapped bundler jar"
classpath(rootProject.tasks.named<io.papermc.paperweight.tasks.CreateBundlerJar>("createMojmapBundlerJar").flatMap { it.outputZip })
mainClass.set(null as String?)
}
tasks.registerRunTask("runReobfBundler") {
description = "Spin up a test server from the reobf bundler jar"
classpath(rootProject.tasks.named<io.papermc.paperweight.tasks.CreateBundlerJar>("createReobfBundlerJar").flatMap { it.outputZip })
mainClass.set(null as String?)
}
tasks.registerRunTask("runPaperclip") {
description = "Spin up a test server from the Mojang mapped Paperclip jar"
classpath(rootProject.tasks.named<io.papermc.paperweight.tasks.CreatePaperclipJar>("createMojmapPaperclipJar").flatMap { it.outputZip })
mainClass.set(null as String?)
}
tasks.registerRunTask("runReobfPaperclip") {
description = "Spin up a test server from the reobf Paperclip jar"
classpath(rootProject.tasks.named<io.papermc.paperweight.tasks.CreatePaperclipJar>("createReobfPaperclipJar").flatMap { it.outputZip })
mainClass.set(null as String?)
}

View file

@ -268,7 +268,7 @@
if (profiledduration != null) {
profiledduration.finish(true);
}
@@ -387,23 +459,217 @@
@@ -387,23 +459,218 @@
protected void forceDifficulty() {}
@ -484,6 +484,7 @@
+ }
+
+ this.server.enablePlugins(org.bukkit.plugin.PluginLoadOrder.POSTWORLD);
+ if (io.papermc.paper.plugin.PluginInitializerManager.instance().pluginRemapper != null) io.papermc.paper.plugin.PluginInitializerManager.instance().pluginRemapper.pluginsEnabled(); // Paper - Remap plugins
+ this.server.getPluginManager().callEvent(new ServerLoadEvent(ServerLoadEvent.LoadType.STARTUP));
+ this.connection.acceptConnections();
+ }
@ -500,7 +501,7 @@
if (!iworlddataserver.isInitialized()) {
try {
@@ -427,30 +693,8 @@
@@ -427,30 +694,8 @@
iworlddataserver.setInitialized(true);
}
@ -532,7 +533,7 @@
private static void setInitialSpawn(ServerLevel world, ServerLevelData worldProperties, boolean bonusChest, boolean debugWorld) {
if (debugWorld) {
@@ -458,6 +702,21 @@
@@ -458,6 +703,21 @@
} else {
ServerChunkCache chunkproviderserver = world.getChunkSource();
ChunkPos chunkcoordintpair = new ChunkPos(chunkproviderserver.randomState().sampler().findSpawnPosition());
@ -554,7 +555,7 @@
int i = chunkproviderserver.getGenerator().getSpawnHeight(world);
if (i < world.getMinY()) {
@@ -516,31 +775,36 @@
@@ -516,31 +776,36 @@
iworlddataserver.setGameType(GameType.SPECTATOR);
}
@ -602,7 +603,7 @@
ForcedChunksSavedData forcedchunk = (ForcedChunksSavedData) worldserver1.getDataStorage().get(ForcedChunksSavedData.factory(), "chunks");
if (forcedchunk != null) {
@@ -555,10 +819,17 @@
@@ -555,10 +820,17 @@
}
}
@ -624,7 +625,7 @@
}
public GameType getDefaultGameType() {
@@ -588,12 +859,16 @@
@@ -588,12 +860,16 @@
worldserver.save((ProgressListener) null, flush, worldserver.noSave && !force);
}
@ -643,7 +644,7 @@
if (flush) {
Iterator iterator1 = this.getAllLevels().iterator();
@@ -628,18 +903,40 @@
@@ -628,18 +904,41 @@
this.stopServer();
}
@ -674,6 +675,7 @@
+ this.server.disablePlugins();
+ }
+ // CraftBukkit end
+ if (io.papermc.paper.plugin.PluginInitializerManager.instance().pluginRemapper != null) io.papermc.paper.plugin.PluginInitializerManager.instance().pluginRemapper.shutdown(); // Paper - Plugin remapping
this.getConnection().stop();
this.isSaving = true;
if (this.playerList != null) {
@ -684,7 +686,7 @@
}
MinecraftServer.LOGGER.info("Saving worlds");
@@ -693,6 +990,15 @@
@@ -693,6 +992,15 @@
} catch (IOException ioexception1) {
MinecraftServer.LOGGER.error("Failed to unlock level {}", this.storageSource.getLevelId(), ioexception1);
}
@ -700,23 +702,21 @@
}
@@ -717,8 +1023,15 @@
MinecraftServer.LOGGER.error("Error while shutting down", interruptedexception);
}
}
+
+ }
@@ -720,6 +1028,13 @@
}
+ // Spigot Start
+ private static double calcTps(double avg, double exp, double tps)
+ {
+ return ( avg * exp ) + ( tps * ( 1 - exp ) );
}
+ }
+ // Spigot End
+
protected void runServer() {
try {
@@ -727,9 +1040,12 @@
if (!this.initServer()) {
@@ -727,9 +1042,12 @@
}
this.nextTickTimeNanos = Util.getNanos();
@ -730,7 +730,7 @@
while (this.running) {
long i;
@@ -744,11 +1060,23 @@
@@ -744,11 +1062,23 @@
if (j > MinecraftServer.OVERLOADED_THRESHOLD_NANOS + 20L * i && this.nextTickTimeNanos - this.lastOverloadWarningNanos >= MinecraftServer.OVERLOADED_WARNING_INTERVAL_NANOS + 100L * i) {
long k = j / i;
@ -754,7 +754,7 @@
boolean flag = i == 0L;
@@ -757,6 +1085,7 @@
@@ -757,6 +1087,7 @@
this.debugCommandProfiler = new MinecraftServer.TimeProfiler(Util.getNanos(), this.tickCount);
}
@ -762,7 +762,7 @@
this.nextTickTimeNanos += i;
try {
@@ -830,6 +1159,13 @@
@@ -830,6 +1161,13 @@
this.services.profileCache().clearExecutor();
}
@ -776,7 +776,7 @@
this.onServerExit();
}
@@ -889,9 +1225,16 @@
@@ -889,9 +1227,16 @@
}
private boolean haveTime() {
@ -794,7 +794,7 @@
public static boolean throwIfFatalException() {
RuntimeException runtimeexception = (RuntimeException) MinecraftServer.fatalException.get();
@@ -903,7 +1246,7 @@
@@ -903,7 +1248,7 @@
}
public static void setFatalException(RuntimeException exception) {
@ -803,7 +803,7 @@
}
@Override
@@ -977,7 +1320,7 @@
@@ -977,7 +1322,7 @@
}
}
@ -812,7 +812,7 @@
Profiler.get().incrementCounter("runTask");
super.doRunTask(ticktask);
}
@@ -1025,6 +1368,7 @@
@@ -1025,6 +1370,7 @@
}
public void tickServer(BooleanSupplier shouldKeepTicking) {
@ -820,7 +820,7 @@
long i = Util.getNanos();
int j = this.pauseWhileEmptySeconds() * 20;
@@ -1041,11 +1385,13 @@
@@ -1041,11 +1387,13 @@
this.autoSave();
}
@ -834,7 +834,7 @@
++this.tickCount;
this.tickRateManager.tick();
this.tickChildren(shouldKeepTicking);
@@ -1055,7 +1401,7 @@
@@ -1055,7 +1403,7 @@
}
--this.ticksUntilAutosave;
@ -843,7 +843,7 @@
this.autoSave();
}
@@ -1071,10 +1417,13 @@
@@ -1071,10 +1419,13 @@
this.smoothedTickTimeMillis = this.smoothedTickTimeMillis * 0.8F + (float) k / (float) TimeUtil.NANOSECONDS_PER_MILLISECOND * 0.19999999F;
this.logTickMethodTime(i);
gameprofilerfiller.pop();
@ -858,7 +858,7 @@
MinecraftServer.LOGGER.debug("Autosave started");
ProfilerFiller gameprofilerfiller = Profiler.get();
@@ -1082,6 +1431,7 @@
@@ -1082,6 +1433,7 @@
this.saveEverything(true, false, false);
gameprofilerfiller.pop();
MinecraftServer.LOGGER.debug("Autosave finished");
@ -866,7 +866,7 @@
}
private void logTickMethodTime(long tickStartTime) {
@@ -1123,7 +1473,7 @@
@@ -1123,7 +1475,7 @@
private ServerStatus buildServerStatus() {
ServerStatus.Players serverping_serverpingplayersample = this.buildPlayerStatus();
@ -875,7 +875,7 @@
}
private ServerStatus.Players buildPlayerStatus() {
@@ -1154,11 +1504,35 @@
@@ -1154,11 +1506,35 @@
this.getPlayerList().getPlayers().forEach((entityplayer) -> {
entityplayer.connection.suspendFlushing();
});
@ -911,7 +911,7 @@
while (iterator.hasNext()) {
ServerLevel worldserver = (ServerLevel) iterator.next();
@@ -1167,16 +1541,20 @@
@@ -1167,16 +1543,20 @@
return s + " " + String.valueOf(worldserver.dimension().location());
});
@ -932,7 +932,7 @@
} catch (Throwable throwable) {
CrashReport crashreport = CrashReport.forThrowable(throwable, "Exception ticking world");
@@ -1189,18 +1567,24 @@
@@ -1189,18 +1569,24 @@
}
gameprofilerfiller.popPush("connection");
@ -957,10 +957,12 @@
gameprofilerfiller.popPush("send chunks");
iterator = this.playerList.getPlayers().iterator();
@@ -1267,6 +1651,22 @@
@@ -1265,7 +1651,23 @@
@Nullable
public ServerLevel getLevel(ResourceKey<Level> key) {
return (ServerLevel) this.levels.get(key);
}
+ }
+
+ // CraftBukkit start
+ public void addLevel(ServerLevel level) {
+ Map<ResourceKey<Level>, ServerLevel> oldLevels = this.levels;
@ -974,13 +976,12 @@
+ Map<ResourceKey<Level>, ServerLevel> newLevels = Maps.newLinkedHashMap(oldLevels);
+ newLevels.remove(level.dimension());
+ this.levels = Collections.unmodifiableMap(newLevels);
+ }
}
+ // CraftBukkit end
+
public Set<ResourceKey<Level>> levelKeys() {
return this.levels.keySet();
}
@@ -1296,7 +1696,7 @@
@@ -1296,7 +1698,7 @@
@DontObfuscate
public String getServerModName() {
@ -989,7 +990,7 @@
}
public SystemReport fillSystemReport(SystemReport details) {
@@ -1347,7 +1747,7 @@
@@ -1347,7 +1749,7 @@
@Override
public void sendSystemMessage(Component message) {
@ -998,7 +999,7 @@
}
public KeyPair getKeyPair() {
@@ -1481,10 +1881,20 @@
@@ -1481,10 +1883,20 @@
@Override
public String getMotd() {
@ -1020,7 +1021,7 @@
this.motd = motd;
}
@@ -1507,7 +1917,7 @@
@@ -1507,7 +1919,7 @@
}
public ServerConnectionListener getConnection() {
@ -1029,7 +1030,7 @@
}
public boolean isReady() {
@@ -1634,11 +2044,11 @@
@@ -1634,11 +2046,11 @@
public CompletableFuture<Void> reloadResources(Collection<String> dataPacks) {
CompletableFuture<Void> completablefuture = CompletableFuture.supplyAsync(() -> {
@ -1043,7 +1044,7 @@
}, this).thenCompose((immutablelist) -> {
MultiPackResourceManager resourcemanager = new MultiPackResourceManager(PackType.SERVER_DATA, immutablelist);
List<Registry.PendingTags<?>> list = TagLoader.loadTagsForExistingRegistries(resourcemanager, this.registries.compositeAccess());
@@ -1654,6 +2064,7 @@
@@ -1654,6 +2066,7 @@
}).thenAcceptAsync((minecraftserver_reloadableresources) -> {
this.resources.close();
this.resources = minecraftserver_reloadableresources;
@ -1051,7 +1052,7 @@
this.packRepository.setSelected(dataPacks);
WorldDataConfiguration worlddataconfiguration = new WorldDataConfiguration(MinecraftServer.getSelectedPacks(this.packRepository, true), this.worldData.enabledFeatures());
@@ -1952,7 +2363,7 @@
@@ -1952,7 +2365,7 @@
final List<String> list = Lists.newArrayList();
final GameRules gamerules = this.getGameRules();
@ -1060,7 +1061,7 @@
@Override
public <T extends GameRules.Value<T>> void visit(GameRules.Key<T> key, GameRules.Type<T> type) {
list.add(String.format(Locale.ROOT, "%s=%s\n", key.getId(), gamerules.getRule(key)));
@@ -2058,7 +2469,7 @@
@@ -2058,7 +2471,7 @@
try {
label51:
{
@ -1069,7 +1070,7 @@
try {
arraylist = Lists.newArrayList(NativeModuleLister.listModules());
@@ -2105,9 +2516,25 @@
@@ -2105,9 +2518,25 @@
if (bufferedwriter != null) {
bufferedwriter.close();
}
@ -1095,7 +1096,7 @@
private ProfilerFiller createProfiler() {
if (this.willStartRecordingMetrics) {
this.metricsRecorder = ActiveMetricsRecorder.createStarted(new ServerMetricsSamplersProvider(Util.timeSource, this.isDedicatedServer()), Util.timeSource, Util.ioPool(), new MetricsPersister("server"), this.onMetricsRecordingStopped, (path) -> {
@@ -2225,18 +2652,24 @@
@@ -2225,18 +2654,24 @@
}
public void logChatMessage(Component message, ChatType.Bound params, @Nullable String prefix) {

View file

@ -229,7 +229,20 @@
Thread thread1 = new Thread(new ServerWatchdog(this));
thread1.setUncaughtExceptionHandler(new DefaultUncaughtExceptionHandlerWithName(DedicatedServer.LOGGER));
@@ -293,6 +396,7 @@
@@ -215,6 +318,12 @@
}
}
+ // Paper start
+ public java.io.File getPluginsFolder() {
+ return (java.io.File) this.options.valueOf("plugins");
+ }
+ // Paper end
+
@Override
public boolean isSpawningMonsters() {
return this.settings.getProperties().spawnMonsters && super.isSpawningMonsters();
@@ -293,6 +402,7 @@
this.queryThreadGs4.stop();
}
@ -237,7 +250,7 @@
}
@Override
@@ -302,8 +406,8 @@
@@ -302,8 +412,8 @@
}
@Override
@ -248,7 +261,7 @@
}
public void handleConsoleInput(String command, CommandSourceStack commandSource) {
@@ -311,12 +415,22 @@
@@ -311,12 +421,22 @@
}
public void handleConsoleInputs() {
@ -272,7 +285,7 @@
}
@Override
@@ -383,7 +497,7 @@
@@ -383,7 +503,7 @@
@Override
public boolean isUnderSpawnProtection(ServerLevel world, BlockPos pos, Player player) {
@ -281,7 +294,7 @@
return false;
} else if (this.getPlayerList().getOps().isEmpty()) {
return false;
@@ -541,16 +655,52 @@
@@ -541,16 +661,52 @@
@Override
public String getPluginNames() {
@ -338,7 +351,7 @@
}
public void storeUsingWhiteList(boolean useWhitelist) {
@@ -660,4 +810,15 @@
@@ -660,4 +816,15 @@
}
}
}

View file

@ -6,10 +6,12 @@ import io.papermc.paper.plugin.entrypoint.Entrypoint;
import io.papermc.paper.plugin.entrypoint.LaunchEntryPointHandler;
import io.papermc.paper.plugin.provider.PluginProvider;
import io.papermc.paper.plugin.provider.type.paper.PaperPluginParent;
import io.papermc.paper.pluginremap.PluginRemapper;
import java.util.function.Function;
import joptsimple.OptionSet;
import net.minecraft.server.dedicated.DedicatedServer;
import org.bukkit.configuration.file.YamlConfiguration;
import org.bukkit.craftbukkit.CraftServer;
import org.bukkit.plugin.java.LibraryLoader;
import org.jetbrains.annotations.NotNull;
import org.jetbrains.annotations.Nullable;
import org.slf4j.Logger;
@ -25,10 +27,15 @@ public class PluginInitializerManager {
private static PluginInitializerManager impl;
private final Path pluginDirectory;
private final Path updateDirectory;
public final io.papermc.paper.pluginremap.@org.checkerframework.checker.nullness.qual.MonotonicNonNull PluginRemapper pluginRemapper; // Paper
PluginInitializerManager(final Path pluginDirectory, final Path updateDirectory) {
this.pluginDirectory = pluginDirectory;
this.updateDirectory = updateDirectory;
this.pluginRemapper = Boolean.getBoolean("paper.disablePluginRemapping")
? null
: PluginRemapper.create(pluginDirectory);
LibraryLoader.REMAPPER = this.pluginRemapper == null ? Function.identity() : this.pluginRemapper::remapLibraries;
}
private static PluginInitializerManager parse(@NotNull final OptionSet minecraftOptionSet) throws Exception {
@ -96,6 +103,7 @@ public class PluginInitializerManager {
public static void load(OptionSet optionSet) throws Exception {
// We have to load the bukkit configuration inorder to get the update folder location.
io.papermc.paper.plugin.PluginInitializerManager pluginSystem = io.papermc.paper.plugin.PluginInitializerManager.init(optionSet);
if (pluginSystem.pluginRemapper != null) pluginSystem.pluginRemapper.loadingPlugins();
// Register the default plugin directory
io.papermc.paper.plugin.util.EntrypointUtil.registerProvidersFromSource(io.papermc.paper.plugin.provider.source.DirectoryProviderSource.INSTANCE, pluginSystem.pluginDirectoryPath());

View file

@ -1,5 +1,6 @@
package io.papermc.paper.plugin.loader;
import io.papermc.paper.plugin.PluginInitializerManager;
import io.papermc.paper.plugin.bootstrap.PluginProviderContext;
import io.papermc.paper.plugin.loader.library.ClassPathLibrary;
import io.papermc.paper.plugin.loader.library.PaperLibraryStore;
@ -45,9 +46,12 @@ public class PaperClasspathBuilder implements PluginClasspathBuilder {
}
List<Path> paths = paperLibraryStore.getPaths();
if (PluginInitializerManager.instance().pluginRemapper != null) {
paths = PluginInitializerManager.instance().pluginRemapper.remapLibraries(paths);
}
URL[] urls = new URL[paths.size()];
for (int i = 0; i < paths.size(); i++) {
Path path = paperLibraryStore.getPaths().get(i);
Path path = paths.get(i);
try {
urls[i] = path.toUri().toURL();
} catch (MalformedURLException e) {

View file

@ -17,7 +17,7 @@ import org.slf4j.Logger;
public class DirectoryProviderSource implements ProviderSource<Path, List<Path>> {
public static final DirectoryProviderSource INSTANCE = new DirectoryProviderSource();
private static final FileProviderSource FILE_PROVIDER_SOURCE = new FileProviderSource("Directory '%s'"::formatted);
private static final FileProviderSource FILE_PROVIDER_SOURCE = new FileProviderSource("Directory '%s'"::formatted, false); // Paper - Remap plugins
private static final Logger LOGGER = LogUtils.getClassLogger();
@Override
@ -37,6 +37,11 @@ public class DirectoryProviderSource implements ProviderSource<Path, List<Path>>
LOGGER.error("Error preparing plugin context: " + e.getMessage(), e);
}
});
// Paper start - Remap plugins
if (io.papermc.paper.plugin.PluginInitializerManager.instance().pluginRemapper != null) {
return io.papermc.paper.plugin.PluginInitializerManager.instance().pluginRemapper.rewritePluginDirectory(files);
}
// Paper end - Remap plugins
return files;
}

View file

@ -24,9 +24,15 @@ import java.util.jar.JarFile;
public class FileProviderSource implements ProviderSource<Path, Path> {
private final Function<Path, String> contextChecker;
private final boolean applyRemap;
public FileProviderSource(Function<Path, String> contextChecker, boolean applyRemap) {
this.contextChecker = contextChecker;
this.applyRemap = applyRemap;
}
public FileProviderSource(Function<Path, String> contextChecker) {
this.contextChecker = contextChecker;
this(contextChecker, true);
}
@Override
@ -50,6 +56,11 @@ public class FileProviderSource implements ProviderSource<Path, Path> {
} catch (Exception exception) {
throw new RuntimeException(source + " failed to update!", exception);
}
// Paper start - Remap plugins
if (this.applyRemap && io.papermc.paper.plugin.PluginInitializerManager.instance().pluginRemapper != null) {
context = io.papermc.paper.plugin.PluginInitializerManager.instance().pluginRemapper.rewritePlugin(context);
}
// Paper end - Remap plugins
return context;
}

View file

@ -14,7 +14,7 @@ import java.util.List;
public class PluginFlagProviderSource implements ProviderSource<List<Path>, List<Path>> {
public static final PluginFlagProviderSource INSTANCE = new PluginFlagProviderSource();
private static final FileProviderSource FILE_PROVIDER_SOURCE = new FileProviderSource("File '%s' specified through 'add-plugin' argument"::formatted);
private static final FileProviderSource FILE_PROVIDER_SOURCE = new FileProviderSource("File '%s' specified through 'add-plugin' argument"::formatted, false);
private static final Logger LOGGER = LogUtils.getClassLogger();
@Override
@ -27,6 +27,11 @@ public class PluginFlagProviderSource implements ProviderSource<List<Path>, List
LOGGER.error("Error preparing plugin context: " + e.getMessage(), e);
}
}
// Paper start - Remap plugins
if (io.papermc.paper.plugin.PluginInitializerManager.instance().pluginRemapper != null && !files.isEmpty()) {
return io.papermc.paper.plugin.PluginInitializerManager.instance().pluginRemapper.rewriteExtraPlugins(files);
}
// Paper end - Remap plugins
return files;
}

View file

@ -22,9 +22,10 @@ import java.util.jar.JarFile;
*/
public abstract class PluginFileType<T, C extends PluginMeta> {
public static final String PAPER_PLUGIN_YML = "paper-plugin.yml";
private static final List<String> CONFIG_TYPES = new ArrayList<>();
public static final PluginFileType<PaperPluginParent, PaperPluginMeta> PAPER = new PluginFileType<>("paper-plugin.yml", PaperPluginParent.FACTORY) {
public static final PluginFileType<PaperPluginParent, PaperPluginMeta> PAPER = new PluginFileType<>(PAPER_PLUGIN_YML, PaperPluginParent.FACTORY) {
@Override
protected void register(EntrypointHandler entrypointHandler, PaperPluginParent parent) {
PaperPluginParent.PaperBootstrapProvider bootstrapPluginProvider = null;

View file

@ -0,0 +1,63 @@
package io.papermc.paper.pluginremap;
import java.io.IOException;
import java.io.PrintWriter;
import java.nio.file.Files;
import java.nio.file.Path;
import java.util.function.Consumer;
import org.checkerframework.checker.nullness.qual.NonNull;
import org.checkerframework.checker.nullness.qual.Nullable;
import org.checkerframework.framework.qual.DefaultQualifier;
/**
* {@link PrintWriter}-backed logger implementation for use with {@link net.neoforged.art.api.Renamer} which
* only opens the backing writer and logs messages when the {@link PluginRemapper#DEBUG_LOGGING} system property
* is set to true.
*/
@DefaultQualifier(NonNull.class)
final class DebugLogger implements Consumer<String>, AutoCloseable {
private final @Nullable PrintWriter writer;
DebugLogger(final Path logFile) {
try {
this.writer = createWriter(logFile);
} catch (final IOException ex) {
throw new RuntimeException("Failed to initialize DebugLogger for file '" + logFile + "'", ex);
}
}
@Override
public void accept(final String line) {
this.useWriter(writer -> writer.println(line));
}
@Override
public void close() {
this.useWriter(PrintWriter::close);
}
private void useWriter(final Consumer<PrintWriter> op) {
final @Nullable PrintWriter writer = this.writer;
if (writer != null) {
op.accept(writer);
}
}
Consumer<String> debug() {
return line -> this.accept("[debug]: " + line);
}
static DebugLogger forOutputFile(final Path outputFile) {
return new DebugLogger(outputFile.resolveSibling(outputFile.getFileName() + ".log"));
}
private static @Nullable PrintWriter createWriter(final Path logFile) throws IOException {
if (!PluginRemapper.DEBUG_LOGGING) {
return null;
}
if (!Files.exists(logFile.getParent())) {
Files.createDirectories(logFile.getParent());
}
return new PrintWriter(logFile.toFile());
}
}

View file

@ -0,0 +1,69 @@
package io.papermc.paper.pluginremap;
import java.io.ByteArrayInputStream;
import java.io.ByteArrayOutputStream;
import java.io.IOException;
import java.util.Collection;
import java.util.List;
import java.util.Set;
import java.util.jar.Attributes;
import java.util.jar.Manifest;
import net.neoforged.art.api.Transformer;
final class InsertManifestAttribute implements Transformer {
static final String PAPERWEIGHT_NAMESPACE_MANIFEST_KEY = "paperweight-mappings-namespace";
static final String MOJANG_NAMESPACE = "mojang";
static final String MOJANG_PLUS_YARN_NAMESPACE = "mojang+yarn";
static final String SPIGOT_NAMESPACE = "spigot";
static final Set<String> KNOWN_NAMESPACES = Set.of(MOJANG_NAMESPACE, MOJANG_PLUS_YARN_NAMESPACE, SPIGOT_NAMESPACE);
private final String mainAttributesKey;
private final String namespace;
private final boolean createIfMissing;
private volatile boolean visitedManifest = false;
static Transformer addNamespaceManifestAttribute(final String namespace) {
return new InsertManifestAttribute(PAPERWEIGHT_NAMESPACE_MANIFEST_KEY, namespace, true);
}
InsertManifestAttribute(
final String mainAttributesKey,
final String namespace,
final boolean createIfMissing
) {
this.mainAttributesKey = mainAttributesKey;
this.namespace = namespace;
this.createIfMissing = createIfMissing;
}
@Override
public ManifestEntry process(final ManifestEntry entry) {
this.visitedManifest = true;
try {
final Manifest manifest = new Manifest(new ByteArrayInputStream(entry.getData()));
manifest.getMainAttributes().putValue(this.mainAttributesKey, this.namespace);
final ByteArrayOutputStream out = new ByteArrayOutputStream();
manifest.write(out);
return ManifestEntry.create(Entry.STABLE_TIMESTAMP, out.toByteArray());
} catch (final IOException e) {
throw new RuntimeException("Failed to modify manifest", e);
}
}
@Override
public Collection<? extends Entry> getExtras() {
if (!this.visitedManifest && this.createIfMissing) {
final Manifest manifest = new Manifest();
manifest.getMainAttributes().put(Attributes.Name.MANIFEST_VERSION, "1.0");
manifest.getMainAttributes().putValue(this.mainAttributesKey, this.namespace);
final ByteArrayOutputStream out = new ByteArrayOutputStream();
try {
manifest.write(out);
} catch (final IOException e) {
throw new RuntimeException("Failed to write manifest", e);
}
return List.of(ManifestEntry.create(Entry.STABLE_TIMESTAMP, out.toByteArray()));
}
return Transformer.super.getExtras();
}
}

View file

@ -0,0 +1,438 @@
package io.papermc.paper.pluginremap;
import com.google.common.util.concurrent.ThreadFactoryBuilder;
import com.mojang.logging.LogUtils;
import io.papermc.paper.plugin.provider.type.PluginFileType;
import io.papermc.paper.util.AtomicFiles;
import io.papermc.paper.util.MappingEnvironment;
import io.papermc.paper.util.concurrent.ScalingThreadPool;
import java.io.BufferedInputStream;
import java.io.IOException;
import java.io.InputStream;
import java.nio.file.FileSystem;
import java.nio.file.FileSystems;
import java.nio.file.Files;
import java.nio.file.Path;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.concurrent.CompletableFuture;
import java.util.concurrent.CompletionException;
import java.util.concurrent.Executor;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.ThreadPoolExecutor;
import java.util.concurrent.TimeUnit;
import java.util.function.Predicate;
import java.util.function.Supplier;
import java.util.jar.Manifest;
import java.util.stream.Stream;
import net.minecraft.DefaultUncaughtExceptionHandlerWithName;
import net.minecraft.util.ExceptionCollector;
import net.neoforged.art.api.Renamer;
import net.neoforged.art.api.SignatureStripperConfig;
import net.neoforged.art.api.Transformer;
import net.neoforged.srgutils.IMappingFile;
import org.checkerframework.checker.nullness.qual.NonNull;
import org.checkerframework.checker.nullness.qual.Nullable;
import org.checkerframework.framework.qual.DefaultQualifier;
import org.slf4j.Logger;
import static io.papermc.paper.pluginremap.InsertManifestAttribute.addNamespaceManifestAttribute;
@DefaultQualifier(NonNull.class)
public final class PluginRemapper {
public static final boolean DEBUG_LOGGING = Boolean.getBoolean("Paper.PluginRemapperDebug");
private static final String PAPER_REMAPPED = ".paper-remapped";
private static final String UNKNOWN_ORIGIN = "unknown-origin";
private static final String LIBRARIES = "libraries";
private static final String EXTRA_PLUGINS = "extra-plugins";
private static final String REMAP_CLASSPATH = "remap-classpath";
private static final String REVERSED_MAPPINGS = "mappings/reversed";
private static final Logger LOGGER = LogUtils.getClassLogger();
private final ExecutorService threadPool;
private final ReobfServer reobf;
private final RemappedPluginIndex remappedPlugins;
private final RemappedPluginIndex extraPlugins;
private final UnknownOriginRemappedPluginIndex unknownOrigin;
private final UnknownOriginRemappedPluginIndex libraries;
private @Nullable CompletableFuture<IMappingFile> reversedMappings;
public PluginRemapper(final Path pluginsDir) {
this.threadPool = createThreadPool();
final CompletableFuture<IMappingFile> mappings = CompletableFuture.supplyAsync(PluginRemapper::loadReobfMappings, this.threadPool);
final Path remappedPlugins = pluginsDir.resolve(PAPER_REMAPPED);
this.reversedMappings = this.reversedMappingsFuture(() -> mappings, remappedPlugins, this.threadPool);
this.reobf = new ReobfServer(remappedPlugins.resolve(REMAP_CLASSPATH), mappings, this.threadPool);
this.remappedPlugins = new RemappedPluginIndex(remappedPlugins, false);
this.extraPlugins = new RemappedPluginIndex(this.remappedPlugins.dir().resolve(EXTRA_PLUGINS), true);
this.unknownOrigin = new UnknownOriginRemappedPluginIndex(this.remappedPlugins.dir().resolve(UNKNOWN_ORIGIN));
this.libraries = new UnknownOriginRemappedPluginIndex(this.remappedPlugins.dir().resolve(LIBRARIES));
}
public static @Nullable PluginRemapper create(final Path pluginsDir) {
if (MappingEnvironment.reobf() || !MappingEnvironment.hasMappings()) {
return null;
}
return new PluginRemapper(pluginsDir);
}
public void shutdown() {
this.threadPool.shutdown();
this.save(true);
boolean didShutdown;
try {
didShutdown = this.threadPool.awaitTermination(3L, TimeUnit.SECONDS);
} catch (final InterruptedException ex) {
didShutdown = false;
}
if (!didShutdown) {
this.threadPool.shutdownNow();
}
}
public void save(final boolean clean) {
this.remappedPlugins.write();
this.extraPlugins.write();
this.unknownOrigin.write(clean);
this.libraries.write(clean);
}
// Called on startup and reload
public void loadingPlugins() {
if (this.reversedMappings == null) {
this.reversedMappings = this.reversedMappingsFuture(
() -> CompletableFuture.supplyAsync(PluginRemapper::loadReobfMappings, this.threadPool),
this.remappedPlugins.dir(),
this.threadPool
);
}
}
// Called after all plugins enabled during startup/reload
public void pluginsEnabled() {
this.reversedMappings = null;
this.save(false);
}
public List<Path> remapLibraries(final List<Path> libraries) {
final List<CompletableFuture<Path>> tasks = new ArrayList<>();
for (final Path lib : libraries) {
if (!lib.getFileName().toString().endsWith(".jar")) {
if (DEBUG_LOGGING) {
LOGGER.info("Library '{}' is not a jar.", lib);
}
tasks.add(CompletableFuture.completedFuture(lib));
continue;
}
final @Nullable Path cached = this.libraries.getIfPresent(lib);
if (cached != null) {
if (DEBUG_LOGGING) {
LOGGER.info("Library '{}' has not changed since last remap.", lib);
}
tasks.add(CompletableFuture.completedFuture(cached));
continue;
}
tasks.add(this.remapLibrary(this.libraries, lib));
}
return waitForAll(tasks);
}
public Path rewritePlugin(final Path plugin) {
// Already remapped
if (plugin.getParent().equals(this.remappedPlugins.dir())
|| plugin.getParent().equals(this.extraPlugins.dir())) {
return plugin;
}
final @Nullable Path cached = this.unknownOrigin.getIfPresent(plugin);
if (cached != null) {
if (DEBUG_LOGGING) {
LOGGER.info("Plugin '{}' has not changed since last remap.", plugin);
}
return cached;
}
return this.remapPlugin(this.unknownOrigin, plugin).join();
}
public List<Path> rewriteExtraPlugins(final List<Path> plugins) {
final @Nullable List<Path> allCached = this.extraPlugins.getAllIfPresent(plugins);
if (allCached != null) {
if (DEBUG_LOGGING) {
LOGGER.info("All extra plugins have a remapped variant cached.");
}
return allCached;
}
final List<CompletableFuture<Path>> tasks = new ArrayList<>();
for (final Path file : plugins) {
final @Nullable Path cached = this.extraPlugins.getIfPresent(file);
if (cached != null) {
if (DEBUG_LOGGING) {
LOGGER.info("Extra plugin '{}' has not changed since last remap.", file);
}
tasks.add(CompletableFuture.completedFuture(cached));
continue;
}
tasks.add(this.remapPlugin(this.extraPlugins, file));
}
return waitForAll(tasks);
}
public List<Path> rewritePluginDirectory(final List<Path> jars) {
final @Nullable List<Path> remappedJars = this.remappedPlugins.getAllIfPresent(jars);
if (remappedJars != null) {
if (DEBUG_LOGGING) {
LOGGER.info("All plugins have a remapped variant cached.");
}
return remappedJars;
}
final List<CompletableFuture<Path>> tasks = new ArrayList<>();
for (final Path file : jars) {
final @Nullable Path existingFile = this.remappedPlugins.getIfPresent(file);
if (existingFile != null) {
if (DEBUG_LOGGING) {
LOGGER.info("Plugin '{}' has not changed since last remap.", file);
}
tasks.add(CompletableFuture.completedFuture(existingFile));
continue;
}
tasks.add(this.remapPlugin(this.remappedPlugins, file));
}
return waitForAll(tasks);
}
private static IMappingFile reverse(final IMappingFile mappings) {
if (DEBUG_LOGGING) {
LOGGER.info("Reversing mappings...");
}
final long start = System.currentTimeMillis();
final IMappingFile reversed = mappings.reverse();
if (DEBUG_LOGGING) {
LOGGER.info("Done reversing mappings in {}ms.", System.currentTimeMillis() - start);
}
return reversed;
}
private CompletableFuture<IMappingFile> reversedMappingsFuture(
final Supplier<CompletableFuture<IMappingFile>> mappingsFuture,
final Path remappedPlugins,
final Executor executor
) {
return CompletableFuture.supplyAsync(() -> {
try {
final String mappingsHash = MappingEnvironment.mappingsHash();
final String fName = mappingsHash + ".tiny";
final Path reversedMappings1 = remappedPlugins.resolve(REVERSED_MAPPINGS);
final Path file = reversedMappings1.resolve(fName);
if (Files.isDirectory(reversedMappings1)) {
if (Files.isRegularFile(file)) {
return CompletableFuture.completedFuture(
loadMappings("Reversed", Files.newInputStream(file))
);
} else {
for (final Path oldFile : list(reversedMappings1, Files::isRegularFile)) {
Files.delete(oldFile);
}
}
} else {
Files.createDirectories(reversedMappings1);
}
return mappingsFuture.get().thenApply(loadedMappings -> {
final IMappingFile reversed = reverse(loadedMappings);
try {
AtomicFiles.atomicWrite(file, writeTo -> {
reversed.write(writeTo, IMappingFile.Format.TINY, false);
});
} catch (final IOException e) {
throw new RuntimeException("Failed to write reversed mappings", e);
}
return reversed;
});
} catch (final IOException e) {
throw new RuntimeException("Failed to load reversed mappings", e);
}
}, executor).thenCompose(f -> f);
}
private CompletableFuture<Path> remapPlugin(
final RemappedPluginIndex index,
final Path inputFile
) {
return this.remap(index, inputFile, false);
}
private CompletableFuture<Path> remapLibrary(
final RemappedPluginIndex index,
final Path inputFile
) {
return this.remap(index, inputFile, true);
}
/**
* Returns the remapped file if remapping was necessary, otherwise null.
*
* @param index remapped plugin index
* @param inputFile input file
* @return remapped file, or inputFile if no remapping was necessary
*/
private CompletableFuture<Path> remap(
final RemappedPluginIndex index,
final Path inputFile,
final boolean library
) {
final Path destination = index.input(inputFile);
try (final FileSystem fs = FileSystems.newFileSystem(inputFile, new HashMap<>())) {
// Leave dummy files if no remapping is required, so that we can check if they exist without copying the whole file
final Path manifestPath = fs.getPath("META-INF/MANIFEST.MF");
final @Nullable String ns;
if (Files.exists(manifestPath)) {
final Manifest manifest;
try (final InputStream in = new BufferedInputStream(Files.newInputStream(manifestPath))) {
manifest = new Manifest(in);
}
ns = manifest.getMainAttributes().getValue(InsertManifestAttribute.PAPERWEIGHT_NAMESPACE_MANIFEST_KEY);
} else {
ns = null;
}
if (ns != null && !InsertManifestAttribute.KNOWN_NAMESPACES.contains(ns)) {
throw new RuntimeException("Failed to remap plugin " + inputFile + " with unknown mapping namespace '" + ns + "'");
}
final boolean mojangMappedManifest = ns != null && (ns.equals(InsertManifestAttribute.MOJANG_NAMESPACE) || ns.equals(InsertManifestAttribute.MOJANG_PLUS_YARN_NAMESPACE));
if (library) {
if (mojangMappedManifest) {
if (DEBUG_LOGGING) {
LOGGER.info("Library '{}' is already Mojang mapped.", inputFile);
}
index.skip(inputFile);
return CompletableFuture.completedFuture(inputFile);
} else if (ns == null) {
if (DEBUG_LOGGING) {
LOGGER.info("Library '{}' does not specify a mappings namespace (not remapping).", inputFile);
}
index.skip(inputFile);
return CompletableFuture.completedFuture(inputFile);
}
} else {
if (mojangMappedManifest) {
if (DEBUG_LOGGING) {
LOGGER.info("Plugin '{}' is already Mojang mapped.", inputFile);
}
index.skip(inputFile);
return CompletableFuture.completedFuture(inputFile);
} else if (ns == null && Files.exists(fs.getPath(PluginFileType.PAPER_PLUGIN_YML))) {
if (DEBUG_LOGGING) {
LOGGER.info("Plugin '{}' is a Paper plugin with no namespace specified.", inputFile);
}
index.skip(inputFile);
return CompletableFuture.completedFuture(inputFile);
}
}
} catch (final IOException ex) {
return CompletableFuture.failedFuture(new RuntimeException("Failed to open plugin jar " + inputFile, ex));
}
return this.reobf.remapped().thenApplyAsync(reobfServer -> {
LOGGER.info("Remapping {} '{}'...", library ? "library" : "plugin", inputFile);
final long start = System.currentTimeMillis();
try (final DebugLogger logger = DebugLogger.forOutputFile(destination)) {
try (final Renamer renamer = Renamer.builder()
.add(Transformer.renamerFactory(this.mappings(), false))
.add(addNamespaceManifestAttribute(InsertManifestAttribute.MOJANG_PLUS_YARN_NAMESPACE))
.add(Transformer.signatureStripperFactory(SignatureStripperConfig.ALL))
.lib(reobfServer.toFile())
.threads(1)
.logger(logger)
.debug(logger.debug())
.build()) {
renamer.run(inputFile.toFile(), destination.toFile());
}
} catch (final Exception ex) {
throw new RuntimeException("Failed to remap plugin jar '" + inputFile + "'", ex);
}
LOGGER.info("Done remapping {} '{}' in {}ms.", library ? "library" : "plugin", inputFile, System.currentTimeMillis() - start);
return destination;
}, this.threadPool);
}
private IMappingFile mappings() {
final @Nullable CompletableFuture<IMappingFile> mappings = this.reversedMappings;
if (mappings == null) {
return this.reversedMappingsFuture(
() -> CompletableFuture.supplyAsync(PluginRemapper::loadReobfMappings, Runnable::run),
this.remappedPlugins.dir(),
Runnable::run
).join();
}
return mappings.join();
}
private static IMappingFile loadReobfMappings() {
return loadMappings("Reobf", MappingEnvironment.mappingsStream());
}
private static IMappingFile loadMappings(final String name, final InputStream stream) {
try (stream) {
if (DEBUG_LOGGING) {
LOGGER.info("Loading {} mappings...", name);
}
final long start = System.currentTimeMillis();
final IMappingFile load = IMappingFile.load(stream);
if (DEBUG_LOGGING) {
LOGGER.info("Done loading {} mappings in {}ms.", name, System.currentTimeMillis() - start);
}
return load;
} catch (final IOException ex) {
throw new RuntimeException("Failed to load " + name + " mappings", ex);
}
}
static List<Path> list(final Path dir, final Predicate<Path> filter) {
try (final Stream<Path> stream = Files.list(dir)) {
return stream.filter(filter).toList();
} catch (final IOException ex) {
throw new RuntimeException("Failed to list directory '" + dir + "'", ex);
}
}
private static List<Path> waitForAll(final List<CompletableFuture<Path>> tasks) {
final ExceptionCollector<Exception> collector = new ExceptionCollector<>();
final List<Path> ret = new ArrayList<>();
for (final CompletableFuture<Path> task : tasks) {
try {
ret.add(task.join());
} catch (final CompletionException ex) {
collector.add(ex);
}
}
try {
collector.throwIfPresent();
} catch (final Exception ex) {
// Don't hard fail during bootstrap/plugin loading. The plugin(s) in question will be skipped
LOGGER.error("Encountered exception remapping plugins", ex);
}
return ret;
}
private static ThreadPoolExecutor createThreadPool() {
return new ThreadPoolExecutor(
0,
4,
5L,
TimeUnit.SECONDS,
ScalingThreadPool.createUnboundedQueue(),
new ThreadFactoryBuilder()
.setNameFormat("Paper Plugin Remapper Thread - %1$d")
.setUncaughtExceptionHandler(new DefaultUncaughtExceptionHandlerWithName(LOGGER))
.build(),
ScalingThreadPool.defaultReEnqueuePolicy()
);
}
}

View file

@ -0,0 +1,212 @@
package io.papermc.paper.pluginremap;
import com.google.gson.Gson;
import com.google.gson.GsonBuilder;
import com.mojang.logging.LogUtils;
import io.papermc.paper.util.Hashing;
import io.papermc.paper.util.MappingEnvironment;
import java.io.BufferedReader;
import java.io.BufferedWriter;
import java.io.IOException;
import java.nio.charset.StandardCharsets;
import java.nio.file.Files;
import java.nio.file.Path;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.HashSet;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.function.Function;
import org.checkerframework.checker.nullness.qual.NonNull;
import org.checkerframework.checker.nullness.qual.Nullable;
import org.checkerframework.framework.qual.DefaultQualifier;
import org.slf4j.Logger;
import org.spongepowered.configurate.loader.AtomicFiles;
@DefaultQualifier(NonNull.class)
class RemappedPluginIndex {
private static final Logger LOGGER = LogUtils.getLogger();
private static final Gson GSON = new GsonBuilder()
.setPrettyPrinting()
.create();
private static final String INDEX_FILE_NAME = "index.json";
protected final State state;
private final Path dir;
private final Path indexFile;
private final boolean handleDuplicateFileNames;
// todo maybe hash remapped variants to ensure they haven't changed? probably unneeded
static final class State {
final Map<String, String> hashes = new HashMap<>();
final Set<String> skippedHashes = new HashSet<>();
private final String mappingsHash = MappingEnvironment.mappingsHash();
}
RemappedPluginIndex(final Path dir, final boolean handleDuplicateFileNames) {
this.dir = dir;
this.handleDuplicateFileNames = handleDuplicateFileNames;
if (!Files.exists(this.dir)) {
try {
Files.createDirectories(this.dir);
} catch (final IOException ex) {
throw new RuntimeException(ex);
}
}
this.indexFile = dir.resolve(INDEX_FILE_NAME);
if (Files.isRegularFile(this.indexFile)) {
try {
this.state = this.readIndex();
} catch (final IOException e) {
throw new RuntimeException(e);
}
} else {
this.state = new State();
}
}
private State readIndex() throws IOException {
final State state;
try (final BufferedReader reader = Files.newBufferedReader(this.indexFile)) {
state = GSON.fromJson(reader, State.class);
}
// If mappings have changed, delete all cached files and create a new index
if (!state.mappingsHash.equals(MappingEnvironment.mappingsHash())) {
for (final String fileName : state.hashes.values()) {
Files.deleteIfExists(this.dir.resolve(fileName));
}
return new State();
}
return state;
}
Path dir() {
return this.dir;
}
/**
* Returns a list of cached paths if all of the input paths are present in the cache.
* The returned list may contain paths from different directories.
*
* @param paths plugin jar paths to check
* @return null if any of the paths are not present in the cache, otherwise a list of the cached paths
*/
@Nullable List<Path> getAllIfPresent(final List<Path> paths) {
final Map<Path, String> hashCache = new HashMap<>();
final Function<Path, String> inputFileHash = path -> hashCache.computeIfAbsent(path, Hashing::sha256);
// Delete cached entries we no longer need
final Iterator<Map.Entry<String, String>> iterator = this.state.hashes.entrySet().iterator();
while (iterator.hasNext()) {
final Map.Entry<String, String> entry = iterator.next();
final String inputHash = entry.getKey();
final String fileName = entry.getValue();
if (paths.stream().anyMatch(path -> inputFileHash.apply(path).equals(inputHash))) {
// Hash is used, keep it
continue;
}
iterator.remove();
try {
Files.deleteIfExists(this.dir.resolve(fileName));
} catch (final IOException ex) {
throw new RuntimeException(ex);
}
}
// Also clear hashes of skipped files
this.state.skippedHashes.removeIf(hash -> paths.stream().noneMatch(path -> inputFileHash.apply(path).equals(hash)));
final List<Path> ret = new ArrayList<>();
for (final Path path : paths) {
final String inputHash = inputFileHash.apply(path);
if (this.state.skippedHashes.contains(inputHash)) {
// Add the original path
ret.add(path);
continue;
}
final @Nullable Path cached = this.getIfPresent(inputHash);
if (cached == null) {
// Missing the remapped file
return null;
}
ret.add(cached);
}
return ret;
}
private String createCachedFileName(final Path in) {
if (this.handleDuplicateFileNames) {
final String fileName = in.getFileName().toString();
final int i = fileName.lastIndexOf(".jar");
return fileName.substring(0, i) + "-" + System.currentTimeMillis() + ".jar";
}
return in.getFileName().toString();
}
/**
* Returns the given path if the file was previously skipped for being remapped, otherwise the cached path or null.
*
* @param in input file
* @return {@code in} if already remapped, the cached path if present, otherwise null
*/
@Nullable Path getIfPresent(final Path in) {
final String inHash = Hashing.sha256(in);
if (this.state.skippedHashes.contains(inHash)) {
return in;
}
return this.getIfPresent(inHash);
}
/**
* Returns the cached path if a remapped file is present for the given hash, otherwise null.
*
* @param inHash hash of the input file
* @return the cached path if present, otherwise null
* @see #getIfPresent(Path)
*/
protected @Nullable Path getIfPresent(final String inHash) {
final @Nullable String fileName = this.state.hashes.get(inHash);
if (fileName == null) {
return null;
}
final Path path = this.dir.resolve(fileName);
if (Files.exists(path)) {
return path;
}
return null;
}
Path input(final Path in) {
return this.input(in, Hashing.sha256(in));
}
/**
* Marks the given file as skipped for remapping.
*
* @param in input file
*/
void skip(final Path in) {
this.state.skippedHashes.add(Hashing.sha256(in));
}
protected Path input(final Path in, final String hashString) {
final String name = this.createCachedFileName(in);
this.state.hashes.put(hashString, name);
return this.dir.resolve(name);
}
void write() {
try (final BufferedWriter writer = AtomicFiles.atomicBufferedWriter(this.indexFile, StandardCharsets.UTF_8)) {
GSON.toJson(this.state, writer);
} catch (final IOException ex) {
LOGGER.warn("Failed to write index file '{}'", this.indexFile, ex);
}
}
}

View file

@ -0,0 +1,92 @@
package io.papermc.paper.pluginremap;
import com.mojang.logging.LogUtils;
import io.papermc.paper.util.AtomicFiles;
import io.papermc.paper.util.MappingEnvironment;
import java.io.IOException;
import java.net.URISyntaxException;
import java.nio.file.Files;
import java.nio.file.Path;
import java.util.concurrent.CompletableFuture;
import java.util.concurrent.Executor;
import net.neoforged.art.api.Renamer;
import net.neoforged.art.api.Transformer;
import net.neoforged.art.internal.RenamerImpl;
import net.neoforged.srgutils.IMappingFile;
import org.checkerframework.checker.nullness.qual.NonNull;
import org.checkerframework.framework.qual.DefaultQualifier;
import org.slf4j.Logger;
import static io.papermc.paper.pluginremap.InsertManifestAttribute.addNamespaceManifestAttribute;
@DefaultQualifier(NonNull.class)
final class ReobfServer {
private static final Logger LOGGER = LogUtils.getClassLogger();
private final Path remapClasspathDir;
private final CompletableFuture<Void> load;
ReobfServer(final Path remapClasspathDir, final CompletableFuture<IMappingFile> mappings, final Executor executor) {
this.remapClasspathDir = remapClasspathDir;
if (this.mappingsChanged()) {
this.load = mappings.thenAcceptAsync(this::remap, executor);
} else {
if (PluginRemapper.DEBUG_LOGGING) {
LOGGER.info("Have cached reobf server for current mappings.");
}
this.load = CompletableFuture.completedFuture(null);
}
}
CompletableFuture<Path> remapped() {
return this.load.thenApply($ -> this.remappedPath());
}
private Path remappedPath() {
return this.remapClasspathDir.resolve(MappingEnvironment.mappingsHash() + ".jar");
}
private boolean mappingsChanged() {
return !Files.exists(this.remappedPath());
}
private void remap(final IMappingFile mappings) {
try {
if (!Files.exists(this.remapClasspathDir)) {
Files.createDirectories(this.remapClasspathDir);
}
for (final Path file : PluginRemapper.list(this.remapClasspathDir, Files::isRegularFile)) {
Files.delete(file);
}
} catch (final IOException ex) {
throw new RuntimeException(ex);
}
LOGGER.info("Remapping server...");
final long startRemap = System.currentTimeMillis();
try (final DebugLogger log = DebugLogger.forOutputFile(this.remappedPath())) {
AtomicFiles.atomicWrite(this.remappedPath(), writeTo -> {
try (final RenamerImpl renamer = (RenamerImpl) Renamer.builder()
.logger(log)
.debug(log.debug())
.threads(1)
.add(Transformer.renamerFactory(mappings, false))
.add(addNamespaceManifestAttribute(InsertManifestAttribute.SPIGOT_NAMESPACE))
.build()) {
renamer.run(serverJar().toFile(), writeTo.toFile(), true);
}
});
} catch (final Exception ex) {
throw new RuntimeException("Failed to remap server jar", ex);
}
LOGGER.info("Done remapping server in {}ms.", System.currentTimeMillis() - startRemap);
}
private static Path serverJar() {
try {
return Path.of(ReobfServer.class.getProtectionDomain().getCodeSource().getLocation().toURI());
} catch (final URISyntaxException ex) {
throw new RuntimeException(ex);
}
}
}

View file

@ -0,0 +1,72 @@
package io.papermc.paper.pluginremap;
import com.mojang.logging.LogUtils;
import io.papermc.paper.util.Hashing;
import java.io.IOException;
import java.nio.file.Files;
import java.nio.file.Path;
import java.util.HashSet;
import java.util.Iterator;
import java.util.Map;
import java.util.Set;
import org.checkerframework.checker.nullness.qual.NonNull;
import org.checkerframework.checker.nullness.qual.Nullable;
import org.checkerframework.framework.qual.DefaultQualifier;
import org.slf4j.Logger;
@DefaultQualifier(NonNull.class)
final class UnknownOriginRemappedPluginIndex extends RemappedPluginIndex {
private static final Logger LOGGER = LogUtils.getLogger();
private final Set<String> used = new HashSet<>();
UnknownOriginRemappedPluginIndex(final Path dir) {
super(dir, true);
}
@Override
@Nullable Path getIfPresent(final Path in) {
final String hash = Hashing.sha256(in);
if (this.state.skippedHashes.contains(hash)) {
return in;
}
final @Nullable Path path = super.getIfPresent(hash);
if (path != null) {
this.used.add(hash);
}
return path;
}
@Override
Path input(final Path in) {
final String hash = Hashing.sha256(in);
this.used.add(hash);
return super.input(in, hash);
}
void write(final boolean clean) {
if (!clean) {
super.write();
return;
}
final Iterator<Map.Entry<String, String>> it = this.state.hashes.entrySet().iterator();
while (it.hasNext()) {
final Map.Entry<String, String> next = it.next();
if (this.used.contains(next.getKey())) {
continue;
}
// Remove unused mapped file
it.remove();
final Path file = this.dir().resolve(next.getValue());
try {
Files.deleteIfExists(file);
} catch (final IOException ex) {
LOGGER.warn("Failed to delete no longer needed cached jar '{}'", file, ex);
}
}
super.write();
}
}

View file

@ -0,0 +1,96 @@
package io.papermc.paper.util;
import java.io.IOException;
import java.nio.file.AccessDeniedException;
import java.nio.file.AtomicMoveNotSupportedException;
import java.nio.file.CopyOption;
import java.nio.file.Files;
import java.nio.file.Path;
import java.nio.file.StandardCopyOption;
import java.util.concurrent.ThreadLocalRandom;
import java.util.function.Consumer;
import org.spongepowered.configurate.util.CheckedConsumer;
// Stripped down version of https://github.com/jpenilla/squaremap/blob/7d7994b4096e5fc61364ea2d87e9aa4e14edf5c6/common/src/main/java/xyz/jpenilla/squaremap/common/util/FileUtil.java
public final class AtomicFiles {
private AtomicFiles() {
}
public static void atomicWrite(final Path path, final CheckedConsumer<Path, IOException> op) throws IOException {
final Path tmp = tempFile(path);
try {
op.accept(tmp);
atomicMove(tmp, path, true);
} catch (final IOException ex) {
try {
Files.deleteIfExists(tmp);
} catch (final IOException ex1) {
ex.addSuppressed(ex1);
}
throw ex;
}
}
private static Path tempFile(final Path path) {
return path.resolveSibling("." + System.nanoTime() + "-" + ThreadLocalRandom.current().nextInt() + "-" + path.getFileName().toString() + ".tmp"); }
@SuppressWarnings("BusyWait") // not busy waiting
public static void atomicMove(final Path from, final Path to, final boolean replaceExisting) throws IOException {
final int maxRetries = 2;
try {
atomicMoveIfPossible(from, to, replaceExisting);
} catch (final AccessDeniedException ex) {
// Sometimes because of file locking this will fail... Let's just try again and hope for the best
// Thanks Windows!
int retries = 1;
while (true) {
try {
// Pause for a bit
Thread.sleep(10L * retries);
atomicMoveIfPossible(from, to, replaceExisting);
break; // success
} catch (final AccessDeniedException ex1) {
ex.addSuppressed(ex1);
if (retries == maxRetries) {
throw ex;
}
} catch (final InterruptedException interruptedException) {
ex.addSuppressed(interruptedException);
Thread.currentThread().interrupt();
throw ex;
}
++retries;
}
}
}
private static void atomicMoveIfPossible(final Path from, final Path to, final boolean replaceExisting) throws IOException {
final CopyOption[] options = replaceExisting
? new CopyOption[]{StandardCopyOption.ATOMIC_MOVE, StandardCopyOption.REPLACE_EXISTING}
: new CopyOption[]{StandardCopyOption.ATOMIC_MOVE};
try {
Files.move(from, to, options);
} catch (final AtomicMoveNotSupportedException ex) {
Files.move(from, to, replaceExisting ? new CopyOption[]{StandardCopyOption.REPLACE_EXISTING} : new CopyOption[]{});
}
}
private static <T, X extends Throwable> Consumer<T> sneaky(final CheckedConsumer<T, X> consumer) {
return t -> {
try {
consumer.accept(t);
} catch (final Throwable thr) {
rethrow(thr);
}
};
}
@SuppressWarnings("unchecked")
private static <X extends Throwable> RuntimeException rethrow(final Throwable t) throws X {
throw (X) t;
}
}

View file

@ -0,0 +1,50 @@
package io.papermc.paper.util;
import com.google.common.hash.HashCode;
import java.io.IOException;
import java.io.InputStream;
import java.nio.file.Files;
import java.nio.file.Path;
import java.util.Locale;
import org.apache.commons.io.IOUtils;
import org.checkerframework.checker.nullness.qual.NonNull;
import org.checkerframework.framework.qual.DefaultQualifier;
@DefaultQualifier(NonNull.class)
public final class Hashing {
private Hashing() {
}
/**
* Hash the provided {@link InputStream} using SHA-256. Stream will be closed.
*
* @param stream input stream
* @return SHA-256 hash string
*/
public static String sha256(final InputStream stream) {
try (stream) {
return com.google.common.hash.Hashing.sha256().hashBytes(IOUtils.toByteArray(stream)).toString().toUpperCase(Locale.ROOT);
} catch (final IOException ex) {
throw new RuntimeException("Failed to take hash of InputStream", ex);
}
}
/**
* Hash the provided file using SHA-256.
*
* @param file file
* @return SHA-256 hash string
*/
public static String sha256(final Path file) {
if (!Files.isRegularFile(file)) {
throw new IllegalArgumentException("'" + file + "' is not a regular file!");
}
final HashCode hash;
try {
hash = com.google.common.io.Files.asByteSource(file.toFile()).hash(com.google.common.hash.Hashing.sha256());
} catch (final IOException ex) {
throw new RuntimeException("Failed to take hash of file '" + file + "'", ex);
}
return hash.toString().toUpperCase(Locale.ROOT);
}
}

View file

@ -0,0 +1,65 @@
package io.papermc.paper.util;
import java.io.InputStream;
import java.util.Objects;
import java.util.jar.Manifest;
import net.minecraft.world.entity.MobCategory;
import org.checkerframework.checker.nullness.qual.NonNull;
import org.checkerframework.checker.nullness.qual.Nullable;
import org.checkerframework.framework.qual.DefaultQualifier;
@DefaultQualifier(NonNull.class)
public final class MappingEnvironment {
private static final @Nullable String MAPPINGS_HASH = readMappingsHash();
private static final boolean REOBF = checkReobf();
private MappingEnvironment() {
}
public static boolean reobf() {
return REOBF;
}
public static boolean hasMappings() {
return MAPPINGS_HASH != null;
}
public static InputStream mappingsStream() {
return Objects.requireNonNull(mappingsStreamIfPresent(), "Missing mappings!");
}
public static @Nullable InputStream mappingsStreamIfPresent() {
return MappingEnvironment.class.getClassLoader().getResourceAsStream("META-INF/mappings/reobf.tiny");
}
public static String mappingsHash() {
return Objects.requireNonNull(MAPPINGS_HASH, "MAPPINGS_HASH");
}
private static @Nullable String readMappingsHash() {
final @Nullable Manifest manifest = JarManifests.manifest(MappingEnvironment.class);
if (manifest != null) {
final Object hash = manifest.getMainAttributes().getValue("Included-Mappings-Hash");
if (hash != null) {
return hash.toString();
}
}
final @Nullable InputStream stream = mappingsStreamIfPresent();
if (stream == null) {
return null;
}
return Hashing.sha256(stream);
}
@SuppressWarnings("ConstantConditions")
private static boolean checkReobf() {
final Class<?> clazz = MobCategory.class;
if (clazz.getSimpleName().equals("MobCategory")) {
return false;
} else if (clazz.getSimpleName().equals("EnumCreatureType")) {
return true;
}
throw new IllegalStateException();
}
}

View file

@ -80,10 +80,10 @@ public enum ObfHelper {
}
private static @Nullable Set<ClassMapping> loadMappingsIfPresent() {
try (final @Nullable InputStream mappingsInputStream = ObfHelper.class.getClassLoader().getResourceAsStream("META-INF/mappings/reobf.tiny")) {
if (mappingsInputStream == null) {
return null;
}
if (!MappingEnvironment.hasMappings()) {
return null;
}
try (final InputStream mappingsInputStream = MappingEnvironment.mappingsStream()) {
final IMappingFile mappings = IMappingFile.load(mappingsInputStream); // Mappings are mojang->spigot
final Set<ClassMapping> classes = new HashSet<>();

View file

@ -0,0 +1,85 @@
package io.papermc.paper.util.concurrent;
import java.util.concurrent.BlockingQueue;
import java.util.concurrent.LinkedBlockingQueue;
import java.util.concurrent.RejectedExecutionHandler;
import java.util.concurrent.ThreadPoolExecutor;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.atomic.AtomicInteger;
/**
* Utilities for scaling thread pools.
*
* @see <a href="https://medium.com/@uditharosha/java-scale-first-executorservice-4245a63222df">Java Scale First ExecutorService A myth or a reality</a>
*/
public final class ScalingThreadPool {
private ScalingThreadPool() {
}
public static RejectedExecutionHandler defaultReEnqueuePolicy() {
return reEnqueuePolicy(new ThreadPoolExecutor.AbortPolicy());
}
public static RejectedExecutionHandler reEnqueuePolicy(final RejectedExecutionHandler original) {
return new ReEnqueuePolicy(original);
}
public static <E> BlockingQueue<E> createUnboundedQueue() {
return new Queue<>();
}
public static <E> BlockingQueue<E> createQueue(final int capacity) {
return new Queue<>(capacity);
}
private static final class Queue<E> extends LinkedBlockingQueue<E> {
private final AtomicInteger idleThreads = new AtomicInteger(0);
private Queue() {
super();
}
private Queue(final int capacity) {
super(capacity);
}
@Override
public boolean offer(final E e) {
return this.idleThreads.get() > 0 && super.offer(e);
}
@Override
public E take() throws InterruptedException {
this.idleThreads.incrementAndGet();
try {
return super.take();
} finally {
this.idleThreads.decrementAndGet();
}
}
@Override
public E poll(final long timeout, final TimeUnit unit) throws InterruptedException {
this.idleThreads.incrementAndGet();
try {
return super.poll(timeout, unit);
} finally {
this.idleThreads.decrementAndGet();
}
}
@Override
public boolean add(final E e) {
return super.offer(e);
}
}
private record ReEnqueuePolicy(RejectedExecutionHandler originalHandler) implements RejectedExecutionHandler {
@Override
public void rejectedExecution(final Runnable r, final ThreadPoolExecutor executor) {
if (!executor.getQueue().add(r)) {
this.originalHandler.rejectedExecution(r, executor);
}
}
}
}

View file

@ -0,0 +1,308 @@
/*
* Forge Auto Renaming Tool
* Copyright (c) 2021
*
* This library is free software; you can redistribute it and/or
* modify it under the terms of the GNU Lesser General Public
* License as published by the Free Software Foundation version 2.1
* of the License.
*
* This library is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public
* License along with this library; if not, write to the Free Software
* Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
*/
package net.neoforged.art.internal;
import java.io.BufferedOutputStream;
import java.io.ByteArrayOutputStream;
import java.io.File;
import java.io.IOException;
import java.io.InputStream;
import java.io.OutputStream;
import java.nio.file.Files;
import java.util.ArrayList;
import java.util.Collections;
import java.util.Enumeration;
import java.util.HashSet;
import java.util.List;
import java.util.Objects;
import java.util.Set;
import java.util.function.Consumer;
import java.util.stream.Collectors;
import java.util.zip.ZipEntry;
import java.util.zip.ZipFile;
import java.util.zip.ZipOutputStream;
import net.neoforged.cliutils.JarUtils;
import net.neoforged.cliutils.progress.ProgressReporter;
import org.objectweb.asm.Opcodes;
import net.neoforged.art.api.ClassProvider;
import net.neoforged.art.api.Renamer;
import net.neoforged.art.api.Transformer;
import net.neoforged.art.api.Transformer.ClassEntry;
import net.neoforged.art.api.Transformer.Entry;
import net.neoforged.art.api.Transformer.ManifestEntry;
import net.neoforged.art.api.Transformer.ResourceEntry;
public class RenamerImpl implements Renamer { // Paper - public
private static final ProgressReporter PROGRESS = ProgressReporter.getDefault();
static final int MAX_ASM_VERSION = Opcodes.ASM9;
private static final String MANIFEST_NAME = "META-INF/MANIFEST.MF";
private final List<File> libraries;
private final List<Transformer> transformers;
private final SortedClassProvider sortedClassProvider;
private final List<ClassProvider> classProviders;
private final int threads;
private final Consumer<String> logger;
private final Consumer<String> debug;
private boolean setup = false;
private ClassProvider libraryClasses;
RenamerImpl(List<File> libraries, List<Transformer> transformers, SortedClassProvider sortedClassProvider, List<ClassProvider> classProviders,
int threads, Consumer<String> logger, Consumer<String> debug) {
this.libraries = libraries;
this.transformers = transformers;
this.sortedClassProvider = sortedClassProvider;
this.classProviders = Collections.unmodifiableList(classProviders);
this.threads = threads;
this.logger = logger;
this.debug = debug;
}
private void setup() {
if (this.setup)
return;
this.setup = true;
ClassProvider.Builder libraryClassesBuilder = ClassProvider.builder().shouldCacheAll(true);
this.logger.accept("Adding Libraries to Inheritance");
this.libraries.forEach(f -> libraryClassesBuilder.addLibrary(f.toPath()));
this.libraryClasses = libraryClassesBuilder.build();
}
@Override
public void run(File input, File output) {
// Paper start - Add remappingSelf
this.run(input, output, false);
}
public void run(File input, File output, boolean remappingSelf) {
// Paper end
if (!this.setup)
this.setup();
if (Boolean.getBoolean(ProgressReporter.ENABLED_PROPERTY)) {
try {
PROGRESS.setMaxProgress(JarUtils.getFileCountInZip(input));
} catch (IOException e) {
logger.accept("Failed to read zip file count: " + e);
}
}
input = Objects.requireNonNull(input).getAbsoluteFile();
output = Objects.requireNonNull(output).getAbsoluteFile();
if (!input.exists())
throw new IllegalArgumentException("Input file not found: " + input.getAbsolutePath());
logger.accept("Reading Input: " + input.getAbsolutePath());
PROGRESS.setStep("Reading input jar");
// Read everything from the input jar!
List<Entry> oldEntries = new ArrayList<>();
try (ZipFile in = new ZipFile(input)) {
int amount = 0;
for (Enumeration<? extends ZipEntry> entries = in.entries(); entries.hasMoreElements();) {
final ZipEntry e = entries.nextElement();
if (e.isDirectory())
continue;
String name = e.getName();
byte[] data;
try (InputStream entryInput = in.getInputStream(e)) {
data = entryInput.readAllBytes(); // Paper - Use readAllBytes
}
if (name.endsWith(".class") && !name.contains("META-INF/")) // Paper - Skip META-INF entries
oldEntries.add(ClassEntry.create(name, e.getTime(), data));
else if (name.equals(MANIFEST_NAME))
oldEntries.add(ManifestEntry.create(e.getTime(), data));
else if (name.equals("javadoctor.json"))
oldEntries.add(Transformer.JavadoctorEntry.create(e.getTime(), data));
else
oldEntries.add(ResourceEntry.create(name, e.getTime(), data));
if ((++amount) % 10 == 0) {
PROGRESS.setProgress(amount);
}
}
} catch (IOException e) {
throw new RuntimeException("Could not parse input: " + input.getAbsolutePath(), e);
}
this.sortedClassProvider.clearCache();
ArrayList<ClassProvider> classProviders = new ArrayList<>(this.classProviders);
classProviders.add(0, this.libraryClasses);
this.sortedClassProvider.classProviders = classProviders;
AsyncHelper async = new AsyncHelper(threads);
try {
/* Disabled until we do something with it
// Gather original file Hashes, so that we can detect changes and update the manifest if necessary
log("Gathering original hashes");
Map<String, String> oldHashes = async.invokeAll(oldEntries,
e -> new Pair<>(e.getName(), HashFunction.SHA256.hash(e.getData()))
).stream().collect(Collectors.toMap(Pair::getLeft, Pair::getRight));
*/
PROGRESS.setProgress(0);
PROGRESS.setIndeterminate(true);
PROGRESS.setStep("Processing entries");
List<ClassEntry> ourClasses = oldEntries.stream()
.filter(e -> e instanceof ClassEntry && !e.getName().startsWith("META-INF/"))
.map(ClassEntry.class::cast)
.collect(Collectors.toList());
// Add the original classes to the inheritance map, TODO: Multi-Release somehow?
logger.accept("Adding input to inheritance map");
ClassProvider.Builder inputClassesBuilder = ClassProvider.builder();
async.consumeAll(ourClasses, ClassEntry::getClassName, c ->
inputClassesBuilder.addClass(c.getName().substring(0, c.getName().length() - 6), c.getData())
);
classProviders.add(0, inputClassesBuilder.build());
// Process everything
logger.accept("Processing entries");
List<Entry> newEntries = async.invokeAll(oldEntries, Entry::getName, this::processEntry);
logger.accept("Adding extras");
// Paper start - I'm pretty sure the duplicates are because the input is already on the classpath
List<Entry> finalNewEntries = newEntries;
transformers.forEach(t -> finalNewEntries.addAll(t.getExtras()));
Set<String> seen = new HashSet<>();
if (remappingSelf) {
// deduplicate
List<Entry> n = new ArrayList<>();
for (final Entry e : newEntries) {
if (seen.add(e.getName())) {
n.add(e);
}
}
newEntries = n;
} else {
String dupes = newEntries.stream().map(Entry::getName)
.filter(n -> !seen.add(n))
.sorted()
.collect(Collectors.joining(", "));
if (!dupes.isEmpty())
throw new IllegalStateException("Duplicate entries detected: " + dupes);
}
// Paper end
// We care about stable output, so sort, and single thread write.
logger.accept("Sorting");
Collections.sort(newEntries, this::compare);
if (!output.getParentFile().exists())
output.getParentFile().mkdirs();
seen.clear();
PROGRESS.setMaxProgress(newEntries.size());
PROGRESS.setStep("Writing output");
logger.accept("Writing Output: " + output.getAbsolutePath());
try (OutputStream fos = new BufferedOutputStream(Files.newOutputStream(output.toPath()));
ZipOutputStream zos = new ZipOutputStream(fos)) {
int amount = 0;
for (Entry e : newEntries) {
String name = e.getName();
int idx = name.lastIndexOf('/');
if (idx != -1)
addDirectory(zos, seen, name.substring(0, idx));
logger.accept(" " + name);
ZipEntry entry = new ZipEntry(name);
entry.setTime(e.getTime());
zos.putNextEntry(entry);
zos.write(e.getData());
zos.closeEntry();
if ((++amount) % 10 == 0) {
PROGRESS.setProgress(amount);
}
}
PROGRESS.setProgress(amount);
}
} catch (final IOException e) {
throw new RuntimeException("Could not write to file " + output.getAbsolutePath(), e);
} finally {
async.shutdown();
}
}
private byte[] readAllBytes(InputStream in, long size) throws IOException {
// This program will crash if size exceeds MAX_INT anyway since arrays are limited to 32-bit indices
ByteArrayOutputStream tmp = new ByteArrayOutputStream(size >= 0 ? (int) size : 0);
byte[] buffer = new byte[8192];
int read;
while ((read = in.read(buffer)) != -1) {
tmp.write(buffer, 0, read);
}
return tmp.toByteArray();
}
// Tho Directory entries are not strictly necessary, we add them because some bad implementations of Zip extractors
// attempt to extract files without making sure the parents exist.
private void addDirectory(ZipOutputStream zos, Set<String> seen, String path) throws IOException {
if (!seen.add(path))
return;
int idx = path.lastIndexOf('/');
if (idx != -1)
addDirectory(zos, seen, path.substring(0, idx));
logger.accept(" " + path + '/');
ZipEntry dir = new ZipEntry(path + '/');
dir.setTime(Entry.STABLE_TIMESTAMP);
zos.putNextEntry(dir);
zos.closeEntry();
}
private Entry processEntry(final Entry start) {
Entry entry = start;
for (Transformer transformer : RenamerImpl.this.transformers) {
entry = entry.process(transformer);
if (entry == null)
return null;
}
return entry;
}
private int compare(Entry o1, Entry o2) {
// In order for JarInputStream to work, MANIFEST has to be the first entry, so make it first!
if (MANIFEST_NAME.equals(o1.getName()))
return MANIFEST_NAME.equals(o2.getName()) ? 0 : -1;
if (MANIFEST_NAME.equals(o2.getName()))
return MANIFEST_NAME.equals(o1.getName()) ? 0 : 1;
return o1.getName().compareTo(o2.getName());
}
@Override
public void close() throws IOException {
this.sortedClassProvider.close();
}
}

View file

@ -1005,6 +1005,7 @@ public final class CraftServer implements Server {
this.loadPlugins();
this.enablePlugins(PluginLoadOrder.STARTUP);
this.enablePlugins(PluginLoadOrder.POSTWORLD);
if (io.papermc.paper.plugin.PluginInitializerManager.instance().pluginRemapper != null) io.papermc.paper.plugin.PluginInitializerManager.instance().pluginRemapper.pluginsEnabled(); // Paper - Remap plugins
this.getPluginManager().callEvent(new ServerLoadEvent(ServerLoadEvent.LoadType.RELOAD));
}