Re-add MPDB migrator loading on redis listener

Use Libby to download SQL dependencies at runtime
feat/data-edit-commands
William 3 years ago committed by HarvelsX
parent e1d2aec483
commit 41bad141c6

@ -34,6 +34,7 @@ subprojects {
mavenCentral()
maven { url 'https://hub.spigotmc.org/nexus/content/repositories/snapshots/' }
maven { url 'https://repo.velocitypowered.com/snapshots/' }
maven { url 'https://repo.alessiodp.com/releases/' }
maven { url 'https://repo.minebench.de/' }
maven { url 'https://repo.codemc.org/repository/maven-public' }
maven { url 'https://jitpack.io' }

@ -5,6 +5,7 @@ dependencies {
compileOnly 'redis.clients:jedis:3.7.0'
implementation 'org.bstats:bstats-bungeecord:2.2.1'
implementation 'de.themoep:minedown:1.7.1-SNAPSHOT'
implementation 'net.byteflux:libby-bungee:1.1.4'
compileOnly 'net.md-5:bungeecord-api:1.16-R0.5-SNAPSHOT'
}
@ -13,6 +14,7 @@ shadowJar {
relocate 'com.zaxxer', 'me.William278.husksync.libraries.hikari'
relocate 'org.bstats', 'me.William278.husksync.libraries.plan'
relocate 'de.themoep', 'me.William278.husksync.libraries.minedown'
relocate 'net.byteflux.libby', 'me.William278.husksync.libraries.libby.bungee'
}
tasks.register('prepareKotlinBuildScriptModel'){}

@ -11,6 +11,8 @@ import me.william278.husksync.bungeecord.util.BungeeLogger;
import me.william278.husksync.bungeecord.util.BungeeUpdateChecker;
import me.william278.husksync.redis.RedisMessage;
import me.william278.husksync.util.Logger;
import net.byteflux.libby.BungeeLibraryManager;
import net.byteflux.libby.Library;
import net.md_5.bungee.api.ProxyServer;
import net.md_5.bungee.api.plugin.Plugin;
import org.bstats.bungeecord.Metrics;
@ -56,6 +58,7 @@ public final class HuskSyncBungeeCord extends Plugin {
public void onLoad() {
instance = this;
logger = new BungeeLogger(getLogger());
fetchDependencies();
}
@Override
@ -146,4 +149,24 @@ public final class HuskSyncBungeeCord extends Plugin {
getBungeeLogger().info("Disabled HuskSync (" + getProxy().getName() + ") v" + getDescription().getVersion());
}
// Load dependencies
private void fetchDependencies() {
BungeeLibraryManager manager = new BungeeLibraryManager(getInstance());
Library mySqlLib = Library.builder()
.groupId("mysql")
.artifactId("mysql-connector-java")
.version("8.0.25")
.build();
Library sqLiteLib = Library.builder()
.groupId("org.xerial")
.artifactId("sqlite-jdbc")
.version("3.36.0.3")
.build();
manager.addMavenCentral();
manager.loadLibrary(mySqlLib);
manager.loadLibrary(sqLiteLib);
}
}

@ -3,6 +3,3 @@ version: @version@
main: me.william278.husksync.HuskSyncBungeeCord
author: William278
description: 'A modern, cross-server player data synchronization system'
libraries:
- mysql:mysql-connector-java:8.0.25
- org.xerial:sqlite-jdbc:3.36.0.3

@ -6,6 +6,7 @@ dependencies {
implementation 'org.bstats:bstats-velocity:2.2.1'
implementation 'com.zaxxer:HikariCP:5.0.0'
implementation 'de.themoep:minedown-adventure:1.7.1-SNAPSHOT'
implementation 'net.byteflux:libby-velocity:1.1.4'
compileOnly 'com.velocitypowered:velocity-api:3.1.0'
annotationProcessor 'com.velocitypowered:velocity-api:3.1.0'
@ -15,6 +16,7 @@ shadowJar {
relocate 'com.zaxxer', 'me.William278.husksync.libraries.hikari'
relocate 'org.bstats', 'me.William278.husksync.libraries.plan'
relocate 'de.themoep', 'me.William278.husksync.libraries.minedown'
relocate 'net.byteflux.libby', 'me.William278.husksync.libraries.libby.velocity'
}
tasks.register('prepareKotlinBuildScriptModel'){}

@ -1,6 +1,7 @@
package me.william278.husksync;
import com.google.inject.Inject;
import com.google.inject.Provides;
import com.velocitypowered.api.command.CommandManager;
import com.velocitypowered.api.command.CommandMeta;
import com.velocitypowered.api.event.Subscribe;
@ -19,6 +20,8 @@ import me.william278.husksync.velocity.config.ConfigManager;
import me.william278.husksync.velocity.listener.VelocityEventListener;
import me.william278.husksync.velocity.listener.VelocityRedisListener;
import me.william278.husksync.velocity.util.VelocityLogger;
import net.byteflux.libby.Library;
import net.byteflux.libby.VelocityLibraryManager;
import org.bstats.velocity.Metrics;
import org.slf4j.Logger;
@ -72,6 +75,8 @@ public class HuskSyncVelocity {
private final ProxyServer server;
private final Path dataDirectory;
private final VelocityLibraryManager<HuskSyncVelocity> manager;
// Get the data folder
public File getDataFolder() {
return dataDirectory.toFile();
@ -90,11 +95,13 @@ public class HuskSyncVelocity {
}
@Inject
public HuskSyncVelocity(ProxyServer server, Logger logger, @DataDirectory Path dataDirectory, Metrics.Factory metricsFactory) {
public HuskSyncVelocity(ProxyServer server, Logger logger, @DataDirectory Path dataDirectory, Metrics.Factory metricsFactory, VelocityLibraryManager<HuskSyncVelocity> manager) {
this.server = server;
this.logger = logger;
this.dataDirectory = dataDirectory;
this.metricsFactory = metricsFactory;
this.manager = manager;
fetchDependencies();
}
@Subscribe
@ -105,6 +112,9 @@ public class HuskSyncVelocity {
// Setup logger
velocityLogger = new VelocityLogger(logger);
// Prepare synchronised servers tracker
synchronisedServers = new HashSet<>();
// Load config
ConfigManager.loadConfig();
@ -125,6 +135,12 @@ public class HuskSyncVelocity {
// Setup data manager
dataManager = new DataManager(getVelocityLogger(), getDataFolder());
// Ensure the data manager initialized correctly
if (dataManager.hasFailedInitialization) {
getVelocityLogger().severe("Failed to initialize the HuskSync database(s).\n" +
"HuskSync will now abort loading itself (Velocity) v" + VERSION);
}
// Setup player data cache
for (Settings.SynchronisationCluster cluster : Settings.clusters) {
dataManager.playerDataCache.put(cluster, new DataManager.PlayerDataCache());
@ -186,4 +202,23 @@ public class HuskSyncVelocity {
// Log to console
getVelocityLogger().info("Disabled HuskSync (Velocity) v" + VERSION);
}
// Load dependencies
private void fetchDependencies() {
Library mySqlLib = Library.builder()
.groupId("mysql")
.artifactId("mysql-connector-java")
.version("8.0.25")
.build();
Library sqLiteLib = Library.builder()
.groupId("org.xerial")
.artifactId("sqlite-jdbc")
.version("3.36.0.3")
.build();
manager.addMavenCentral();
manager.loadLibrary(mySqlLib);
manager.loadLibrary(sqLiteLib);
}
}

@ -71,7 +71,7 @@ public class VelocityCommand implements SimpleCommand, HuskSyncCommand {
}
sender.sendMessage(new MineDown("[•](white) [Download links:](#00fb9a) [[⏩ Spigot]](gray open_url=https://www.spigotmc.org/resources/husktowns.92672/updates) [•](#262626) [[⏩ Polymart]](gray open_url=https://polymart.org/resource/husktowns.1056/updates)").toComponent());
}
});
}).schedule();
}
case "invsee", "openinv", "inventory" -> {
if (!player.hasPermission("husksync.command.inventory")) {
@ -294,7 +294,7 @@ public class VelocityCommand implements SimpleCommand, HuskSyncCommand {
HuskSyncVelocity.synchronisedServers)) {
plugin.getProxyServer().getScheduler().buildTask(plugin, () ->
HuskSyncVelocity.mpdbMigrator.executeMigrationOperations(HuskSyncVelocity.dataManager,
HuskSyncVelocity.synchronisedServers));
HuskSyncVelocity.synchronisedServers)).schedule();
}
}
default -> sender.sendMessage(new MineDown("Error: Invalid argument for migration. Use \"husksync migrate\" to start the process").toComponent());
@ -338,7 +338,7 @@ public class VelocityCommand implements SimpleCommand, HuskSyncCommand {
return;
}
viewer.sendMessage(new MineDown(MessageManager.getMessage("error_invalid_cluster")).toComponent());
});
}).schedule();
}
// View the ender chest of a player specified by their name
@ -372,7 +372,7 @@ public class VelocityCommand implements SimpleCommand, HuskSyncCommand {
return;
}
viewer.sendMessage(new MineDown(MessageManager.getMessage("error_invalid_cluster")).toComponent());
});
}).schedule();
}
/**
@ -406,6 +406,11 @@ public class VelocityCommand implements SimpleCommand, HuskSyncCommand {
}
subCommands.add(subCommand.command());
}
// Return list of subcommands
if (args[0].length() == 0) {
return subCommands;
}
// Automatically filter the sub commands' order in tab completion by what the player has typed
return subCommands.stream().filter(val -> val.startsWith(args[0]))
.sorted().collect(Collectors.toList());

@ -78,7 +78,7 @@ public class ConfigLoader {
// Read cluster data
ConfigurationNode clusterSection = config.getNode("clusters");
final String settingDatabaseName = Settings.mySQLDatabase != null ? Settings.mySQLDatabase : "HuskSync";
for (ConfigurationNode cluster : clusterSection.getChildrenList()) {
for (ConfigurationNode cluster : clusterSection.getChildrenMap().values()) {
final String clusterId = (String) cluster.getKey();
final String playerTableName = getConfigString(config, "husksync_players", "clusters", clusterId, "player_table");
final String dataTableName = getConfigString(config, "husksync_data", "clusters", clusterId, "data_table");
@ -89,7 +89,7 @@ public class ConfigLoader {
public static void loadMessageStrings(ConfigurationNode config) {
final HashMap<String, String> messages = new HashMap<>();
for (ConfigurationNode message : config.getChildrenList()) {
for (ConfigurationNode message : config.getChildrenMap().values()) {
final String messageId = (String) message.getKey();
messages.put(messageId, getConfigString(config, "", messageId));
}

@ -82,6 +82,8 @@ public class ConfigManager {
File configFile = new File(plugin.getDataFolder(), "messages_" + Settings.language + ".yml");
return YAMLConfigurationLoader.builder()
.setPath(configFile.toPath())
.setFlowStyle(DumperOptions.FlowStyle.BLOCK)
.setIndent(2)
.build()
.load();
} catch (IOException e) {

@ -41,6 +41,6 @@ public class VelocityEventListener {
plugin.getVelocityLogger().log(Level.SEVERE, "Failed to serialize request data on join message data");
e.printStackTrace();
}
});
}).schedule();
}
}

@ -6,6 +6,7 @@ import me.william278.husksync.HuskSyncVelocity;
import me.william278.husksync.PlayerData;
import me.william278.husksync.Server;
import me.william278.husksync.Settings;
import me.william278.husksync.migrator.MPDBMigrator;
import me.william278.husksync.redis.RedisListener;
import me.william278.husksync.redis.RedisMessage;
import me.william278.husksync.util.MessageManager;
@ -84,7 +85,7 @@ public class VelocityRedisListener extends RedisListener {
log(Level.SEVERE, "Failed to serialize data when replying to a data request");
e.printStackTrace();
}
});
}).schedule();
}
case PLAYER_DATA_UPDATE -> {
// Deserialize the PlayerData received
@ -174,18 +175,20 @@ public class VelocityRedisListener extends RedisListener {
return;
}
//todo Migrator
/*// Add the incoming data to the data to be saved
MPDBMigrator.incomingPlayerData.put(playerData, playerName);
// Get the MPDB migrator
MPDBMigrator migrator = HuskSyncVelocity.mpdbMigrator;
// Add the incoming data to the data to be saved
migrator.incomingPlayerData.put(playerData, playerName);
// Increment players migrated
MPDBMigrator.playersMigrated++;
plugin.getBungeeLogger().log(Level.INFO, "Migrated " + MPDBMigrator.playersMigrated + "/" + MPDBMigrator.migratedDataSent + " players.");
migrator.playersMigrated++;
plugin.getVelocityLogger().log(Level.INFO, "Migrated " + migrator.playersMigrated + "/" + migrator.migratedDataSent + " players.");
// When all the data has been received, save it
if (MPDBMigrator.migratedDataSent == MPDBMigrator.playersMigrated) {
MPDBMigrator.loadIncomingData(MPDBMigrator.incomingPlayerData);
}*/
if (migrator.migratedDataSent == migrator.playersMigrated) {
migrator.loadIncomingData(migrator.incomingPlayerData, HuskSyncVelocity.dataManager);
}
}
}
}

Loading…
Cancel
Save