Start work on Velocity support

feat/data-edit-commands
William 3 years ago committed by HarvelsX
parent 48441cf9fd
commit c6451a6ac6

@ -11,7 +11,7 @@ plugins {
allprojects { allprojects {
group 'me.William278' group 'me.William278'
version '1.1.3' version '1.2-dev'
compileJava { options.encoding = 'UTF-8' } compileJava { options.encoding = 'UTF-8' }
tasks.withType(JavaCompile) { options.encoding = 'UTF-8' } tasks.withType(JavaCompile) { options.encoding = 'UTF-8' }
@ -33,6 +33,7 @@ subprojects {
mavenLocal() mavenLocal()
mavenCentral() mavenCentral()
maven { url 'https://hub.spigotmc.org/nexus/content/repositories/snapshots/' } maven { url 'https://hub.spigotmc.org/nexus/content/repositories/snapshots/' }
maven { url 'https://repo.velocitypowered.com/snapshots/' }
maven { url 'https://repo.minebench.de/' } maven { url 'https://repo.minebench.de/' }
maven { url 'https://repo.codemc.org/repository/maven-public' } maven { url 'https://repo.codemc.org/repository/maven-public' }
maven { url 'https://jitpack.io' } maven { url 'https://jitpack.io' }

@ -53,7 +53,7 @@ public final class HuskSyncBukkit extends JavaPlugin {
} }
try { try {
new RedisMessage(RedisMessage.MessageType.CONNECTION_HANDSHAKE, new RedisMessage(RedisMessage.MessageType.CONNECTION_HANDSHAKE,
new RedisMessage.MessageTarget(Settings.ServerType.BUNGEECORD, null, Settings.cluster), new RedisMessage.MessageTarget(Settings.ServerType.PROXY, null, Settings.cluster),
serverUUID.toString(), serverUUID.toString(),
Boolean.toString(isMySqlPlayerDataBridgeInstalled), Boolean.toString(isMySqlPlayerDataBridgeInstalled),
Bukkit.getName(), Bukkit.getName(),
@ -73,7 +73,7 @@ public final class HuskSyncBukkit extends JavaPlugin {
if (!handshakeCompleted) return; if (!handshakeCompleted) return;
try { try {
new RedisMessage(RedisMessage.MessageType.TERMINATE_HANDSHAKE, new RedisMessage(RedisMessage.MessageType.TERMINATE_HANDSHAKE,
new RedisMessage.MessageTarget(Settings.ServerType.BUNGEECORD, null, Settings.cluster), new RedisMessage.MessageTarget(Settings.ServerType.PROXY, null, Settings.cluster),
serverUUID.toString(), serverUUID.toString(),
Bukkit.getName()).send(); Bukkit.getName()).send();
} catch (IOException e) { } catch (IOException e) {

@ -57,7 +57,7 @@ public class DataViewer {
// Send a redis message with the updated data after the viewing // Send a redis message with the updated data after the viewing
new RedisMessage(RedisMessage.MessageType.PLAYER_DATA_UPDATE, new RedisMessage(RedisMessage.MessageType.PLAYER_DATA_UPDATE,
new RedisMessage.MessageTarget(Settings.ServerType.BUNGEECORD, null, Settings.cluster), new RedisMessage.MessageTarget(Settings.ServerType.PROXY, null, Settings.cluster),
RedisMessage.serialize(playerData)) RedisMessage.serialize(playerData))
.send(); .send();
} }

@ -8,6 +8,8 @@ import me.william278.husksync.Settings;
import me.william278.husksync.bukkit.config.ConfigLoader; import me.william278.husksync.bukkit.config.ConfigLoader;
import me.william278.husksync.bukkit.data.DataViewer; import me.william278.husksync.bukkit.data.DataViewer;
import me.william278.husksync.bukkit.util.PlayerSetter; import me.william278.husksync.bukkit.util.PlayerSetter;
import me.william278.husksync.bukkit.migrator.MPDBDeserializer;
import me.william278.husksync.migrator.MPDBPlayerData;
import me.william278.husksync.redis.RedisListener; import me.william278.husksync.redis.RedisListener;
import me.william278.husksync.redis.RedisMessage; import me.william278.husksync.redis.RedisMessage;
import org.bukkit.Bukkit; import org.bukkit.Bukkit;
@ -87,6 +89,24 @@ public class BukkitRedisListener extends RedisListener {
Bukkit.getScheduler().runTaskLaterAsynchronously(plugin, HuskSyncBukkit::establishRedisHandshake, 20); Bukkit.getScheduler().runTaskLaterAsynchronously(plugin, HuskSyncBukkit::establishRedisHandshake, 20);
} }
} }
case DECODE_MPDB_DATA -> {
UUID serverUUID = UUID.fromString(message.getMessageDataElements()[0]);
String encodedData = message.getMessageDataElements()[1];
Bukkit.getScheduler().runTaskAsynchronously(plugin, () -> {
if (serverUUID.equals(HuskSyncBukkit.serverUUID)) {
try {
MPDBPlayerData data = (MPDBPlayerData) RedisMessage.deserialize(encodedData);
new RedisMessage(RedisMessage.MessageType.DECODED_MPDB_DATA_SET,
new RedisMessage.MessageTarget(Settings.ServerType.PROXY, null, Settings.cluster),
RedisMessage.serialize(MPDBDeserializer.convertMPDBData(data)),
data.playerName)
.send();
} catch (IOException | ClassNotFoundException e) {
log(Level.SEVERE, "Failed to serialize encoded MPDB data");
}
}
});
}
case RELOAD_CONFIG -> { case RELOAD_CONFIG -> {
plugin.reloadConfig(); plugin.reloadConfig();
ConfigLoader.loadSettings(plugin.getConfig()); ConfigLoader.loadSettings(plugin.getConfig());

@ -104,7 +104,7 @@ public class PlayerSetter {
try { try {
final String serializedPlayerData = getNewSerializedPlayerData(player); final String serializedPlayerData = getNewSerializedPlayerData(player);
new RedisMessage(RedisMessage.MessageType.PLAYER_DATA_UPDATE, new RedisMessage(RedisMessage.MessageType.PLAYER_DATA_UPDATE,
new RedisMessage.MessageTarget(Settings.ServerType.BUNGEECORD, null, Settings.cluster), new RedisMessage.MessageTarget(Settings.ServerType.PROXY, null, Settings.cluster),
serializedPlayerData).send(); serializedPlayerData).send();
} catch (IOException e) { } catch (IOException e) {
plugin.getLogger().log(Level.SEVERE, "Failed to send a PlayerData update to the proxy", e); plugin.getLogger().log(Level.SEVERE, "Failed to send a PlayerData update to the proxy", e);
@ -123,7 +123,7 @@ public class PlayerSetter {
*/ */
public static void requestPlayerData(UUID playerUUID) throws IOException { public static void requestPlayerData(UUID playerUUID) throws IOException {
new RedisMessage(RedisMessage.MessageType.PLAYER_DATA_REQUEST, new RedisMessage(RedisMessage.MessageType.PLAYER_DATA_REQUEST,
new RedisMessage.MessageTarget(Settings.ServerType.BUNGEECORD, null, Settings.cluster), new RedisMessage.MessageTarget(Settings.ServerType.PROXY, null, Settings.cluster),
playerUUID.toString()).send(); playerUUID.toString()).send();
} }

@ -4,7 +4,6 @@ dependencies {
compileOnly 'redis.clients:jedis:3.7.0' compileOnly 'redis.clients:jedis:3.7.0'
implementation 'org.bstats:bstats-bungeecord:2.2.1' implementation 'org.bstats:bstats-bungeecord:2.2.1'
implementation 'com.zaxxer:HikariCP:5.0.0'
implementation 'de.themoep:minedown:1.7.1-SNAPSHOT' implementation 'de.themoep:minedown:1.7.1-SNAPSHOT'
compileOnly 'net.md-5:bungeecord-api:1.16-R0.5-SNAPSHOT' compileOnly 'net.md-5:bungeecord-api:1.16-R0.5-SNAPSHOT'

@ -3,25 +3,21 @@ package me.william278.husksync;
import me.william278.husksync.bungeecord.command.HuskSyncCommand; import me.william278.husksync.bungeecord.command.HuskSyncCommand;
import me.william278.husksync.bungeecord.config.ConfigLoader; import me.william278.husksync.bungeecord.config.ConfigLoader;
import me.william278.husksync.bungeecord.config.ConfigManager; import me.william278.husksync.bungeecord.config.ConfigManager;
import me.william278.husksync.bungeecord.data.DataManager; import me.william278.husksync.proxy.data.DataManager;
import me.william278.husksync.bungeecord.data.sql.Database;
import me.william278.husksync.bungeecord.data.sql.MySQL;
import me.william278.husksync.bungeecord.data.sql.SQLite;
import me.william278.husksync.bungeecord.listener.BungeeEventListener; import me.william278.husksync.bungeecord.listener.BungeeEventListener;
import me.william278.husksync.bungeecord.listener.BungeeRedisListener; import me.william278.husksync.bungeecord.listener.BungeeRedisListener;
import me.william278.husksync.bungeecord.migrator.MPDBMigrator;
import me.william278.husksync.bungeecord.util.BungeeLogger;
import me.william278.husksync.bungeecord.util.BungeeUpdateChecker; import me.william278.husksync.bungeecord.util.BungeeUpdateChecker;
import me.william278.husksync.redis.RedisMessage; import me.william278.husksync.redis.RedisMessage;
import me.william278.husksync.util.Logger;
import net.md_5.bungee.api.ProxyServer; import net.md_5.bungee.api.ProxyServer;
import net.md_5.bungee.api.plugin.Plugin; import net.md_5.bungee.api.plugin.Plugin;
import org.bstats.bungeecord.Metrics; import org.bstats.bungeecord.Metrics;
import java.io.IOException; import java.io.IOException;
import java.sql.Connection;
import java.sql.SQLException;
import java.util.HashMap;
import java.util.HashSet; import java.util.HashSet;
import java.util.Objects; import java.util.Objects;
import java.util.UUID;
import java.util.logging.Level; import java.util.logging.Level;
public final class HuskSyncBungeeCord extends Plugin { public final class HuskSyncBungeeCord extends Plugin {
@ -46,15 +42,20 @@ public final class HuskSyncBungeeCord extends Plugin {
*/ */
public static HashSet<Server> synchronisedServers; public static HashSet<Server> synchronisedServers;
private static HashMap<String, Database> clusterDatabases; public static DataManager dataManager;
public static Connection getConnection(String clusterId) throws SQLException { public static MPDBMigrator mpdbMigrator;
return clusterDatabases.get(clusterId).getConnection();
private Logger logger;
public Logger getBungeeLogger() {
return logger;
} }
@Override @Override
public void onLoad() { public void onLoad() {
instance = this; instance = this;
logger = new BungeeLogger(getLogger());
} }
@Override @Override
@ -79,35 +80,23 @@ public final class HuskSyncBungeeCord extends Plugin {
new BungeeUpdateChecker(getDescription().getVersion()).logToConsole(); new BungeeUpdateChecker(getDescription().getVersion()).logToConsole();
} }
// Initialize the database // Setup data manager
clusterDatabases = new HashMap<>(); dataManager = new DataManager(getBungeeLogger(), getDataFolder());
for (Settings.SynchronisationCluster cluster : Settings.clusters) {
Database clusterDatabase = switch (Settings.dataStorageType) {
case SQLITE -> new SQLite(this, cluster);
case MYSQL -> new MySQL(this, cluster);
};
clusterDatabase.load();
clusterDatabase.createTables();
clusterDatabases.put(cluster.clusterId(), clusterDatabase);
}
// Abort loading if the database failed to initialize // Ensure the data manager initialized correctly
for (Database database : clusterDatabases.values()) { if (dataManager.hasFailedInitialization) {
if (database.isInactive()) { getBungeeLogger().severe("Failed to initialize the HuskSync database(s).\n" +
getLogger().severe("Failed to initialize the database(s); HuskSync will now abort loading itself (" + getProxy().getName() + ") v" + getDescription().getVersion()); "HuskSync will now abort loading itself (" + getProxy().getName() + ") v" + getDescription().getVersion());
return;
}
} }
// Setup player data cache // Setup player data cache
for (Settings.SynchronisationCluster cluster : Settings.clusters) { for (Settings.SynchronisationCluster cluster : Settings.clusters) {
DataManager.playerDataCache.put(cluster, new DataManager.PlayerDataCache()); dataManager.playerDataCache.put(cluster, new DataManager.PlayerDataCache());
} }
// Initialize the redis listener // Initialize the redis listener
if (!new BungeeRedisListener().isActiveAndEnabled) { if (!new BungeeRedisListener().isActiveAndEnabled) {
getLogger().severe("Failed to initialize Redis; HuskSync will now abort loading itself (" + getProxy().getName() + ") v" + getDescription().getVersion()); getBungeeLogger().severe("Failed to initialize Redis; HuskSync will now abort loading itself (" + getProxy().getName() + ") v" + getDescription().getVersion());
return; return;
} }
@ -117,15 +106,18 @@ public final class HuskSyncBungeeCord extends Plugin {
// Register command // Register command
getProxy().getPluginManager().registerCommand(this, new HuskSyncCommand()); getProxy().getPluginManager().registerCommand(this, new HuskSyncCommand());
// Prepare the migrator for use if needed
mpdbMigrator = new MPDBMigrator();
// Initialize bStats metrics // Initialize bStats metrics
try { try {
new Metrics(this, METRICS_ID); new Metrics(this, METRICS_ID);
} catch (Exception e) { } catch (Exception e) {
getLogger().info("Skipped metrics initialization"); getBungeeLogger().info("Skipped metrics initialization");
} }
// Log to console // Log to console
getLogger().info("Enabled HuskSync (" + getProxy().getName() + ") v" + getDescription().getVersion()); getBungeeLogger().info("Enabled HuskSync (" + getProxy().getName() + ") v" + getDescription().getVersion());
// Mark as ready for redis message processing // Mark as ready for redis message processing
readyForRedis = true; readyForRedis = true;
@ -144,23 +136,14 @@ public final class HuskSyncBungeeCord extends Plugin {
server.serverUUID().toString(), server.serverUUID().toString(),
ProxyServer.getInstance().getName()).send(); ProxyServer.getInstance().getName()).send();
} catch (IOException e) { } catch (IOException e) {
getInstance().getLogger().log(Level.SEVERE, "Failed to serialize Redis message for handshake termination", e); getBungeeLogger().log(Level.SEVERE, "Failed to serialize Redis message for handshake termination", e);
} }
} }
// Close the database dataManager.closeDatabases();
for (Database database : clusterDatabases.values()) {
database.close();
}
// Log to console // Log to console
getLogger().info("Disabled HuskSync (" + getProxy().getName() + ") v" + getDescription().getVersion()); getBungeeLogger().info("Disabled HuskSync (" + getProxy().getName() + ") v" + getDescription().getVersion());
} }
/**
* A record representing a server synchronised on the network and whether it has MySqlPlayerDataBridge installed
*/
public record Server(UUID serverUUID, boolean hasMySqlPlayerDataBridge, String huskSyncVersion, String serverBrand,
String clusterId) {
}
} }

@ -2,13 +2,14 @@ package me.william278.husksync.bungeecord.command;
import de.themoep.minedown.MineDown; import de.themoep.minedown.MineDown;
import me.william278.husksync.HuskSyncBungeeCord; import me.william278.husksync.HuskSyncBungeeCord;
import me.william278.husksync.Server;
import me.william278.husksync.bungeecord.util.BungeeUpdateChecker; import me.william278.husksync.bungeecord.util.BungeeUpdateChecker;
import me.william278.husksync.util.MessageManager; import me.william278.husksync.util.MessageManager;
import me.william278.husksync.PlayerData; import me.william278.husksync.PlayerData;
import me.william278.husksync.Settings; import me.william278.husksync.Settings;
import me.william278.husksync.bungeecord.config.ConfigLoader; import me.william278.husksync.bungeecord.config.ConfigLoader;
import me.william278.husksync.bungeecord.config.ConfigManager; import me.william278.husksync.bungeecord.config.ConfigManager;
import me.william278.husksync.bungeecord.data.DataManager; import me.william278.husksync.bungeecord.migrator.MPDBMigrator;
import me.william278.husksync.redis.RedisMessage; import me.william278.husksync.redis.RedisMessage;
import net.md_5.bungee.api.CommandSender; import net.md_5.bungee.api.CommandSender;
import net.md_5.bungee.api.ProxyServer; import net.md_5.bungee.api.ProxyServer;
@ -59,7 +60,7 @@ public class HuskSyncCommand extends Command implements TabExecutor {
int updatesNeeded = 0; int updatesNeeded = 0;
String bukkitBrand = "Spigot"; String bukkitBrand = "Spigot";
String bukkitVersion = "1.0"; String bukkitVersion = "1.0";
for (HuskSyncBungeeCord.Server server : HuskSyncBungeeCord.synchronisedServers) { for (Server server : HuskSyncBungeeCord.synchronisedServers) {
BungeeUpdateChecker updateChecker = new BungeeUpdateChecker(server.huskSyncVersion()); BungeeUpdateChecker updateChecker = new BungeeUpdateChecker(server.huskSyncVersion());
if (!updateChecker.isUpToDate()) { if (!updateChecker.isUpToDate()) {
updatesNeeded++; updatesNeeded++;
@ -155,7 +156,7 @@ public class HuskSyncCommand extends Command implements TabExecutor {
} }
int playerDataSize = 0; int playerDataSize = 0;
for (Settings.SynchronisationCluster cluster : Settings.clusters) { for (Settings.SynchronisationCluster cluster : Settings.clusters) {
playerDataSize += DataManager.playerDataCache.get(cluster).playerData.size(); playerDataSize += HuskSyncBungeeCord.dataManager.playerDataCache.get(cluster).playerData.size();
} }
sender.sendMessage(new MineDown(MessageManager.PLUGIN_STATUS.toString() sender.sendMessage(new MineDown(MessageManager.PLUGIN_STATUS.toString()
.replaceAll("%1%", String.valueOf(HuskSyncBungeeCord.synchronisedServers.size())) .replaceAll("%1%", String.valueOf(HuskSyncBungeeCord.synchronisedServers.size()))
@ -179,7 +180,7 @@ public class HuskSyncCommand extends Command implements TabExecutor {
"reload") "reload")
.send(); .send();
} catch (IOException e) { } catch (IOException e) {
plugin.getLogger().log(Level.WARNING, "Failed to serialize reload notification message data"); plugin.getBungeeLogger().log(Level.WARNING, "Failed to serialize reload notification message data");
} }
sender.sendMessage(new MineDown(MessageManager.getMessage("reload_complete")).toComponent()); sender.sendMessage(new MineDown(MessageManager.getMessage("reload_complete")).toComponent());
@ -190,6 +191,123 @@ public class HuskSyncCommand extends Command implements TabExecutor {
} else { } else {
sendAboutInformation(player); sendAboutInformation(player);
} }
} else {
// Database migration wizard
if (args.length >= 1) {
if (args[0].equalsIgnoreCase("migrate")) {
if (args.length == 1) {
sender.sendMessage(new MineDown(
"""
=== MySQLPlayerDataBridge Migration Wizard ==========
This will migrate data from the MySQLPlayerDataBridge
plugin to HuskSync.
Data that will be migrated:
- Inventories
- Ender Chests
- Experience points
Other non-vital data, such as current health, hunger
& potion effects will not be migrated to ensure that
migration does not take an excessive amount of time.
To do this, you need to have MySqlPlayerDataBridge
and HuskSync installed on one Spigot server as well
as HuskSync installed on the proxy (which you have)
>To proceed, type: husksync migrate setup""").toComponent());
} else {
switch (args[1].toLowerCase()) {
case "setup" -> sender.sendMessage(new MineDown(
"""
=== MySQLPlayerDataBridge Migration Wizard ==========
The following database settings will be used.
Please make sure they match the correct settings to
access your MySQLPlayerDataBridge Data
sourceHost: %1%
sourcePort: %2%
sourceDatabase: %3%
sourceUsername: %4%
sourcePassword: %5%
sourceInventoryTableName: %6%
sourceEnderChestTableName: %7%
sourceExperienceTableName: %8%
targetCluster: %9%
To change a setting, type:
husksync migrate setting <settingName> <value>
Please ensure no players are logged in to the network
and that at least one Spigot server is online with
both HuskSync AND MySqlPlayerDataBridge installed AND
that the server has been configured with the correct
Redis credentials.
Warning: Data will be saved to your configured data
source, which is currently a %10% database.
Please make sure you are happy with this, or stop
the proxy server and edit this in config.yml
Warning: Migration will overwrite any current data
saved by HuskSync. It will not, however, delete any
data from the source MySQLPlayerDataBridge database.
>When done, type: husksync migrate start"""
.replaceAll("%1%", MPDBMigrator.migrationSettings.sourceHost)
.replaceAll("%2%", String.valueOf(MPDBMigrator.migrationSettings.sourcePort))
.replaceAll("%3%", MPDBMigrator.migrationSettings.sourceDatabase)
.replaceAll("%4%", MPDBMigrator.migrationSettings.sourceUsername)
.replaceAll("%5%", MPDBMigrator.migrationSettings.sourcePassword)
.replaceAll("%6%", MPDBMigrator.migrationSettings.inventoryDataTable)
.replaceAll("%7%", MPDBMigrator.migrationSettings.enderChestDataTable)
.replaceAll("%8%", MPDBMigrator.migrationSettings.expDataTable)
.replaceAll("%9%", MPDBMigrator.migrationSettings.targetCluster)
.replaceAll("%10%", Settings.dataStorageType.toString())
).toComponent());
case "setting" -> {
if (args.length == 4) {
String value = args[3];
switch (args[2]) {
case "sourceHost", "host" -> MPDBMigrator.migrationSettings.sourceHost = value;
case "sourcePort", "port" -> {
try {
MPDBMigrator.migrationSettings.sourcePort = Integer.parseInt(value);
} catch (NumberFormatException e) {
sender.sendMessage(new MineDown("Error: Invalid value; port must be a number").toComponent());
return;
}
}
case "sourceDatabase", "database" -> MPDBMigrator.migrationSettings.sourceDatabase = value;
case "sourceUsername", "username" -> MPDBMigrator.migrationSettings.sourceUsername = value;
case "sourcePassword", "password" -> MPDBMigrator.migrationSettings.sourcePassword = value;
case "sourceInventoryTableName", "inventoryTableName", "inventoryTable" -> MPDBMigrator.migrationSettings.inventoryDataTable = value;
case "sourceEnderChestTableName", "enderChestTableName", "enderChestTable" -> MPDBMigrator.migrationSettings.enderChestDataTable = value;
case "sourceExperienceTableName", "experienceTableName", "experienceTable" -> MPDBMigrator.migrationSettings.expDataTable = value;
case "targetCluster", "cluster" -> MPDBMigrator.migrationSettings.targetCluster = value;
default -> {
sender.sendMessage(new MineDown("Error: Invalid setting; please use \"husksync migrate setup\" to view a list").toComponent());
return;
}
}
sender.sendMessage(new MineDown("Successfully updated setting: \"" + args[2] + "\" --> \"" + value + "\"").toComponent());
} else {
sender.sendMessage(new MineDown("Error: Invalid usage. Syntax: husksync migrate setting <settingName> <value>").toComponent());
}
}
case "start" -> {
sender.sendMessage(new MineDown("Starting MySQLPlayerDataBridge migration!...").toComponent());
HuskSyncBungeeCord.mpdbMigrator.start();
}
default -> sender.sendMessage(new MineDown("Error: Invalid argument for migration. Use \"husksync migrate\" to start the process").toComponent());
}
}
return;
}
}
sender.sendMessage(new MineDown("Error: Invalid syntax. Usage: husksync migrate <args>").toComponent());
} }
} }
@ -206,7 +324,7 @@ public class HuskSyncCommand extends Command implements TabExecutor {
ProxyServer.getInstance().getScheduler().runAsync(plugin, () -> { ProxyServer.getInstance().getScheduler().runAsync(plugin, () -> {
for (Settings.SynchronisationCluster cluster : Settings.clusters) { for (Settings.SynchronisationCluster cluster : Settings.clusters) {
if (!cluster.clusterId().equals(clusterId)) continue; if (!cluster.clusterId().equals(clusterId)) continue;
PlayerData playerData = DataManager.getPlayerDataByName(targetPlayerName, cluster.clusterId()); PlayerData playerData = HuskSyncBungeeCord.dataManager.getPlayerDataByName(targetPlayerName, cluster.clusterId());
if (playerData == null) { if (playerData == null) {
viewer.sendMessage(new MineDown(MessageManager.getMessage("error_invalid_player")).toComponent()); viewer.sendMessage(new MineDown(MessageManager.getMessage("error_invalid_player")).toComponent());
return; return;
@ -219,7 +337,7 @@ public class HuskSyncCommand extends Command implements TabExecutor {
viewer.sendMessage(new MineDown(MessageManager.getMessage("viewing_inventory_of").replaceAll("%1%", viewer.sendMessage(new MineDown(MessageManager.getMessage("viewing_inventory_of").replaceAll("%1%",
targetPlayerName)).toComponent()); targetPlayerName)).toComponent());
} catch (IOException e) { } catch (IOException e) {
plugin.getLogger().log(Level.WARNING, "Failed to serialize inventory-see player data", e); plugin.getBungeeLogger().log(Level.WARNING, "Failed to serialize inventory-see player data", e);
} }
return; return;
} }
@ -240,7 +358,7 @@ public class HuskSyncCommand extends Command implements TabExecutor {
ProxyServer.getInstance().getScheduler().runAsync(plugin, () -> { ProxyServer.getInstance().getScheduler().runAsync(plugin, () -> {
for (Settings.SynchronisationCluster cluster : Settings.clusters) { for (Settings.SynchronisationCluster cluster : Settings.clusters) {
if (!cluster.clusterId().equals(clusterId)) continue; if (!cluster.clusterId().equals(clusterId)) continue;
PlayerData playerData = DataManager.getPlayerDataByName(targetPlayerName, cluster.clusterId()); PlayerData playerData = HuskSyncBungeeCord.dataManager.getPlayerDataByName(targetPlayerName, cluster.clusterId());
if (playerData == null) { if (playerData == null) {
viewer.sendMessage(new MineDown(MessageManager.getMessage("error_invalid_player")).toComponent()); viewer.sendMessage(new MineDown(MessageManager.getMessage("error_invalid_player")).toComponent());
return; return;
@ -253,7 +371,7 @@ public class HuskSyncCommand extends Command implements TabExecutor {
viewer.sendMessage(new MineDown(MessageManager.getMessage("viewing_ender_chest_of").replaceAll("%1%", viewer.sendMessage(new MineDown(MessageManager.getMessage("viewing_ender_chest_of").replaceAll("%1%",
targetPlayerName)).toComponent()); targetPlayerName)).toComponent());
} catch (IOException e) { } catch (IOException e) {
plugin.getLogger().log(Level.WARNING, "Failed to serialize inventory-see player data", e); plugin.getBungeeLogger().log(Level.WARNING, "Failed to serialize inventory-see player data", e);
} }
return; return;
} }
@ -272,7 +390,7 @@ public class HuskSyncCommand extends Command implements TabExecutor {
new RedisMessage.MessageTarget(Settings.ServerType.BUKKIT, player.getUniqueId(), null), new RedisMessage.MessageTarget(Settings.ServerType.BUKKIT, player.getUniqueId(), null),
plugin.getProxy().getName(), plugin.getDescription().getVersion()).send(); plugin.getProxy().getName(), plugin.getDescription().getVersion()).send();
} catch (IOException e) { } catch (IOException e) {
plugin.getLogger().log(Level.WARNING, "Failed to serialize plugin information to send", e); plugin.getBungeeLogger().log(Level.WARNING, "Failed to serialize plugin information to send", e);
} }
} }

@ -37,7 +37,7 @@ public class ConfigLoader {
Settings.language = config.getString("language", "en-gb"); Settings.language = config.getString("language", "en-gb");
Settings.serverType = Settings.ServerType.BUNGEECORD; Settings.serverType = Settings.ServerType.PROXY;
Settings.automaticUpdateChecks = config.getBoolean("check_for_updates", true); Settings.automaticUpdateChecks = config.getBoolean("check_for_updates", true);
Settings.redisHost = config.getString("redis_settings.host", "localhost"); Settings.redisHost = config.getString("redis_settings.host", "localhost");
Settings.redisPort = config.getInt("redis_settings.port", 6379); Settings.redisPort = config.getInt("redis_settings.port", 6379);

@ -19,16 +19,16 @@ public class ConfigManager {
try { try {
if (!plugin.getDataFolder().exists()) { if (!plugin.getDataFolder().exists()) {
if (plugin.getDataFolder().mkdir()) { if (plugin.getDataFolder().mkdir()) {
plugin.getLogger().info("Created HuskSync data folder"); plugin.getBungeeLogger().info("Created HuskSync data folder");
} }
} }
File configFile = new File(plugin.getDataFolder(), "config.yml"); File configFile = new File(plugin.getDataFolder(), "config.yml");
if (!configFile.exists()) { if (!configFile.exists()) {
Files.copy(plugin.getResourceAsStream("bungee-config.yml"), configFile.toPath()); Files.copy(plugin.getResourceAsStream("proxy-config.yml"), configFile.toPath());
plugin.getLogger().info("Created HuskSync config file"); plugin.getBungeeLogger().info("Created HuskSync config file");
} }
} catch (Exception e) { } catch (Exception e) {
plugin.getLogger().log(Level.CONFIG, "An exception occurred loading the configuration file", e); plugin.getBungeeLogger().log(Level.CONFIG, "An exception occurred loading the configuration file", e);
} }
} }
@ -36,7 +36,7 @@ public class ConfigManager {
try { try {
ConfigurationProvider.getProvider(YamlConfiguration.class).save(config, new File(plugin.getDataFolder(), "config.yml")); ConfigurationProvider.getProvider(YamlConfiguration.class).save(config, new File(plugin.getDataFolder(), "config.yml"));
} catch (IOException e) { } catch (IOException e) {
plugin.getLogger().log(Level.CONFIG, "An exception occurred loading the configuration file", e); plugin.getBungeeLogger().log(Level.CONFIG, "An exception occurred loading the configuration file", e);
} }
} }
@ -44,16 +44,16 @@ public class ConfigManager {
try { try {
if (!plugin.getDataFolder().exists()) { if (!plugin.getDataFolder().exists()) {
if (plugin.getDataFolder().mkdir()) { if (plugin.getDataFolder().mkdir()) {
plugin.getLogger().info("Created HuskSync data folder"); plugin.getBungeeLogger().info("Created HuskSync data folder");
} }
} }
File messagesFile = new File(plugin.getDataFolder(), "messages_" + Settings.language + ".yml"); File messagesFile = new File(plugin.getDataFolder(), "messages_" + Settings.language + ".yml");
if (!messagesFile.exists()) { if (!messagesFile.exists()) {
Files.copy(plugin.getResourceAsStream("languages/" + Settings.language + ".yml"), messagesFile.toPath()); Files.copy(plugin.getResourceAsStream("languages/" + Settings.language + ".yml"), messagesFile.toPath());
plugin.getLogger().info("Created HuskSync messages file"); plugin.getBungeeLogger().info("Created HuskSync messages file");
} }
} catch (Exception e) { } catch (Exception e) {
plugin.getLogger().log(Level.CONFIG, "An exception occurred loading the messages file", e); plugin.getBungeeLogger().log(Level.CONFIG, "An exception occurred loading the messages file", e);
} }
} }
@ -62,7 +62,7 @@ public class ConfigManager {
File configFile = new File(plugin.getDataFolder(), "config.yml"); File configFile = new File(plugin.getDataFolder(), "config.yml");
return ConfigurationProvider.getProvider(YamlConfiguration.class).load(configFile); return ConfigurationProvider.getProvider(YamlConfiguration.class).load(configFile);
} catch (IOException e) { } catch (IOException e) {
plugin.getLogger().log(Level.CONFIG, "An IOException occurred fetching the configuration file", e); plugin.getBungeeLogger().log(Level.CONFIG, "An IOException occurred fetching the configuration file", e);
return null; return null;
} }
} }
@ -72,7 +72,7 @@ public class ConfigManager {
File configFile = new File(plugin.getDataFolder(), "messages_" + Settings.language + ".yml"); File configFile = new File(plugin.getDataFolder(), "messages_" + Settings.language + ".yml");
return ConfigurationProvider.getProvider(YamlConfiguration.class).load(configFile); return ConfigurationProvider.getProvider(YamlConfiguration.class).load(configFile);
} catch (IOException e) { } catch (IOException e) {
plugin.getLogger().log(Level.CONFIG, "An IOException occurred fetching the messages file", e); plugin.getBungeeLogger().log(Level.CONFIG, "An IOException occurred fetching the messages file", e);
return null; return null;
} }
} }

@ -2,7 +2,6 @@ package me.william278.husksync.bungeecord.listener;
import me.william278.husksync.HuskSyncBungeeCord; import me.william278.husksync.HuskSyncBungeeCord;
import me.william278.husksync.PlayerData; import me.william278.husksync.PlayerData;
import me.william278.husksync.bungeecord.data.DataManager;
import me.william278.husksync.Settings; import me.william278.husksync.Settings;
import me.william278.husksync.redis.RedisMessage; import me.william278.husksync.redis.RedisMessage;
import net.md_5.bungee.api.ProxyServer; import net.md_5.bungee.api.ProxyServer;
@ -24,15 +23,15 @@ public class BungeeEventListener implements Listener {
final ProxiedPlayer player = event.getPlayer(); final ProxiedPlayer player = event.getPlayer();
ProxyServer.getInstance().getScheduler().runAsync(plugin, () -> { ProxyServer.getInstance().getScheduler().runAsync(plugin, () -> {
// Ensure the player has data on SQL and that it is up-to-date // Ensure the player has data on SQL and that it is up-to-date
DataManager.ensurePlayerExists(player.getUniqueId(), player.getName()); HuskSyncBungeeCord.dataManager.ensurePlayerExists(player.getUniqueId(), player.getName());
// Get the player's data from SQL // Get the player's data from SQL
final Map<Settings.SynchronisationCluster,PlayerData> data = DataManager.getPlayerData(player.getUniqueId()); final Map<Settings.SynchronisationCluster,PlayerData> data = HuskSyncBungeeCord.dataManager.getPlayerData(player.getUniqueId());
// Update the player's data from SQL onto the cache // Update the player's data from SQL onto the cache
assert data != null; assert data != null;
for (Settings.SynchronisationCluster cluster : data.keySet()) { for (Settings.SynchronisationCluster cluster : data.keySet()) {
DataManager.playerDataCache.get(cluster).updatePlayer(data.get(cluster)); HuskSyncBungeeCord.dataManager.playerDataCache.get(cluster).updatePlayer(data.get(cluster));
} }
// Send a message asking the bukkit to request data on join // Send a message asking the bukkit to request data on join
@ -41,7 +40,7 @@ public class BungeeEventListener implements Listener {
new RedisMessage.MessageTarget(Settings.ServerType.BUKKIT, null, null), new RedisMessage.MessageTarget(Settings.ServerType.BUKKIT, null, null),
RedisMessage.RequestOnJoinUpdateType.ADD_REQUESTER.toString(), player.getUniqueId().toString()).send(); RedisMessage.RequestOnJoinUpdateType.ADD_REQUESTER.toString(), player.getUniqueId().toString()).send();
} catch (IOException e) { } catch (IOException e) {
plugin.getLogger().log(Level.SEVERE, "Failed to serialize request data on join message data"); plugin.getBungeeLogger().log(Level.SEVERE, "Failed to serialize request data on join message data");
e.printStackTrace(); e.printStackTrace();
} }
}); });

@ -2,10 +2,11 @@ package me.william278.husksync.bungeecord.listener;
import de.themoep.minedown.MineDown; import de.themoep.minedown.MineDown;
import me.william278.husksync.HuskSyncBungeeCord; import me.william278.husksync.HuskSyncBungeeCord;
import me.william278.husksync.Server;
import me.william278.husksync.util.MessageManager; import me.william278.husksync.util.MessageManager;
import me.william278.husksync.PlayerData; import me.william278.husksync.PlayerData;
import me.william278.husksync.Settings; import me.william278.husksync.Settings;
import me.william278.husksync.bungeecord.data.DataManager; import me.william278.husksync.bungeecord.migrator.MPDBMigrator;
import me.william278.husksync.redis.RedisListener; import me.william278.husksync.redis.RedisListener;
import me.william278.husksync.redis.RedisMessage; import me.william278.husksync.redis.RedisMessage;
import net.md_5.bungee.api.ChatMessageType; import net.md_5.bungee.api.ChatMessageType;
@ -31,13 +32,13 @@ public class BungeeRedisListener extends RedisListener {
for (Settings.SynchronisationCluster cluster : Settings.clusters) { for (Settings.SynchronisationCluster cluster : Settings.clusters) {
if (cluster.clusterId().equals(clusterId)) { if (cluster.clusterId().equals(clusterId)) {
// Get the player data from the cache // Get the player data from the cache
PlayerData cachedData = DataManager.playerDataCache.get(cluster).getPlayer(uuid); PlayerData cachedData = HuskSyncBungeeCord.dataManager.playerDataCache.get(cluster).getPlayer(uuid);
if (cachedData != null) { if (cachedData != null) {
return cachedData; return cachedData;
} }
data = Objects.requireNonNull(DataManager.getPlayerData(uuid)).get(cluster); // Get their player data from MySQL data = Objects.requireNonNull(HuskSyncBungeeCord.dataManager.getPlayerData(uuid)).get(cluster); // Get their player data from MySQL
DataManager.playerDataCache.get(cluster).updatePlayer(data); // Update the cache HuskSyncBungeeCord.dataManager.playerDataCache.get(cluster).updatePlayer(data); // Update the cache
break; break;
} }
} }
@ -52,7 +53,7 @@ public class BungeeRedisListener extends RedisListener {
@Override @Override
public void handleMessage(RedisMessage message) { public void handleMessage(RedisMessage message) {
// Ignore messages destined for Bukkit servers // Ignore messages destined for Bukkit servers
if (message.getMessageTarget().targetServerType() != Settings.ServerType.BUNGEECORD) { if (message.getMessageTarget().targetServerType() != Settings.ServerType.PROXY) {
return; return;
} }
// Only process redis messages when ready // Only process redis messages when ready
@ -106,7 +107,7 @@ public class BungeeRedisListener extends RedisListener {
// Update the data in the cache and SQL // Update the data in the cache and SQL
for (Settings.SynchronisationCluster cluster : Settings.clusters) { for (Settings.SynchronisationCluster cluster : Settings.clusters) {
if (cluster.clusterId().equals(message.getMessageTarget().targetClusterId())) { if (cluster.clusterId().equals(message.getMessageTarget().targetClusterId())) {
DataManager.updatePlayerData(playerData, cluster); HuskSyncBungeeCord.dataManager.updatePlayerData(playerData, cluster);
break; break;
} }
} }
@ -143,7 +144,7 @@ public class BungeeRedisListener extends RedisListener {
serverUUID.toString(), plugin.getProxy().getName()) serverUUID.toString(), plugin.getProxy().getName())
.send(); .send();
HuskSyncBungeeCord.synchronisedServers.add( HuskSyncBungeeCord.synchronisedServers.add(
new HuskSyncBungeeCord.Server(serverUUID, hasMySqlPlayerDataBridge, new Server(serverUUID, hasMySqlPlayerDataBridge,
huskSyncVersion, bukkitBrand, message.getMessageTarget().targetClusterId())); huskSyncVersion, bukkitBrand, message.getMessageTarget().targetClusterId()));
log(Level.INFO, "Completed handshake with " + bukkitBrand + " server (" + serverUUID + ")"); log(Level.INFO, "Completed handshake with " + bukkitBrand + " server (" + serverUUID + ")");
} catch (IOException e) { } catch (IOException e) {
@ -157,8 +158,8 @@ public class BungeeRedisListener extends RedisListener {
final String bukkitBrand = message.getMessageDataElements()[1]; final String bukkitBrand = message.getMessageDataElements()[1];
// Remove a server from the synchronised server list // Remove a server from the synchronised server list
HuskSyncBungeeCord.Server serverToRemove = null; Server serverToRemove = null;
for (HuskSyncBungeeCord.Server server : HuskSyncBungeeCord.synchronisedServers) { for (Server server : HuskSyncBungeeCord.synchronisedServers) {
if (server.serverUUID().equals(serverUUID)) { if (server.serverUUID().equals(serverUUID)) {
serverToRemove = server; serverToRemove = server;
break; break;
@ -167,6 +168,31 @@ public class BungeeRedisListener extends RedisListener {
HuskSyncBungeeCord.synchronisedServers.remove(serverToRemove); HuskSyncBungeeCord.synchronisedServers.remove(serverToRemove);
log(Level.INFO, "Terminated the handshake with " + bukkitBrand + " server (" + serverUUID + ")"); log(Level.INFO, "Terminated the handshake with " + bukkitBrand + " server (" + serverUUID + ")");
} }
case DECODED_MPDB_DATA_SET -> {
// Deserialize the PlayerData received
PlayerData playerData;
final String serializedPlayerData = message.getMessageDataElements()[0];
final String playerName = message.getMessageDataElements()[1];
try {
playerData = (PlayerData) RedisMessage.deserialize(serializedPlayerData);
} catch (IOException | ClassNotFoundException e) {
log(Level.SEVERE, "Failed to deserialize PlayerData when handling incoming decoded MPDB data");
e.printStackTrace();
return;
}
// Add the incoming data to the data to be saved
MPDBMigrator.incomingPlayerData.put(playerData, playerName);
// Increment players migrated
MPDBMigrator.playersMigrated++;
plugin.getBungeeLogger().log(Level.INFO, "Migrated " + MPDBMigrator.playersMigrated + "/" + MPDBMigrator.migratedDataSent + " players.");
// When all the data has been received, save it
if (MPDBMigrator.migratedDataSent == MPDBMigrator.playersMigrated) {
MPDBMigrator.loadIncomingData(MPDBMigrator.incomingPlayerData);
}
}
} }
} }
@ -178,6 +204,6 @@ public class BungeeRedisListener extends RedisListener {
*/ */
@Override @Override
public void log(Level level, String message) { public void log(Level level, String message) {
plugin.getLogger().log(level, message); plugin.getBungeeLogger().log(level, message);
} }
} }

@ -0,0 +1,302 @@
package me.william278.husksync.bungeecord.migrator;
import me.william278.husksync.HuskSyncBungeeCord;
import me.william278.husksync.PlayerData;
import me.william278.husksync.Server;
import me.william278.husksync.Settings;
import me.william278.husksync.migrator.MPDBPlayerData;
import me.william278.husksync.proxy.data.sql.Database;
import me.william278.husksync.proxy.data.sql.MySQL;
import me.william278.husksync.redis.RedisMessage;
import net.md_5.bungee.api.ProxyServer;
import java.io.IOException;
import java.sql.Connection;
import java.sql.PreparedStatement;
import java.sql.ResultSet;
import java.sql.SQLException;
import java.util.HashMap;
import java.util.HashSet;
import java.util.UUID;
import java.util.logging.Level;
/**
* Class to handle migration of data from MySQLPlayerDataBridge
* <p>
* The migrator accesses and decodes MPDB's format directly,
* by communicating with a Spigot server
*/
public class MPDBMigrator {
public static int migratedDataSent = 0;
public static int playersMigrated = 0;
private static final HuskSyncBungeeCord plugin = HuskSyncBungeeCord.getInstance();
public static HashMap<PlayerData, String> incomingPlayerData;
public static MigrationSettings migrationSettings = new MigrationSettings();
private static Settings.SynchronisationCluster targetCluster;
private static Database sourceDatabase;
private static HashSet<MPDBPlayerData> mpdbPlayerData;
public void start() {
if (ProxyServer.getInstance().getPlayers().size() > 0) {
plugin.getBungeeLogger().log(Level.WARNING, "Failed to start migration because there are players online. " +
"Your network has to be empty to migrate data for safety reasons.");
return;
}
int synchronisedServersWithMpdb = 0;
for (Server server : HuskSyncBungeeCord.synchronisedServers) {
if (server.hasMySqlPlayerDataBridge()) {
synchronisedServersWithMpdb++;
}
}
if (synchronisedServersWithMpdb < 1) {
plugin.getBungeeLogger().log(Level.WARNING, "Failed to start migration because at least one Spigot server with both HuskSync and MySqlPlayerDataBridge installed is not online. " +
"Please start one Spigot server with HuskSync installed to begin migration.");
return;
}
for (Settings.SynchronisationCluster cluster : Settings.clusters) {
if (migrationSettings.targetCluster.equals(cluster.clusterId())) {
targetCluster = cluster;
break;
}
}
if (targetCluster == null) {
plugin.getBungeeLogger().log(Level.WARNING, "Failed to start migration because the target cluster could not be found. " +
"Please ensure the target cluster is correct, configured in the proxy config file, then try again");
return;
}
migratedDataSent = 0;
playersMigrated = 0;
mpdbPlayerData = new HashSet<>();
incomingPlayerData = new HashMap<>();
final MigrationSettings settings = migrationSettings;
// Get connection to source database
sourceDatabase = new MigratorMySQL(plugin, settings.sourceHost, settings.sourcePort,
settings.sourceDatabase, settings.sourceUsername, settings.sourcePassword, targetCluster);
sourceDatabase.load();
if (sourceDatabase.isInactive()) {
plugin.getBungeeLogger().log(Level.WARNING, "Failed to establish connection to the origin MySQL database. " +
"Please check you have input the correct connection details and try again.");
return;
}
ProxyServer.getInstance().getScheduler().runAsync(plugin, () -> {
prepareTargetDatabase();
getInventoryData();
getEnderChestData();
getExperienceData();
sendEncodedData();
});
}
// Clear the new database out of current data
private void prepareTargetDatabase() {
plugin.getBungeeLogger().log(Level.INFO, "Preparing target database...");
try (Connection connection = HuskSyncBungeeCord.dataManager.getConnection(targetCluster.clusterId())) {
try (PreparedStatement statement = connection.prepareStatement("DELETE FROM " + targetCluster.playerTableName() + ";")) {
statement.executeUpdate();
}
try (PreparedStatement statement = connection.prepareStatement("DELETE FROM " + targetCluster.dataTableName() + ";")) {
statement.executeUpdate();
}
} catch (SQLException e) {
plugin.getBungeeLogger().log(Level.SEVERE, "An exception occurred preparing the target database", e);
} finally {
plugin.getBungeeLogger().log(Level.INFO, "Finished preparing target database!");
}
}
private void getInventoryData() {
plugin.getBungeeLogger().log(Level.INFO, "Getting inventory data from MySQLPlayerDataBridge...");
try (Connection connection = sourceDatabase.getConnection()) {
try (PreparedStatement statement = connection.prepareStatement("SELECT * FROM " + migrationSettings.inventoryDataTable + ";")) {
ResultSet resultSet = statement.executeQuery();
while (resultSet.next()) {
final UUID playerUUID = UUID.fromString(resultSet.getString("player_uuid"));
final String playerName = resultSet.getString("player_name");
MPDBPlayerData data = new MPDBPlayerData(playerUUID, playerName);
data.inventoryData = resultSet.getString("inventory");
data.armorData = resultSet.getString("armor");
mpdbPlayerData.add(data);
}
}
} catch (SQLException e) {
plugin.getBungeeLogger().log(Level.SEVERE, "An exception occurred getting inventory data", e);
} finally {
plugin.getBungeeLogger().log(Level.INFO, "Finished getting inventory data from MySQLPlayerDataBridge");
}
}
private void getEnderChestData() {
plugin.getBungeeLogger().log(Level.INFO, "Getting ender chest data from MySQLPlayerDataBridge...");
try (Connection connection = sourceDatabase.getConnection()) {
try (PreparedStatement statement = connection.prepareStatement("SELECT * FROM " + migrationSettings.enderChestDataTable + ";")) {
ResultSet resultSet = statement.executeQuery();
while (resultSet.next()) {
final UUID playerUUID = UUID.fromString(resultSet.getString("player_uuid"));
for (MPDBPlayerData data : mpdbPlayerData) {
if (data.playerUUID.equals(playerUUID)) {
data.enderChestData = resultSet.getString("enderchest");
break;
}
}
}
}
} catch (SQLException e) {
plugin.getBungeeLogger().log(Level.SEVERE, "An exception occurred getting ender chest data", e);
} finally {
plugin.getBungeeLogger().log(Level.INFO, "Finished getting ender chest data from MySQLPlayerDataBridge");
}
}
private void getExperienceData() {
plugin.getBungeeLogger().log(Level.INFO, "Getting experience data from MySQLPlayerDataBridge...");
try (Connection connection = sourceDatabase.getConnection()) {
try (PreparedStatement statement = connection.prepareStatement("SELECT * FROM " + migrationSettings.expDataTable + ";")) {
ResultSet resultSet = statement.executeQuery();
while (resultSet.next()) {
final UUID playerUUID = UUID.fromString(resultSet.getString("player_uuid"));
for (MPDBPlayerData data : mpdbPlayerData) {
if (data.playerUUID.equals(playerUUID)) {
data.expLevel = resultSet.getInt("exp_lvl");
data.expProgress = resultSet.getFloat("exp");
data.totalExperience = resultSet.getInt("total_exp");
break;
}
}
}
}
} catch (SQLException e) {
plugin.getBungeeLogger().log(Level.SEVERE, "An exception occurred getting experience data", e);
} finally {
plugin.getBungeeLogger().log(Level.INFO, "Finished getting experience data from MySQLPlayerDataBridge");
}
}
private void sendEncodedData() {
for (Server processingServer : HuskSyncBungeeCord.synchronisedServers) {
if (processingServer.hasMySqlPlayerDataBridge()) {
for (MPDBPlayerData data : mpdbPlayerData) {
try {
new RedisMessage(RedisMessage.MessageType.DECODE_MPDB_DATA,
new RedisMessage.MessageTarget(Settings.ServerType.BUKKIT, null, null),
processingServer.serverUUID().toString(),
RedisMessage.serialize(data))
.send();
migratedDataSent++;
} catch (IOException e) {
plugin.getBungeeLogger().log(Level.SEVERE, "Failed to serialize encoded MPDB data", e);
}
}
plugin.getBungeeLogger().log(Level.INFO, "Finished dispatching encoded data for " + migratedDataSent + " players; please wait for conversion to finish");
}
return;
}
}
/**
* Loads all incoming decoded MPDB data to the cache and database
*
* @param dataToLoad HashMap of the {@link PlayerData} to player Usernames that will be loaded
*/
public static void loadIncomingData(HashMap<PlayerData, String> dataToLoad) {
ProxyServer.getInstance().getScheduler().runAsync(plugin, () -> {
int playersSaved = 0;
plugin.getBungeeLogger().log(Level.INFO, "Saving data for " + playersMigrated + " players...");
for (PlayerData playerData : dataToLoad.keySet()) {
String playerName = dataToLoad.get(playerData);
// Add the player to the MySQL table
HuskSyncBungeeCord.dataManager.ensurePlayerExists(playerData.getPlayerUUID(), playerName);
// Update the data in the cache and SQL
for (Settings.SynchronisationCluster cluster : Settings.clusters) {
HuskSyncBungeeCord.dataManager.updatePlayerData(playerData, cluster);
break;
}
playersSaved++;
plugin.getBungeeLogger().log(Level.INFO, "Saved data for " + playersSaved + "/" + playersMigrated + " players");
}
// Mark as done when done
plugin.getBungeeLogger().log(Level.INFO, """
=== MySQLPlayerDataBridge Migration Wizard ==========
Migration complete!
Successfully migrated data for %1%/%2% players.
You should now uninstall MySQLPlayerDataBridge from
the rest of the Spigot servers, then restart them.
""".replaceAll("%1%", Integer.toString(MPDBMigrator.playersMigrated))
.replaceAll("%2%", Integer.toString(MPDBMigrator.migratedDataSent)));
sourceDatabase.close(); // Close source database
});
}
/**
* Class used to hold settings for the MPDB migration
*/
public static class MigrationSettings {
public String sourceHost;
public int sourcePort;
public String sourceDatabase;
public String sourceUsername;
public String sourcePassword;
public String inventoryDataTable;
public String enderChestDataTable;
public String expDataTable;
public String targetCluster;
public MigrationSettings() {
sourceHost = "localhost";
sourcePort = 3306;
sourceDatabase = "mpdb";
sourceUsername = "root";
sourcePassword = "pa55w0rd";
targetCluster = "main";
inventoryDataTable = "mpdb_inventory";
enderChestDataTable = "mpdb_enderchest";
expDataTable = "mpdb_experience";
}
}
/**
* MySQL class used for importing data from MPDB
*/
public static class MigratorMySQL extends MySQL {
public MigratorMySQL(HuskSyncBungeeCord instance, String host, int port, String database, String username, String password, Settings.SynchronisationCluster cluster) {
super(cluster, instance.getBungeeLogger());
super.host = host;
super.port = port;
super.database = database;
super.username = username;
super.password = password;
super.params = "?useSSL=false";
super.dataPoolName = super.dataPoolName + "Migrator";
}
}
}

@ -0,0 +1,33 @@
package me.william278.husksync.bungeecord.util;
import me.william278.husksync.util.Logger;
import java.util.logging.Level;
public record BungeeLogger(java.util.logging.Logger parent) implements Logger {
@Override
public void log(Level level, String message, Exception e) {
parent.log(level, message, e);
}
@Override
public void log(Level level, String message) {
parent.log(level, message);
}
@Override
public void info(String message) {
parent.info(message);
}
@Override
public void severe(String message) {
parent.severe(message);
}
@Override
public void config(String message) {
parent.config(message);
}
}

@ -15,6 +15,6 @@ public class BungeeUpdateChecker extends UpdateChecker {
@Override @Override
public void log(Level level, String message) { public void log(Level level, String message) {
plugin.getLogger().log(level, message); plugin.getBungeeLogger().log(level, message);
} }
} }

@ -1,5 +1,6 @@
dependencies { dependencies {
implementation 'redis.clients:jedis:3.7.0' implementation 'redis.clients:jedis:3.7.0'
implementation 'com.zaxxer:HikariCP:5.0.0'
} }
import org.apache.tools.ant.filters.ReplaceTokens import org.apache.tools.ant.filters.ReplaceTokens

@ -0,0 +1,10 @@
package me.william278.husksync;
import java.util.UUID;
/**
* A record representing a server synchronised on the network and whether it has MySqlPlayerDataBridge installed
*/
public record Server(UUID serverUUID, boolean hasMySqlPlayerDataBridge, String huskSyncVersion, String serverBrand,
String clusterId) {
}

@ -79,7 +79,7 @@ public class Settings {
public enum ServerType { public enum ServerType {
BUKKIT, BUKKIT,
BUNGEECORD PROXY,
} }
public enum DataStorageType { public enum DataStorageType {

@ -1,10 +1,13 @@
package me.william278.husksync.bungeecord.data; package me.william278.husksync.proxy.data;
import me.william278.husksync.PlayerData; import me.william278.husksync.PlayerData;
import me.william278.husksync.HuskSyncBungeeCord;
import me.william278.husksync.Settings; import me.william278.husksync.Settings;
import me.william278.husksync.bungeecord.data.sql.Database; import me.william278.husksync.proxy.data.sql.Database;
import me.william278.husksync.proxy.data.sql.MySQL;
import me.william278.husksync.proxy.data.sql.SQLite;
import me.william278.husksync.util.Logger;
import java.io.File;
import java.sql.*; import java.sql.*;
import java.time.Instant; import java.time.Instant;
import java.util.*; import java.util.*;
@ -12,12 +15,65 @@ import java.util.logging.Level;
public class DataManager { public class DataManager {
private static final HuskSyncBungeeCord plugin = HuskSyncBungeeCord.getInstance();
/** /**
* The player data cache for each cluster ID * The player data cache for each cluster ID
*/ */
public static HashMap<Settings.SynchronisationCluster, PlayerDataCache> playerDataCache = new HashMap<>(); public HashMap<Settings.SynchronisationCluster, PlayerDataCache> playerDataCache = new HashMap<>();
/**
* Map of the database assigned for each cluster
*/
private final HashMap<String, Database> clusterDatabases;
// Retrieve database connection for a cluster
public Connection getConnection(String clusterId) throws SQLException {
return clusterDatabases.get(clusterId).getConnection();
}
// Console logger for errors
private final Logger logger;
// Plugin data folder
private final File dataFolder;
// Flag variable identifying if the data manager failed to initialize
public boolean hasFailedInitialization = false;
public DataManager(Logger logger, File dataFolder) {
this.logger = logger;
this.dataFolder = dataFolder;
clusterDatabases = new HashMap<>();
initializeDatabases();
}
private void initializeDatabases() {
for (Settings.SynchronisationCluster cluster : Settings.clusters) {
Database clusterDatabase = switch (Settings.dataStorageType) {
case SQLITE -> new SQLite(cluster, dataFolder, logger);
case MYSQL -> new MySQL(cluster, logger);
};
clusterDatabase.load();
clusterDatabase.createTables();
clusterDatabases.put(cluster.clusterId(), clusterDatabase);
}
// Abort loading if the database failed to initialize
for (Database database : clusterDatabases.values()) {
if (database.isInactive()) {
hasFailedInitialization = true;
return;
}
}
}
/**
* Close the database connections
*/
public void closeDatabases() {
for (Database database : clusterDatabases.values()) {
database.close();
}
}
/** /**
* Checks if the player is registered on the database. * Checks if the player is registered on the database.
@ -26,7 +82,7 @@ public class DataManager {
* *
* @param playerUUID The UUID of the player to register * @param playerUUID The UUID of the player to register
*/ */
public static void ensurePlayerExists(UUID playerUUID, String playerName) { public void ensurePlayerExists(UUID playerUUID, String playerName) {
for (Settings.SynchronisationCluster cluster : Settings.clusters) { for (Settings.SynchronisationCluster cluster : Settings.clusters) {
if (!playerExists(playerUUID, cluster)) { if (!playerExists(playerUUID, cluster)) {
createPlayerEntry(playerUUID, playerName, cluster); createPlayerEntry(playerUUID, playerName, cluster);
@ -42,8 +98,8 @@ public class DataManager {
* @param playerUUID The UUID of the player * @param playerUUID The UUID of the player
* @return {@code true} if the player is on the player table * @return {@code true} if the player is on the player table
*/ */
private static boolean playerExists(UUID playerUUID, Settings.SynchronisationCluster cluster) { private boolean playerExists(UUID playerUUID, Settings.SynchronisationCluster cluster) {
try (Connection connection = HuskSyncBungeeCord.getConnection(cluster.clusterId())) { try (Connection connection = getConnection(cluster.clusterId())) {
try (PreparedStatement statement = connection.prepareStatement( try (PreparedStatement statement = connection.prepareStatement(
"SELECT * FROM " + cluster.playerTableName() + " WHERE `uuid`=?;")) { "SELECT * FROM " + cluster.playerTableName() + " WHERE `uuid`=?;")) {
statement.setString(1, playerUUID.toString()); statement.setString(1, playerUUID.toString());
@ -51,13 +107,13 @@ public class DataManager {
return resultSet.next(); return resultSet.next();
} }
} catch (SQLException e) { } catch (SQLException e) {
plugin.getLogger().log(Level.SEVERE, "An SQL exception occurred", e); logger.log(Level.SEVERE, "An SQL exception occurred", e);
return false; return false;
} }
} }
private static void createPlayerEntry(UUID playerUUID, String playerName, Settings.SynchronisationCluster cluster) { private void createPlayerEntry(UUID playerUUID, String playerName, Settings.SynchronisationCluster cluster) {
try (Connection connection = HuskSyncBungeeCord.getConnection(cluster.clusterId())) { try (Connection connection = getConnection(cluster.clusterId())) {
try (PreparedStatement statement = connection.prepareStatement( try (PreparedStatement statement = connection.prepareStatement(
"INSERT INTO " + cluster.playerTableName() + " (`uuid`,`username`) VALUES(?,?);")) { "INSERT INTO " + cluster.playerTableName() + " (`uuid`,`username`) VALUES(?,?);")) {
statement.setString(1, playerUUID.toString()); statement.setString(1, playerUUID.toString());
@ -65,12 +121,12 @@ public class DataManager {
statement.executeUpdate(); statement.executeUpdate();
} }
} catch (SQLException e) { } catch (SQLException e) {
plugin.getLogger().log(Level.SEVERE, "An SQL exception occurred", e); logger.log(Level.SEVERE, "An SQL exception occurred", e);
} }
} }
public static void updatePlayerName(UUID playerUUID, String playerName, Settings.SynchronisationCluster cluster) { public void updatePlayerName(UUID playerUUID, String playerName, Settings.SynchronisationCluster cluster) {
try (Connection connection = HuskSyncBungeeCord.getConnection(cluster.clusterId())) { try (Connection connection = getConnection(cluster.clusterId())) {
try (PreparedStatement statement = connection.prepareStatement( try (PreparedStatement statement = connection.prepareStatement(
"UPDATE " + cluster.playerTableName() + " SET `username`=? WHERE `uuid`=?;")) { "UPDATE " + cluster.playerTableName() + " SET `username`=? WHERE `uuid`=?;")) {
statement.setString(1, playerName); statement.setString(1, playerName);
@ -78,7 +134,7 @@ public class DataManager {
statement.executeUpdate(); statement.executeUpdate();
} }
} catch (SQLException e) { } catch (SQLException e) {
plugin.getLogger().log(Level.SEVERE, "An SQL exception occurred", e); logger.log(Level.SEVERE, "An SQL exception occurred", e);
} }
} }
@ -88,11 +144,11 @@ public class DataManager {
* @param playerName The PlayerName of the data to get * @param playerName The PlayerName of the data to get
* @return Their {@link PlayerData}; or {@code null} if the player does not exist * @return Their {@link PlayerData}; or {@code null} if the player does not exist
*/ */
public static PlayerData getPlayerDataByName(String playerName, String clusterId) { public PlayerData getPlayerDataByName(String playerName, String clusterId) {
PlayerData playerData = null; PlayerData playerData = null;
for (Settings.SynchronisationCluster cluster : Settings.clusters) { for (Settings.SynchronisationCluster cluster : Settings.clusters) {
if (cluster.clusterId().equals(clusterId)) { if (cluster.clusterId().equals(clusterId)) {
try (Connection connection = HuskSyncBungeeCord.getConnection(clusterId)) { try (Connection connection = getConnection(clusterId)) {
try (PreparedStatement statement = connection.prepareStatement( try (PreparedStatement statement = connection.prepareStatement(
"SELECT * FROM " + cluster.playerTableName() + " WHERE `username`=? LIMIT 1;")) { "SELECT * FROM " + cluster.playerTableName() + " WHERE `username`=? LIMIT 1;")) {
statement.setString(1, playerName); statement.setString(1, playerName);
@ -110,7 +166,7 @@ public class DataManager {
} }
} }
} catch (SQLException e) { } catch (SQLException e) {
plugin.getLogger().log(Level.SEVERE, "An SQL exception occurred", e); logger.log(Level.SEVERE, "An SQL exception occurred", e);
} }
break; break;
} }
@ -119,10 +175,10 @@ public class DataManager {
return playerData; return playerData;
} }
public static Map<Settings.SynchronisationCluster, PlayerData> getPlayerData(UUID playerUUID) { public Map<Settings.SynchronisationCluster, PlayerData> getPlayerData(UUID playerUUID) {
HashMap<Settings.SynchronisationCluster, PlayerData> data = new HashMap<>(); HashMap<Settings.SynchronisationCluster, PlayerData> data = new HashMap<>();
for (Settings.SynchronisationCluster cluster : Settings.clusters) { for (Settings.SynchronisationCluster cluster : Settings.clusters) {
try (Connection connection = HuskSyncBungeeCord.getConnection(cluster.clusterId())) { try (Connection connection = getConnection(cluster.clusterId())) {
try (PreparedStatement statement = connection.prepareStatement( try (PreparedStatement statement = connection.prepareStatement(
"SELECT * FROM " + cluster.dataTableName() + " WHERE `player_id`=(SELECT `id` FROM " + cluster.playerTableName() + " WHERE `uuid`=?);")) { "SELECT * FROM " + cluster.dataTableName() + " WHERE `player_id`=(SELECT `id` FROM " + cluster.playerTableName() + " WHERE `uuid`=?);")) {
statement.setString(1, playerUUID.toString()); statement.setString(1, playerUUID.toString());
@ -158,14 +214,14 @@ public class DataManager {
} }
} }
} catch (SQLException e) { } catch (SQLException e) {
plugin.getLogger().log(Level.SEVERE, "An SQL exception occurred", e); logger.log(Level.SEVERE, "An SQL exception occurred", e);
return null; return null;
} }
} }
return data; return data;
} }
public static void updatePlayerData(PlayerData playerData, Settings.SynchronisationCluster cluster) { public void updatePlayerData(PlayerData playerData, Settings.SynchronisationCluster cluster) {
// Ignore if the Spigot server didn't properly sync the previous data // Ignore if the Spigot server didn't properly sync the previous data
// Add the new player data to the cache // Add the new player data to the cache
@ -179,8 +235,8 @@ public class DataManager {
} }
} }
private static void updatePlayerSQLData(PlayerData playerData, Settings.SynchronisationCluster cluster) { private void updatePlayerSQLData(PlayerData playerData, Settings.SynchronisationCluster cluster) {
try (Connection connection = HuskSyncBungeeCord.getConnection(cluster.clusterId())) { try (Connection connection = getConnection(cluster.clusterId())) {
try (PreparedStatement statement = connection.prepareStatement( try (PreparedStatement statement = connection.prepareStatement(
"UPDATE " + cluster.dataTableName() + " SET `version_uuid`=?, `timestamp`=?, `inventory`=?, `ender_chest`=?, `health`=?, `max_health`=?, `health_scale`=?, `hunger`=?, `saturation`=?, `saturation_exhaustion`=?, `selected_slot`=?, `status_effects`=?, `total_experience`=?, `exp_level`=?, `exp_progress`=?, `game_mode`=?, `statistics`=?, `is_flying`=?, `advancements`=?, `location`=? WHERE `player_id`=(SELECT `id` FROM " + cluster.playerTableName() + " WHERE `uuid`=?);")) { "UPDATE " + cluster.dataTableName() + " SET `version_uuid`=?, `timestamp`=?, `inventory`=?, `ender_chest`=?, `health`=?, `max_health`=?, `health_scale`=?, `hunger`=?, `saturation`=?, `saturation_exhaustion`=?, `selected_slot`=?, `status_effects`=?, `total_experience`=?, `exp_level`=?, `exp_progress`=?, `game_mode`=?, `statistics`=?, `is_flying`=?, `advancements`=?, `location`=? WHERE `player_id`=(SELECT `id` FROM " + cluster.playerTableName() + " WHERE `uuid`=?);")) {
statement.setString(1, playerData.getDataVersionUUID().toString()); statement.setString(1, playerData.getDataVersionUUID().toString());
@ -208,12 +264,12 @@ public class DataManager {
statement.executeUpdate(); statement.executeUpdate();
} }
} catch (SQLException e) { } catch (SQLException e) {
plugin.getLogger().log(Level.SEVERE, "An SQL exception occurred", e); logger.log(Level.SEVERE, "An SQL exception occurred", e);
} }
} }
private static void insertPlayerData(PlayerData playerData, Settings.SynchronisationCluster cluster) { private void insertPlayerData(PlayerData playerData, Settings.SynchronisationCluster cluster) {
try (Connection connection = HuskSyncBungeeCord.getConnection(cluster.clusterId())) { try (Connection connection = getConnection(cluster.clusterId())) {
try (PreparedStatement statement = connection.prepareStatement( try (PreparedStatement statement = connection.prepareStatement(
"INSERT INTO " + cluster.dataTableName() + " (`player_id`,`version_uuid`,`timestamp`,`inventory`,`ender_chest`,`health`,`max_health`,`health_scale`,`hunger`,`saturation`,`saturation_exhaustion`,`selected_slot`,`status_effects`,`total_experience`,`exp_level`,`exp_progress`,`game_mode`,`statistics`,`is_flying`,`advancements`,`location`) VALUES((SELECT `id` FROM " + cluster.playerTableName() + " WHERE `uuid`=?),?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?);")) { "INSERT INTO " + cluster.dataTableName() + " (`player_id`,`version_uuid`,`timestamp`,`inventory`,`ender_chest`,`health`,`max_health`,`health_scale`,`hunger`,`saturation`,`saturation_exhaustion`,`selected_slot`,`status_effects`,`total_experience`,`exp_level`,`exp_progress`,`game_mode`,`statistics`,`is_flying`,`advancements`,`location`) VALUES((SELECT `id` FROM " + cluster.playerTableName() + " WHERE `uuid`=?),?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?);")) {
statement.setString(1, playerData.getPlayerUUID().toString()); statement.setString(1, playerData.getPlayerUUID().toString());
@ -241,7 +297,7 @@ public class DataManager {
statement.executeUpdate(); statement.executeUpdate();
} }
} catch (SQLException e) { } catch (SQLException e) {
plugin.getLogger().log(Level.SEVERE, "An SQL exception occurred", e); logger.log(Level.SEVERE, "An SQL exception occurred", e);
} }
} }
@ -251,8 +307,8 @@ public class DataManager {
* @param playerUUID The UUID of the player * @param playerUUID The UUID of the player
* @return {@code true} if the player has an entry in the data table * @return {@code true} if the player has an entry in the data table
*/ */
private static boolean playerHasCachedData(UUID playerUUID, Settings.SynchronisationCluster cluster) { private boolean playerHasCachedData(UUID playerUUID, Settings.SynchronisationCluster cluster) {
try (Connection connection = HuskSyncBungeeCord.getConnection(cluster.clusterId())) { try (Connection connection = getConnection(cluster.clusterId())) {
try (PreparedStatement statement = connection.prepareStatement( try (PreparedStatement statement = connection.prepareStatement(
"SELECT * FROM " + cluster.dataTableName() + " WHERE `player_id`=(SELECT `id` FROM " + cluster.playerTableName() + " WHERE `uuid`=?);")) { "SELECT * FROM " + cluster.dataTableName() + " WHERE `player_id`=(SELECT `id` FROM " + cluster.playerTableName() + " WHERE `uuid`=?);")) {
statement.setString(1, playerUUID.toString()); statement.setString(1, playerUUID.toString());
@ -260,7 +316,7 @@ public class DataManager {
return resultSet.next(); return resultSet.next();
} }
} catch (SQLException e) { } catch (SQLException e) {
plugin.getLogger().log(Level.SEVERE, "An SQL exception occurred", e); logger.log(Level.SEVERE, "An SQL exception occurred", e);
return false; return false;
} }
} }

@ -1,21 +1,21 @@
package me.william278.husksync.bungeecord.data.sql; package me.william278.husksync.proxy.data.sql;
import me.william278.husksync.HuskSyncBungeeCord;
import me.william278.husksync.Settings; import me.william278.husksync.Settings;
import me.william278.husksync.util.Logger;
import java.sql.Connection; import java.sql.Connection;
import java.sql.SQLException; import java.sql.SQLException;
public abstract class Database { public abstract class Database {
protected HuskSyncBungeeCord plugin;
public String dataPoolName; public String dataPoolName;
public Settings.SynchronisationCluster cluster; public Settings.SynchronisationCluster cluster;
public final Logger logger;
public Database(HuskSyncBungeeCord instance, Settings.SynchronisationCluster cluster) { public Database(Settings.SynchronisationCluster cluster, Logger logger) {
this.plugin = instance;
this.cluster = cluster; this.cluster = cluster;
this.dataPoolName = cluster != null ? "HuskSyncHikariPool-" + cluster.clusterId() : "HuskSyncMigratorPool"; this.dataPoolName = cluster != null ? "HuskSyncHikariPool-" + cluster.clusterId() : "HuskSyncMigratorPool";
this.logger = logger;
} }
public abstract Connection getConnection() throws SQLException; public abstract Connection getConnection() throws SQLException;

@ -1,8 +1,8 @@
package me.william278.husksync.bungeecord.data.sql; package me.william278.husksync.proxy.data.sql;
import com.zaxxer.hikari.HikariDataSource; import com.zaxxer.hikari.HikariDataSource;
import me.william278.husksync.HuskSyncBungeeCord;
import me.william278.husksync.Settings; import me.william278.husksync.Settings;
import me.william278.husksync.util.Logger;
import java.sql.Connection; import java.sql.Connection;
import java.sql.SQLException; import java.sql.SQLException;
@ -58,8 +58,8 @@ public class MySQL extends Database {
private HikariDataSource dataSource; private HikariDataSource dataSource;
public MySQL(HuskSyncBungeeCord instance, Settings.SynchronisationCluster cluster) { public MySQL(Settings.SynchronisationCluster cluster, Logger logger) {
super(instance, cluster); super(cluster, logger);
} }
@Override @Override
@ -96,7 +96,7 @@ public class MySQL extends Database {
} }
} }
} catch (SQLException e) { } catch (SQLException e) {
plugin.getLogger().log(Level.SEVERE, "An error occurred creating tables on the MySQL database: ", e); logger.log(Level.SEVERE, "An error occurred creating tables on the MySQL database: ", e);
} }
} }

@ -1,8 +1,8 @@
package me.william278.husksync.bungeecord.data.sql; package me.william278.husksync.proxy.data.sql;
import com.zaxxer.hikari.HikariDataSource; import com.zaxxer.hikari.HikariDataSource;
import me.william278.husksync.HuskSyncBungeeCord;
import me.william278.husksync.Settings; import me.william278.husksync.Settings;
import me.william278.husksync.util.Logger;
import java.io.File; import java.io.File;
import java.io.IOException; import java.io.IOException;
@ -54,22 +54,25 @@ public class SQLite extends Database {
return cluster.databaseName() + "Data"; return cluster.databaseName() + "Data";
} }
private final File dataFolder;
private HikariDataSource dataSource; private HikariDataSource dataSource;
public SQLite(HuskSyncBungeeCord instance, Settings.SynchronisationCluster cluster) { public SQLite(Settings.SynchronisationCluster cluster, File dataFolder, Logger logger) {
super(instance, cluster); super(cluster, logger);
this.dataFolder = dataFolder;
} }
// Create the database file if it does not exist yet // Create the database file if it does not exist yet
private void createDatabaseFileIfNotExist() { private void createDatabaseFileIfNotExist() {
File databaseFile = new File(plugin.getDataFolder(), getDatabaseName() + ".db"); File databaseFile = new File(dataFolder, getDatabaseName() + ".db");
if (!databaseFile.exists()) { if (!databaseFile.exists()) {
try { try {
if (!databaseFile.createNewFile()) { if (!databaseFile.createNewFile()) {
plugin.getLogger().log(Level.SEVERE, "Failed to write new file: " + getDatabaseName() + ".db (file already exists)"); logger.log(Level.SEVERE, "Failed to write new file: " + getDatabaseName() + ".db (file already exists)");
} }
} catch (IOException e) { } catch (IOException e) {
plugin.getLogger().log(Level.SEVERE, "An error occurred writing a file: " + getDatabaseName() + ".db (" + e.getCause() + ")"); logger.log(Level.SEVERE, "An error occurred writing a file: " + getDatabaseName() + ".db (" + e.getCause() + ")", e);
} }
} }
} }
@ -85,7 +88,7 @@ public class SQLite extends Database {
createDatabaseFileIfNotExist(); createDatabaseFileIfNotExist();
// Create new HikariCP data source // Create new HikariCP data source
final String jdbcUrl = "jdbc:sqlite:" + plugin.getDataFolder().getAbsolutePath() + File.separator + getDatabaseName() + ".db"; final String jdbcUrl = "jdbc:sqlite:" + dataFolder.getAbsolutePath() + File.separator + getDatabaseName() + ".db";
dataSource = new HikariDataSource(); dataSource = new HikariDataSource();
dataSource.setDataSourceClassName("org.sqlite.SQLiteDataSource"); dataSource.setDataSourceClassName("org.sqlite.SQLiteDataSource");
dataSource.addDataSourceProperty("url", jdbcUrl); dataSource.addDataSourceProperty("url", jdbcUrl);
@ -109,7 +112,7 @@ public class SQLite extends Database {
} }
} }
} catch (SQLException e) { } catch (SQLException e) {
plugin.getLogger().log(Level.SEVERE, "An error occurred creating tables on the SQLite database: ", e); logger.log(Level.SEVERE, "An error occurred creating tables on the SQLite database", e);
} }
} }

@ -0,0 +1,19 @@
package me.william278.husksync.util;
import java.util.logging.Level;
/**
* Logger interface to allow for implementation of different logger platforms used by Bungee and Velocity
*/
public interface Logger {
void log(Level level, String message, Exception e);
void log(Level level, String message);
void info(String message);
void severe(String message);
void config(String message);
}

@ -3,6 +3,7 @@ dependencies {
implementation project(path: ":api", configuration: 'shadow') implementation project(path: ":api", configuration: 'shadow')
implementation project(path: ":bukkit", configuration: 'shadow') implementation project(path: ":bukkit", configuration: 'shadow')
implementation project(path: ":bungeecord", configuration: 'shadow') implementation project(path: ":bungeecord", configuration: 'shadow')
implementation project(path: ":velocity", configuration: 'shadow')
} }
shadowJar { shadowJar {

@ -10,4 +10,5 @@ include 'common'
include 'api' include 'api'
include 'bukkit' include 'bukkit'
include 'bungeecord' include 'bungeecord'
include 'velocity'
include 'plugin' include 'plugin'

@ -0,0 +1,20 @@
dependencies {
compileOnly project(':common')
implementation project(path: ':common', configuration: 'shadow')
compileOnly 'redis.clients:jedis:3.7.0'
implementation 'org.bstats:bstats-velocity:2.2.1'
implementation 'com.zaxxer:HikariCP:5.0.0'
implementation 'de.themoep:minedown-adventure:1.7.1-SNAPSHOT'
compileOnly 'com.velocitypowered:velocity-api:3.1.0'
annotationProcessor 'com.velocitypowered:velocity-api:3.1.0'
}
shadowJar {
relocate 'com.zaxxer', 'me.William278.husksync.libraries.hikari'
relocate 'org.bstats', 'me.William278.husksync.libraries.plan'
relocate 'de.themoep', 'me.William278.husksync.libraries.minedown'
}
tasks.register('prepareKotlinBuildScriptModel'){}

@ -0,0 +1,187 @@
package me.william278.husksync;
import com.google.inject.Inject;
import com.velocitypowered.api.command.CommandManager;
import com.velocitypowered.api.command.CommandMeta;
import com.velocitypowered.api.event.Subscribe;
import com.velocitypowered.api.event.proxy.ProxyInitializeEvent;
import com.velocitypowered.api.event.proxy.ProxyShutdownEvent;
import com.velocitypowered.api.plugin.Plugin;
import com.velocitypowered.api.plugin.annotation.DataDirectory;
import com.velocitypowered.api.proxy.ProxyServer;
import me.william278.husksync.proxy.data.DataManager;
import me.william278.husksync.redis.RedisMessage;
import me.william278.husksync.velocity.VelocityUpdateChecker;
import me.william278.husksync.velocity.command.HuskSyncCommand;
import me.william278.husksync.velocity.config.ConfigLoader;
import me.william278.husksync.velocity.config.ConfigManager;
import me.william278.husksync.velocity.listener.VelocityEventListener;
import me.william278.husksync.velocity.listener.VelocityRedisListener;
import me.william278.husksync.velocity.util.VelocityLogger;
import org.bstats.velocity.Metrics;
import org.slf4j.Logger;
import java.io.File;
import java.io.IOException;
import java.nio.file.Path;
import java.util.HashSet;
import java.util.Objects;
import java.util.logging.Level;
import static me.william278.husksync.HuskSyncVelocity.VERSION;
@Plugin(
id = "velocity",
name = "HuskSync",
version = VERSION,
description = "HuskSync for velocity",
authors = {"William278"}
)
public class HuskSyncVelocity {
// Plugin version
public static final String VERSION = "1.2-dev";
// Velocity bStats ID (different from Bukkit and BungeeCord)
private static final int METRICS_ID = 13489;
private final Metrics.Factory metricsFactory;
private static HuskSyncVelocity instance;
public static HuskSyncVelocity getInstance() {
return instance;
}
// Whether the plugin is ready to accept redis messages
public static boolean readyForRedis = false;
// Whether the plugin is in the process of disabling and should skip responding to handshake confirmations
public static boolean isDisabling = false;
/**
* Set of all the {@link Server}s that have completed the synchronisation handshake with HuskSync on the proxy
*/
public static HashSet<Server> synchronisedServers;
public static DataManager dataManager;
//public static MPDBMigrator mpdbMigrator;
private final Logger logger;
private final ProxyServer server;
private final Path dataDirectory;
// Get the data folder
public File getDataFolder() {
return dataDirectory.toFile();
}
// Get the proxy server
public ProxyServer getProxyServer() {
return server;
}
// Velocity logger handling
private VelocityLogger velocityLogger;
public VelocityLogger getVelocityLogger() {
return velocityLogger;
}
@Inject
public HuskSyncVelocity(ProxyServer server, Logger logger, @DataDirectory Path dataDirectory, Metrics.Factory metricsFactory) {
this.server = server;
this.logger = logger;
this.dataDirectory = dataDirectory;
this.metricsFactory = metricsFactory;
}
@Subscribe
public void onProxyInitialization(ProxyInitializeEvent event) {
// Set instance
instance = this;
// Setup logger
velocityLogger = new VelocityLogger(logger);
// Load config
ConfigManager.loadConfig();
// Load settings from config
ConfigLoader.loadSettings(ConfigManager.getConfig());
// Load messages
ConfigManager.loadMessages();
// Load locales from messages
ConfigLoader.loadMessageStrings(ConfigManager.getMessages());
// Do update checker
if (Settings.automaticUpdateChecks) {
new VelocityUpdateChecker(VERSION).logToConsole();
}
// Setup data manager
dataManager = new DataManager(getVelocityLogger(), getDataFolder());
// Setup player data cache
for (Settings.SynchronisationCluster cluster : Settings.clusters) {
dataManager.playerDataCache.put(cluster, new DataManager.PlayerDataCache());
}
// Initialize the redis listener
if (!new VelocityRedisListener().isActiveAndEnabled) {
getVelocityLogger().severe("Failed to initialize Redis; HuskSync will now abort loading itself (Velocity) v" + VERSION);
return;
}
// Register listener
server.getEventManager().register(this, new VelocityEventListener());
// Register command
CommandManager commandManager = getProxyServer().getCommandManager();
CommandMeta meta = commandManager.metaBuilder("husksync")
.aliases("hs")
.build();
commandManager.register(meta, new HuskSyncCommand());
// Prepare the migrator for use if needed
//todo migrator
// Initialize bStats metrics
try {
metricsFactory.make(this, METRICS_ID);
} catch (Exception e) {
getVelocityLogger().info("Skipped metrics initialization");
}
// Log to console
getVelocityLogger().info("Enabled HuskSync (Velocity) v" + VERSION);
// Mark as ready for redis message processing
readyForRedis = true;
}
@Subscribe
public void onProxyShutdown(ProxyShutdownEvent event) {
// Plugin shutdown logic
isDisabling = true;
// Send terminating handshake message
for (Server server : synchronisedServers) {
try {
new RedisMessage(RedisMessage.MessageType.TERMINATE_HANDSHAKE,
new RedisMessage.MessageTarget(Settings.ServerType.BUKKIT, null, server.clusterId()),
server.serverUUID().toString(),
"Velocity").send();
} catch (IOException e) {
getVelocityLogger().log(Level.SEVERE, "Failed to serialize Redis message for handshake termination", e);
}
}
dataManager.closeDatabases();
// Log to console
getVelocityLogger().info("Disabled HuskSync (Velocity) v" + VERSION);
}
}

@ -0,0 +1,20 @@
package me.william278.husksync.velocity;
import me.william278.husksync.HuskSyncVelocity;
import me.william278.husksync.util.UpdateChecker;
import java.util.logging.Level;
public class VelocityUpdateChecker extends UpdateChecker {
private static final HuskSyncVelocity plugin = HuskSyncVelocity.getInstance();
public VelocityUpdateChecker(String versionToCheck) {
super(versionToCheck);
}
@Override
public void log(Level level, String message) {
plugin.getVelocityLogger().log(level, message);
}
}

@ -0,0 +1,34 @@
package me.william278.husksync.velocity.command;
import com.velocitypowered.api.command.CommandSource;
import com.velocitypowered.api.command.SimpleCommand;
import java.util.ArrayList;
import java.util.List;
import java.util.concurrent.CompletableFuture;
public class HuskSyncCommand implements SimpleCommand {
/**
* Executes the command for the specified invocation.
*
* @param invocation the invocation context
*/
@Override
public void execute(Invocation invocation) {
final String[] args = invocation.arguments();
final CommandSource source = invocation.source();
}
/**
* Provides tab complete suggestions for the specified invocation.
*
* @param invocation the invocation context
* @return the tab complete suggestions
*/
@Override
public List<String> suggest(Invocation invocation) {
return new ArrayList<>();
}
}

@ -0,0 +1,97 @@
package me.william278.husksync.velocity.config;
import me.william278.husksync.HuskSyncVelocity;
import me.william278.husksync.Settings;
import me.william278.husksync.util.MessageManager;
import ninja.leaping.configurate.ConfigurationNode;
import java.util.HashMap;
public class ConfigLoader {
private static ConfigurationNode copyDefaults(ConfigurationNode configRoot) {
// Get the config version and update if needed
String configVersion = getConfigString(configRoot, "1.0", "config_file_version");
if (configVersion.contains("-dev")) {
configVersion = configVersion.replaceAll("-dev", "");
}
if (!configVersion.equals(HuskSyncVelocity.VERSION)) {
if (configVersion.equalsIgnoreCase("1.0")) {
configRoot.getNode("check_for_updates").setValue(true);
}
if (configVersion.equalsIgnoreCase("1.0") || configVersion.equalsIgnoreCase("1.0.1") || configVersion.equalsIgnoreCase("1.0.2") || configVersion.equalsIgnoreCase("1.0.3")) {
configRoot.getNode("clusters.main.player_table").setValue("husksync_players");
configRoot.getNode("clusters.main.data_table").setValue("husksync_data");
}
configRoot.getNode("config_file_version").setValue(HuskSyncVelocity.VERSION);
}
// Save the config back
ConfigManager.saveConfig(configRoot);
return configRoot;
}
private static String getConfigString(ConfigurationNode rootNode, String defaultValue, String... nodePath) {
return !rootNode.getNode(nodePath).isVirtual() ? rootNode.getNode(nodePath).getString() : defaultValue;
}
private static boolean getConfigBoolean(ConfigurationNode rootNode, boolean defaultValue, String... nodePath) {
return !rootNode.getNode(nodePath).isVirtual() ? rootNode.getNode(nodePath).getBoolean() : defaultValue;
}
private static int getConfigInt(ConfigurationNode rootNode, int defaultValue, String... nodePath) {
return !rootNode.getNode(nodePath).isVirtual() ? rootNode.getNode(nodePath).getInt() : defaultValue;
}
private static long getConfigLong(ConfigurationNode rootNode, long defaultValue, String... nodePath) {
return !rootNode.getNode(nodePath).isVirtual() ? rootNode.getNode(nodePath).getLong() : defaultValue;
}
public static void loadSettings(ConfigurationNode loadedConfig) throws IllegalArgumentException {
ConfigurationNode config = copyDefaults(loadedConfig);
Settings.language = getConfigString(config, "en-gb", "language");
Settings.serverType = Settings.ServerType.PROXY;
Settings.automaticUpdateChecks = getConfigBoolean(config, true, "check_for_updates");
Settings.redisHost = getConfigString(config, "localhost", "redis_settings", "host");
Settings.redisPort = getConfigInt(config, 6379, "redis_settings", "port");
Settings.redisPassword = getConfigString(config, "", "redis_settings", "password");
Settings.dataStorageType = Settings.DataStorageType.valueOf(getConfigString(config, "sqlite", "data_storage_settings", "database_type").toUpperCase());
if (Settings.dataStorageType == Settings.DataStorageType.MYSQL) {
Settings.mySQLHost = getConfigString(config, "localhost", "data_storage_settings", "mysql_settings", "host");
Settings.mySQLPort = getConfigInt(config, 3306, "data_storage_settings", "mysql_settings", "port");
Settings.mySQLDatabase = getConfigString(config, "HuskSync", "data_storage_settings", "mysql_settings", "database");
Settings.mySQLUsername = getConfigString(config, "root", "data_storage_settings", "mysql_settings", "username");
Settings.mySQLPassword = getConfigString(config, "pa55w0rd", "data_storage_settings", "mysql_settings", "password");
Settings.mySQLParams = getConfigString(config, "?autoReconnect=true&useSSL=false", "data_storage_settings", "mysql_settings", "params");
}
Settings.hikariMaximumPoolSize = getConfigInt(config, 10, "data_storage_settings", "hikari_pool_settings", "maximum_pool_size");
Settings.hikariMinimumIdle = getConfigInt(config, 10, "data_storage_settings", "hikari_pool_settings", "minimum_idle");
Settings.hikariMaximumLifetime = getConfigLong(config, 1800000, "data_storage_settings", "hikari_pool_settings", "maximum_lifetime");
Settings.hikariKeepAliveTime = getConfigLong(config, 0, "data_storage_settings", "hikari_pool_settings", "keepalive_time");
Settings.hikariConnectionTimeOut = getConfigLong(config, 5000, "data_storage_settings", "hikari_pool_settings", "connection_timeout");
// Read cluster data
ConfigurationNode clusterSection = config.getNode("clusters");
final String settingDatabaseName = Settings.mySQLDatabase != null ? Settings.mySQLDatabase : "HuskSync";
for (ConfigurationNode cluster : clusterSection.getChildrenList()) {
final String clusterId = (String) cluster.getKey();
final String playerTableName = getConfigString(config, "husksync_players", "clusters", clusterId, "player_table");
final String dataTableName = getConfigString(config, "husksync_data", "clusters", clusterId, "data_table");
final String databaseName = getConfigString(config, settingDatabaseName, "clusters", clusterId, "database");
Settings.clusters.add(new Settings.SynchronisationCluster(clusterId, databaseName, playerTableName, dataTableName));
}
}
public static void loadMessageStrings(ConfigurationNode config) {
final HashMap<String, String> messages = new HashMap<>();
for (ConfigurationNode message : config.getChildrenList()) {
final String messageId = (String) message.getKey();
messages.put(messageId, getConfigString(config, "", messageId));
}
MessageManager.setMessages(messages);
}
}

@ -0,0 +1,92 @@
package me.william278.husksync.velocity.config;
import me.william278.husksync.HuskSyncVelocity;
import me.william278.husksync.Settings;
import ninja.leaping.configurate.ConfigurationNode;
import ninja.leaping.configurate.commented.CommentedConfigurationNode;
import ninja.leaping.configurate.yaml.YAMLConfigurationLoader;
import java.io.File;
import java.io.IOException;
import java.nio.file.Files;
import java.util.Objects;
import java.util.logging.Level;
public class ConfigManager {
private static final HuskSyncVelocity plugin = HuskSyncVelocity.getInstance();
public static void loadConfig() {
try {
if (!plugin.getDataFolder().exists()) {
if (plugin.getDataFolder().mkdir()) {
plugin.getVelocityLogger().info("Created HuskSync data folder");
}
}
File configFile = new File(plugin.getDataFolder(), "config.yml");
if (!configFile.exists()) {
Files.copy(Objects.requireNonNull(plugin.getClass().getResourceAsStream("proxy-config.yml")), configFile.toPath());
plugin.getVelocityLogger().info("Created HuskSync config file");
}
} catch (Exception e) {
plugin.getVelocityLogger().log(Level.CONFIG, "An exception occurred loading the configuration file", e);
}
}
public static void saveConfig(ConfigurationNode rootNode) {
try {
getConfigLoader().save(rootNode);
} catch (IOException e) {
plugin.getVelocityLogger().log(Level.CONFIG, "An exception occurred loading the configuration file", e);
}
}
public static void loadMessages() {
try {
if (!plugin.getDataFolder().exists()) {
if (plugin.getDataFolder().mkdir()) {
plugin.getVelocityLogger().info("Created HuskSync data folder");
}
}
File messagesFile = new File(plugin.getDataFolder(), "messages_" + Settings.language + ".yml");
if (!messagesFile.exists()) {
Files.copy(Objects.requireNonNull(plugin.getClass().getResourceAsStream("languages/" + Settings.language + ".yml")),
messagesFile.toPath());
plugin.getVelocityLogger().info("Created HuskSync messages file");
}
} catch (IOException e) {
plugin.getVelocityLogger().log(Level.CONFIG, "An exception occurred loading the messages file", e);
}
}
private static YAMLConfigurationLoader getConfigLoader() {
File configFile = new File(plugin.getDataFolder(), "config.yml");
return YAMLConfigurationLoader.builder()
.setPath(configFile.toPath())
.build();
}
public static ConfigurationNode getConfig() {
try {
return getConfigLoader().load();
} catch (IOException e) {
plugin.getVelocityLogger().log(Level.CONFIG, "An IOException has occurred loading the plugin config.");
return null;
}
}
public static ConfigurationNode getMessages() {
try {
File configFile = new File(plugin.getDataFolder(), "messages_" + Settings.language + ".yml");
return YAMLConfigurationLoader.builder()
.setPath(configFile.toPath())
.build()
.load();
} catch (IOException e) {
plugin.getVelocityLogger().log(Level.CONFIG, "An IOException has occurred loading the plugin messages.");
return null;
}
}
}

@ -0,0 +1,46 @@
package me.william278.husksync.velocity.listener;
import com.velocitypowered.api.event.Subscribe;
import com.velocitypowered.api.event.connection.PostLoginEvent;
import com.velocitypowered.api.proxy.Player;
import me.william278.husksync.HuskSyncVelocity;
import me.william278.husksync.PlayerData;
import me.william278.husksync.Settings;
import me.william278.husksync.redis.RedisMessage;
import java.io.IOException;
import java.util.Map;
import java.util.logging.Level;
public class VelocityEventListener {
private static final HuskSyncVelocity plugin = HuskSyncVelocity.getInstance();
@Subscribe
public void onPostLogin(PostLoginEvent event) {
final Player player = event.getPlayer();
plugin.getProxyServer().getScheduler().buildTask(plugin, () -> {
// Ensure the player has data on SQL and that it is up-to-date
HuskSyncVelocity.dataManager.ensurePlayerExists(player.getUniqueId(), player.getUsername());
// Get the player's data from SQL
final Map<Settings.SynchronisationCluster, PlayerData> data = HuskSyncVelocity.dataManager.getPlayerData(player.getUniqueId());
// Update the player's data from SQL onto the cache
assert data != null;
for (Settings.SynchronisationCluster cluster : data.keySet()) {
HuskSyncVelocity.dataManager.playerDataCache.get(cluster).updatePlayer(data.get(cluster));
}
// Send a message asking the bukkit to request data on join
try {
new RedisMessage(RedisMessage.MessageType.REQUEST_DATA_ON_JOIN,
new RedisMessage.MessageTarget(Settings.ServerType.BUKKIT, null, null),
RedisMessage.RequestOnJoinUpdateType.ADD_REQUESTER.toString(), player.getUniqueId().toString()).send();
} catch (IOException e) {
plugin.getVelocityLogger().log(Level.SEVERE, "Failed to serialize request data on join message data");
e.printStackTrace();
}
});
}
}

@ -0,0 +1,203 @@
package me.william278.husksync.velocity.listener;
import com.velocitypowered.api.proxy.Player;
import de.themoep.minedown.adventure.MineDown;
import me.william278.husksync.HuskSyncVelocity;
import me.william278.husksync.PlayerData;
import me.william278.husksync.Server;
import me.william278.husksync.Settings;
import me.william278.husksync.redis.RedisListener;
import me.william278.husksync.redis.RedisMessage;
import me.william278.husksync.util.MessageManager;
import java.io.IOException;
import java.util.Objects;
import java.util.Optional;
import java.util.UUID;
import java.util.logging.Level;
public class VelocityRedisListener extends RedisListener {
private static final HuskSyncVelocity plugin = HuskSyncVelocity.getInstance();
// Initialize the listener on the bungee
public VelocityRedisListener() {
listen();
}
private PlayerData getPlayerCachedData(UUID uuid, String clusterId) {
PlayerData data = null;
for (Settings.SynchronisationCluster cluster : Settings.clusters) {
if (cluster.clusterId().equals(clusterId)) {
// Get the player data from the cache
PlayerData cachedData = HuskSyncVelocity.dataManager.playerDataCache.get(cluster).getPlayer(uuid);
if (cachedData != null) {
return cachedData;
}
data = Objects.requireNonNull(HuskSyncVelocity.dataManager.getPlayerData(uuid)).get(cluster); // Get their player data from MySQL
HuskSyncVelocity.dataManager.playerDataCache.get(cluster).updatePlayer(data); // Update the cache
break;
}
}
return data; // Return the data
}
/**
* Handle an incoming {@link RedisMessage}
*
* @param message The {@link RedisMessage} to handle
*/
@Override
public void handleMessage(RedisMessage message) {
// Ignore messages destined for Bukkit servers
if (message.getMessageTarget().targetServerType() != Settings.ServerType.PROXY) {
return;
}
// Only process redis messages when ready
if (!HuskSyncVelocity.readyForRedis) {
return;
}
switch (message.getMessageType()) {
case PLAYER_DATA_REQUEST -> {
// Get the UUID of the requesting player
final UUID requestingPlayerUUID = UUID.fromString(message.getMessageData());
plugin.getProxyServer().getScheduler().buildTask(plugin, () -> {
try {
// Send the reply, serializing the message data
new RedisMessage(RedisMessage.MessageType.PLAYER_DATA_SET,
new RedisMessage.MessageTarget(Settings.ServerType.BUKKIT, requestingPlayerUUID, message.getMessageTarget().targetClusterId()),
RedisMessage.serialize(getPlayerCachedData(requestingPlayerUUID, message.getMessageTarget().targetClusterId())))
.send();
// Send an update to all bukkit servers removing the player from the requester cache
new RedisMessage(RedisMessage.MessageType.REQUEST_DATA_ON_JOIN,
new RedisMessage.MessageTarget(Settings.ServerType.BUKKIT, null, message.getMessageTarget().targetClusterId()),
RedisMessage.RequestOnJoinUpdateType.REMOVE_REQUESTER.toString(), requestingPlayerUUID.toString())
.send();
// Send synchronisation complete message
Optional<Player> player = plugin.getProxyServer().getPlayer(requestingPlayerUUID);
player.ifPresent(value -> value.sendActionBar(new MineDown(MessageManager.getMessage("synchronisation_complete")).toComponent()));
} catch (IOException e) {
log(Level.SEVERE, "Failed to serialize data when replying to a data request");
e.printStackTrace();
}
});
}
case PLAYER_DATA_UPDATE -> {
// Deserialize the PlayerData received
PlayerData playerData;
final String serializedPlayerData = message.getMessageData();
try {
playerData = (PlayerData) RedisMessage.deserialize(serializedPlayerData);
} catch (IOException | ClassNotFoundException e) {
log(Level.SEVERE, "Failed to deserialize PlayerData when handling a player update request");
e.printStackTrace();
return;
}
// Update the data in the cache and SQL
for (Settings.SynchronisationCluster cluster : Settings.clusters) {
if (cluster.clusterId().equals(message.getMessageTarget().targetClusterId())) {
HuskSyncVelocity.dataManager.updatePlayerData(playerData, cluster);
break;
}
}
// Reply with the player data if they are still online (switching server)
Optional<Player> updatingPlayer = plugin.getProxyServer().getPlayer(playerData.getPlayerUUID());
updatingPlayer.ifPresent(player -> {
try {
new RedisMessage(RedisMessage.MessageType.PLAYER_DATA_SET,
new RedisMessage.MessageTarget(Settings.ServerType.BUKKIT, playerData.getPlayerUUID(), message.getMessageTarget().targetClusterId()),
RedisMessage.serialize(playerData))
.send();
// Send synchronisation complete message
player.sendActionBar(new MineDown(MessageManager.getMessage("synchronisation_complete")).toComponent());
} catch (IOException e) {
log(Level.SEVERE, "Failed to re-serialize PlayerData when handling a player update request");
e.printStackTrace();
}
});
}
case CONNECTION_HANDSHAKE -> {
// Reply to a Bukkit server's connection handshake to complete the process
if (HuskSyncVelocity.isDisabling) return; // Return if the Proxy is disabling
final UUID serverUUID = UUID.fromString(message.getMessageDataElements()[0]);
final boolean hasMySqlPlayerDataBridge = Boolean.parseBoolean(message.getMessageDataElements()[1]);
final String bukkitBrand = message.getMessageDataElements()[2];
final String huskSyncVersion = message.getMessageDataElements()[3];
try {
new RedisMessage(RedisMessage.MessageType.CONNECTION_HANDSHAKE,
new RedisMessage.MessageTarget(Settings.ServerType.BUKKIT, null, message.getMessageTarget().targetClusterId()),
serverUUID.toString(), "Velocity")
.send();
HuskSyncVelocity.synchronisedServers.add(
new Server(serverUUID, hasMySqlPlayerDataBridge,
huskSyncVersion, bukkitBrand, message.getMessageTarget().targetClusterId()));
log(Level.INFO, "Completed handshake with " + bukkitBrand + " server (" + serverUUID + ")");
} catch (IOException e) {
log(Level.SEVERE, "Failed to serialize handshake message data");
e.printStackTrace();
}
}
case TERMINATE_HANDSHAKE -> {
// Terminate the handshake with a Bukkit server
final UUID serverUUID = UUID.fromString(message.getMessageDataElements()[0]);
final String bukkitBrand = message.getMessageDataElements()[1];
// Remove a server from the synchronised server list
Server serverToRemove = null;
for (Server server : HuskSyncVelocity.synchronisedServers) {
if (server.serverUUID().equals(serverUUID)) {
serverToRemove = server;
break;
}
}
HuskSyncVelocity.synchronisedServers.remove(serverToRemove);
log(Level.INFO, "Terminated the handshake with " + bukkitBrand + " server (" + serverUUID + ")");
}
case DECODED_MPDB_DATA_SET -> {
// Deserialize the PlayerData received
PlayerData playerData;
final String serializedPlayerData = message.getMessageDataElements()[0];
final String playerName = message.getMessageDataElements()[1];
try {
playerData = (PlayerData) RedisMessage.deserialize(serializedPlayerData);
} catch (IOException | ClassNotFoundException e) {
log(Level.SEVERE, "Failed to deserialize PlayerData when handling incoming decoded MPDB data");
e.printStackTrace();
return;
}
//todo Migrator
/*// Add the incoming data to the data to be saved
MPDBMigrator.incomingPlayerData.put(playerData, playerName);
// Increment players migrated
MPDBMigrator.playersMigrated++;
plugin.getBungeeLogger().log(Level.INFO, "Migrated " + MPDBMigrator.playersMigrated + "/" + MPDBMigrator.migratedDataSent + " players.");
// When all the data has been received, save it
if (MPDBMigrator.migratedDataSent == MPDBMigrator.playersMigrated) {
MPDBMigrator.loadIncomingData(MPDBMigrator.incomingPlayerData);
}*/
}
}
}
/**
* Log to console
*
* @param level The {@link Level} to log
* @param message Message to log
*/
@Override
public void log(Level level, String message) {
plugin.getVelocityLogger().log(level, message);
}
}

@ -0,0 +1,44 @@
package me.william278.husksync.velocity.util;
import me.william278.husksync.util.Logger;
import java.util.logging.Level;
public record VelocityLogger(org.slf4j.Logger parent) implements Logger {
@Override
public void log(Level level, String message, Exception e) {
logMessage(level, message);
e.printStackTrace();
}
@Override
public void log(Level level, String message) {
logMessage(level, message);
}
@Override
public void info(String message) {
logMessage(Level.INFO, message);
}
@Override
public void severe(String message) {
logMessage(Level.SEVERE, message);
}
@Override
public void config(String message) {
logMessage(Level.CONFIG, message);
}
// Logs the message using SLF4J
private void logMessage(Level level, String message) {
switch (level.intValue()) {
case 1000 -> parent.error(message); // Severe
case 900 -> parent.warn(message); // Warning
case 70 -> parent.warn("[Config] " + message);
default -> parent.info(message);
}
}
}
Loading…
Cancel
Save