Start work on Velocity support

feat/data-edit-commands
William 3 years ago
parent 32a5004fc7
commit 725bf2c315

@ -11,7 +11,7 @@ plugins {
allprojects {
group 'me.William278'
version '1.1.3'
version '1.2-dev'
compileJava { options.encoding = 'UTF-8' }
tasks.withType(JavaCompile) { options.encoding = 'UTF-8' }
@ -33,6 +33,7 @@ subprojects {
mavenLocal()
mavenCentral()
maven { url 'https://hub.spigotmc.org/nexus/content/repositories/snapshots/' }
maven { url 'https://repo.velocitypowered.com/snapshots/' }
maven { url 'https://repo.minebench.de/' }
maven { url 'https://repo.codemc.org/repository/maven-public' }
maven { url 'https://jitpack.io' }

@ -55,7 +55,7 @@ public final class HuskSyncBukkit extends JavaPlugin {
}
try {
new RedisMessage(RedisMessage.MessageType.CONNECTION_HANDSHAKE,
new RedisMessage.MessageTarget(Settings.ServerType.BUNGEECORD, null, Settings.cluster),
new RedisMessage.MessageTarget(Settings.ServerType.PROXY, null, Settings.cluster),
serverUUID.toString(),
Boolean.toString(isMySqlPlayerDataBridgeInstalled),
Bukkit.getName(),
@ -75,7 +75,7 @@ public final class HuskSyncBukkit extends JavaPlugin {
if (!handshakeCompleted) return;
try {
new RedisMessage(RedisMessage.MessageType.TERMINATE_HANDSHAKE,
new RedisMessage.MessageTarget(Settings.ServerType.BUNGEECORD, null, Settings.cluster),
new RedisMessage.MessageTarget(Settings.ServerType.PROXY, null, Settings.cluster),
serverUUID.toString(),
Bukkit.getName()).send();
} catch (IOException e) {

@ -57,7 +57,7 @@ public class DataViewer {
// Send a redis message with the updated data after the viewing
new RedisMessage(RedisMessage.MessageType.PLAYER_DATA_UPDATE,
new RedisMessage.MessageTarget(Settings.ServerType.BUNGEECORD, null, Settings.cluster),
new RedisMessage.MessageTarget(Settings.ServerType.PROXY, null, Settings.cluster),
RedisMessage.serialize(playerData))
.send();
}

@ -97,7 +97,7 @@ public class BukkitRedisListener extends RedisListener {
try {
MPDBPlayerData data = (MPDBPlayerData) RedisMessage.deserialize(encodedData);
new RedisMessage(RedisMessage.MessageType.DECODED_MPDB_DATA_SET,
new RedisMessage.MessageTarget(Settings.ServerType.BUNGEECORD, null, Settings.cluster),
new RedisMessage.MessageTarget(Settings.ServerType.PROXY, null, Settings.cluster),
RedisMessage.serialize(MPDBDeserializer.convertMPDBData(data)),
data.playerName)
.send();

@ -104,7 +104,7 @@ public class PlayerSetter {
try {
final String serializedPlayerData = getNewSerializedPlayerData(player);
new RedisMessage(RedisMessage.MessageType.PLAYER_DATA_UPDATE,
new RedisMessage.MessageTarget(Settings.ServerType.BUNGEECORD, null, Settings.cluster),
new RedisMessage.MessageTarget(Settings.ServerType.PROXY, null, Settings.cluster),
serializedPlayerData).send();
} catch (IOException e) {
plugin.getLogger().log(Level.SEVERE, "Failed to send a PlayerData update to the proxy", e);
@ -123,7 +123,7 @@ public class PlayerSetter {
*/
public static void requestPlayerData(UUID playerUUID) throws IOException {
new RedisMessage(RedisMessage.MessageType.PLAYER_DATA_REQUEST,
new RedisMessage.MessageTarget(Settings.ServerType.BUNGEECORD, null, Settings.cluster),
new RedisMessage.MessageTarget(Settings.ServerType.PROXY, null, Settings.cluster),
playerUUID.toString()).send();
}

@ -4,7 +4,6 @@ dependencies {
compileOnly 'redis.clients:jedis:3.7.0'
implementation 'org.bstats:bstats-bungeecord:2.2.1'
implementation 'com.zaxxer:HikariCP:5.0.0'
implementation 'de.themoep:minedown:1.7.1-SNAPSHOT'
compileOnly 'net.md-5:bungeecord-api:1.16-R0.5-SNAPSHOT'

@ -3,26 +3,21 @@ package me.william278.husksync;
import me.william278.husksync.bungeecord.command.HuskSyncCommand;
import me.william278.husksync.bungeecord.config.ConfigLoader;
import me.william278.husksync.bungeecord.config.ConfigManager;
import me.william278.husksync.bungeecord.data.DataManager;
import me.william278.husksync.bungeecord.data.sql.Database;
import me.william278.husksync.bungeecord.data.sql.MySQL;
import me.william278.husksync.bungeecord.data.sql.SQLite;
import me.william278.husksync.proxy.data.DataManager;
import me.william278.husksync.bungeecord.listener.BungeeEventListener;
import me.william278.husksync.bungeecord.listener.BungeeRedisListener;
import me.william278.husksync.bungeecord.migrator.MPDBMigrator;
import me.william278.husksync.bungeecord.util.BungeeLogger;
import me.william278.husksync.bungeecord.util.BungeeUpdateChecker;
import me.william278.husksync.redis.RedisMessage;
import me.william278.husksync.util.Logger;
import net.md_5.bungee.api.ProxyServer;
import net.md_5.bungee.api.plugin.Plugin;
import org.bstats.bungeecord.Metrics;
import java.io.IOException;
import java.sql.Connection;
import java.sql.SQLException;
import java.util.HashMap;
import java.util.HashSet;
import java.util.Objects;
import java.util.UUID;
import java.util.logging.Level;
public final class HuskSyncBungeeCord extends Plugin {
@ -47,17 +42,20 @@ public final class HuskSyncBungeeCord extends Plugin {
*/
public static HashSet<Server> synchronisedServers;
private static HashMap<String, Database> clusterDatabases;
public static Connection getConnection(String clusterId) throws SQLException {
return clusterDatabases.get(clusterId).getConnection();
}
public static DataManager dataManager;
public static MPDBMigrator mpdbMigrator;
private Logger logger;
public Logger getBungeeLogger() {
return logger;
}
@Override
public void onLoad() {
instance = this;
logger = new BungeeLogger(getLogger());
}
@Override
@ -82,35 +80,23 @@ public final class HuskSyncBungeeCord extends Plugin {
new BungeeUpdateChecker(getDescription().getVersion()).logToConsole();
}
// Initialize the database
clusterDatabases = new HashMap<>();
for (Settings.SynchronisationCluster cluster : Settings.clusters) {
Database clusterDatabase = switch (Settings.dataStorageType) {
case SQLITE -> new SQLite(this, cluster);
case MYSQL -> new MySQL(this, cluster);
};
clusterDatabase.load();
clusterDatabase.createTables();
clusterDatabases.put(cluster.clusterId(), clusterDatabase);
}
// Setup data manager
dataManager = new DataManager(getBungeeLogger(), getDataFolder());
// Abort loading if the database failed to initialize
for (Database database : clusterDatabases.values()) {
if (database.isInactive()) {
getLogger().severe("Failed to initialize the database(s); HuskSync will now abort loading itself (" + getProxy().getName() + ") v" + getDescription().getVersion());
return;
}
// Ensure the data manager initialized correctly
if (dataManager.hasFailedInitialization) {
getBungeeLogger().severe("Failed to initialize the HuskSync database(s).\n" +
"HuskSync will now abort loading itself (" + getProxy().getName() + ") v" + getDescription().getVersion());
}
// Setup player data cache
for (Settings.SynchronisationCluster cluster : Settings.clusters) {
DataManager.playerDataCache.put(cluster, new DataManager.PlayerDataCache());
dataManager.playerDataCache.put(cluster, new DataManager.PlayerDataCache());
}
// Initialize the redis listener
if (!new BungeeRedisListener().isActiveAndEnabled) {
getLogger().severe("Failed to initialize Redis; HuskSync will now abort loading itself (" + getProxy().getName() + ") v" + getDescription().getVersion());
getBungeeLogger().severe("Failed to initialize Redis; HuskSync will now abort loading itself (" + getProxy().getName() + ") v" + getDescription().getVersion());
return;
}
@ -127,11 +113,11 @@ public final class HuskSyncBungeeCord extends Plugin {
try {
new Metrics(this, METRICS_ID);
} catch (Exception e) {
getLogger().info("Skipped metrics initialization");
getBungeeLogger().info("Skipped metrics initialization");
}
// Log to console
getLogger().info("Enabled HuskSync (" + getProxy().getName() + ") v" + getDescription().getVersion());
getBungeeLogger().info("Enabled HuskSync (" + getProxy().getName() + ") v" + getDescription().getVersion());
// Mark as ready for redis message processing
readyForRedis = true;
@ -150,23 +136,14 @@ public final class HuskSyncBungeeCord extends Plugin {
server.serverUUID().toString(),
ProxyServer.getInstance().getName()).send();
} catch (IOException e) {
getInstance().getLogger().log(Level.SEVERE, "Failed to serialize Redis message for handshake termination", e);
getBungeeLogger().log(Level.SEVERE, "Failed to serialize Redis message for handshake termination", e);
}
}
// Close the database
for (Database database : clusterDatabases.values()) {
database.close();
}
dataManager.closeDatabases();
// Log to console
getLogger().info("Disabled HuskSync (" + getProxy().getName() + ") v" + getDescription().getVersion());
getBungeeLogger().info("Disabled HuskSync (" + getProxy().getName() + ") v" + getDescription().getVersion());
}
/**
* A record representing a server synchronised on the network and whether it has MySqlPlayerDataBridge installed
*/
public record Server(UUID serverUUID, boolean hasMySqlPlayerDataBridge, String huskSyncVersion, String serverBrand,
String clusterId) {
}
}

@ -2,13 +2,13 @@ package me.william278.husksync.bungeecord.command;
import de.themoep.minedown.MineDown;
import me.william278.husksync.HuskSyncBungeeCord;
import me.william278.husksync.Server;
import me.william278.husksync.bungeecord.util.BungeeUpdateChecker;
import me.william278.husksync.util.MessageManager;
import me.william278.husksync.PlayerData;
import me.william278.husksync.Settings;
import me.william278.husksync.bungeecord.config.ConfigLoader;
import me.william278.husksync.bungeecord.config.ConfigManager;
import me.william278.husksync.bungeecord.data.DataManager;
import me.william278.husksync.bungeecord.migrator.MPDBMigrator;
import me.william278.husksync.redis.RedisMessage;
import net.md_5.bungee.api.CommandSender;
@ -60,7 +60,7 @@ public class HuskSyncCommand extends Command implements TabExecutor {
int updatesNeeded = 0;
String bukkitBrand = "Spigot";
String bukkitVersion = "1.0";
for (HuskSyncBungeeCord.Server server : HuskSyncBungeeCord.synchronisedServers) {
for (Server server : HuskSyncBungeeCord.synchronisedServers) {
BungeeUpdateChecker updateChecker = new BungeeUpdateChecker(server.huskSyncVersion());
if (!updateChecker.isUpToDate()) {
updatesNeeded++;
@ -156,7 +156,7 @@ public class HuskSyncCommand extends Command implements TabExecutor {
}
int playerDataSize = 0;
for (Settings.SynchronisationCluster cluster : Settings.clusters) {
playerDataSize += DataManager.playerDataCache.get(cluster).playerData.size();
playerDataSize += HuskSyncBungeeCord.dataManager.playerDataCache.get(cluster).playerData.size();
}
sender.sendMessage(new MineDown(MessageManager.PLUGIN_STATUS.toString()
.replaceAll("%1%", String.valueOf(HuskSyncBungeeCord.synchronisedServers.size()))
@ -180,7 +180,7 @@ public class HuskSyncCommand extends Command implements TabExecutor {
"reload")
.send();
} catch (IOException e) {
plugin.getLogger().log(Level.WARNING, "Failed to serialize reload notification message data");
plugin.getBungeeLogger().log(Level.WARNING, "Failed to serialize reload notification message data");
}
sender.sendMessage(new MineDown(MessageManager.getMessage("reload_complete")).toComponent());
@ -324,7 +324,7 @@ public class HuskSyncCommand extends Command implements TabExecutor {
ProxyServer.getInstance().getScheduler().runAsync(plugin, () -> {
for (Settings.SynchronisationCluster cluster : Settings.clusters) {
if (!cluster.clusterId().equals(clusterId)) continue;
PlayerData playerData = DataManager.getPlayerDataByName(targetPlayerName, cluster.clusterId());
PlayerData playerData = HuskSyncBungeeCord.dataManager.getPlayerDataByName(targetPlayerName, cluster.clusterId());
if (playerData == null) {
viewer.sendMessage(new MineDown(MessageManager.getMessage("error_invalid_player")).toComponent());
return;
@ -337,7 +337,7 @@ public class HuskSyncCommand extends Command implements TabExecutor {
viewer.sendMessage(new MineDown(MessageManager.getMessage("viewing_inventory_of").replaceAll("%1%",
targetPlayerName)).toComponent());
} catch (IOException e) {
plugin.getLogger().log(Level.WARNING, "Failed to serialize inventory-see player data", e);
plugin.getBungeeLogger().log(Level.WARNING, "Failed to serialize inventory-see player data", e);
}
return;
}
@ -358,7 +358,7 @@ public class HuskSyncCommand extends Command implements TabExecutor {
ProxyServer.getInstance().getScheduler().runAsync(plugin, () -> {
for (Settings.SynchronisationCluster cluster : Settings.clusters) {
if (!cluster.clusterId().equals(clusterId)) continue;
PlayerData playerData = DataManager.getPlayerDataByName(targetPlayerName, cluster.clusterId());
PlayerData playerData = HuskSyncBungeeCord.dataManager.getPlayerDataByName(targetPlayerName, cluster.clusterId());
if (playerData == null) {
viewer.sendMessage(new MineDown(MessageManager.getMessage("error_invalid_player")).toComponent());
return;
@ -371,7 +371,7 @@ public class HuskSyncCommand extends Command implements TabExecutor {
viewer.sendMessage(new MineDown(MessageManager.getMessage("viewing_ender_chest_of").replaceAll("%1%",
targetPlayerName)).toComponent());
} catch (IOException e) {
plugin.getLogger().log(Level.WARNING, "Failed to serialize inventory-see player data", e);
plugin.getBungeeLogger().log(Level.WARNING, "Failed to serialize inventory-see player data", e);
}
return;
}
@ -390,7 +390,7 @@ public class HuskSyncCommand extends Command implements TabExecutor {
new RedisMessage.MessageTarget(Settings.ServerType.BUKKIT, player.getUniqueId(), null),
plugin.getProxy().getName(), plugin.getDescription().getVersion()).send();
} catch (IOException e) {
plugin.getLogger().log(Level.WARNING, "Failed to serialize plugin information to send", e);
plugin.getBungeeLogger().log(Level.WARNING, "Failed to serialize plugin information to send", e);
}
}

@ -37,7 +37,7 @@ public class ConfigLoader {
Settings.language = config.getString("language", "en-gb");
Settings.serverType = Settings.ServerType.BUNGEECORD;
Settings.serverType = Settings.ServerType.PROXY;
Settings.automaticUpdateChecks = config.getBoolean("check_for_updates", true);
Settings.redisHost = config.getString("redis_settings.host", "localhost");
Settings.redisPort = config.getInt("redis_settings.port", 6379);

@ -19,16 +19,16 @@ public class ConfigManager {
try {
if (!plugin.getDataFolder().exists()) {
if (plugin.getDataFolder().mkdir()) {
plugin.getLogger().info("Created HuskSync data folder");
plugin.getBungeeLogger().info("Created HuskSync data folder");
}
}
File configFile = new File(plugin.getDataFolder(), "config.yml");
if (!configFile.exists()) {
Files.copy(plugin.getResourceAsStream("bungee-config.yml"), configFile.toPath());
plugin.getLogger().info("Created HuskSync config file");
Files.copy(plugin.getResourceAsStream("proxy-config.yml"), configFile.toPath());
plugin.getBungeeLogger().info("Created HuskSync config file");
}
} catch (Exception e) {
plugin.getLogger().log(Level.CONFIG, "An exception occurred loading the configuration file", e);
plugin.getBungeeLogger().log(Level.CONFIG, "An exception occurred loading the configuration file", e);
}
}
@ -36,7 +36,7 @@ public class ConfigManager {
try {
ConfigurationProvider.getProvider(YamlConfiguration.class).save(config, new File(plugin.getDataFolder(), "config.yml"));
} catch (IOException e) {
plugin.getLogger().log(Level.CONFIG, "An exception occurred loading the configuration file", e);
plugin.getBungeeLogger().log(Level.CONFIG, "An exception occurred loading the configuration file", e);
}
}
@ -44,16 +44,16 @@ public class ConfigManager {
try {
if (!plugin.getDataFolder().exists()) {
if (plugin.getDataFolder().mkdir()) {
plugin.getLogger().info("Created HuskSync data folder");
plugin.getBungeeLogger().info("Created HuskSync data folder");
}
}
File messagesFile = new File(plugin.getDataFolder(), "messages_" + Settings.language + ".yml");
if (!messagesFile.exists()) {
Files.copy(plugin.getResourceAsStream("languages/" + Settings.language + ".yml"), messagesFile.toPath());
plugin.getLogger().info("Created HuskSync messages file");
plugin.getBungeeLogger().info("Created HuskSync messages file");
}
} catch (Exception e) {
plugin.getLogger().log(Level.CONFIG, "An exception occurred loading the messages file", e);
plugin.getBungeeLogger().log(Level.CONFIG, "An exception occurred loading the messages file", e);
}
}
@ -62,7 +62,7 @@ public class ConfigManager {
File configFile = new File(plugin.getDataFolder(), "config.yml");
return ConfigurationProvider.getProvider(YamlConfiguration.class).load(configFile);
} catch (IOException e) {
plugin.getLogger().log(Level.CONFIG, "An IOException occurred fetching the configuration file", e);
plugin.getBungeeLogger().log(Level.CONFIG, "An IOException occurred fetching the configuration file", e);
return null;
}
}
@ -72,7 +72,7 @@ public class ConfigManager {
File configFile = new File(plugin.getDataFolder(), "messages_" + Settings.language + ".yml");
return ConfigurationProvider.getProvider(YamlConfiguration.class).load(configFile);
} catch (IOException e) {
plugin.getLogger().log(Level.CONFIG, "An IOException occurred fetching the messages file", e);
plugin.getBungeeLogger().log(Level.CONFIG, "An IOException occurred fetching the messages file", e);
return null;
}
}

@ -2,7 +2,6 @@ package me.william278.husksync.bungeecord.listener;
import me.william278.husksync.HuskSyncBungeeCord;
import me.william278.husksync.PlayerData;
import me.william278.husksync.bungeecord.data.DataManager;
import me.william278.husksync.Settings;
import me.william278.husksync.redis.RedisMessage;
import net.md_5.bungee.api.ProxyServer;
@ -24,15 +23,15 @@ public class BungeeEventListener implements Listener {
final ProxiedPlayer player = event.getPlayer();
ProxyServer.getInstance().getScheduler().runAsync(plugin, () -> {
// Ensure the player has data on SQL and that it is up-to-date
DataManager.ensurePlayerExists(player.getUniqueId(), player.getName());
HuskSyncBungeeCord.dataManager.ensurePlayerExists(player.getUniqueId(), player.getName());
// Get the player's data from SQL
final Map<Settings.SynchronisationCluster,PlayerData> data = DataManager.getPlayerData(player.getUniqueId());
final Map<Settings.SynchronisationCluster,PlayerData> data = HuskSyncBungeeCord.dataManager.getPlayerData(player.getUniqueId());
// Update the player's data from SQL onto the cache
assert data != null;
for (Settings.SynchronisationCluster cluster : data.keySet()) {
DataManager.playerDataCache.get(cluster).updatePlayer(data.get(cluster));
HuskSyncBungeeCord.dataManager.playerDataCache.get(cluster).updatePlayer(data.get(cluster));
}
// Send a message asking the bukkit to request data on join
@ -41,7 +40,7 @@ public class BungeeEventListener implements Listener {
new RedisMessage.MessageTarget(Settings.ServerType.BUKKIT, null, null),
RedisMessage.RequestOnJoinUpdateType.ADD_REQUESTER.toString(), player.getUniqueId().toString()).send();
} catch (IOException e) {
plugin.getLogger().log(Level.SEVERE, "Failed to serialize request data on join message data");
plugin.getBungeeLogger().log(Level.SEVERE, "Failed to serialize request data on join message data");
e.printStackTrace();
}
});

@ -2,10 +2,10 @@ package me.william278.husksync.bungeecord.listener;
import de.themoep.minedown.MineDown;
import me.william278.husksync.HuskSyncBungeeCord;
import me.william278.husksync.Server;
import me.william278.husksync.util.MessageManager;
import me.william278.husksync.PlayerData;
import me.william278.husksync.Settings;
import me.william278.husksync.bungeecord.data.DataManager;
import me.william278.husksync.bungeecord.migrator.MPDBMigrator;
import me.william278.husksync.redis.RedisListener;
import me.william278.husksync.redis.RedisMessage;
@ -32,13 +32,13 @@ public class BungeeRedisListener extends RedisListener {
for (Settings.SynchronisationCluster cluster : Settings.clusters) {
if (cluster.clusterId().equals(clusterId)) {
// Get the player data from the cache
PlayerData cachedData = DataManager.playerDataCache.get(cluster).getPlayer(uuid);
PlayerData cachedData = HuskSyncBungeeCord.dataManager.playerDataCache.get(cluster).getPlayer(uuid);
if (cachedData != null) {
return cachedData;
}
data = Objects.requireNonNull(DataManager.getPlayerData(uuid)).get(cluster); // Get their player data from MySQL
DataManager.playerDataCache.get(cluster).updatePlayer(data); // Update the cache
data = Objects.requireNonNull(HuskSyncBungeeCord.dataManager.getPlayerData(uuid)).get(cluster); // Get their player data from MySQL
HuskSyncBungeeCord.dataManager.playerDataCache.get(cluster).updatePlayer(data); // Update the cache
break;
}
}
@ -53,7 +53,7 @@ public class BungeeRedisListener extends RedisListener {
@Override
public void handleMessage(RedisMessage message) {
// Ignore messages destined for Bukkit servers
if (message.getMessageTarget().targetServerType() != Settings.ServerType.BUNGEECORD) {
if (message.getMessageTarget().targetServerType() != Settings.ServerType.PROXY) {
return;
}
// Only process redis messages when ready
@ -107,7 +107,7 @@ public class BungeeRedisListener extends RedisListener {
// Update the data in the cache and SQL
for (Settings.SynchronisationCluster cluster : Settings.clusters) {
if (cluster.clusterId().equals(message.getMessageTarget().targetClusterId())) {
DataManager.updatePlayerData(playerData, cluster);
HuskSyncBungeeCord.dataManager.updatePlayerData(playerData, cluster);
break;
}
}
@ -144,7 +144,7 @@ public class BungeeRedisListener extends RedisListener {
serverUUID.toString(), plugin.getProxy().getName())
.send();
HuskSyncBungeeCord.synchronisedServers.add(
new HuskSyncBungeeCord.Server(serverUUID, hasMySqlPlayerDataBridge,
new Server(serverUUID, hasMySqlPlayerDataBridge,
huskSyncVersion, bukkitBrand, message.getMessageTarget().targetClusterId()));
log(Level.INFO, "Completed handshake with " + bukkitBrand + " server (" + serverUUID + ")");
} catch (IOException e) {
@ -158,8 +158,8 @@ public class BungeeRedisListener extends RedisListener {
final String bukkitBrand = message.getMessageDataElements()[1];
// Remove a server from the synchronised server list
HuskSyncBungeeCord.Server serverToRemove = null;
for (HuskSyncBungeeCord.Server server : HuskSyncBungeeCord.synchronisedServers) {
Server serverToRemove = null;
for (Server server : HuskSyncBungeeCord.synchronisedServers) {
if (server.serverUUID().equals(serverUUID)) {
serverToRemove = server;
break;
@ -186,7 +186,7 @@ public class BungeeRedisListener extends RedisListener {
// Increment players migrated
MPDBMigrator.playersMigrated++;
plugin.getLogger().log(Level.INFO, "Migrated " + MPDBMigrator.playersMigrated + "/" + MPDBMigrator.migratedDataSent + " players.");
plugin.getBungeeLogger().log(Level.INFO, "Migrated " + MPDBMigrator.playersMigrated + "/" + MPDBMigrator.migratedDataSent + " players.");
// When all the data has been received, save it
if (MPDBMigrator.migratedDataSent == MPDBMigrator.playersMigrated) {
@ -204,6 +204,6 @@ public class BungeeRedisListener extends RedisListener {
*/
@Override
public void log(Level level, String message) {
plugin.getLogger().log(level, message);
plugin.getBungeeLogger().log(level, message);
}
}

@ -2,11 +2,11 @@ package me.william278.husksync.bungeecord.migrator;
import me.william278.husksync.HuskSyncBungeeCord;
import me.william278.husksync.PlayerData;
import me.william278.husksync.Server;
import me.william278.husksync.Settings;
import me.william278.husksync.bungeecord.data.DataManager;
import me.william278.husksync.bungeecord.data.sql.Database;
import me.william278.husksync.bungeecord.data.sql.MySQL;
import me.william278.husksync.migrator.MPDBPlayerData;
import me.william278.husksync.proxy.data.sql.Database;
import me.william278.husksync.proxy.data.sql.MySQL;
import me.william278.husksync.redis.RedisMessage;
import net.md_5.bungee.api.ProxyServer;
@ -23,8 +23,8 @@ import java.util.logging.Level;
/**
* Class to handle migration of data from MySQLPlayerDataBridge
* <p>
* The migrator accesses and decodes MPDB's format directly.
* It does this by establishing a connection
* The migrator accesses and decodes MPDB's format directly,
* by communicating with a Spigot server
*/
public class MPDBMigrator {
@ -43,19 +43,19 @@ public class MPDBMigrator {
public void start() {
if (ProxyServer.getInstance().getPlayers().size() > 0) {
plugin.getLogger().log(Level.WARNING, "Failed to start migration because there are players online. " +
plugin.getBungeeLogger().log(Level.WARNING, "Failed to start migration because there are players online. " +
"Your network has to be empty to migrate data for safety reasons.");
return;
}
int synchronisedServersWithMpdb = 0;
for (HuskSyncBungeeCord.Server server : HuskSyncBungeeCord.synchronisedServers) {
for (Server server : HuskSyncBungeeCord.synchronisedServers) {
if (server.hasMySqlPlayerDataBridge()) {
synchronisedServersWithMpdb++;
}
}
if (synchronisedServersWithMpdb < 1) {
plugin.getLogger().log(Level.WARNING, "Failed to start migration because at least one Spigot server with both HuskSync and MySqlPlayerDataBridge installed is not online. " +
plugin.getBungeeLogger().log(Level.WARNING, "Failed to start migration because at least one Spigot server with both HuskSync and MySqlPlayerDataBridge installed is not online. " +
"Please start one Spigot server with HuskSync installed to begin migration.");
return;
}
@ -67,7 +67,7 @@ public class MPDBMigrator {
}
}
if (targetCluster == null) {
plugin.getLogger().log(Level.WARNING, "Failed to start migration because the target cluster could not be found. " +
plugin.getBungeeLogger().log(Level.WARNING, "Failed to start migration because the target cluster could not be found. " +
"Please ensure the target cluster is correct, configured in the proxy config file, then try again");
return;
}
@ -83,7 +83,7 @@ public class MPDBMigrator {
settings.sourceDatabase, settings.sourceUsername, settings.sourcePassword, targetCluster);
sourceDatabase.load();
if (sourceDatabase.isInactive()) {
plugin.getLogger().log(Level.WARNING, "Failed to establish connection to the origin MySQL database. " +
plugin.getBungeeLogger().log(Level.WARNING, "Failed to establish connection to the origin MySQL database. " +
"Please check you have input the correct connection details and try again.");
return;
}
@ -103,8 +103,8 @@ public class MPDBMigrator {
// Clear the new database out of current data
private void prepareTargetDatabase() {
plugin.getLogger().log(Level.INFO, "Preparing target database...");
try (Connection connection = HuskSyncBungeeCord.getConnection(targetCluster.clusterId())) {
plugin.getBungeeLogger().log(Level.INFO, "Preparing target database...");
try (Connection connection = HuskSyncBungeeCord.dataManager.getConnection(targetCluster.clusterId())) {
try (PreparedStatement statement = connection.prepareStatement("DELETE FROM " + targetCluster.playerTableName() + ";")) {
statement.executeUpdate();
}
@ -112,14 +112,14 @@ public class MPDBMigrator {
statement.executeUpdate();
}
} catch (SQLException e) {
plugin.getLogger().log(Level.SEVERE, "An exception occurred preparing the target database", e);
plugin.getBungeeLogger().log(Level.SEVERE, "An exception occurred preparing the target database", e);
} finally {
plugin.getLogger().log(Level.INFO, "Finished preparing target database!");
plugin.getBungeeLogger().log(Level.INFO, "Finished preparing target database!");
}
}
private void getInventoryData() {
plugin.getLogger().log(Level.INFO, "Getting inventory data from MySQLPlayerDataBridge...");
plugin.getBungeeLogger().log(Level.INFO, "Getting inventory data from MySQLPlayerDataBridge...");
try (Connection connection = sourceDatabase.getConnection()) {
try (PreparedStatement statement = connection.prepareStatement("SELECT * FROM " + migrationSettings.inventoryDataTable + ";")) {
ResultSet resultSet = statement.executeQuery();
@ -135,14 +135,14 @@ public class MPDBMigrator {
}
}
} catch (SQLException e) {
plugin.getLogger().log(Level.SEVERE, "An exception occurred getting inventory data", e);
plugin.getBungeeLogger().log(Level.SEVERE, "An exception occurred getting inventory data", e);
} finally {
plugin.getLogger().log(Level.INFO, "Finished getting inventory data from MySQLPlayerDataBridge");
plugin.getBungeeLogger().log(Level.INFO, "Finished getting inventory data from MySQLPlayerDataBridge");
}
}
private void getEnderChestData() {
plugin.getLogger().log(Level.INFO, "Getting ender chest data from MySQLPlayerDataBridge...");
plugin.getBungeeLogger().log(Level.INFO, "Getting ender chest data from MySQLPlayerDataBridge...");
try (Connection connection = sourceDatabase.getConnection()) {
try (PreparedStatement statement = connection.prepareStatement("SELECT * FROM " + migrationSettings.enderChestDataTable + ";")) {
ResultSet resultSet = statement.executeQuery();
@ -158,14 +158,14 @@ public class MPDBMigrator {
}
}
} catch (SQLException e) {
plugin.getLogger().log(Level.SEVERE, "An exception occurred getting ender chest data", e);
plugin.getBungeeLogger().log(Level.SEVERE, "An exception occurred getting ender chest data", e);
} finally {
plugin.getLogger().log(Level.INFO, "Finished getting ender chest data from MySQLPlayerDataBridge");
plugin.getBungeeLogger().log(Level.INFO, "Finished getting ender chest data from MySQLPlayerDataBridge");
}
}
private void getExperienceData() {
plugin.getLogger().log(Level.INFO, "Getting experience data from MySQLPlayerDataBridge...");
plugin.getBungeeLogger().log(Level.INFO, "Getting experience data from MySQLPlayerDataBridge...");
try (Connection connection = sourceDatabase.getConnection()) {
try (PreparedStatement statement = connection.prepareStatement("SELECT * FROM " + migrationSettings.expDataTable + ";")) {
ResultSet resultSet = statement.executeQuery();
@ -183,14 +183,14 @@ public class MPDBMigrator {
}
}
} catch (SQLException e) {
plugin.getLogger().log(Level.SEVERE, "An exception occurred getting experience data", e);
plugin.getBungeeLogger().log(Level.SEVERE, "An exception occurred getting experience data", e);
} finally {
plugin.getLogger().log(Level.INFO, "Finished getting experience data from MySQLPlayerDataBridge");
plugin.getBungeeLogger().log(Level.INFO, "Finished getting experience data from MySQLPlayerDataBridge");
}
}
private void sendEncodedData() {
for (HuskSyncBungeeCord.Server processingServer : HuskSyncBungeeCord.synchronisedServers) {
for (Server processingServer : HuskSyncBungeeCord.synchronisedServers) {
if (processingServer.hasMySqlPlayerDataBridge()) {
for (MPDBPlayerData data : mpdbPlayerData) {
try {
@ -201,10 +201,10 @@ public class MPDBMigrator {
.send();
migratedDataSent++;
} catch (IOException e) {
plugin.getLogger().log(Level.SEVERE, "Failed to serialize encoded MPDB data", e);
plugin.getBungeeLogger().log(Level.SEVERE, "Failed to serialize encoded MPDB data", e);
}
}
plugin.getLogger().log(Level.INFO, "Finished dispatching encoded data for " + migratedDataSent + " players; please wait for conversion to finish");
plugin.getBungeeLogger().log(Level.INFO, "Finished dispatching encoded data for " + migratedDataSent + " players; please wait for conversion to finish");
}
return;
}
@ -218,26 +218,26 @@ public class MPDBMigrator {
public static void loadIncomingData(HashMap<PlayerData, String> dataToLoad) {
ProxyServer.getInstance().getScheduler().runAsync(plugin, () -> {
int playersSaved = 0;
plugin.getLogger().log(Level.INFO, "Saving data for " + playersMigrated + " players...");
plugin.getBungeeLogger().log(Level.INFO, "Saving data for " + playersMigrated + " players...");
for (PlayerData playerData : dataToLoad.keySet()) {
String playerName = dataToLoad.get(playerData);
// Add the player to the MySQL table
DataManager.ensurePlayerExists(playerData.getPlayerUUID(), playerName);
HuskSyncBungeeCord.dataManager.ensurePlayerExists(playerData.getPlayerUUID(), playerName);
// Update the data in the cache and SQL
for (Settings.SynchronisationCluster cluster : Settings.clusters) {
DataManager.updatePlayerData(playerData, cluster);
HuskSyncBungeeCord.dataManager.updatePlayerData(playerData, cluster);
break;
}
playersSaved++;
plugin.getLogger().log(Level.INFO, "Saved data for " + playersSaved + "/" + playersMigrated + " players");
plugin.getBungeeLogger().log(Level.INFO, "Saved data for " + playersSaved + "/" + playersMigrated + " players");
}
// Mark as done when done
plugin.getLogger().log(Level.INFO, """
plugin.getBungeeLogger().log(Level.INFO, """
=== MySQLPlayerDataBridge Migration Wizard ==========
Migration complete!
@ -288,7 +288,7 @@ public class MPDBMigrator {
*/
public static class MigratorMySQL extends MySQL {
public MigratorMySQL(HuskSyncBungeeCord instance, String host, int port, String database, String username, String password, Settings.SynchronisationCluster cluster) {
super(instance, cluster);
super(cluster, instance.getBungeeLogger());
super.host = host;
super.port = port;
super.database = database;

@ -0,0 +1,33 @@
package me.william278.husksync.bungeecord.util;
import me.william278.husksync.util.Logger;
import java.util.logging.Level;
public record BungeeLogger(java.util.logging.Logger parent) implements Logger {
@Override
public void log(Level level, String message, Exception e) {
parent.log(level, message, e);
}
@Override
public void log(Level level, String message) {
parent.log(level, message);
}
@Override
public void info(String message) {
parent.info(message);
}
@Override
public void severe(String message) {
parent.severe(message);
}
@Override
public void config(String message) {
parent.config(message);
}
}

@ -15,6 +15,6 @@ public class BungeeUpdateChecker extends UpdateChecker {
@Override
public void log(Level level, String message) {
plugin.getLogger().log(level, message);
plugin.getBungeeLogger().log(level, message);
}
}

@ -1,5 +1,6 @@
dependencies {
implementation 'redis.clients:jedis:3.7.0'
implementation 'com.zaxxer:HikariCP:5.0.0'
}
import org.apache.tools.ant.filters.ReplaceTokens

@ -0,0 +1,10 @@
package me.william278.husksync;
import java.util.UUID;
/**
* A record representing a server synchronised on the network and whether it has MySqlPlayerDataBridge installed
*/
public record Server(UUID serverUUID, boolean hasMySqlPlayerDataBridge, String huskSyncVersion, String serverBrand,
String clusterId) {
}

@ -76,7 +76,7 @@ public class Settings {
public enum ServerType {
BUKKIT,
BUNGEECORD
PROXY,
}
public enum DataStorageType {

@ -1,10 +1,13 @@
package me.william278.husksync.bungeecord.data;
package me.william278.husksync.proxy.data;
import me.william278.husksync.PlayerData;
import me.william278.husksync.HuskSyncBungeeCord;
import me.william278.husksync.Settings;
import me.william278.husksync.bungeecord.data.sql.Database;
import me.william278.husksync.proxy.data.sql.Database;
import me.william278.husksync.proxy.data.sql.MySQL;
import me.william278.husksync.proxy.data.sql.SQLite;
import me.william278.husksync.util.Logger;
import java.io.File;
import java.sql.*;
import java.time.Instant;
import java.util.*;
@ -12,12 +15,65 @@ import java.util.logging.Level;
public class DataManager {
private static final HuskSyncBungeeCord plugin = HuskSyncBungeeCord.getInstance();
/**
* The player data cache for each cluster ID
*/
public static HashMap<Settings.SynchronisationCluster, PlayerDataCache> playerDataCache = new HashMap<>();
public HashMap<Settings.SynchronisationCluster, PlayerDataCache> playerDataCache = new HashMap<>();
/**
* Map of the database assigned for each cluster
*/
private final HashMap<String, Database> clusterDatabases;
// Retrieve database connection for a cluster
public Connection getConnection(String clusterId) throws SQLException {
return clusterDatabases.get(clusterId).getConnection();
}
// Console logger for errors
private final Logger logger;
// Plugin data folder
private final File dataFolder;
// Flag variable identifying if the data manager failed to initialize
public boolean hasFailedInitialization = false;
public DataManager(Logger logger, File dataFolder) {
this.logger = logger;
this.dataFolder = dataFolder;
clusterDatabases = new HashMap<>();
initializeDatabases();
}
private void initializeDatabases() {
for (Settings.SynchronisationCluster cluster : Settings.clusters) {
Database clusterDatabase = switch (Settings.dataStorageType) {
case SQLITE -> new SQLite(cluster, dataFolder, logger);
case MYSQL -> new MySQL(cluster, logger);
};
clusterDatabase.load();
clusterDatabase.createTables();
clusterDatabases.put(cluster.clusterId(), clusterDatabase);
}
// Abort loading if the database failed to initialize
for (Database database : clusterDatabases.values()) {
if (database.isInactive()) {
hasFailedInitialization = true;
return;
}
}
}
/**
* Close the database connections
*/
public void closeDatabases() {
for (Database database : clusterDatabases.values()) {
database.close();
}
}
/**
* Checks if the player is registered on the database.
@ -26,7 +82,7 @@ public class DataManager {
*
* @param playerUUID The UUID of the player to register
*/
public static void ensurePlayerExists(UUID playerUUID, String playerName) {
public void ensurePlayerExists(UUID playerUUID, String playerName) {
for (Settings.SynchronisationCluster cluster : Settings.clusters) {
if (!playerExists(playerUUID, cluster)) {
createPlayerEntry(playerUUID, playerName, cluster);
@ -42,8 +98,8 @@ public class DataManager {
* @param playerUUID The UUID of the player
* @return {@code true} if the player is on the player table
*/
private static boolean playerExists(UUID playerUUID, Settings.SynchronisationCluster cluster) {
try (Connection connection = HuskSyncBungeeCord.getConnection(cluster.clusterId())) {
private boolean playerExists(UUID playerUUID, Settings.SynchronisationCluster cluster) {
try (Connection connection = getConnection(cluster.clusterId())) {
try (PreparedStatement statement = connection.prepareStatement(
"SELECT * FROM " + cluster.playerTableName() + " WHERE `uuid`=?;")) {
statement.setString(1, playerUUID.toString());
@ -51,13 +107,13 @@ public class DataManager {
return resultSet.next();
}
} catch (SQLException e) {
plugin.getLogger().log(Level.SEVERE, "An SQL exception occurred", e);
logger.log(Level.SEVERE, "An SQL exception occurred", e);
return false;
}
}
private static void createPlayerEntry(UUID playerUUID, String playerName, Settings.SynchronisationCluster cluster) {
try (Connection connection = HuskSyncBungeeCord.getConnection(cluster.clusterId())) {
private void createPlayerEntry(UUID playerUUID, String playerName, Settings.SynchronisationCluster cluster) {
try (Connection connection = getConnection(cluster.clusterId())) {
try (PreparedStatement statement = connection.prepareStatement(
"INSERT INTO " + cluster.playerTableName() + " (`uuid`,`username`) VALUES(?,?);")) {
statement.setString(1, playerUUID.toString());
@ -65,12 +121,12 @@ public class DataManager {
statement.executeUpdate();
}
} catch (SQLException e) {
plugin.getLogger().log(Level.SEVERE, "An SQL exception occurred", e);
logger.log(Level.SEVERE, "An SQL exception occurred", e);
}
}
public static void updatePlayerName(UUID playerUUID, String playerName, Settings.SynchronisationCluster cluster) {
try (Connection connection = HuskSyncBungeeCord.getConnection(cluster.clusterId())) {
public void updatePlayerName(UUID playerUUID, String playerName, Settings.SynchronisationCluster cluster) {
try (Connection connection = getConnection(cluster.clusterId())) {
try (PreparedStatement statement = connection.prepareStatement(
"UPDATE " + cluster.playerTableName() + " SET `username`=? WHERE `uuid`=?;")) {
statement.setString(1, playerName);
@ -78,7 +134,7 @@ public class DataManager {
statement.executeUpdate();
}
} catch (SQLException e) {
plugin.getLogger().log(Level.SEVERE, "An SQL exception occurred", e);
logger.log(Level.SEVERE, "An SQL exception occurred", e);
}
}
@ -88,11 +144,11 @@ public class DataManager {
* @param playerName The PlayerName of the data to get
* @return Their {@link PlayerData}; or {@code null} if the player does not exist
*/
public static PlayerData getPlayerDataByName(String playerName, String clusterId) {
public PlayerData getPlayerDataByName(String playerName, String clusterId) {
PlayerData playerData = null;
for (Settings.SynchronisationCluster cluster : Settings.clusters) {
if (cluster.clusterId().equals(clusterId)) {
try (Connection connection = HuskSyncBungeeCord.getConnection(clusterId)) {
try (Connection connection = getConnection(clusterId)) {
try (PreparedStatement statement = connection.prepareStatement(
"SELECT * FROM " + cluster.playerTableName() + " WHERE `username`=? LIMIT 1;")) {
statement.setString(1, playerName);
@ -110,7 +166,7 @@ public class DataManager {
}
}
} catch (SQLException e) {
plugin.getLogger().log(Level.SEVERE, "An SQL exception occurred", e);
logger.log(Level.SEVERE, "An SQL exception occurred", e);
}
break;
}
@ -119,10 +175,10 @@ public class DataManager {
return playerData;
}
public static Map<Settings.SynchronisationCluster, PlayerData> getPlayerData(UUID playerUUID) {
public Map<Settings.SynchronisationCluster, PlayerData> getPlayerData(UUID playerUUID) {
HashMap<Settings.SynchronisationCluster, PlayerData> data = new HashMap<>();
for (Settings.SynchronisationCluster cluster : Settings.clusters) {
try (Connection connection = HuskSyncBungeeCord.getConnection(cluster.clusterId())) {
try (Connection connection = getConnection(cluster.clusterId())) {
try (PreparedStatement statement = connection.prepareStatement(
"SELECT * FROM " + cluster.dataTableName() + " WHERE `player_id`=(SELECT `id` FROM " + cluster.playerTableName() + " WHERE `uuid`=?);")) {
statement.setString(1, playerUUID.toString());
@ -158,14 +214,14 @@ public class DataManager {
}
}
} catch (SQLException e) {
plugin.getLogger().log(Level.SEVERE, "An SQL exception occurred", e);
logger.log(Level.SEVERE, "An SQL exception occurred", e);
return null;
}
}
return data;
}
public static void updatePlayerData(PlayerData playerData, Settings.SynchronisationCluster cluster) {
public void updatePlayerData(PlayerData playerData, Settings.SynchronisationCluster cluster) {
// Ignore if the Spigot server didn't properly sync the previous data
// Add the new player data to the cache
@ -179,8 +235,8 @@ public class DataManager {
}
}
private static void updatePlayerSQLData(PlayerData playerData, Settings.SynchronisationCluster cluster) {
try (Connection connection = HuskSyncBungeeCord.getConnection(cluster.clusterId())) {
private void updatePlayerSQLData(PlayerData playerData, Settings.SynchronisationCluster cluster) {
try (Connection connection = getConnection(cluster.clusterId())) {
try (PreparedStatement statement = connection.prepareStatement(
"UPDATE " + cluster.dataTableName() + " SET `version_uuid`=?, `timestamp`=?, `inventory`=?, `ender_chest`=?, `health`=?, `max_health`=?, `health_scale`=?, `hunger`=?, `saturation`=?, `saturation_exhaustion`=?, `selected_slot`=?, `status_effects`=?, `total_experience`=?, `exp_level`=?, `exp_progress`=?, `game_mode`=?, `statistics`=?, `is_flying`=?, `advancements`=?, `location`=? WHERE `player_id`=(SELECT `id` FROM " + cluster.playerTableName() + " WHERE `uuid`=?);")) {
statement.setString(1, playerData.getDataVersionUUID().toString());
@ -208,12 +264,12 @@ public class DataManager {
statement.executeUpdate();
}
} catch (SQLException e) {
plugin.getLogger().log(Level.SEVERE, "An SQL exception occurred", e);
logger.log(Level.SEVERE, "An SQL exception occurred", e);
}
}
private static void insertPlayerData(PlayerData playerData, Settings.SynchronisationCluster cluster) {
try (Connection connection = HuskSyncBungeeCord.getConnection(cluster.clusterId())) {
private void insertPlayerData(PlayerData playerData, Settings.SynchronisationCluster cluster) {
try (Connection connection = getConnection(cluster.clusterId())) {
try (PreparedStatement statement = connection.prepareStatement(
"INSERT INTO " + cluster.dataTableName() + " (`player_id`,`version_uuid`,`timestamp`,`inventory`,`ender_chest`,`health`,`max_health`,`health_scale`,`hunger`,`saturation`,`saturation_exhaustion`,`selected_slot`,`status_effects`,`total_experience`,`exp_level`,`exp_progress`,`game_mode`,`statistics`,`is_flying`,`advancements`,`location`) VALUES((SELECT `id` FROM " + cluster.playerTableName() + " WHERE `uuid`=?),?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?);")) {
statement.setString(1, playerData.getPlayerUUID().toString());
@ -241,7 +297,7 @@ public class DataManager {
statement.executeUpdate();
}
} catch (SQLException e) {
plugin.getLogger().log(Level.SEVERE, "An SQL exception occurred", e);
logger.log(Level.SEVERE, "An SQL exception occurred", e);
}
}
@ -251,8 +307,8 @@ public class DataManager {
* @param playerUUID The UUID of the player
* @return {@code true} if the player has an entry in the data table
*/
private static boolean playerHasCachedData(UUID playerUUID, Settings.SynchronisationCluster cluster) {
try (Connection connection = HuskSyncBungeeCord.getConnection(cluster.clusterId())) {
private boolean playerHasCachedData(UUID playerUUID, Settings.SynchronisationCluster cluster) {
try (Connection connection = getConnection(cluster.clusterId())) {
try (PreparedStatement statement = connection.prepareStatement(
"SELECT * FROM " + cluster.dataTableName() + " WHERE `player_id`=(SELECT `id` FROM " + cluster.playerTableName() + " WHERE `uuid`=?);")) {
statement.setString(1, playerUUID.toString());
@ -260,7 +316,7 @@ public class DataManager {
return resultSet.next();
}
} catch (SQLException e) {
plugin.getLogger().log(Level.SEVERE, "An SQL exception occurred", e);
logger.log(Level.SEVERE, "An SQL exception occurred", e);
return false;
}
}

@ -1,21 +1,21 @@
package me.william278.husksync.bungeecord.data.sql;
package me.william278.husksync.proxy.data.sql;
import me.william278.husksync.HuskSyncBungeeCord;
import me.william278.husksync.Settings;
import me.william278.husksync.util.Logger;
import java.sql.Connection;
import java.sql.SQLException;
public abstract class Database {
protected HuskSyncBungeeCord plugin;
public String dataPoolName;
public Settings.SynchronisationCluster cluster;
public final Logger logger;
public Database(HuskSyncBungeeCord instance, Settings.SynchronisationCluster cluster) {
this.plugin = instance;
public Database(Settings.SynchronisationCluster cluster, Logger logger) {
this.cluster = cluster;
this.dataPoolName = cluster != null ? "HuskSyncHikariPool-" + cluster.clusterId() : "HuskSyncMigratorPool";
this.logger = logger;
}
public abstract Connection getConnection() throws SQLException;

@ -1,8 +1,8 @@
package me.william278.husksync.bungeecord.data.sql;
package me.william278.husksync.proxy.data.sql;
import com.zaxxer.hikari.HikariDataSource;
import me.william278.husksync.HuskSyncBungeeCord;
import me.william278.husksync.Settings;
import me.william278.husksync.util.Logger;
import java.sql.Connection;
import java.sql.SQLException;
@ -58,8 +58,8 @@ public class MySQL extends Database {
private HikariDataSource dataSource;
public MySQL(HuskSyncBungeeCord instance, Settings.SynchronisationCluster cluster) {
super(instance, cluster);
public MySQL(Settings.SynchronisationCluster cluster, Logger logger) {
super(cluster, logger);
}
@Override
@ -96,7 +96,7 @@ public class MySQL extends Database {
}
}
} catch (SQLException e) {
plugin.getLogger().log(Level.SEVERE, "An error occurred creating tables on the MySQL database: ", e);
logger.log(Level.SEVERE, "An error occurred creating tables on the MySQL database: ", e);
}
}

@ -1,8 +1,8 @@
package me.william278.husksync.bungeecord.data.sql;
package me.william278.husksync.proxy.data.sql;
import com.zaxxer.hikari.HikariDataSource;
import me.william278.husksync.HuskSyncBungeeCord;
import me.william278.husksync.Settings;
import me.william278.husksync.util.Logger;
import java.io.File;
import java.io.IOException;
@ -54,22 +54,25 @@ public class SQLite extends Database {
return cluster.databaseName() + "Data";
}
private final File dataFolder;
private HikariDataSource dataSource;
public SQLite(HuskSyncBungeeCord instance, Settings.SynchronisationCluster cluster) {
super(instance, cluster);
public SQLite(Settings.SynchronisationCluster cluster, File dataFolder, Logger logger) {
super(cluster, logger);
this.dataFolder = dataFolder;
}
// Create the database file if it does not exist yet
private void createDatabaseFileIfNotExist() {
File databaseFile = new File(plugin.getDataFolder(), getDatabaseName() + ".db");
File databaseFile = new File(dataFolder, getDatabaseName() + ".db");
if (!databaseFile.exists()) {
try {
if (!databaseFile.createNewFile()) {
plugin.getLogger().log(Level.SEVERE, "Failed to write new file: " + getDatabaseName() + ".db (file already exists)");
logger.log(Level.SEVERE, "Failed to write new file: " + getDatabaseName() + ".db (file already exists)");
}
} catch (IOException e) {
plugin.getLogger().log(Level.SEVERE, "An error occurred writing a file: " + getDatabaseName() + ".db (" + e.getCause() + ")");
logger.log(Level.SEVERE, "An error occurred writing a file: " + getDatabaseName() + ".db (" + e.getCause() + ")", e);
}
}
}
@ -85,7 +88,7 @@ public class SQLite extends Database {
createDatabaseFileIfNotExist();
// Create new HikariCP data source
final String jdbcUrl = "jdbc:sqlite:" + plugin.getDataFolder().getAbsolutePath() + File.separator + getDatabaseName() + ".db";
final String jdbcUrl = "jdbc:sqlite:" + dataFolder.getAbsolutePath() + File.separator + getDatabaseName() + ".db";
dataSource = new HikariDataSource();
dataSource.setDataSourceClassName("org.sqlite.SQLiteDataSource");
dataSource.addDataSourceProperty("url", jdbcUrl);
@ -109,7 +112,7 @@ public class SQLite extends Database {
}
}
} catch (SQLException e) {
plugin.getLogger().log(Level.SEVERE, "An error occurred creating tables on the SQLite database: ", e);
logger.log(Level.SEVERE, "An error occurred creating tables on the SQLite database", e);
}
}

@ -0,0 +1,19 @@
package me.william278.husksync.util;
import java.util.logging.Level;
/**
* Logger interface to allow for implementation of different logger platforms used by Bungee and Velocity
*/
public interface Logger {
void log(Level level, String message, Exception e);
void log(Level level, String message);
void info(String message);
void severe(String message);
void config(String message);
}

@ -3,6 +3,7 @@ dependencies {
implementation project(path: ":api", configuration: 'shadow')
implementation project(path: ":bukkit", configuration: 'shadow')
implementation project(path: ":bungeecord", configuration: 'shadow')
implementation project(path: ":velocity", configuration: 'shadow')
}
shadowJar {

@ -10,4 +10,5 @@ include 'common'
include 'api'
include 'bukkit'
include 'bungeecord'
include 'velocity'
include 'plugin'

@ -0,0 +1,20 @@
dependencies {
compileOnly project(':common')
implementation project(path: ':common', configuration: 'shadow')
compileOnly 'redis.clients:jedis:3.7.0'
implementation 'org.bstats:bstats-velocity:2.2.1'
implementation 'com.zaxxer:HikariCP:5.0.0'
implementation 'de.themoep:minedown-adventure:1.7.1-SNAPSHOT'
compileOnly 'com.velocitypowered:velocity-api:3.1.0'
annotationProcessor 'com.velocitypowered:velocity-api:3.1.0'
}
shadowJar {
relocate 'com.zaxxer', 'me.William278.husksync.libraries.hikari'
relocate 'org.bstats', 'me.William278.husksync.libraries.plan'
relocate 'de.themoep', 'me.William278.husksync.libraries.minedown'
}
tasks.register('prepareKotlinBuildScriptModel'){}

@ -0,0 +1,187 @@
package me.william278.husksync;
import com.google.inject.Inject;
import com.velocitypowered.api.command.CommandManager;
import com.velocitypowered.api.command.CommandMeta;
import com.velocitypowered.api.event.Subscribe;
import com.velocitypowered.api.event.proxy.ProxyInitializeEvent;
import com.velocitypowered.api.event.proxy.ProxyShutdownEvent;
import com.velocitypowered.api.plugin.Plugin;
import com.velocitypowered.api.plugin.annotation.DataDirectory;
import com.velocitypowered.api.proxy.ProxyServer;
import me.william278.husksync.proxy.data.DataManager;
import me.william278.husksync.redis.RedisMessage;
import me.william278.husksync.velocity.VelocityUpdateChecker;
import me.william278.husksync.velocity.command.HuskSyncCommand;
import me.william278.husksync.velocity.config.ConfigLoader;
import me.william278.husksync.velocity.config.ConfigManager;
import me.william278.husksync.velocity.listener.VelocityEventListener;
import me.william278.husksync.velocity.listener.VelocityRedisListener;
import me.william278.husksync.velocity.util.VelocityLogger;
import org.bstats.velocity.Metrics;
import org.slf4j.Logger;
import java.io.File;
import java.io.IOException;
import java.nio.file.Path;
import java.util.HashSet;
import java.util.Objects;
import java.util.logging.Level;
import static me.william278.husksync.HuskSyncVelocity.VERSION;
@Plugin(
id = "velocity",
name = "HuskSync",
version = VERSION,
description = "HuskSync for velocity",
authors = {"William278"}
)
public class HuskSyncVelocity {
// Plugin version
public static final String VERSION = "1.2-dev";
// Velocity bStats ID (different from Bukkit and BungeeCord)
private static final int METRICS_ID = 13489;
private final Metrics.Factory metricsFactory;
private static HuskSyncVelocity instance;
public static HuskSyncVelocity getInstance() {
return instance;
}
// Whether the plugin is ready to accept redis messages
public static boolean readyForRedis = false;
// Whether the plugin is in the process of disabling and should skip responding to handshake confirmations
public static boolean isDisabling = false;
/**
* Set of all the {@link Server}s that have completed the synchronisation handshake with HuskSync on the proxy
*/
public static HashSet<Server> synchronisedServers;
public static DataManager dataManager;
//public static MPDBMigrator mpdbMigrator;
private final Logger logger;
private final ProxyServer server;
private final Path dataDirectory;
// Get the data folder
public File getDataFolder() {
return dataDirectory.toFile();
}
// Get the proxy server
public ProxyServer getProxyServer() {
return server;
}
// Velocity logger handling
private VelocityLogger velocityLogger;
public VelocityLogger getVelocityLogger() {
return velocityLogger;
}
@Inject
public HuskSyncVelocity(ProxyServer server, Logger logger, @DataDirectory Path dataDirectory, Metrics.Factory metricsFactory) {
this.server = server;
this.logger = logger;
this.dataDirectory = dataDirectory;
this.metricsFactory = metricsFactory;
}
@Subscribe
public void onProxyInitialization(ProxyInitializeEvent event) {
// Set instance
instance = this;
// Setup logger
velocityLogger = new VelocityLogger(logger);
// Load config
ConfigManager.loadConfig();
// Load settings from config
ConfigLoader.loadSettings(ConfigManager.getConfig());
// Load messages
ConfigManager.loadMessages();
// Load locales from messages
ConfigLoader.loadMessageStrings(ConfigManager.getMessages());
// Do update checker
if (Settings.automaticUpdateChecks) {
new VelocityUpdateChecker(VERSION).logToConsole();
}
// Setup data manager
dataManager = new DataManager(getVelocityLogger(), getDataFolder());
// Setup player data cache
for (Settings.SynchronisationCluster cluster : Settings.clusters) {
dataManager.playerDataCache.put(cluster, new DataManager.PlayerDataCache());
}
// Initialize the redis listener
if (!new VelocityRedisListener().isActiveAndEnabled) {
getVelocityLogger().severe("Failed to initialize Redis; HuskSync will now abort loading itself (Velocity) v" + VERSION);
return;
}
// Register listener
server.getEventManager().register(this, new VelocityEventListener());
// Register command
CommandManager commandManager = getProxyServer().getCommandManager();
CommandMeta meta = commandManager.metaBuilder("husksync")
.aliases("hs")
.build();
commandManager.register(meta, new HuskSyncCommand());
// Prepare the migrator for use if needed
//todo migrator
// Initialize bStats metrics
try {
metricsFactory.make(this, METRICS_ID);
} catch (Exception e) {
getVelocityLogger().info("Skipped metrics initialization");
}
// Log to console
getVelocityLogger().info("Enabled HuskSync (Velocity) v" + VERSION);
// Mark as ready for redis message processing
readyForRedis = true;
}
@Subscribe
public void onProxyShutdown(ProxyShutdownEvent event) {
// Plugin shutdown logic
isDisabling = true;
// Send terminating handshake message
for (Server server : synchronisedServers) {
try {
new RedisMessage(RedisMessage.MessageType.TERMINATE_HANDSHAKE,
new RedisMessage.MessageTarget(Settings.ServerType.BUKKIT, null, server.clusterId()),
server.serverUUID().toString(),
"Velocity").send();
} catch (IOException e) {
getVelocityLogger().log(Level.SEVERE, "Failed to serialize Redis message for handshake termination", e);
}
}
dataManager.closeDatabases();
// Log to console
getVelocityLogger().info("Disabled HuskSync (Velocity) v" + VERSION);
}
}

@ -0,0 +1,20 @@
package me.william278.husksync.velocity;
import me.william278.husksync.HuskSyncVelocity;
import me.william278.husksync.util.UpdateChecker;
import java.util.logging.Level;
public class VelocityUpdateChecker extends UpdateChecker {
private static final HuskSyncVelocity plugin = HuskSyncVelocity.getInstance();
public VelocityUpdateChecker(String versionToCheck) {
super(versionToCheck);
}
@Override
public void log(Level level, String message) {
plugin.getVelocityLogger().log(level, message);
}
}

@ -0,0 +1,34 @@
package me.william278.husksync.velocity.command;
import com.velocitypowered.api.command.CommandSource;
import com.velocitypowered.api.command.SimpleCommand;
import java.util.ArrayList;
import java.util.List;
import java.util.concurrent.CompletableFuture;
public class HuskSyncCommand implements SimpleCommand {
/**
* Executes the command for the specified invocation.
*
* @param invocation the invocation context
*/
@Override
public void execute(Invocation invocation) {
final String[] args = invocation.arguments();
final CommandSource source = invocation.source();
}
/**
* Provides tab complete suggestions for the specified invocation.
*
* @param invocation the invocation context
* @return the tab complete suggestions
*/
@Override
public List<String> suggest(Invocation invocation) {
return new ArrayList<>();
}
}

@ -0,0 +1,97 @@
package me.william278.husksync.velocity.config;
import me.william278.husksync.HuskSyncVelocity;
import me.william278.husksync.Settings;
import me.william278.husksync.util.MessageManager;
import ninja.leaping.configurate.ConfigurationNode;
import java.util.HashMap;
public class ConfigLoader {
private static ConfigurationNode copyDefaults(ConfigurationNode configRoot) {
// Get the config version and update if needed
String configVersion = getConfigString(configRoot, "1.0", "config_file_version");
if (configVersion.contains("-dev")) {
configVersion = configVersion.replaceAll("-dev", "");
}
if (!configVersion.equals(HuskSyncVelocity.VERSION)) {
if (configVersion.equalsIgnoreCase("1.0")) {
configRoot.getNode("check_for_updates").setValue(true);
}
if (configVersion.equalsIgnoreCase("1.0") || configVersion.equalsIgnoreCase("1.0.1") || configVersion.equalsIgnoreCase("1.0.2") || configVersion.equalsIgnoreCase("1.0.3")) {
configRoot.getNode("clusters.main.player_table").setValue("husksync_players");
configRoot.getNode("clusters.main.data_table").setValue("husksync_data");
}
configRoot.getNode("config_file_version").setValue(HuskSyncVelocity.VERSION);
}
// Save the config back
ConfigManager.saveConfig(configRoot);
return configRoot;
}
private static String getConfigString(ConfigurationNode rootNode, String defaultValue, String... nodePath) {
return !rootNode.getNode(nodePath).isVirtual() ? rootNode.getNode(nodePath).getString() : defaultValue;
}
private static boolean getConfigBoolean(ConfigurationNode rootNode, boolean defaultValue, String... nodePath) {
return !rootNode.getNode(nodePath).isVirtual() ? rootNode.getNode(nodePath).getBoolean() : defaultValue;
}
private static int getConfigInt(ConfigurationNode rootNode, int defaultValue, String... nodePath) {
return !rootNode.getNode(nodePath).isVirtual() ? rootNode.getNode(nodePath).getInt() : defaultValue;
}
private static long getConfigLong(ConfigurationNode rootNode, long defaultValue, String... nodePath) {
return !rootNode.getNode(nodePath).isVirtual() ? rootNode.getNode(nodePath).getLong() : defaultValue;
}
public static void loadSettings(ConfigurationNode loadedConfig) throws IllegalArgumentException {
ConfigurationNode config = copyDefaults(loadedConfig);
Settings.language = getConfigString(config, "en-gb", "language");
Settings.serverType = Settings.ServerType.PROXY;
Settings.automaticUpdateChecks = getConfigBoolean(config, true, "check_for_updates");
Settings.redisHost = getConfigString(config, "localhost", "redis_settings", "host");
Settings.redisPort = getConfigInt(config, 6379, "redis_settings", "port");
Settings.redisPassword = getConfigString(config, "", "redis_settings", "password");
Settings.dataStorageType = Settings.DataStorageType.valueOf(getConfigString(config, "sqlite", "data_storage_settings", "database_type").toUpperCase());
if (Settings.dataStorageType == Settings.DataStorageType.MYSQL) {
Settings.mySQLHost = getConfigString(config, "localhost", "data_storage_settings", "mysql_settings", "host");
Settings.mySQLPort = getConfigInt(config, 3306, "data_storage_settings", "mysql_settings", "port");
Settings.mySQLDatabase = getConfigString(config, "HuskSync", "data_storage_settings", "mysql_settings", "database");
Settings.mySQLUsername = getConfigString(config, "root", "data_storage_settings", "mysql_settings", "username");
Settings.mySQLPassword = getConfigString(config, "pa55w0rd", "data_storage_settings", "mysql_settings", "password");
Settings.mySQLParams = getConfigString(config, "?autoReconnect=true&useSSL=false", "data_storage_settings", "mysql_settings", "params");
}
Settings.hikariMaximumPoolSize = getConfigInt(config, 10, "data_storage_settings", "hikari_pool_settings", "maximum_pool_size");
Settings.hikariMinimumIdle = getConfigInt(config, 10, "data_storage_settings", "hikari_pool_settings", "minimum_idle");
Settings.hikariMaximumLifetime = getConfigLong(config, 1800000, "data_storage_settings", "hikari_pool_settings", "maximum_lifetime");
Settings.hikariKeepAliveTime = getConfigLong(config, 0, "data_storage_settings", "hikari_pool_settings", "keepalive_time");
Settings.hikariConnectionTimeOut = getConfigLong(config, 5000, "data_storage_settings", "hikari_pool_settings", "connection_timeout");
// Read cluster data
ConfigurationNode clusterSection = config.getNode("clusters");
final String settingDatabaseName = Settings.mySQLDatabase != null ? Settings.mySQLDatabase : "HuskSync";
for (ConfigurationNode cluster : clusterSection.getChildrenList()) {
final String clusterId = (String) cluster.getKey();
final String playerTableName = getConfigString(config, "husksync_players", "clusters", clusterId, "player_table");
final String dataTableName = getConfigString(config, "husksync_data", "clusters", clusterId, "data_table");
final String databaseName = getConfigString(config, settingDatabaseName, "clusters", clusterId, "database");
Settings.clusters.add(new Settings.SynchronisationCluster(clusterId, databaseName, playerTableName, dataTableName));
}
}
public static void loadMessageStrings(ConfigurationNode config) {
final HashMap<String, String> messages = new HashMap<>();
for (ConfigurationNode message : config.getChildrenList()) {
final String messageId = (String) message.getKey();
messages.put(messageId, getConfigString(config, "", messageId));
}
MessageManager.setMessages(messages);
}
}

@ -0,0 +1,92 @@
package me.william278.husksync.velocity.config;
import me.william278.husksync.HuskSyncVelocity;
import me.william278.husksync.Settings;
import ninja.leaping.configurate.ConfigurationNode;
import ninja.leaping.configurate.commented.CommentedConfigurationNode;
import ninja.leaping.configurate.yaml.YAMLConfigurationLoader;
import java.io.File;
import java.io.IOException;
import java.nio.file.Files;
import java.util.Objects;
import java.util.logging.Level;
public class ConfigManager {
private static final HuskSyncVelocity plugin = HuskSyncVelocity.getInstance();
public static void loadConfig() {
try {
if (!plugin.getDataFolder().exists()) {
if (plugin.getDataFolder().mkdir()) {
plugin.getVelocityLogger().info("Created HuskSync data folder");
}
}
File configFile = new File(plugin.getDataFolder(), "config.yml");
if (!configFile.exists()) {
Files.copy(Objects.requireNonNull(plugin.getClass().getResourceAsStream("proxy-config.yml")), configFile.toPath());
plugin.getVelocityLogger().info("Created HuskSync config file");
}
} catch (Exception e) {
plugin.getVelocityLogger().log(Level.CONFIG, "An exception occurred loading the configuration file", e);
}
}
public static void saveConfig(ConfigurationNode rootNode) {
try {
getConfigLoader().save(rootNode);
} catch (IOException e) {
plugin.getVelocityLogger().log(Level.CONFIG, "An exception occurred loading the configuration file", e);
}
}
public static void loadMessages() {
try {
if (!plugin.getDataFolder().exists()) {
if (plugin.getDataFolder().mkdir()) {
plugin.getVelocityLogger().info("Created HuskSync data folder");
}
}
File messagesFile = new File(plugin.getDataFolder(), "messages_" + Settings.language + ".yml");
if (!messagesFile.exists()) {
Files.copy(Objects.requireNonNull(plugin.getClass().getResourceAsStream("languages/" + Settings.language + ".yml")),
messagesFile.toPath());
plugin.getVelocityLogger().info("Created HuskSync messages file");
}
} catch (IOException e) {
plugin.getVelocityLogger().log(Level.CONFIG, "An exception occurred loading the messages file", e);
}
}
private static YAMLConfigurationLoader getConfigLoader() {
File configFile = new File(plugin.getDataFolder(), "config.yml");
return YAMLConfigurationLoader.builder()
.setPath(configFile.toPath())
.build();
}
public static ConfigurationNode getConfig() {
try {
return getConfigLoader().load();
} catch (IOException e) {
plugin.getVelocityLogger().log(Level.CONFIG, "An IOException has occurred loading the plugin config.");
return null;
}
}
public static ConfigurationNode getMessages() {
try {
File configFile = new File(plugin.getDataFolder(), "messages_" + Settings.language + ".yml");
return YAMLConfigurationLoader.builder()
.setPath(configFile.toPath())
.build()
.load();
} catch (IOException e) {
plugin.getVelocityLogger().log(Level.CONFIG, "An IOException has occurred loading the plugin messages.");
return null;
}
}
}

@ -0,0 +1,46 @@
package me.william278.husksync.velocity.listener;
import com.velocitypowered.api.event.Subscribe;
import com.velocitypowered.api.event.connection.PostLoginEvent;
import com.velocitypowered.api.proxy.Player;
import me.william278.husksync.HuskSyncVelocity;
import me.william278.husksync.PlayerData;
import me.william278.husksync.Settings;
import me.william278.husksync.redis.RedisMessage;
import java.io.IOException;
import java.util.Map;
import java.util.logging.Level;
public class VelocityEventListener {
private static final HuskSyncVelocity plugin = HuskSyncVelocity.getInstance();
@Subscribe
public void onPostLogin(PostLoginEvent event) {
final Player player = event.getPlayer();
plugin.getProxyServer().getScheduler().buildTask(plugin, () -> {
// Ensure the player has data on SQL and that it is up-to-date
HuskSyncVelocity.dataManager.ensurePlayerExists(player.getUniqueId(), player.getUsername());
// Get the player's data from SQL
final Map<Settings.SynchronisationCluster, PlayerData> data = HuskSyncVelocity.dataManager.getPlayerData(player.getUniqueId());
// Update the player's data from SQL onto the cache
assert data != null;
for (Settings.SynchronisationCluster cluster : data.keySet()) {
HuskSyncVelocity.dataManager.playerDataCache.get(cluster).updatePlayer(data.get(cluster));
}
// Send a message asking the bukkit to request data on join
try {
new RedisMessage(RedisMessage.MessageType.REQUEST_DATA_ON_JOIN,
new RedisMessage.MessageTarget(Settings.ServerType.BUKKIT, null, null),
RedisMessage.RequestOnJoinUpdateType.ADD_REQUESTER.toString(), player.getUniqueId().toString()).send();
} catch (IOException e) {
plugin.getVelocityLogger().log(Level.SEVERE, "Failed to serialize request data on join message data");
e.printStackTrace();
}
});
}
}

@ -0,0 +1,203 @@
package me.william278.husksync.velocity.listener;
import com.velocitypowered.api.proxy.Player;
import de.themoep.minedown.adventure.MineDown;
import me.william278.husksync.HuskSyncVelocity;
import me.william278.husksync.PlayerData;
import me.william278.husksync.Server;
import me.william278.husksync.Settings;
import me.william278.husksync.redis.RedisListener;
import me.william278.husksync.redis.RedisMessage;
import me.william278.husksync.util.MessageManager;
import java.io.IOException;
import java.util.Objects;
import java.util.Optional;
import java.util.UUID;
import java.util.logging.Level;
public class VelocityRedisListener extends RedisListener {
private static final HuskSyncVelocity plugin = HuskSyncVelocity.getInstance();
// Initialize the listener on the bungee
public VelocityRedisListener() {
listen();
}
private PlayerData getPlayerCachedData(UUID uuid, String clusterId) {
PlayerData data = null;
for (Settings.SynchronisationCluster cluster : Settings.clusters) {
if (cluster.clusterId().equals(clusterId)) {
// Get the player data from the cache
PlayerData cachedData = HuskSyncVelocity.dataManager.playerDataCache.get(cluster).getPlayer(uuid);
if (cachedData != null) {
return cachedData;
}
data = Objects.requireNonNull(HuskSyncVelocity.dataManager.getPlayerData(uuid)).get(cluster); // Get their player data from MySQL
HuskSyncVelocity.dataManager.playerDataCache.get(cluster).updatePlayer(data); // Update the cache
break;
}
}
return data; // Return the data
}
/**
* Handle an incoming {@link RedisMessage}
*
* @param message The {@link RedisMessage} to handle
*/
@Override
public void handleMessage(RedisMessage message) {
// Ignore messages destined for Bukkit servers
if (message.getMessageTarget().targetServerType() != Settings.ServerType.PROXY) {
return;
}
// Only process redis messages when ready
if (!HuskSyncVelocity.readyForRedis) {
return;
}
switch (message.getMessageType()) {
case PLAYER_DATA_REQUEST -> {
// Get the UUID of the requesting player
final UUID requestingPlayerUUID = UUID.fromString(message.getMessageData());
plugin.getProxyServer().getScheduler().buildTask(plugin, () -> {
try {
// Send the reply, serializing the message data
new RedisMessage(RedisMessage.MessageType.PLAYER_DATA_SET,
new RedisMessage.MessageTarget(Settings.ServerType.BUKKIT, requestingPlayerUUID, message.getMessageTarget().targetClusterId()),
RedisMessage.serialize(getPlayerCachedData(requestingPlayerUUID, message.getMessageTarget().targetClusterId())))
.send();
// Send an update to all bukkit servers removing the player from the requester cache
new RedisMessage(RedisMessage.MessageType.REQUEST_DATA_ON_JOIN,
new RedisMessage.MessageTarget(Settings.ServerType.BUKKIT, null, message.getMessageTarget().targetClusterId()),
RedisMessage.RequestOnJoinUpdateType.REMOVE_REQUESTER.toString(), requestingPlayerUUID.toString())
.send();
// Send synchronisation complete message
Optional<Player> player = plugin.getProxyServer().getPlayer(requestingPlayerUUID);
player.ifPresent(value -> value.sendActionBar(new MineDown(MessageManager.getMessage("synchronisation_complete")).toComponent()));
} catch (IOException e) {
log(Level.SEVERE, "Failed to serialize data when replying to a data request");
e.printStackTrace();
}
});
}
case PLAYER_DATA_UPDATE -> {
// Deserialize the PlayerData received
PlayerData playerData;
final String serializedPlayerData = message.getMessageData();
try {
playerData = (PlayerData) RedisMessage.deserialize(serializedPlayerData);
} catch (IOException | ClassNotFoundException e) {
log(Level.SEVERE, "Failed to deserialize PlayerData when handling a player update request");
e.printStackTrace();
return;
}
// Update the data in the cache and SQL
for (Settings.SynchronisationCluster cluster : Settings.clusters) {
if (cluster.clusterId().equals(message.getMessageTarget().targetClusterId())) {
HuskSyncVelocity.dataManager.updatePlayerData(playerData, cluster);
break;
}
}
// Reply with the player data if they are still online (switching server)
Optional<Player> updatingPlayer = plugin.getProxyServer().getPlayer(playerData.getPlayerUUID());
updatingPlayer.ifPresent(player -> {
try {
new RedisMessage(RedisMessage.MessageType.PLAYER_DATA_SET,
new RedisMessage.MessageTarget(Settings.ServerType.BUKKIT, playerData.getPlayerUUID(), message.getMessageTarget().targetClusterId()),
RedisMessage.serialize(playerData))
.send();
// Send synchronisation complete message
player.sendActionBar(new MineDown(MessageManager.getMessage("synchronisation_complete")).toComponent());
} catch (IOException e) {
log(Level.SEVERE, "Failed to re-serialize PlayerData when handling a player update request");
e.printStackTrace();
}
});
}
case CONNECTION_HANDSHAKE -> {
// Reply to a Bukkit server's connection handshake to complete the process
if (HuskSyncVelocity.isDisabling) return; // Return if the Proxy is disabling
final UUID serverUUID = UUID.fromString(message.getMessageDataElements()[0]);
final boolean hasMySqlPlayerDataBridge = Boolean.parseBoolean(message.getMessageDataElements()[1]);
final String bukkitBrand = message.getMessageDataElements()[2];
final String huskSyncVersion = message.getMessageDataElements()[3];
try {
new RedisMessage(RedisMessage.MessageType.CONNECTION_HANDSHAKE,
new RedisMessage.MessageTarget(Settings.ServerType.BUKKIT, null, message.getMessageTarget().targetClusterId()),
serverUUID.toString(), "Velocity")
.send();
HuskSyncVelocity.synchronisedServers.add(
new Server(serverUUID, hasMySqlPlayerDataBridge,
huskSyncVersion, bukkitBrand, message.getMessageTarget().targetClusterId()));
log(Level.INFO, "Completed handshake with " + bukkitBrand + " server (" + serverUUID + ")");
} catch (IOException e) {
log(Level.SEVERE, "Failed to serialize handshake message data");
e.printStackTrace();
}
}
case TERMINATE_HANDSHAKE -> {
// Terminate the handshake with a Bukkit server
final UUID serverUUID = UUID.fromString(message.getMessageDataElements()[0]);
final String bukkitBrand = message.getMessageDataElements()[1];
// Remove a server from the synchronised server list
Server serverToRemove = null;
for (Server server : HuskSyncVelocity.synchronisedServers) {
if (server.serverUUID().equals(serverUUID)) {
serverToRemove = server;
break;
}
}
HuskSyncVelocity.synchronisedServers.remove(serverToRemove);
log(Level.INFO, "Terminated the handshake with " + bukkitBrand + " server (" + serverUUID + ")");
}
case DECODED_MPDB_DATA_SET -> {
// Deserialize the PlayerData received
PlayerData playerData;
final String serializedPlayerData = message.getMessageDataElements()[0];
final String playerName = message.getMessageDataElements()[1];
try {
playerData = (PlayerData) RedisMessage.deserialize(serializedPlayerData);
} catch (IOException | ClassNotFoundException e) {
log(Level.SEVERE, "Failed to deserialize PlayerData when handling incoming decoded MPDB data");
e.printStackTrace();
return;
}
//todo Migrator
/*// Add the incoming data to the data to be saved
MPDBMigrator.incomingPlayerData.put(playerData, playerName);
// Increment players migrated
MPDBMigrator.playersMigrated++;
plugin.getBungeeLogger().log(Level.INFO, "Migrated " + MPDBMigrator.playersMigrated + "/" + MPDBMigrator.migratedDataSent + " players.");
// When all the data has been received, save it
if (MPDBMigrator.migratedDataSent == MPDBMigrator.playersMigrated) {
MPDBMigrator.loadIncomingData(MPDBMigrator.incomingPlayerData);
}*/
}
}
}
/**
* Log to console
*
* @param level The {@link Level} to log
* @param message Message to log
*/
@Override
public void log(Level level, String message) {
plugin.getVelocityLogger().log(level, message);
}
}

@ -0,0 +1,44 @@
package me.william278.husksync.velocity.util;
import me.william278.husksync.util.Logger;
import java.util.logging.Level;
public record VelocityLogger(org.slf4j.Logger parent) implements Logger {
@Override
public void log(Level level, String message, Exception e) {
logMessage(level, message);
e.printStackTrace();
}
@Override
public void log(Level level, String message) {
logMessage(level, message);
}
@Override
public void info(String message) {
logMessage(Level.INFO, message);
}
@Override
public void severe(String message) {
logMessage(Level.SEVERE, message);
}
@Override
public void config(String message) {
logMessage(Level.CONFIG, message);
}
// Logs the message using SLF4J
private void logMessage(Level level, String message) {
switch (level.intValue()) {
case 1000 -> parent.error(message); // Severe
case 900 -> parent.warn(message); // Warning
case 70 -> parent.warn("[Config] " + message);
default -> parent.info(message);
}
}
}
Loading…
Cancel
Save