Add legacy migrator

feat/data-edit-commands
William 2 years ago
parent 3d29d45d8a
commit 0fce3c44ab

@ -20,6 +20,7 @@ shadowJar {
relocate 'net.byteflux.libby', 'net.william278.husksync.libraries.libby'
relocate 'org.bstats', 'net.william278.husksync.libraries.bstats'
relocate 'net.william278.mpdbconverter', 'net.william278.husksync.libraries.mpdbconverter'
relocate 'net.william278.hslmigrator', 'net.william278.husksync.libraries.hslconverter'
}
java {

@ -2,6 +2,7 @@ dependencies {
implementation project(path: ':common')
implementation 'org.bstats:bstats-bukkit:3.0.0'
implementation 'net.william278:mpdbdataconverter:1.0'
implementation 'net.william278:hsldataconverter:1.0'
compileOnly 'redis.clients:jedis:4.2.3'
compileOnly 'commons-io:commons-io:2.11.0'
@ -24,4 +25,5 @@ shadowJar {
relocate 'net.byteflux.libby', 'net.william278.husksync.libraries.libby'
relocate 'org.bstats', 'net.william278.husksync.libraries.bstats'
relocate 'net.william278.mpdbconverter', 'net.william278.husksync.libraries.mpdbconverter'
relocate 'net.william278.hslmigrator', 'net.william278.husksync.libraries.hslconverter'
}

@ -0,0 +1,320 @@
package net.william278.husksync.migrator;
import com.zaxxer.hikari.HikariDataSource;
import me.william278.husksync.bukkit.data.DataSerializer;
import net.william278.hslmigrator.HSLConverter;
import net.william278.husksync.HuskSync;
import net.william278.husksync.config.Settings;
import net.william278.husksync.data.*;
import net.william278.husksync.player.User;
import org.bukkit.Material;
import org.bukkit.Statistic;
import org.bukkit.entity.EntityType;
import org.jetbrains.annotations.NotNull;
import java.io.IOException;
import java.sql.Connection;
import java.sql.PreparedStatement;
import java.sql.ResultSet;
import java.util.*;
import java.util.concurrent.CompletableFuture;
import java.util.logging.Level;
import java.util.regex.Pattern;
public class LegacyMigrator extends Migrator {
private final HSLConverter hslConverter;
private String sourceHost;
private int sourcePort;
private String sourceUsername;
private String sourcePassword;
private String sourceDatabase;
private String sourcePlayersTable;
private String sourceDataTable;
public LegacyMigrator(@NotNull HuskSync plugin) {
super(plugin);
this.hslConverter = HSLConverter.getInstance();
this.sourceHost = plugin.getSettings().getStringValue(Settings.ConfigOption.DATABASE_HOST);
this.sourcePort = plugin.getSettings().getIntegerValue(Settings.ConfigOption.DATABASE_PORT);
this.sourceUsername = plugin.getSettings().getStringValue(Settings.ConfigOption.DATABASE_USERNAME);
this.sourcePassword = plugin.getSettings().getStringValue(Settings.ConfigOption.DATABASE_PASSWORD);
this.sourceDatabase = plugin.getSettings().getStringValue(Settings.ConfigOption.DATABASE_NAME);
this.sourcePlayersTable = "husksync_players";
this.sourceDataTable = "husksync_data";
}
@Override
public CompletableFuture<Boolean> start() {
plugin.getLoggingAdapter().log(Level.INFO, "Starting migration of legacy HuskSync v1.x data...");
final long startTime = System.currentTimeMillis();
return CompletableFuture.supplyAsync(() -> {
// Wipe the existing database, preparing it for data import
plugin.getLoggingAdapter().log(Level.INFO, "Preparing existing database (wiping)...");
plugin.getDatabase().wipeDatabase().join();
plugin.getLoggingAdapter().log(Level.INFO, "Successfully wiped user data database (took " + (System.currentTimeMillis() - startTime) + "ms)");
// Create jdbc driver connection url
final String jdbcUrl = "jdbc:mysql://" + sourceHost + ":" + sourcePort + "/" + sourceDatabase;
// Create a new data source for the mpdb converter
try (final HikariDataSource connectionPool = new HikariDataSource()) {
plugin.getLoggingAdapter().log(Level.INFO, "Establishing connection to legacy database...");
connectionPool.setJdbcUrl(jdbcUrl);
connectionPool.setUsername(sourceUsername);
connectionPool.setPassword(sourcePassword);
connectionPool.setPoolName((getIdentifier() + "_migrator_pool").toUpperCase());
plugin.getLoggingAdapter().log(Level.INFO, "Downloading raw data from the legacy database...");
final List<LegacyData> dataToMigrate = new ArrayList<>();
try (final Connection connection = connectionPool.getConnection()) {
try (final PreparedStatement statement = connection.prepareStatement("""
SELECT `uuid`, `name`, `inventory`, `ender_chest`, `health`, `max_health`, `health_scale`, `hunger`, `saturation`, `saturation_exhaustion`, `selected_slot`, `status_effects`, `total_experience`, `exp_level`, `exp_progress`, `game_mode`, `statistics`, `is_flying`, `advancements`, `location`
FROM `%source_players_table%`
INNER JOIN `%source_data_table%`
ON `%source_players_table%`.`id` = `%source_data_table%`.`player_id`;
""".replaceAll(Pattern.quote("%source_players_table%"), sourcePlayersTable)
.replaceAll(Pattern.quote("%source_data_table%"), sourceDataTable))) {
try (final ResultSet resultSet = statement.executeQuery()) {
int playersMigrated = 0;
while (resultSet.next()) {
dataToMigrate.add(new LegacyData(
new User(UUID.fromString(resultSet.getString("uuid")),
resultSet.getString("name")),
resultSet.getString("inventory"),
resultSet.getString("ender_chest"),
resultSet.getDouble("health"),
resultSet.getDouble("max_health"),
resultSet.getDouble("health_scale"),
resultSet.getInt("hunger"),
resultSet.getFloat("saturation"),
resultSet.getFloat("saturation_exhaustion"),
resultSet.getInt("selected_slot"),
resultSet.getString("status_effects"),
resultSet.getInt("total_experience"),
resultSet.getInt("exp_level"),
resultSet.getFloat("exp_progress"),
resultSet.getString("game_mode"),
resultSet.getString("statistics"),
resultSet.getBoolean("is_flying"),
resultSet.getString("advancements"),
resultSet.getString("location")
));
playersMigrated++;
if (playersMigrated % 25 == 0) {
plugin.getLoggingAdapter().log(Level.INFO, "Downloaded legacy data for " + playersMigrated + " players...");
}
}
}
}
}
plugin.getLoggingAdapter().log(Level.INFO, "Completed download of " + dataToMigrate.size() + " entries from the legacy database!");
plugin.getLoggingAdapter().log(Level.INFO, "Converting HuskSync 1.x data to the latest HuskSync user data format...");
dataToMigrate.forEach(data -> data.toUserData(hslConverter).thenAccept(convertedData ->
plugin.getDatabase().ensureUser(data.user()).thenRun(() ->
plugin.getDatabase().setUserData(data.user(), convertedData, DataSaveCause.LEGACY_MIGRATION)
.exceptionally(exception -> {
plugin.getLoggingAdapter().log(Level.SEVERE, "Failed to migrate legacy data for " + data.user().username + ": " + exception.getMessage());
return null;
}))));
plugin.getLoggingAdapter().log(Level.INFO, "Migration complete for " + dataToMigrate.size() + " users in " + ((System.currentTimeMillis() - startTime) / 1000) + " seconds!");
return true;
} catch (Exception e) {
plugin.getLoggingAdapter().log(Level.SEVERE, "Error while migrating legacy data: " + e.getMessage() + " - are your source database credentials correct?");
return false;
}
});
}
@Override
public void handleConfigurationCommand(@NotNull String[] args) {
if (args.length == 2) {
if (switch (args[0].toLowerCase()) {
case "host" -> {
this.sourceHost = args[1];
yield true;
}
case "port" -> {
try {
this.sourcePort = Integer.parseInt(args[1]);
yield true;
} catch (NumberFormatException e) {
yield false;
}
}
case "username" -> {
this.sourceUsername = args[1];
yield true;
}
case "password" -> {
this.sourcePassword = args[1];
yield true;
}
case "database" -> {
this.sourceDatabase = args[1];
yield true;
}
case "players_table" -> {
this.sourcePlayersTable = args[1];
yield true;
}
case "data_table" -> {
this.sourceDataTable = args[1];
yield true;
}
default -> false;
}) {
plugin.getLoggingAdapter().log(Level.INFO, getHelpMenu());
plugin.getLoggingAdapter().log(Level.INFO, "Successfully set " + args[0] + " to " +
obfuscateDataString(args[1]));
} else {
plugin.getLoggingAdapter().log(Level.INFO, "Invalid operation, could not set " + args[0] + " to " +
obfuscateDataString(args[1]) + " (is it a valid option?)");
}
} else {
plugin.getLoggingAdapter().log(Level.INFO, getHelpMenu());
}
}
@NotNull
@Override
public String getIdentifier() {
return "legacy";
}
@NotNull
@Override
public String getName() {
return "HuskSync v1.x --> v2.x Migrator";
}
@NotNull
@Override
public String getHelpMenu() {
return """
=== HuskSync v1.x --> v2.x Migration Wizard =========
This will migrate all user data from HuskSync v1.x to
HuskSync v2.x's new format. To perform the migration,
please follow the steps below carefully.
[!] Existing data in the database will be wiped. [!]
STEP 1] Please ensure no players are on any servers.
STEP 2] HuskSync will need to connect to the database
used to hold the existing, legacy HuskSync data.
If this is the same database as the one you are
currently using, you probably don't need to change
anything.
Please check that the credentials below are the
correct credentials of the source legacy HuskSync
database.
- host: %source_host%
- port: %source_port%
- username: %source_username%
- password: %source_password%
- database: %source_database%
- players_table: %source_players_table%
- data_table: %source_data_table%
If any of these are not correct, please correct them
using the command:
"husksync migrate legacy set <parameter> <value>"
(e.g.: "husksync migrate legacy set host 1.2.3.4")
STEP 3] HuskSync will migrate data into the database
tables configures in the config.yml file of this
server. Please make sure you're happy with this
before proceeding.
STEP 4] To start the migration, please run:
"husksync migrate legacy start"
""".replaceAll(Pattern.quote("%source_host%"), obfuscateDataString(sourceHost))
.replaceAll(Pattern.quote("%source_port%"), Integer.toString(sourcePort))
.replaceAll(Pattern.quote("%source_username%"), obfuscateDataString(sourceUsername))
.replaceAll(Pattern.quote("%source_password%"), obfuscateDataString(sourcePassword))
.replaceAll(Pattern.quote("%source_database%"), sourceDatabase)
.replaceAll(Pattern.quote("%source_players_table%"), sourcePlayersTable)
.replaceAll(Pattern.quote("%source_data_table%"), sourceDataTable);
}
private record LegacyData(@NotNull User user,
@NotNull String serializedInventory, @NotNull String serializedEnderChest,
double health, double maxHealth, double healthScale, int hunger, float saturation,
float saturationExhaustion, int selectedSlot, @NotNull String serializedPotionEffects,
int totalExp, int expLevel, float expProgress,
@NotNull String gameMode, @NotNull String serializedStatistics, boolean isFlying,
@NotNull String serializedAdvancements, @NotNull String serializedLocation) {
@NotNull
public CompletableFuture<UserData> toUserData(@NotNull HSLConverter converter) {
return CompletableFuture.supplyAsync(() -> {
try {
final DataSerializer.StatisticData legacyStatisticData = converter
.deserializeStatisticData(serializedStatistics);
final StatisticsData convertedStatisticData = new StatisticsData(
convertStatisticMap(legacyStatisticData.untypedStatisticValues()),
convertMaterialStatisticMap(legacyStatisticData.blockStatisticValues()),
convertMaterialStatisticMap(legacyStatisticData.itemStatisticValues()),
convertEntityStatisticMap(legacyStatisticData.entityStatisticValues()));
final List<AdvancementData> convertedAdvancements = converter
.deserializeAdvancementData(serializedAdvancements)
.stream().map(data -> new AdvancementData(data.key(), data.criteriaMap())).toList();
final DataSerializer.PlayerLocation legacyLocationData = converter
.deserializePlayerLocationData(serializedLocation);
final LocationData convertedLocationData = new LocationData(
legacyLocationData == null ? "world" : legacyLocationData.worldName(),
UUID.randomUUID(),
"NORMAL",
legacyLocationData == null ? 0d : legacyLocationData.x(),
legacyLocationData == null ? 64d : legacyLocationData.y(),
legacyLocationData == null ? 0d : legacyLocationData.z(),
legacyLocationData == null ? 90f : legacyLocationData.yaw(),
legacyLocationData == null ? 180f : legacyLocationData.pitch());
return new UserData(new StatusData(health, maxHealth, healthScale, hunger, saturation,
saturationExhaustion, selectedSlot, totalExp, expLevel, expProgress, gameMode, isFlying),
new ItemData(serializedInventory), new ItemData(serializedEnderChest),
new PotionEffectData(serializedPotionEffects), convertedAdvancements,
convertedStatisticData, convertedLocationData,
new PersistentDataContainerData(new HashMap<>()));
} catch (IOException e) {
throw new RuntimeException(e);
}
});
}
private Map<String, Integer> convertStatisticMap(@NotNull HashMap<Statistic, Integer> rawMap) {
final HashMap<String, Integer> convertedMap = new HashMap<>();
for (Map.Entry<Statistic, Integer> entry : rawMap.entrySet()) {
convertedMap.put(entry.getKey().toString(), entry.getValue());
}
return convertedMap;
}
private Map<String, Map<String, Integer>> convertMaterialStatisticMap(@NotNull HashMap<Statistic, HashMap<Material, Integer>> rawMap) {
final Map<String, Map<String, Integer>> convertedMap = new HashMap<>();
for (Map.Entry<Statistic, HashMap<Material, Integer>> entry : rawMap.entrySet()) {
for (Map.Entry<Material, Integer> materialEntry : entry.getValue().entrySet()) {
convertedMap.computeIfAbsent(entry.getKey().toString(), k -> new HashMap<>())
.put(materialEntry.getKey().toString(), materialEntry.getValue());
}
}
return convertedMap;
}
private Map<String, Map<String, Integer>> convertEntityStatisticMap(@NotNull HashMap<Statistic, HashMap<EntityType, Integer>> rawMap) {
final Map<String, Map<String, Integer>> convertedMap = new HashMap<>();
for (Map.Entry<Statistic, HashMap<EntityType, Integer>> entry : rawMap.entrySet()) {
for (Map.Entry<EntityType, Integer> materialEntry : entry.getValue().entrySet()) {
convertedMap.computeIfAbsent(entry.getKey().toString(), k -> new HashMap<>())
.put(materialEntry.getKey().toString(), materialEntry.getValue());
}
}
return convertedMap;
}
}
}

@ -21,6 +21,9 @@ import java.util.concurrent.CompletableFuture;
import java.util.logging.Level;
import java.util.regex.Pattern;
/**
* A migrator for migrating MySQLPlayerDataBridge data to HuskSync {@link UserData}
*/
public class MpdbMigrator extends Migrator {
private final MPDBConverter mpdbConverter;
@ -51,6 +54,11 @@ public class MpdbMigrator extends Migrator {
plugin.getLoggingAdapter().log(Level.INFO, "Starting migration from MySQLPlayerDataBridge to HuskSync...");
final long startTime = System.currentTimeMillis();
return CompletableFuture.supplyAsync(() -> {
// Wipe the existing database, preparing it for data import
plugin.getLoggingAdapter().log(Level.INFO, "Preparing existing database (wiping)...");
plugin.getDatabase().wipeDatabase().join();
plugin.getLoggingAdapter().log(Level.INFO, "Successfully wiped user data database (took " + (System.currentTimeMillis() - startTime) + "ms)");
// Create jdbc driver connection url
final String jdbcUrl = "jdbc:mysql://" + sourceHost + ":" + sourcePort + "/" + sourceDatabase;
@ -70,9 +78,11 @@ public class MpdbMigrator extends Migrator {
FROM `%source_inventory_table%`
INNER JOIN `%source_ender_chest_table%`
ON `%source_inventory_table%`.`player_uuid` = `%source_ender_chest_table%`.`player_uuid`
INNER JOIN `%source_experience_table%`
ON `%source_inventory_table%`.`player_uuid` = `%source_experience_table%`.`player_uuid`;
""".replaceAll(Pattern.quote("%source_inventory_table%"), sourceInventoryTable).replaceAll(Pattern.quote("%source_ender_chest_table%"), sourceEnderChestTable).replaceAll(Pattern.quote("%source_experience_table%"), sourceExperienceTable))) {
INNER JOIN `%source_xp_table%`
ON `%source_inventory_table%`.`player_uuid` = `%source_xp_table%`.`player_uuid`;
""".replaceAll(Pattern.quote("%source_inventory_table%"), sourceInventoryTable)
.replaceAll(Pattern.quote("%source_ender_chest_table%"), sourceEnderChestTable)
.replaceAll(Pattern.quote("%source_xp_table%"), sourceExperienceTable))) {
try (final ResultSet resultSet = statement.executeQuery()) {
int playersMigrated = 0;
while (resultSet.next()) {
@ -98,11 +108,15 @@ public class MpdbMigrator extends Migrator {
plugin.getLoggingAdapter().log(Level.INFO, "Converting raw MySQLPlayerDataBridge data to HuskSync user data...");
dataToMigrate.forEach(data -> data.toUserData(mpdbConverter).thenAccept(convertedData ->
plugin.getDatabase().ensureUser(data.user()).thenRun(() ->
plugin.getDatabase().setUserData(data.user(), convertedData, DataSaveCause.MPDB_MIGRATION))));
plugin.getDatabase().setUserData(data.user(), convertedData, DataSaveCause.MPDB_MIGRATION))
.exceptionally(exception -> {
plugin.getLoggingAdapter().log(Level.SEVERE, "Failed to migrate MySQLPlayerDataBridge data for " + data.user().username + ": " + exception.getMessage());
return null;
})));
plugin.getLoggingAdapter().log(Level.INFO, "Migration complete for " + dataToMigrate.size() + " users in " + ((System.currentTimeMillis() - startTime) / 1000) + " seconds!");
return true;
} catch (Exception e) {
plugin.getLoggingAdapter().log(Level.SEVERE, "Error while migrating data: " + e.getMessage());
plugin.getLoggingAdapter().log(Level.SEVERE, "Error while migrating data: " + e.getMessage() + " - are your source database credentials correct?");
return false;
}
});
@ -151,10 +165,14 @@ public class MpdbMigrator extends Migrator {
default -> false;
}) {
plugin.getLoggingAdapter().log(Level.INFO, getHelpMenu());
plugin.getLoggingAdapter().log(Level.INFO, "Successfully set " + args[0] + " to " + args[1]);
plugin.getLoggingAdapter().log(Level.INFO, "Successfully set " + args[0] + " to " +
obfuscateDataString(args[1]));
} else {
plugin.getLoggingAdapter().log(Level.INFO, "Invalid operation, could not set " + args[0] + " to " + args[1] + " (is it a valid option?)");
plugin.getLoggingAdapter().log(Level.INFO, "Invalid operation, could not set " + args[0] + " to " +
obfuscateDataString(args[1]) + " (is it a valid option?)");
}
} else {
plugin.getLoggingAdapter().log(Level.INFO, getHelpMenu());
}
}
@ -167,7 +185,7 @@ public class MpdbMigrator extends Migrator {
@NotNull
@Override
public String getName() {
return "MySQLPlayerDataBridge";
return "MySQLPlayerDataBridge Migrator";
}
@NotNull
@ -181,7 +199,9 @@ public class MpdbMigrator extends Migrator {
To prevent excessive migration times, other non-vital
data will not be transferred.
STEP 1] Please ensure no players are on the server.
[!] Existing data in the database will be wiped. [!]
STEP 1] Please ensure no players are on any servers.
STEP 2] HuskSync will need to connect to the database
used to hold the source MySQLPlayerDataBridge data.
@ -196,8 +216,8 @@ public class MpdbMigrator extends Migrator {
- experience_table: %source_xp_table%
If any of these are not correct, please correct them
using the command:
"husksync migrate mpdb set <parameter> <host>"
(e.g.: "husksync migrate mpdb set host 123.456.789")
"husksync migrate mpdb set <parameter> <value>"
(e.g.: "husksync migrate mpdb set host 1.2.3.4")
STEP 3] HuskSync will migrate data into the database
tables configures in the config.yml file of this
@ -206,12 +226,36 @@ public class MpdbMigrator extends Migrator {
STEP 4] To start the migration, please run:
"husksync migrate mpdb start"
""";
""".replaceAll(Pattern.quote("%source_host%"), obfuscateDataString(sourceHost))
.replaceAll(Pattern.quote("%source_port%"), Integer.toString(sourcePort))
.replaceAll(Pattern.quote("%source_username%"), obfuscateDataString(sourceUsername))
.replaceAll(Pattern.quote("%source_password%"), obfuscateDataString(sourcePassword))
.replaceAll(Pattern.quote("%source_database%"), sourceDatabase)
.replaceAll(Pattern.quote("%source_inventory_table%"), sourceInventoryTable)
.replaceAll(Pattern.quote("%source_ender_chest_table%"), sourceEnderChestTable)
.replaceAll(Pattern.quote("%source_xp_table%"), sourceExperienceTable);
}
/**
* Represents data exported from the MySQLPlayerDataBridge source database
*
* @param user The user whose data is being migrated
* @param serializedInventory The serialized inventory data
* @param serializedArmor The serialized armor data
* @param serializedEnderChest The serialized ender chest data
* @param expLevel The player's current XP level
* @param expProgress The player's current XP progress
* @param totalExp The player's total XP score
*/
private record MpdbData(@NotNull User user, @NotNull String serializedInventory,
@NotNull String serializedArmor, @NotNull String serializedEnderChest,
int expLevel, float expProgress, int totalExp) {
/**
* Converts exported MySQLPlayerDataBridge data into HuskSync's {@link UserData} object format
*
* @param converter The {@link MPDBConverter} to use for converting to {@link ItemStack}s
* @return A {@link CompletableFuture} that will resolve to the converted {@link UserData} object
*/
@NotNull
public CompletableFuture<UserData> toUserData(@NotNull MPDBConverter converter) {
return CompletableFuture.supplyAsync(() -> {
@ -224,10 +268,9 @@ public class MpdbMigrator extends Migrator {
}
// Create user data record
return new UserData(
new StatusData(20, 20, 0, 20, 10,
1, 0, totalExp, expLevel, expProgress, "SURVIVAL",
false),
return new UserData(new StatusData(20, 20, 0, 20, 10,
1, 0, totalExp, expLevel, expProgress, "SURVIVAL",
false),
new ItemData(BukkitSerializer.serializeItemStackArray(inventory.getContents()).join()),
new ItemData(BukkitSerializer.serializeItemStackArray(converter
.getItemStackFromSerializedData(serializedEnderChest)).join()),

@ -132,8 +132,8 @@ public class Settings {
DATABASE_CONNECTION_POOL_MAX_LIFETIME("database.connection_pool.maximum_lifetime", OptionType.INTEGER, 1800000),
DATABASE_CONNECTION_POOL_KEEPALIVE("database.connection_pool.keepalive_time", OptionType.INTEGER, 0),
DATABASE_CONNECTION_POOL_TIMEOUT("database.connection_pool.connection_timeout", OptionType.INTEGER, 5000),
DATABASE_PLAYERS_TABLE_NAME("database.table_names.players_table", OptionType.STRING, "husksync_players"),
DATABASE_DATA_TABLE_NAME("database.table_names.data_table", OptionType.STRING, "husksync_data"),
DATABASE_USERS_TABLE_NAME("database.table_names.users_table", OptionType.STRING, "husksync_users"),
DATABASE_USER_DATA_TABLE_NAME("database.table_names.user_data_table", OptionType.STRING, "husksync_user_data"),
REDIS_HOST("redis.credentials.host", OptionType.STRING, "localhost"),
REDIS_PORT("redis.credentials.port", OptionType.INTEGER, 6379),

@ -5,6 +5,7 @@ import net.william278.husksync.data.DataSaveCause;
import net.william278.husksync.data.UserData;
import net.william278.husksync.data.VersionedUserData;
import net.william278.husksync.event.EventCannon;
import net.william278.husksync.migrator.Migrator;
import net.william278.husksync.player.User;
import net.william278.husksync.util.Logger;
import net.william278.husksync.util.ResourceReader;
@ -117,8 +118,8 @@ public abstract class Database {
* @return the formatted statement, with table placeholders replaced with the correct names
*/
protected final String formatStatementTables(@NotNull String sql) {
return sql.replaceAll("%players_table%", playerTableName)
.replaceAll("%data_table%", dataTableName);
return sql.replaceAll("%users_table%", playerTableName)
.replaceAll("%user_data_table%", dataTableName);
}
/**
@ -205,6 +206,15 @@ public abstract class Database {
*/
public abstract CompletableFuture<Void> setUserData(@NotNull User user, @NotNull UserData userData, @NotNull DataSaveCause dataSaveCause);
/**
* Wipes <b>all</b> {@link UserData} entries from the database.
* <b>This should never be used</b>, except when preparing tables for migration.
*
* @return A future returning void when complete
* @see Migrator#start()
*/
public abstract CompletableFuture<Void> wipeDatabase();
/**
* Close the database connection
*/

@ -53,8 +53,8 @@ public class MySqlDatabase extends Database {
public MySqlDatabase(@NotNull Settings settings, @NotNull ResourceReader resourceReader, @NotNull Logger logger,
@NotNull DataAdapter dataAdapter, @NotNull EventCannon eventCannon) {
super(settings.getStringValue(Settings.ConfigOption.DATABASE_PLAYERS_TABLE_NAME),
settings.getStringValue(Settings.ConfigOption.DATABASE_DATA_TABLE_NAME),
super(settings.getStringValue(Settings.ConfigOption.DATABASE_USERS_TABLE_NAME),
settings.getStringValue(Settings.ConfigOption.DATABASE_USER_DATA_TABLE_NAME),
Math.max(1, Math.min(20, settings.getIntegerValue(Settings.ConfigOption.SYNCHRONIZATION_MAX_USER_DATA_RECORDS))),
resourceReader, dataAdapter, eventCannon, logger);
this.mySqlHost = settings.getStringValue(Settings.ConfigOption.DATABASE_HOST);
@ -127,7 +127,7 @@ public class MySqlDatabase extends Database {
// Update a user's name if it has changed in the database
try (Connection connection = getConnection()) {
try (PreparedStatement statement = connection.prepareStatement(formatStatementTables("""
UPDATE `%players_table%`
UPDATE `%users_table%`
SET `username`=?
WHERE `uuid`=?"""))) {
@ -145,7 +145,7 @@ public class MySqlDatabase extends Database {
// Insert new player data into the database
try (Connection connection = getConnection()) {
try (PreparedStatement statement = connection.prepareStatement(formatStatementTables("""
INSERT INTO `%players_table%` (`uuid`,`username`)
INSERT INTO `%users_table%` (`uuid`,`username`)
VALUES (?,?);"""))) {
statement.setString(1, user.uuid.toString());
@ -164,7 +164,7 @@ public class MySqlDatabase extends Database {
try (Connection connection = getConnection()) {
try (PreparedStatement statement = connection.prepareStatement(formatStatementTables("""
SELECT `uuid`, `username`
FROM `%players_table%`
FROM `%users_table%`
WHERE `uuid`=?"""))) {
statement.setString(1, uuid.toString());
@ -188,7 +188,7 @@ public class MySqlDatabase extends Database {
try (Connection connection = getConnection()) {
try (PreparedStatement statement = connection.prepareStatement(formatStatementTables("""
SELECT `uuid`, `username`
FROM `%players_table%`
FROM `%users_table%`
WHERE `username`=?"""))) {
statement.setString(1, username);
@ -211,7 +211,7 @@ public class MySqlDatabase extends Database {
try (Connection connection = getConnection()) {
try (PreparedStatement statement = connection.prepareStatement(formatStatementTables("""
SELECT `version_uuid`, `timestamp`, `save_cause`, `data`
FROM `%data_table%`
FROM `%user_data_table%`
WHERE `player_uuid`=?
ORDER BY `timestamp` DESC
LIMIT 1;"""))) {
@ -242,7 +242,7 @@ public class MySqlDatabase extends Database {
try (Connection connection = getConnection()) {
try (PreparedStatement statement = connection.prepareStatement(formatStatementTables("""
SELECT `version_uuid`, `timestamp`, `save_cause`, `data`
FROM `%data_table%`
FROM `%user_data_table%`
WHERE `player_uuid`=?
ORDER BY `timestamp` DESC;"""))) {
statement.setString(1, user.uuid.toString());
@ -273,7 +273,7 @@ public class MySqlDatabase extends Database {
try (Connection connection = getConnection()) {
try (PreparedStatement statement = connection.prepareStatement(formatStatementTables("""
SELECT `version_uuid`, `timestamp`, `save_cause`, `data`
FROM `%data_table%`
FROM `%user_data_table%`
WHERE `player_uuid`=? AND `version_uuid`=?
ORDER BY `timestamp` DESC
LIMIT 1;"""))) {
@ -304,7 +304,7 @@ public class MySqlDatabase extends Database {
if (data.size() > maxUserDataRecords) {
try (Connection connection = getConnection()) {
try (PreparedStatement statement = connection.prepareStatement(formatStatementTables("""
DELETE FROM `%data_table%`
DELETE FROM `%user_data_table%`
WHERE `player_uuid`=?
ORDER BY `timestamp` ASC
LIMIT %entry_count%;""".replace("%entry_count%",
@ -324,7 +324,7 @@ public class MySqlDatabase extends Database {
return CompletableFuture.supplyAsync(() -> {
try (Connection connection = getConnection()) {
try (PreparedStatement statement = connection.prepareStatement(formatStatementTables("""
DELETE FROM `%data_table%`
DELETE FROM `%user_data_table%`
WHERE `player_uuid`=? AND `version_uuid`=?
LIMIT 1;"""))) {
statement.setString(1, user.uuid.toString());
@ -348,7 +348,7 @@ public class MySqlDatabase extends Database {
final UserData finalData = dataSaveEvent.getUserData();
try (Connection connection = getConnection()) {
try (PreparedStatement statement = connection.prepareStatement(formatStatementTables("""
INSERT INTO `%data_table%`
INSERT INTO `%user_data_table%`
(`player_uuid`,`version_uuid`,`timestamp`,`save_cause`,`data`)
VALUES (?,UUID(),NOW(),?,?);"""))) {
statement.setString(1, user.uuid.toString());
@ -364,6 +364,19 @@ public class MySqlDatabase extends Database {
}).thenRun(() -> pruneUserData(user).join());
}
@Override
public CompletableFuture<Void> wipeDatabase() {
return CompletableFuture.runAsync(() -> {
try (Connection connection = getConnection()) {
try (Statement statement = connection.createStatement()) {
statement.executeUpdate(formatStatementTables("DELETE FROM `%user_data_table%`;"));
}
} catch (SQLException e) {
getLogger().log(Level.SEVERE, "Failed to wipe the database", e);
}
});
}
@Override
public void close() {
if (connectionPool != null) {

@ -1,43 +0,0 @@
package net.william278.husksync.migrator;
import net.william278.husksync.HuskSync;
import org.jetbrains.annotations.NotNull;
import java.util.concurrent.CompletableFuture;
//todo: implement this
public class LegacyMigrator extends Migrator {
public LegacyMigrator(@NotNull HuskSync plugin) {
super(plugin);
}
@Override
public CompletableFuture<Boolean> start() {
return null;
}
@Override
public void handleConfigurationCommand(@NotNull String[] args) {
}
@NotNull
@Override
public String getIdentifier() {
return "legacy";
}
@NotNull
@Override
public String getName() {
return "HuskSync v1.x --> v2.x";
}
@NotNull
@Override
public String getHelpMenu() {
return null;
}
}

@ -2,9 +2,13 @@ package net.william278.husksync.migrator;
import net.william278.husksync.HuskSync;
import org.jetbrains.annotations.NotNull;
import net.william278.husksync.data.UserData;
import java.util.concurrent.CompletableFuture;
/**
* A migrator that migrates data from other data formats to HuskSync's {@link UserData} format
*/
public abstract class Migrator {
protected final HuskSync plugin;
@ -22,10 +26,21 @@ public abstract class Migrator {
/**
* Handle a command that sets migrator configuration parameters
*
* @param args The command arguments
*/
public abstract void handleConfigurationCommand(@NotNull String[] args);
/**
* Obfuscates a data string to prevent important data from being logged to console
*
* @param dataString The data string to obfuscate
* @return The data string obfuscated with stars (*)
*/
protected final String obfuscateDataString(@NotNull String dataString) {
return (dataString.length() > 1 ? dataString.charAt(0) + "*".repeat(dataString.length() - 1) : "");
}
@NotNull
public abstract String getIdentifier();

@ -24,8 +24,8 @@ database:
keepalive_time: 0
connection_timeout: 5000
table_names:
players_table: 'husksync_players'
data_table: 'husksync_data'
users_table: 'husksync_users'
user_data_table: 'husksync_user_data'
redis:
credentials:

@ -1,5 +1,5 @@
# Create the players table if it does not exist
CREATE TABLE IF NOT EXISTS `%players_table%`
# Create the users table if it does not exist
CREATE TABLE IF NOT EXISTS `%users_table%`
(
`uuid` char(36) NOT NULL UNIQUE,
`username` varchar(16) NOT NULL,
@ -7,15 +7,15 @@ CREATE TABLE IF NOT EXISTS `%players_table%`
PRIMARY KEY (`uuid`)
);
# Create the player data table if it does not exist
CREATE TABLE IF NOT EXISTS `%data_table%`
# Create the user data table if it does not exist
CREATE TABLE IF NOT EXISTS `%user_data_table%`
(
`version_uuid` char(36) NOT NULL,
`player_uuid` char(36) NOT NULL,
`version_uuid` char(36) NOT NULL UNIQUE,
`player_uuid` char(36) NOT NULL UNIQUE,
`timestamp` datetime NOT NULL,
`save_cause` varchar(32) NOT NULL,
`pinned` boolean NOT NULL DEFAULT FALSE,
`data` mediumblob NOT NULL,
PRIMARY KEY (`version_uuid`),
FOREIGN KEY (`player_uuid`) REFERENCES `%players_table%` (`uuid`) ON DELETE CASCADE
PRIMARY KEY (`version_uuid`, `player_uuid`),
FOREIGN KEY (`player_uuid`) REFERENCES `%users_table%` (`uuid`) ON DELETE CASCADE
);
Loading…
Cancel
Save