feat: PostgreSQL, Mongo Atlas & Replica Support (#255)

* Started impl for mongo

* added docs

* refactor of the mongo code, made mongodb artifacts download at run time, tested and working

* complete all change requests

* remove mongo and bson from relocations as they arnt needed

* changed the config

* updated docs

* not null not null not null not null not null not null not null not null not null not null not null not null not null not null not null not null not null not null not null not null not null not null not null not null

* added postgres support (closes https://github.com/WiIIiam278/HuskSync/issues/212)

* add support for mongodb atlas, added atlas and postrgres to docs, update the config example in docs, also updates mongodb driver bc apparently i was special and very very out of data

* Rework how mongo connections are handled, **breaks config for mongo only**, allows for MongoDB Atlas, normal MongoDb AND MongoDB replica sets via the parameters in advanced mongo settings, added try and catch on all mongo operations so that it actually throws instead of a cutsie little warning

* small doc change

* whoops forgot to instantiate MongoCollectionHelper, and added missing step from docs for atlas users

* why thats a tad embarrassing (grammar mistake)

* add cluster id to `/husksync status`, shows "MongoDB Atlas" in status if using mongodb atlas

---------

Co-authored-by: William <will27528@gmail.com>
feat/data-edit-commands
Preva1l 8 months ago committed by GitHub
parent b77cf2524d
commit 4c0addfd67
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194

@ -20,6 +20,7 @@ ext {
set 'jedis_version', jedis_version.toString()
set 'mysql_driver_version', mysql_driver_version.toString()
set 'mariadb_driver_version', mariadb_driver_version.toString()
set 'postgres_driver_version', postgres_driver_version.toString()
set 'mongodb_driver_version', mongodb_driver_version.toString()
set 'snappy_version', snappy_version.toString()
}

@ -45,6 +45,7 @@ import net.william278.husksync.data.Serializer;
import net.william278.husksync.database.Database;
import net.william278.husksync.database.MongoDbDatabase;
import net.william278.husksync.database.MySqlDatabase;
import net.william278.husksync.database.PostgresDatabase;
import net.william278.husksync.event.BukkitEventDispatcher;
import net.william278.husksync.hook.PlanHook;
import net.william278.husksync.listener.BukkitEventListener;
@ -165,6 +166,7 @@ public class BukkitHuskSync extends JavaPlugin implements HuskSync, BukkitTask.S
initialize(getSettings().getDatabase().getType().getDisplayName() + " database connection", (plugin) -> {
this.database = switch (settings.getDatabase().getType()) {
case MYSQL, MARIADB -> new MySqlDatabase(this);
case POSTGRES -> new PostgresDatabase(this);
case MONGO -> new MongoDbDatabase(this);
default -> throw new IllegalStateException("Invalid database type");
};

@ -12,5 +12,6 @@ libraries:
- 'redis.clients:jedis:${jedis_version}'
- 'com.mysql:mysql-connector-j:${mysql_driver_version}'
- 'org.mariadb.jdbc:mariadb-java-client:${mariadb_driver_version}'
- 'org.mongodb:mongodb-driver:${mongodb_driver_version}'
- 'org.postgresql:postgresql:${postgres_driver_version}'
- 'org.mongodb:mongodb-driver-sync:${mongodb_driver_version}'
- 'org.xerial.snappy:snappy-java:${snappy_version}'

@ -25,7 +25,8 @@ dependencies {
compileOnly "redis.clients:jedis:$jedis_version"
compileOnly "com.mysql:mysql-connector-j:$mysql_driver_version"
compileOnly "org.mariadb.jdbc:mariadb-java-client:$mariadb_driver_version"
compileOnly "org.mongodb:mongodb-driver:$mongodb_driver_version"
compileOnly "org.postgresql:postgresql:$postgres_driver_version"
compileOnly "org.mongodb:mongodb-driver-sync:$mongodb_driver_version"
compileOnly "org.xerial.snappy:snappy-java:$snappy_version"
testImplementation "redis.clients:jedis:$jedis_version"

@ -28,6 +28,7 @@ import net.kyori.adventure.text.format.TextColor;
import net.william278.desertwell.about.AboutMenu;
import net.william278.desertwell.util.UpdateChecker;
import net.william278.husksync.HuskSync;
import net.william278.husksync.database.Database;
import net.william278.husksync.migrator.Migrator;
import net.william278.husksync.user.CommandUser;
import net.william278.husksync.user.OnlineUser;
@ -216,7 +217,12 @@ public class HuskSyncCommand extends Command implements TabProvider {
plugin.getSettings().getSynchronization().getNetworkLatencyMilliseconds() + "ms"
)),
SERVER_NAME(plugin -> Component.text(plugin.getServerName())),
DATABASE_TYPE(plugin -> Component.text(plugin.getSettings().getDatabase().getType().getDisplayName())),
CLUSTER_ID(plugin -> Component.text(plugin.getSettings().getClusterId().isBlank() ? "None" : plugin.getSettings().getClusterId())),
DATABASE_TYPE(plugin ->
Component.text(plugin.getSettings().getDatabase().getType().getDisplayName() +
(plugin.getSettings().getDatabase().getType() == Database.Type.MONGO ?
(plugin.getSettings().getDatabase().getMongoSettings().isUsingAtlas() ? " Atlas" : "") : ""))
),
IS_DATABASE_LOCAL(plugin -> getLocalhostBoolean(plugin.getSettings().getDatabase().getCredentials().getHost())),
USING_REDIS_SENTINEL(plugin -> getBoolean(
!plugin.getSettings().getRedis().getSentinel().getMaster().isBlank()

@ -30,7 +30,6 @@ import net.william278.husksync.data.Identifier;
import net.william278.husksync.database.Database;
import net.william278.husksync.listener.EventListener;
import net.william278.husksync.sync.DataSyncer;
import org.checkerframework.checker.units.qual.C;
import org.jetbrains.annotations.NotNull;
import java.util.ArrayList;
@ -86,10 +85,10 @@ public class Settings {
@NoArgsConstructor(access = AccessLevel.PRIVATE)
public static class DatabaseSettings {
@Comment("Type of database to use (MYSQL, MARIADB, MONGO)")
@Comment("Type of database to use (MYSQL, MARIADB, POSTGRES, MONGO)")
private Database.Type type = Database.Type.MYSQL;
@Comment("Specify credentials here for your MYSQL, MARIADB OR MONGO database")
@Comment("Specify credentials here for your MYSQL, MARIADB, POSTGRES OR MONGO database")
private DatabaseCredentials credentials = new DatabaseCredentials();
@Getter
@ -101,15 +100,13 @@ public class Settings {
private String database = "HuskSync";
private String username = "root";
private String password = "pa55w0rd";
@Comment("Only change this if you have select MYSQL or MARIADB")
@Comment("Only change this if you have select MYSQL, MARIADB or POSTGRES")
private String parameters = String.join("&",
"?autoReconnect=true", "useSSL=false",
"useUnicode=true", "characterEncoding=UTF-8");
@Comment("Only change this if you have selected MONGO")
private String mongoAuthDb = "admin";
}
@Comment("MYSQL / MARIADB database Hikari connection pool properties. Don't modify this unless you know what you're doing!")
@Comment("MYSQL, MARIADB, POSTGRES database Hikari connection pool properties. Don't modify this unless you know what you're doing!")
private PoolSettings connectionPool = new PoolSettings();
@Getter
@ -123,6 +120,19 @@ public class Settings {
private long connectionTimeout = 5000;
}
@Comment("Advanced MongoDB settings. Don't modify unless you know what you're doing!")
private MongoSettings mongoSettings = new MongoSettings();
@Getter
@Configuration
@NoArgsConstructor(access = AccessLevel.PRIVATE)
public static class MongoSettings {
private boolean usingAtlas = false;
private String parameters = String.join("&",
"?retryWrites=true", "w=majority",
"authSource=HuskSync");
}
@Comment("Names of tables to use on your database. Don't modify this unless you know what you're doing!")
@Getter(AccessLevel.NONE)
private Map<String, String> tableNames = Database.TableName.getDefaults();

@ -19,7 +19,6 @@
package net.william278.husksync.database;
import lombok.AllArgsConstructor;
import lombok.Getter;
import net.william278.husksync.HuskSync;
import net.william278.husksync.config.Settings;
@ -258,6 +257,7 @@ public abstract class Database {
public enum Type {
MYSQL("MySQL", "mysql"),
MARIADB("MariaDB", "mariadb"),
POSTGRES("PostgreSQL", "postgresql"),
MONGO("MongoDB", "mongo");
private final String displayName;

@ -20,6 +20,7 @@
package net.william278.husksync.database;
import com.google.common.collect.Lists;
import com.mongodb.ConnectionString;
import com.mongodb.MongoException;
import com.mongodb.client.FindIterable;
import com.mongodb.client.model.Updates;
@ -64,14 +65,8 @@ public class MongoDbDatabase extends Database {
public void initialize() throws IllegalStateException {
final Settings.DatabaseSettings.DatabaseCredentials credentials = plugin.getSettings().getDatabase().getCredentials();
try {
mongoConnectionHandler = new MongoConnectionHandler(
credentials.getHost(),
credentials.getPort(),
credentials.getUsername(),
credentials.getPassword(),
credentials.getDatabase(),
credentials.getMongoAuthDb()
);
ConnectionString URI = createConnectionURI(credentials);
mongoConnectionHandler = new MongoConnectionHandler(URI, credentials.getDatabase());
mongoCollectionHelper = new MongoCollectionHelper(mongoConnectionHandler);
if (mongoCollectionHelper.getCollection(usersTable) == null) {
mongoCollectionHelper.createCollection(usersTable);
@ -85,6 +80,19 @@ public class MongoDbDatabase extends Database {
}
}
@NotNull
private ConnectionString createConnectionURI(Settings.DatabaseSettings.DatabaseCredentials credentials) {
String baseURI = plugin.getSettings().getDatabase().getMongoSettings().isUsingAtlas() ?
"mongodb+srv://{0}:{1}@{2}/{4}{5}" : "mongodb://{0}:{1}@{2}:{3}/{4}{5}";
baseURI = baseURI.replace("{0}", credentials.getUsername());
baseURI = baseURI.replace("{1}", credentials.getPassword());
baseURI = baseURI.replace("{2}", credentials.getHost());
baseURI = baseURI.replace("{3}", String.valueOf(credentials.getPort()));
baseURI = baseURI.replace("{4}", credentials.getDatabase());
baseURI = baseURI.replace("{5}", plugin.getSettings().getDatabase().getMongoSettings().getParameters());
return new ConnectionString(baseURI);
}
/**
* Ensure a {@link User} has an entry in the database and that their username is up-to-date
*
@ -93,31 +101,38 @@ public class MongoDbDatabase extends Database {
@Blocking
@Override
public void ensureUser(@NotNull User user) {
getUser(user.getUuid()).ifPresentOrElse(
existingUser -> {
if (!existingUser.getUsername().equals(user.getUsername())) {
// Update a user's name if it has changed in the database
try {
Document filter = new Document("uuid", existingUser.getUuid().toString());
Document doc = mongoCollectionHelper.getCollection(usersTable).find(filter).first();
try {
getUser(user.getUuid()).ifPresentOrElse(
existingUser -> {
if (!existingUser.getUsername().equals(user.getUsername())) {
// Update a user's name if it has changed in the database
try {
Document filter = new Document("uuid", existingUser.getUuid().toString());
Document doc = mongoCollectionHelper.getCollection(usersTable).find(filter).first();
if (doc == null) {
throw new MongoException("User document returned null!");
}
Bson updates = Updates.set("uuid", user.getUuid().toString());
mongoCollectionHelper.updateDocument(usersTable, doc, updates);
Bson updates = Updates.set("uuid", user.getUuid().toString());
mongoCollectionHelper.updateDocument(usersTable, doc, updates);
} catch (MongoException e) {
plugin.log(Level.SEVERE, "Failed to insert a user into the database", e);
}
}
},
() -> {
// Insert new player data into the database
try {
Document doc = new Document("uuid", user.getUuid().toString()).append("username", user.getUsername());
mongoCollectionHelper.insertDocument(usersTable, doc);
} catch (MongoException e) {
plugin.log(Level.SEVERE, "Failed to insert a user into the database", e);
}
}
},
() -> {
// Insert new player data into the database
try {
Document doc = new Document("uuid", user.getUuid().toString()).append("username", user.getUsername());
mongoCollectionHelper.insertDocument(usersTable, doc);
} catch (MongoException e) {
plugin.log(Level.SEVERE, "Failed to insert a user into the database", e);
}
}
);
);
} catch (MongoException e) {
plugin.log(Level.SEVERE, "Failed to ensure user data is in the database", e);
}
}
/**
@ -129,13 +144,18 @@ public class MongoDbDatabase extends Database {
@Blocking
@Override
public Optional<User> getUser(@NotNull UUID uuid) {
Document filter = new Document("uuid", uuid);
Document doc = mongoCollectionHelper.getCollection(usersTable).find(filter).first();
if (doc != null) {
return Optional.of(new User(UUID.fromString(doc.getString("uuid")),
doc.getString("username")));
try {
Document filter = new Document("uuid", uuid);
Document doc = mongoCollectionHelper.getCollection(usersTable).find(filter).first();
if (doc != null) {
return Optional.of(new User(UUID.fromString(doc.getString("uuid")),
doc.getString("username")));
}
return Optional.empty();
} catch (MongoException e) {
plugin.log(Level.SEVERE, "Failed to get user data from the database", e);
return Optional.empty();
}
return Optional.empty();
}
/**
@ -147,13 +167,18 @@ public class MongoDbDatabase extends Database {
@Blocking
@Override
public Optional<User> getUserByName(@NotNull String username) {
Document filter = new Document("username", username);
Document doc = mongoCollectionHelper.getCollection(usersTable).find(filter).first();
if (doc != null) {
return Optional.of(new User(UUID.fromString(doc.getString("uuid")),
doc.getString("username")));
try {
Document filter = new Document("username", username);
Document doc = mongoCollectionHelper.getCollection(usersTable).find(filter).first();
if (doc != null) {
return Optional.of(new User(UUID.fromString(doc.getString("uuid")),
doc.getString("username")));
}
return Optional.empty();
} catch (MongoException e) {
plugin.log(Level.SEVERE, "Failed to get user data from the database", e);
return Optional.empty();
}
return Optional.empty();
}
/**
@ -165,18 +190,23 @@ public class MongoDbDatabase extends Database {
@Blocking
@Override
public Optional<DataSnapshot.Packed> getLatestSnapshot(@NotNull User user) {
Document filter = new Document("player_uuid", user.getUuid().toString());
Document sort = new Document("timestamp", -1); // -1 = Descending
FindIterable<Document> iterable = mongoCollectionHelper.getCollection(userDataTable).find(filter).sort(sort);
Document doc = iterable.first();
if (doc != null) {
final UUID versionUuid = UUID.fromString(doc.getString("version_uuid"));
final OffsetDateTime timestamp = OffsetDateTime.ofInstant(Instant.ofEpochMilli((long) doc.get("timestamp")), TimeZone.getDefault().toZoneId());
final Binary bin = doc.get("data", Binary.class);
final byte[] dataByteArray = bin.getData();
return Optional.of(DataSnapshot.deserialize(plugin, dataByteArray, versionUuid, timestamp));
try {
Document filter = new Document("player_uuid", user.getUuid().toString());
Document sort = new Document("timestamp", -1); // -1 = Descending
FindIterable<Document> iterable = mongoCollectionHelper.getCollection(userDataTable).find(filter).sort(sort);
Document doc = iterable.first();
if (doc != null) {
final UUID versionUuid = UUID.fromString(doc.getString("version_uuid"));
final OffsetDateTime timestamp = OffsetDateTime.ofInstant(Instant.ofEpochMilli((long) doc.get("timestamp")), TimeZone.getDefault().toZoneId());
final Binary bin = doc.get("data", Binary.class);
final byte[] dataByteArray = bin.getData();
return Optional.of(DataSnapshot.deserialize(plugin, dataByteArray, versionUuid, timestamp));
}
return Optional.empty();
} catch (MongoException e) {
plugin.log(Level.SEVERE, "Failed to get latest snapshot from the database", e);
return Optional.empty();
}
return Optional.empty();
}
/**
@ -189,18 +219,23 @@ public class MongoDbDatabase extends Database {
@Override
@NotNull
public List<DataSnapshot.Packed> getAllSnapshots(@NotNull User user) {
final List<DataSnapshot.Packed> retrievedData = Lists.newArrayList();
Document filter = new Document("player_uuid", user.getUuid().toString());
Document sort = new Document("timestamp", -1); // -1 = Descending
FindIterable<Document> iterable = mongoCollectionHelper.getCollection(userDataTable).find(filter).sort(sort);
for (Document doc : iterable) {
final UUID versionUuid = UUID.fromString(doc.getString("version_uuid"));
final OffsetDateTime timestamp = OffsetDateTime.ofInstant(Instant.ofEpochMilli((long) doc.get("timestamp")), TimeZone.getDefault().toZoneId());
final Binary bin = doc.get("data", Binary.class);
final byte[] dataByteArray = bin.getData();
retrievedData.add(DataSnapshot.deserialize(plugin, dataByteArray, versionUuid, timestamp));
try {
final List<DataSnapshot.Packed> retrievedData = Lists.newArrayList();
Document filter = new Document("player_uuid", user.getUuid().toString());
Document sort = new Document("timestamp", -1); // -1 = Descending
FindIterable<Document> iterable = mongoCollectionHelper.getCollection(userDataTable).find(filter).sort(sort);
for (Document doc : iterable) {
final UUID versionUuid = UUID.fromString(doc.getString("version_uuid"));
final OffsetDateTime timestamp = OffsetDateTime.ofInstant(Instant.ofEpochMilli((long) doc.get("timestamp")), TimeZone.getDefault().toZoneId());
final Binary bin = doc.get("data", Binary.class);
final byte[] dataByteArray = bin.getData();
retrievedData.add(DataSnapshot.deserialize(plugin, dataByteArray, versionUuid, timestamp));
}
return retrievedData;
} catch (MongoException e) {
plugin.log(Level.SEVERE, "Failed to get all snapshots from the database", e);
return Lists.newArrayList();
}
return retrievedData;
}
/**
@ -213,17 +248,22 @@ public class MongoDbDatabase extends Database {
@Blocking
@Override
public Optional<DataSnapshot.Packed> getSnapshot(@NotNull User user, @NotNull UUID versionUuid) {
Document filter = new Document("player_uuid", user.getUuid().toString()).append("version_uuid", versionUuid.toString());
Document sort = new Document("timestamp", -1); // -1 = Descending
FindIterable<Document> iterable = mongoCollectionHelper.getCollection(userDataTable).find(filter).sort(sort);
Document doc = iterable.first();
if (doc != null) {
final OffsetDateTime timestamp = OffsetDateTime.ofInstant(Instant.ofEpochMilli((long) doc.get("timestamp")), TimeZone.getDefault().toZoneId());
final Binary bin = doc.get("data", Binary.class);
final byte[] dataByteArray = bin.getData();
return Optional.of(DataSnapshot.deserialize(plugin, dataByteArray, versionUuid, timestamp));
try {
Document filter = new Document("player_uuid", user.getUuid().toString()).append("version_uuid", versionUuid.toString());
Document sort = new Document("timestamp", -1); // -1 = Descending
FindIterable<Document> iterable = mongoCollectionHelper.getCollection(userDataTable).find(filter).sort(sort);
Document doc = iterable.first();
if (doc != null) {
final OffsetDateTime timestamp = OffsetDateTime.ofInstant(Instant.ofEpochMilli((long) doc.get("timestamp")), TimeZone.getDefault().toZoneId());
final Binary bin = doc.get("data", Binary.class);
final byte[] dataByteArray = bin.getData();
return Optional.of(DataSnapshot.deserialize(plugin, dataByteArray, versionUuid, timestamp));
}
return Optional.empty();
} catch (MongoException e) {
plugin.log(Level.SEVERE, "Failed to get snapshot from the database", e);
return Optional.empty();
}
return Optional.empty();
}
/**
@ -253,7 +293,7 @@ public class MongoDbDatabase extends Database {
}
}
} catch (MongoException e) {
plugin.log(Level.SEVERE, "Failed to prune user data from the database", e);
plugin.log(Level.SEVERE, "Failed to rotate snapshots", e);
}
}
@ -308,7 +348,7 @@ public class MongoDbDatabase extends Database {
}
}
} catch (MongoException e) {
plugin.log(Level.SEVERE, "Failed to prune user data from the database", e);
plugin.log(Level.SEVERE, "Failed to rotate latest snapshot from the database", e);
}
}
@ -352,7 +392,7 @@ public class MongoDbDatabase extends Database {
);
mongoCollectionHelper.updateDocument(userDataTable, doc, updates);
} catch (MongoException e) {
plugin.log(Level.SEVERE, "Failed to pin user data in the database", e);
plugin.log(Level.SEVERE, "Failed to update snapshot in the database", e);
}
}

@ -0,0 +1,431 @@
/*
* This file is part of HuskSync, licensed under the Apache License 2.0.
*
* Copyright (c) William278 <will27528@gmail.com>
* Copyright (c) contributors
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package net.william278.husksync.database;
import com.google.common.collect.Lists;
import com.zaxxer.hikari.HikariDataSource;
import net.william278.husksync.HuskSync;
import net.william278.husksync.adapter.DataAdapter;
import net.william278.husksync.data.DataSnapshot;
import net.william278.husksync.user.User;
import org.jetbrains.annotations.Blocking;
import org.jetbrains.annotations.NotNull;
import java.io.ByteArrayInputStream;
import java.io.IOException;
import java.sql.*;
import java.time.OffsetDateTime;
import java.util.*;
import java.util.logging.Level;
import static net.william278.husksync.config.Settings.DatabaseSettings;
public class PostgresDatabase extends Database {
private static final String DATA_POOL_NAME = "HuskSyncHikariPool";
private final String flavor;
private final String driverClass;
private HikariDataSource dataSource;
public PostgresDatabase(@NotNull HuskSync plugin) {
super(plugin);
final Type type = plugin.getSettings().getDatabase().getType();
this.flavor = type.getProtocol();
this.driverClass = "org.postgresql.Driver";
}
/**
* Fetch the auto-closeable connection from the hikariDataSource
*
* @return The {@link Connection} to the MySQL database
* @throws SQLException if the connection fails for some reason
*/
@Blocking
@NotNull
private Connection getConnection() throws SQLException {
if (dataSource == null) {
throw new IllegalStateException("The database has not been initialized");
}
return dataSource.getConnection();
}
@Blocking
@Override
public void initialize() throws IllegalStateException {
// Initialize the Hikari pooled connection
final DatabaseSettings.DatabaseCredentials credentials = plugin.getSettings().getDatabase().getCredentials();
dataSource = new HikariDataSource();
dataSource.setDriverClassName(driverClass);
dataSource.setJdbcUrl(String.format("jdbc:%s://%s:%s/%s%s",
flavor,
credentials.getHost(),
credentials.getPort(),
credentials.getDatabase(),
credentials.getParameters()
));
// Authenticate with the database
dataSource.setUsername(credentials.getUsername());
dataSource.setPassword(credentials.getPassword());
// Set connection pool options
final DatabaseSettings.PoolSettings pool = plugin.getSettings().getDatabase().getConnectionPool();
dataSource.setMaximumPoolSize(pool.getMaximumPoolSize());
dataSource.setMinimumIdle(pool.getMinimumIdle());
dataSource.setMaxLifetime(pool.getMaximumLifetime());
dataSource.setKeepaliveTime(pool.getKeepaliveTime());
dataSource.setConnectionTimeout(pool.getConnectionTimeout());
dataSource.setPoolName(DATA_POOL_NAME);
// Set additional connection pool properties
final Properties properties = new Properties();
properties.putAll(
Map.of("cachePrepStmts", "true",
"prepStmtCacheSize", "250",
"prepStmtCacheSqlLimit", "2048",
"useServerPrepStmts", "true",
"useLocalSessionState", "true",
"useLocalTransactionState", "true"
));
properties.putAll(
Map.of(
"rewriteBatchedStatements", "true",
"cacheResultSetMetadata", "true",
"cacheServerConfiguration", "true",
"elideSetAutoCommits", "true",
"maintainTimeStats", "false")
);
dataSource.setDataSourceProperties(properties);
// Prepare database schema; make tables if they don't exist
try (Connection connection = dataSource.getConnection()) {
final String[] databaseSchema = getSchemaStatements(String.format("database/%s_schema.sql", flavor));
try (Statement statement = connection.createStatement()) {
for (String tableCreationStatement : databaseSchema) {
statement.execute(tableCreationStatement);
}
} catch (SQLException e) {
throw new IllegalStateException("Failed to create database tables. Please ensure you are running PostgreSQL " +
"and that your connecting user account has privileges to create tables.", e);
}
} catch (SQLException | IOException e) {
throw new IllegalStateException("Failed to establish a connection to the PostgreSQL database. " +
"Please check the supplied database credentials in the config file", e);
}
}
@Blocking
@Override
public void ensureUser(@NotNull User user) {
getUser(user.getUuid()).ifPresentOrElse(
existingUser -> {
if (!existingUser.getUsername().equals(user.getUsername())) {
// Update a user's name if it has changed in the database
try (Connection connection = getConnection()) {
try (PreparedStatement statement = connection.prepareStatement(formatStatementTables("""
UPDATE "%users_table%"
SET "username"=?
WHERE "uuid"=?"""))) {
statement.setString(1, user.getUsername());
statement.setObject(2, existingUser.getUuid());
statement.executeUpdate();
}
plugin.log(Level.INFO, "Updated " + user.getUsername() + "'s name in the database (" + existingUser.getUsername() + " -> " + user.getUsername() + ")");
} catch (SQLException e) {
plugin.log(Level.SEVERE, "Failed to update a user's name on the database", e);
}
}
},
() -> {
// Insert new player data into the database
try (Connection connection = getConnection()) {
try (PreparedStatement statement = connection.prepareStatement(formatStatementTables("""
INSERT INTO "%users_table%" ("uuid","username")
VALUES (?,?);"""))) {
statement.setObject(1, user.getUuid());
statement.setString(2, user.getUsername());
statement.executeUpdate();
}
} catch (SQLException e) {
plugin.log(Level.SEVERE, "Failed to insert a user into the database", e);
}
}
);
}
@Blocking
@Override
public Optional<User> getUser(@NotNull UUID uuid) {
try (Connection connection = getConnection()) {
try (PreparedStatement statement = connection.prepareStatement(formatStatementTables("""
SELECT "uuid", "username"
FROM "%users_table%"
WHERE "uuid"=?"""))) {
statement.setObject(1, uuid);
final ResultSet resultSet = statement.executeQuery();
if (resultSet.next()) {
return Optional.of(new User((UUID) resultSet.getObject("uuid"),
resultSet.getString("username")));
}
}
} catch (SQLException e) {
plugin.log(Level.SEVERE, "Failed to fetch a user from uuid from the database", e);
}
return Optional.empty();
}
@Blocking
@Override
public Optional<User> getUserByName(@NotNull String username) {
try (Connection connection = getConnection()) {
try (PreparedStatement statement = connection.prepareStatement(formatStatementTables("""
SELECT "uuid", "username"
FROM "%users_table%"
WHERE "username"=?"""))) {
statement.setString(1, username);
final ResultSet resultSet = statement.executeQuery();
if (resultSet.next()) {
return Optional.of(new User((UUID) resultSet.getObject("uuid"),
resultSet.getString("username")));
}
}
} catch (SQLException e) {
plugin.log(Level.SEVERE, "Failed to fetch a user by name from the database", e);
}
return Optional.empty();
}
@Blocking
@Override
public Optional<DataSnapshot.Packed> getLatestSnapshot(@NotNull User user) {
try (Connection connection = getConnection()) {
try (PreparedStatement statement = connection.prepareStatement(formatStatementTables("""
SELECT "version_uuid", "timestamp", "data"
FROM "%user_data_table%"
WHERE "player_uuid"=?
ORDER BY "timestamp" DESC
LIMIT 1;"""))) {
statement.setObject(1, user.getUuid());
final ResultSet resultSet = statement.executeQuery();
if (resultSet.next()) {
final UUID versionUuid = (UUID) resultSet.getObject("version_uuid");
final OffsetDateTime timestamp = OffsetDateTime.ofInstant(
resultSet.getTimestamp("timestamp").toInstant(), TimeZone.getDefault().toZoneId()
);
final byte[] dataByteArray = resultSet.getBytes("data");
return Optional.of(DataSnapshot.deserialize(plugin, dataByteArray, versionUuid, timestamp));
}
}
} catch (SQLException | DataAdapter.AdaptionException e) {
plugin.log(Level.SEVERE, "Failed to fetch a user's current user data from the database", e);
}
return Optional.empty();
}
@Blocking
@Override
@NotNull
public List<DataSnapshot.Packed> getAllSnapshots(@NotNull User user) {
final List<DataSnapshot.Packed> retrievedData = Lists.newArrayList();
try (Connection connection = getConnection()) {
try (PreparedStatement statement = connection.prepareStatement(formatStatementTables("""
SELECT "version_uuid", "timestamp", "data"
FROM "%user_data_table%"
WHERE "player_uuid"=?
ORDER BY "timestamp" DESC;"""))) {
statement.setObject(1, user.getUuid());
final ResultSet resultSet = statement.executeQuery();
while (resultSet.next()) {
final UUID versionUuid = (UUID) resultSet.getObject("version_uuid");
final OffsetDateTime timestamp = OffsetDateTime.ofInstant(
resultSet.getTimestamp("timestamp").toInstant(), TimeZone.getDefault().toZoneId()
);
final byte[] dataByteArray = resultSet.getBytes("data");
retrievedData.add(DataSnapshot.deserialize(plugin, dataByteArray, versionUuid, timestamp));
}
return retrievedData;
}
} catch (SQLException | DataAdapter.AdaptionException e) {
plugin.log(Level.SEVERE, "Failed to fetch a user's current user data from the database", e);
}
return retrievedData;
}
@Blocking
@Override
public Optional<DataSnapshot.Packed> getSnapshot(@NotNull User user, @NotNull UUID versionUuid) {
try (Connection connection = getConnection()) {
try (PreparedStatement statement = connection.prepareStatement(formatStatementTables("""
SELECT "version_uuid", "timestamp", "data"
FROM "%user_data_table%"
WHERE "player_uuid"=? AND "version_uuid"=?
ORDER BY "timestamp" DESC
LIMIT 1;"""))) {
statement.setObject(1, user.getUuid());
statement.setObject(2, versionUuid);
final ResultSet resultSet = statement.executeQuery();
if (resultSet.next()) {
final OffsetDateTime timestamp = OffsetDateTime.ofInstant(
resultSet.getTimestamp("timestamp").toInstant(), TimeZone.getDefault().toZoneId()
);
final byte[] dataByteArray = resultSet.getBytes("data");
return Optional.of(DataSnapshot.deserialize(plugin, dataByteArray, versionUuid, timestamp));
}
}
} catch (SQLException | DataAdapter.AdaptionException e) {
plugin.log(Level.SEVERE, "Failed to fetch specific user data by UUID from the database", e);
}
return Optional.empty();
}
@Blocking
@Override
protected void rotateSnapshots(@NotNull User user) {
final List<DataSnapshot.Packed> unpinnedUserData = getAllSnapshots(user).stream()
.filter(dataSnapshot -> !dataSnapshot.isPinned()).toList();
final int maxSnapshots = plugin.getSettings().getSynchronization().getMaxUserDataSnapshots();
if (unpinnedUserData.size() > maxSnapshots) {
try (Connection connection = getConnection()) {
try (PreparedStatement statement = connection.prepareStatement(formatStatementTables("""
DELETE FROM "%user_data_table%"
WHERE "player_uuid"=?
AND "pinned" = FALSE
ORDER BY "timestamp" ASC
LIMIT %entry_count%;""".replace("%entry_count%",
Integer.toString(unpinnedUserData.size() - maxSnapshots))))) {
statement.setObject(1, user.getUuid());
statement.executeUpdate();
}
} catch (SQLException e) {
plugin.log(Level.SEVERE, "Failed to prune user data from the database", e);
}
}
}
@Blocking
@Override
public boolean deleteSnapshot(@NotNull User user, @NotNull UUID versionUuid) {
try (Connection connection = getConnection()) {
try (PreparedStatement statement = connection.prepareStatement(formatStatementTables("""
DELETE FROM "%user_data_table%"
WHERE "player_uuid"=? AND "version_uuid"=?
LIMIT 1;"""))) {
statement.setObject(1, user.getUuid());
statement.setString(2, versionUuid.toString());
return statement.executeUpdate() > 0;
}
} catch (SQLException e) {
plugin.log(Level.SEVERE, "Failed to delete specific user data from the database", e);
}
return false;
}
@Blocking
@Override
protected void rotateLatestSnapshot(@NotNull User user, @NotNull OffsetDateTime within) {
try (Connection connection = getConnection()) {
try (PreparedStatement statement = connection.prepareStatement(formatStatementTables("""
DELETE FROM "%user_data_table%"
WHERE "player_uuid"=? AND "timestamp" = (
SELECT "timestamp"
FROM "%user_data_table%"
WHERE "player_uuid"=? AND "timestamp" > ? AND "pinned" = FALSE
ORDER BY "timestamp" ASC
LIMIT 1
);"""))) {
statement.setObject(1, user.getUuid());
statement.setObject(2, user.getUuid());
statement.setTimestamp(3, Timestamp.from(within.toInstant()));
statement.executeUpdate();
}
} catch (SQLException e) {
plugin.log(Level.SEVERE, "Failed to delete a user's data from the database", e);
}
}
@Blocking
@Override
protected void createSnapshot(@NotNull User user, @NotNull DataSnapshot.Packed data) {
try (Connection connection = getConnection()) {
try (PreparedStatement statement = connection.prepareStatement(formatStatementTables("""
INSERT INTO "%user_data_table%"
("player_uuid","version_uuid","timestamp","save_cause","pinned","data")
VALUES (?,?,?,?,?,?);"""))) {
statement.setObject(1, user.getUuid());
statement.setObject(2, data.getId());
statement.setTimestamp(3, Timestamp.from(data.getTimestamp().toInstant()));
statement.setString(4, data.getSaveCause().name());
statement.setBoolean(5, data.isPinned());
statement.setBytes(6, data.asBytes(plugin));
statement.executeUpdate();
}
} catch (SQLException | DataAdapter.AdaptionException e) {
plugin.log(Level.SEVERE, "Failed to set user data in the database", e);
}
}
@Blocking
@Override
public void updateSnapshot(@NotNull User user, @NotNull DataSnapshot.Packed data) {
try (Connection connection = getConnection()) {
try (PreparedStatement statement = connection.prepareStatement(formatStatementTables("""
UPDATE "%user_data_table%"
SET "save_cause"=?,"pinned"=?,"data"=?
WHERE "player_uuid"=? AND "version_uuid"=?
LIMIT 1;"""))) {
statement.setString(1, data.getSaveCause().name());
statement.setBoolean(2, data.isPinned());
statement.setBytes(3, data.asBytes(plugin));
statement.setObject(4, user.getUuid());
statement.setObject(5, data.getId());
statement.executeUpdate();
}
} catch (SQLException e) {
plugin.log(Level.SEVERE, "Failed to pin user data in the database", e);
}
}
@Override
public void wipeDatabase() {
try (Connection connection = getConnection()) {
try (Statement statement = connection.createStatement()) {
statement.executeUpdate(formatStatementTables("DELETE FROM \"%user_data_table%\";"));
}
} catch (SQLException e) {
plugin.log(Level.SEVERE, "Failed to wipe the database", e);
}
}
@Override
public void terminate() {
if (dataSource != null) {
if (!dataSource.isClosed()) {
dataSource.close();
}
}
}
}

@ -19,17 +19,15 @@
package net.william278.husksync.database.mongo;
import com.mongodb.ConnectionString;
import com.mongodb.MongoClientSettings;
import com.mongodb.MongoCredential;
import com.mongodb.ServerAddress;
import com.mongodb.client.MongoClient;
import com.mongodb.client.MongoClients;
import com.mongodb.client.MongoDatabase;
import lombok.Getter;
import org.bson.UuidRepresentation;
import org.jetbrains.annotations.NotNull;
import java.util.Collections;
@Getter
public class MongoConnectionHandler {
private final MongoClient mongoClient;
@ -37,24 +35,21 @@ public class MongoConnectionHandler {
/**
* Initiate a connection to a Mongo Server
* @param host The IP/Host Name of the Mongo Server
* @param port The Port of the Mongo Server
* @param username The Username of the user with the appropriate permissions
* @param password The Password of the user with the appropriate permissions
* @param databaseName The database to use.
* @param authDb The database to authenticate with.
* @param uri The connection string
*/
public MongoConnectionHandler(@NotNull String host, @NotNull Integer port, @NotNull String username, @NotNull String password, @NotNull String databaseName, @NotNull String authDb) {
final ServerAddress serverAddress = new ServerAddress(host, port);
final MongoCredential credential = MongoCredential.createCredential(username, authDb, password.toCharArray());
final MongoClientSettings settings = MongoClientSettings.builder()
.credential(credential)
.applyToClusterSettings(builder -> builder.hosts(Collections.singletonList(serverAddress)))
.build();
this.mongoClient = MongoClients.create(settings);
this.database = mongoClient.getDatabase(databaseName);
public MongoConnectionHandler(@NotNull ConnectionString uri, @NotNull String databaseName) {
try {
final MongoClientSettings settings = MongoClientSettings.builder()
.applyConnectionString(uri)
.uuidRepresentation(UuidRepresentation.STANDARD)
.build();
this.mongoClient = MongoClients.create(settings);
this.database = mongoClient.getDatabase(databaseName);
} catch (Exception e) {
throw new IllegalStateException("Failed to establish a connection to the MongoDB database. " +
"Please check the supplied database credentials in the config file", e);
}
}
/**

@ -0,0 +1,22 @@
-- Create the users table if it does not exist
CREATE TABLE IF NOT EXISTS "%users_table%"
(
uuid uuid NOT NULL UNIQUE,
username varchar(16) NOT NULL,
PRIMARY KEY (uuid)
);
-- Create the user data table if it does not exist
CREATE TABLE IF NOT EXISTS "%user_data_table%"
(
version_uuid uuid NOT NULL UNIQUE,
player_uuid uuid NOT NULL,
timestamp timestamp NOT NULL,
save_cause varchar(32) NOT NULL,
pinned boolean NOT NULL DEFAULT FALSE,
data longblob NOT NULL,
PRIMARY KEY (version_uuid, player_uuid),
FOREIGN KEY (player_uuid) REFERENCES "%users_table%" (uuid) ON DELETE CASCADE
);

@ -35,23 +35,28 @@ brigadier_tab_completion: false
enable_plan_hook: true
# Database settings
database:
# Type of database to use (MYSQL, MARIADB)
# Type of database to use (MYSQL, MARIADB, POSTGRES, MONGO)
type: MYSQL
# Specify credentials here for your MYSQL or MARIADB database
# Specify credentials here for your MYSQL, MARIADB, POSTGRES OR MONGO database
credentials:
host: localhost
port: 3306
database: HuskSync
username: root
password: pa55w0rd
# Only change this if you have select MYSQL, MARIADB or POSTGRES
parameters: ?autoReconnect=true&useSSL=false&useUnicode=true&characterEncoding=UTF-8
# MYSQL / MARIADB database Hikari connection pool properties. Don't modify this unless you know what you're doing!
# MYSQL, MARIADB, POSTGRES database Hikari connection pool properties. Don't modify this unless you know what you're doing!
connection_pool:
maximum_pool_size: 10
minimum_idle: 10
maximum_lifetime: 1800000
keepalive_time: 0
connection_timeout: 5000
# Advanced MongoDB settings. Don't modify unless you know what your doing!
mongo_settings:
using_atlas: false
parameters: ?retryWrites=true&w=majority&authSource=HuskSync
# Names of tables to use on your database. Don't modify this unless you know what you're doing!
table_names:
users: husksync_users
@ -113,25 +118,26 @@ synchronization:
# Which data types to synchronize.
# Docs: https://william278.net/docs/husksync/sync-features
features:
persistent_data: true
inventory: true
game_mode: true
advancements: true
experience: true
ender_chest: true
potion_effects: true
location: false
statistics: true
health: true
ender_chest: true
experience: true
advancements: true
game_mode: true
inventory: true
persistent_data: true
hunger: true
health: true
statistics: true
location: false
# Commands which should be blocked before a player has finished syncing (Use * to block all commands)
blacklisted_commands_while_locked:
- '*'
# Event priorities for listeners (HIGHEST, NORMAL, LOWEST). Change if you encounter plugin conflicts
event_priorities:
quit_listener: LOWEST
join_listener: LOWEST
quit_listener: LOWEST
death_listener: NORMAL
```
</details>

@ -3,7 +3,7 @@ This will walk you through installing HuskSync on your network of Spigot servers
## Requirements
> **Note:** If the plugin fails to load, please check that you are not running an [incompatible version combination](Unsupported-Versions)
* A MySQL Database (v8.0+) (or MongoDB Database)
* A MySQL Database (v8.0+) (MariaDB, PostrgreSQL or MongoDB are also supported)
* A Redis Database (v5.0+) &mdash; see [[FAQs]] for more details.
* Any number of Spigot servers, connected by a BungeeCord or Velocity-based proxy (Minecraft v1.17.1+, running Java 17+)
@ -17,15 +17,25 @@ This will walk you through installing HuskSync on your network of Spigot servers
- Advanced users: If you'd prefer, you can just create one config.yml file and create symbolic links in each `/plugins/HuskSync/` folder to it to make updating it easier.
### 3. Enter Mysql & Redis database credentials
- Navigate to the HuskSync config file on each server (`~/plugins/HuskSync/config.yml`)
- Under `credentials` in the `database` section, enter the credentials of your MySQL Database. You shouldn't touch the `connection_pool` properties.
- Under `credentials` in the `database` section, enter the credentials of your (MySQL/MariaDB/MongoDB/PostgreSQL) Database. You shouldn't touch the `connection_pool` properties.
- Under `credentials` in the `redis` section, enter the credentials of your Redis Database. If your Redis server doesn't have a password, leave the password blank as it is.
- Unless you want to have multiple clusters of servers within your network, each with separate user data, you should not change the value of `cluster_id`.
<details>
<summary><b>For MongoDB Users</b></summary>
- Navigate to the HuskSync config file on each server (`~/plugins/HuskSync/config.yml`)
- Set `type` in the `database` section to `MONGO`
- Under `credentials` in the `database` section, enter the credentials of your MongoDB Database. You shouldn't touch the `connection_pool` properties.
- Be sure to fill in the `mongo_auth_db` field with the database that the username and password is authenticated in. (In most cases this will {and should be} be the same database as the database your trying to connect to.)
<details>
<summary><b>MongoDB Atlas</b></summary>
- Navigate to the HuskSync config file on each server (`~/plugins/HuskSync/config.yml`)
- Set `using_atlas` in the `mongo_settings` section to `true`.
- Remove `&authSource=HuskSync` from `parameters` in the `mongo_settings`.
(The `port` setting in `credentials` is disregarded when using Atlas.)
</details>
</details>
### 4. Set server names in server.yml files

@ -10,5 +10,6 @@ plugin_description=A modern, cross-server player data synchronization system
jedis_version=5.1.0
mysql_driver_version=8.3.0
mariadb_driver_version=3.3.2
mongodb_driver_version=3.12.14
postgres_driver_version=42.7.2
mongodb_driver_version=5.0.0
snappy_version=1.1.10.5

@ -3,5 +3,6 @@ libraries:
- 'redis.clients:jedis:${jedis_version}'
- 'com.mysql:mysql-connector-j:${mysql_driver_version}'
- 'org.mariadb.jdbc:mariadb-java-client:${mariadb_driver_version}'
- 'org.mongodb:mongodb-driver:${mongodb_driver_version}'
- 'org.postgresql:postgresql:${postgres_driver_version}'
- 'org.mongodb:mongodb-driver-sync:${mongodb_driver_version}'
- 'org.xerial.snappy:snappy-java:${snappy_version}'
Loading…
Cancel
Save