org.redkalex.source.vertx.VertxSqlDataSource Maven / Gradle / Ivy
Go to download
Show more of this group Show more artifacts with this name
Show all versions of redkale-plugins Show documentation
Show all versions of redkale-plugins Show documentation
Redkale-Plugins -- java framework
/*
* To change this license header, choose License Headers in Project Properties.
* To change this template file, choose Tools | Templates
* and open the template in the editor.
*/
package org.redkalex.source.vertx;
import io.vertx.core.*;
import io.vertx.core.metrics.MetricsOptions;
import io.vertx.sqlclient.*;
import io.vertx.sqlclient.impl.ListTuple;
import java.io.Serializable;
import java.lang.reflect.Method;
import java.net.URI;
import java.util.*;
import java.util.concurrent.CompletableFuture;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.atomic.AtomicInteger;
import java.util.function.*;
import java.util.logging.Level;
import java.util.stream.Stream;
import org.redkale.annotation.AutoLoad;
import org.redkale.annotation.Nullable;
import org.redkale.annotation.ResourceType;
import org.redkale.inject.ResourceEvent;
import org.redkale.net.WorkThread;
import org.redkale.service.Local;
import org.redkale.source.*;
import static org.redkale.source.DataSources.*;
import org.redkale.util.*;
/**
* 只实现了 Mysql 和 Postgresql
* see
* https://github.com/eclipse-vertx/vertx-sql-client/blob/master/vertx-pg-client/src/main/java/examples/SqlClientExamples.java
*
* @author zhangjx
*/
@Local
@AutoLoad(false)
@SuppressWarnings("unchecked")
@ResourceType(DataSource.class)
public class VertxSqlDataSource extends AbstractDataSqlSource {
protected static final PropertyKind MYSQL_LAST_INSERTED_ID =
PropertyKind.create("last-inserted-id", Long.class);
protected final Random random = new Random();
protected Vertx vertx;
protected boolean dollar;
protected SqlConnectOptions readOptions;
protected PoolOptions readPoolOptions;
protected Pool[] readThreadPools;
protected SqlConnectOptions writeOptions;
protected PoolOptions writePoolOptions;
protected Pool[] writeThreadPools;
protected boolean pgsql;
@Override
public void init(AnyValue conf) {
super.init(conf);
this.dollar = "postgresql".equalsIgnoreCase(dbtype);
this.pgsql = this.dollar;
this.vertx = createVertx();
{
this.readOptions = createConnectOptions(readConfProps);
this.readPoolOptions = createPoolOptions(readConfProps);
String clazzName = readOptions.getClass().getName();
RedkaleClassLoader.putReflectionClass(clazzName);
RedkaleClassLoader.putReflectionPublicConstructors(readOptions.getClass(), clazzName);
}
Pool[] rpools = new Pool[Utility.cpus()];
for (int i = 0; i < rpools.length; i++) {
rpools[i] = Pool.pool(vertx, readOptions, readPoolOptions);
}
this.readThreadPools = rpools;
if (readConfProps == writeConfProps) {
this.writeOptions = readOptions;
this.writePoolOptions = readPoolOptions;
this.writeThreadPools = this.readThreadPools;
} else {
this.writeOptions = createConnectOptions(writeConfProps);
this.writePoolOptions = createPoolOptions(writeConfProps);
Pool[] wpools = new Pool[Utility.cpus()];
for (int i = 0; i < wpools.length; i++) {
wpools[i] = Pool.pool(vertx, writeOptions, writePoolOptions);
}
this.writeThreadPools = wpools;
}
}
protected Vertx createVertx() {
return Vertx.vertx(new VertxOptions()
.setEventLoopPoolSize(Utility.cpus())
.setPreferNativeTransport(true)
.setDisableTCCL(true)
.setHAEnabled(false)
.setBlockedThreadCheckIntervalUnit(TimeUnit.HOURS)
.setMetricsOptions(new MetricsOptions().setEnabled(false)));
}
public boolean isPgsql() {
return pgsql;
}
@Override
protected void updateOneResourceChange(Properties newProps, ResourceEvent[] events) {
Pool[] oldPools = this.readThreadPools;
SqlConnectOptions readOpt = createConnectOptions(newProps);
PoolOptions readPoolOpt = createPoolOptions(newProps);
this.readOptions = readOpt;
this.readPoolOptions = readPoolOpt;
Pool[] wpools = new Pool[Utility.cpus()];
for (int i = 0; i < wpools.length; i++) {
wpools[i] = Pool.pool(vertx, readOpt, readPoolOpt);
}
this.readThreadPools = wpools;
this.writeOptions = readOptions;
this.writePoolOptions = readPoolOptions;
this.writeThreadPools = this.readThreadPools;
if (oldPools != null) {
for (Pool oldPool : oldPools) {
oldPool.close();
}
}
}
@Override
protected void updateReadResourceChange(Properties newReadProps, ResourceEvent[] events) {
Pool[] oldPools = this.readThreadPools;
SqlConnectOptions readOpt = createConnectOptions(newReadProps);
PoolOptions readPoolOpt = createPoolOptions(newReadProps);
this.readOptions = readOpt;
this.readPoolOptions = readPoolOpt;
Pool[] wpools = new Pool[Utility.cpus()];
for (int i = 0; i < wpools.length; i++) {
wpools[i] = Pool.pool(vertx, readOpt, readPoolOpt);
}
this.readThreadPools = wpools;
if (oldPools != null) {
for (Pool oldPool : oldPools) {
oldPool.close();
}
}
}
@Override
protected void updateWriteResourceChange(Properties newWriteProps, ResourceEvent[] events) {
Pool[] oldPools = this.writeThreadPools;
SqlConnectOptions writeOpt = createConnectOptions(newWriteProps);
PoolOptions writePoolOpt = createPoolOptions(newWriteProps);
this.writeOptions = writeOpt;
this.writePoolOptions = writePoolOpt;
Pool[] rpools = new Pool[Utility.cpus()];
for (int i = 0; i < rpools.length; i++) {
rpools[i] = Pool.pool(vertx, writeOpt, writePoolOpt);
}
this.writeThreadPools = rpools;
if (oldPools != null) {
for (Pool oldPool : oldPools) {
oldPool.close();
}
}
}
protected PoolOptions createPoolOptions(Properties prop) {
int maxConns = Math.max(1, Integer.decode(prop.getProperty(DATA_SOURCE_MAXCONNS, "" + Utility.cpus())));
PoolOptions options = new PoolOptions().setMaxSize((maxConns + Utility.cpus() - 1) / Utility.cpus());
try {
if ("mysql".equalsIgnoreCase(dbtype())) {
Class myclass = Class.forName("io.vertx.mysqlclient.impl.MySQLPoolOptions");
Object myopts = myclass.getConstructor(PoolOptions.class).newInstance(options);
Method method = myclass.getMethod("setPipelined", boolean.class);
method.invoke(myopts, true);
RedkaleClassLoader.putReflectionClass(myclass.getName());
RedkaleClassLoader.putReflectionPublicConstructors(myclass, myclass.getName());
RedkaleClassLoader.putReflectionMethod(myclass.getName(), method);
return (PoolOptions) myopts;
} else if ("postgresql".equalsIgnoreCase(dbtype())) {
Class myclass = Class.forName("io.vertx.pgclient.impl.PgPoolOptions");
Object myopts = myclass.getConstructor(PoolOptions.class).newInstance(options);
Method method = myclass.getMethod("setPipelined", boolean.class);
method.invoke(myopts, true);
RedkaleClassLoader.putReflectionClass(myclass.getName());
RedkaleClassLoader.putReflectionPublicConstructors(myclass, myclass.getName());
RedkaleClassLoader.putReflectionMethod(myclass.getName(), method);
return (PoolOptions) myopts;
} else {
return options;
}
} catch (Throwable t) {
logger.log(Level.INFO, VertxSqlDataSource.class.getSimpleName() + " createPoolOptions failed", t);
return options;
}
}
protected SqlConnectOptions createConnectOptions(Properties prop) {
SqlConnectOptions sqlOptions;
if ("mysql".equalsIgnoreCase(dbtype())) {
try {
String clzName = "io.vertx.mysqlclient.MySQLConnectOptions";
Class clazz = Thread.currentThread().getContextClassLoader().loadClass(clzName);
RedkaleClassLoader.putReflectionPublicConstructors(clazz, clazz.getName());
sqlOptions = (SqlConnectOptions) clazz.getConstructor().newInstance();
Method method = sqlOptions.getClass().getMethod("setPipeliningLimit", int.class);
method.invoke(sqlOptions, 100000);
} catch (Exception e) {
throw new SourceException(e);
}
} else if ("postgresql".equalsIgnoreCase(dbtype())) {
try {
String clzName = "io.vertx.pgclient.PgConnectOptions";
Class clazz = Thread.currentThread().getContextClassLoader().loadClass(clzName);
RedkaleClassLoader.putReflectionPublicConstructors(clazz, clazz.getName());
sqlOptions = (SqlConnectOptions) clazz.getConstructor().newInstance();
Method method = sqlOptions.getClass().getMethod("setPipeliningLimit", int.class);
method.invoke(sqlOptions, 100000);
} catch (Exception e) {
throw new SourceException(e);
}
} else {
throw new UnsupportedOperationException("dbtype(" + dbtype() + ") not supported yet.");
}
sqlOptions.setCachePreparedStatements(true);
String url = prop.getProperty(DATA_SOURCE_URL);
if (url.startsWith("jdbc:")) {
url = url.substring("jdbc:".length());
}
final URI uri = URI.create(url);
sqlOptions.setHost(uri.getHost());
if (uri.getPort() > 0) {
sqlOptions.setPort(uri.getPort());
}
String user = prop.getProperty(DATA_SOURCE_USER);
if (user != null && !user.trim().isEmpty()) {
sqlOptions.setUser(user.trim());
}
String pwd = prop.getProperty(DATA_SOURCE_PASSWORD);
if (pwd != null && !pwd.trim().isEmpty()) {
sqlOptions.setPassword(pwd.trim());
}
String path = uri.getPath();
if (path != null && path.length() > 1) {
if (path.startsWith("/")) {
path = path.substring(1);
}
sqlOptions.setDatabase(path);
}
String query = uri.getQuery();
if (query != null && !query.isEmpty()) {
query = query.replace("&", "&");
for (String str : query.split("&")) {
if (str.isEmpty()) {
continue;
}
int pos = str.indexOf('=');
if (pos < 1) {
continue;
}
String key = str.substring(0, pos);
String val = str.substring(pos + 1);
sqlOptions.addProperty(key, val);
}
}
return sqlOptions;
}
@Override
protected int readMaxConns() {
return readPoolOptions.getMaxSize();
}
@Override
protected int writeMaxConns() {
return writePoolOptions.getMaxSize();
}
protected Pool readPool(WorkThread thread) {
Pool[] pools = readThreadPools;
if (thread != null && thread.index() >= 0 && thread.index() < pools.length) {
return pools[thread.index()];
}
return pools[random.nextInt(pools.length)];
}
protected Pool writePool(WorkThread thread) {
Pool[] pools = writeThreadPools;
if (thread != null && thread.index() >= 0 && thread.index() < pools.length) {
return pools[thread.index()];
}
return pools[random.nextInt(pools.length)];
}
@Local
@Override
public void close() {
destroy(null);
}
@Override
public void destroy(AnyValue config) {
super.destroy(config);
if (this.vertx != null) {
this.vertx.close();
}
if (readThreadPools != null) {
for (Pool pool : readThreadPools) {
pool.close();
}
}
if (this.writeThreadPools != null && this.writeThreadPools != this.readThreadPools) {
for (Pool pool : writeThreadPools) {
pool.close();
}
}
}
@Override
protected String prepareParamSign(int index) {
return dollar ? ("$" + index) : "?";
}
@Override
protected final boolean isAsync() {
return true;
}
@Override
protected CompletableFuture insertDBAsync(EntityInfo info, T... values) {
final long s = System.currentTimeMillis();
final WorkThread workThread = WorkThread.currentWorkThread();
final Attribute[] attrs = info.getInsertAttributes();
final List objs = new ArrayList<>(values.length);
for (T value : values) {
final ListTuple params = new ListTuple(new ArrayList<>());
for (Attribute attr : attrs) {
params.addValue(attr.get(value));
}
objs.add(params);
}
String sql0 = dollar ? info.getInsertDollarPrepareSQL(values[0]) : info.getInsertQuestionPrepareSQL(values[0]);
final String sql =
info.isAutoGenerated() && isPgsql() ? (sql0 + " RETURNING " + info.getPrimarySQLColumn()) : sql0;
final CompletableFuture future = new CompletableFuture<>();
final ObjectRef>>> selfHandlerRef = new ObjectRef<>();
final Handler>> handler = (AsyncResult> event) -> {
slowLog(s, sql);
if (event.failed()) {
if (!isTableNotExist(info, event.cause())) {
completeExceptionally(workThread, future, event.cause());
return;
}
if (info.getTableStrategy() == null) { // 单表模式
String[] tableSqls = createTableSqls(info);
if (tableSqls == null) { // 没有建表DDL
completeExceptionally(workThread, future, event.cause());
return;
}
// 创建单表结构
AtomicInteger createIndex = new AtomicInteger();
final ObjectRef>>> createHandlerRef = new ObjectRef<>();
final Handler>> createHandler = (AsyncResult> event2) -> {
if (event2.failed()) {
completeExceptionally(workThread, future, event2.cause());
} else if (createIndex.incrementAndGet() < tableSqls.length) {
writePool(workThread)
.query(tableSqls[createIndex.get()])
.execute(createHandlerRef.get());
} else {
// 重新提交新增记录
writePool(workThread).preparedQuery(sql).executeBatch(objs, selfHandlerRef.get());
}
};
createHandlerRef.set(createHandler);
writePool(workThread).query(tableSqls[createIndex.get()]).execute(createHandler);
} else { // 分表模式
// 执行一遍复制表操作
final String copySql = getTableCopySql(info, info.getTable(values[0]));
final ObjectRef>>> copySqlHandlerRef = new ObjectRef<>();
final Handler>> copySqlHandler = (AsyncResult> event2) -> {
if (event2.failed()) {
completeExceptionally(workThread, future, event2.cause());
} else {
// 重新提交新增记录
writePool(workThread).preparedQuery(sql).executeBatch(objs, selfHandlerRef.get());
}
};
copySqlHandlerRef.set(copySqlHandler);
writePool(workThread).query(copySql).execute(copySqlHandler);
}
return;
}
if (info.isAutoGenerated()) {
int i = -1;
RowSet res = event.result();
final Attribute primary = info.getPrimary();
final Class primaryType = primary.type();
if (isPgsql()) {
for (RowSet rows = res; rows != null; rows = rows.next()) {
T entity = values[++i];
Row row = rows.iterator().next();
if (primaryType == int.class || primaryType == Integer.class) {
primary.set(entity, row.getInteger(0));
} else if (primaryType == long.class || primaryType == Long.class) {
primary.set(entity, row.getLong(0));
} else if (primaryType == String.class) {
primary.set(entity, row.getString(0));
} else {
primary.set(entity, row.get(primaryType, 0));
}
}
} else {
long firstId = res.property(MYSQL_LAST_INSERTED_ID);
for (T entity : values) {
long id = firstId + (++i);
if (primaryType == int.class || primaryType == Integer.class) {
primary.set(entity, (int) id);
} else if (primaryType == long.class || primaryType == Long.class) {
primary.set(entity, id);
} else if (primaryType == String.class) {
primary.set(entity, String.valueOf(id));
} else {
primary.set(entity, id);
}
}
}
}
complete(workThread, future, event.result().rowCount());
};
selfHandlerRef.set(handler);
writePool(workThread).preparedQuery(sql).executeBatch(objs, handler);
return future;
}
@Override
protected CompletableFuture deleteDBAsync(
EntityInfo info,
String[] tables,
Flipper flipper,
FilterNode node,
Map> pkmap,
String... sqls) {
if (info.isLoggable(logger, Level.FINEST)) {
final String debugsql = Flipper.hasLimit(flipper) ? (sqls[0] + " LIMIT " + flipper.getLimit()) : sqls[0];
if (info.isLoggable(logger, Level.FINEST, debugsql)) {
String typeName = info.getType().getSimpleName();
if (sqls.length == 1) {
logger.finest(typeName + " delete sql=" + debugsql);
} else if (flipper == null || flipper.getLimit() <= 0) {
logger.finest(typeName + " delete sqls=" + Arrays.toString(sqls));
} else {
logger.finest(typeName + " limit " + flipper.getLimit() + " delete sqls=" + Arrays.toString(sqls));
}
}
}
return executeUpdate(info, sqls, null, fetchSize(flipper), false, null, null);
}
@Override
protected CompletableFuture clearTableDBAsync(
EntityInfo info, final String[] tables, FilterNode node, String... sqls) {
if (info.isLoggable(logger, Level.FINEST)) {
if (info.isLoggable(logger, Level.FINEST, sqls[0])) {
String typeName = info.getType().getSimpleName();
if (sqls.length == 1) {
logger.finest(typeName + " clearTable sql=" + sqls[0]);
} else {
logger.finest(typeName + " clearTable sqls=" + Arrays.toString(sqls));
}
}
}
return executeUpdate(info, sqls, null, 0, false, null, null);
}
@Override
protected CompletableFuture createTableDBAsync(
EntityInfo info, String copyTableSql, final Serializable pk, String... sqls) {
if (copyTableSql == null) {
return executeUpdate(info, sqls, null, 0, false, null, null);
} else {
return executeUpdate(info, new String[] {copyTableSql}, null, 0, false, null, null);
}
}
@Override
protected CompletableFuture dropTableDBAsync(
EntityInfo info, final String[] tables, FilterNode node, String... sqls) {
if (info.isLoggable(logger, Level.FINEST)) {
if (info.isLoggable(logger, Level.FINEST, sqls[0])) {
String typeName = info.getType().getSimpleName();
if (sqls.length == 1) {
logger.finest(typeName + " dropTable sql=" + sqls[0]);
} else {
logger.finest(typeName + " dropTable sqls=" + Arrays.toString(sqls));
}
}
}
return executeUpdate(info, sqls, null, 0, false, null, null);
}
@Override
protected CompletableFuture updateEntityDBAsync(EntityInfo info, final T... values) {
final long s = System.currentTimeMillis();
final WorkThread workThread = WorkThread.currentWorkThread();
final Attribute primary = info.getPrimary();
final Attribute[] attrs = info.getUpdateAttributes();
final List objs = new ArrayList<>(values.length);
for (T value : values) {
final ListTuple params = new ListTuple(new ArrayList<>(attrs.length + 1));
for (Attribute attr : attrs) {
params.addValue(attr.get(value));
}
params.addValue(primary.get(value)); // 最后一个是主键
objs.add(params);
}
final String sql =
dollar ? info.getUpdateDollarPrepareSQL(values[0]) : info.getUpdateQuestionPrepareSQL(values[0]);
final CompletableFuture future = new CompletableFuture<>();
writePool(workThread).preparedQuery(sql).executeBatch(objs, (AsyncResult> event) -> {
slowLog(s, sql);
if (event.failed()) {
completeExceptionally(workThread, future, event.cause());
return;
}
complete(workThread, future, event.result().rowCount());
});
return future;
}
@Override
protected CompletableFuture updateColumnDBAsync(
EntityInfo info, Flipper flipper, UpdateSqlInfo sql) {
if (info.isLoggable(logger, Level.FINEST)) {
final String debugsql =
flipper == null || flipper.getLimit() <= 0 ? sql.sql : (sql.sql + " LIMIT " + flipper.getLimit());
if (info.isLoggable(logger, Level.FINEST, debugsql)) {
logger.finest(info.getType().getSimpleName() + " update sql=" + debugsql);
}
}
List objs = null;
if (sql.blobs != null || sql.tables != null) {
if (sql.tables == null) {
objs = List.of(Tuple.wrap(sql.blobs));
} else {
objs = new ArrayList<>();
for (String table : sql.tables) {
if (sql.blobs != null) {
List w = new ArrayList(sql.blobs);
w.add(table);
objs.add(Tuple.wrap(w));
} else {
objs.add(Tuple.of(table));
}
}
}
}
// 有params的情况表示 prepareSQL带byte[]的绑定参数
return executeUpdate(info, new String[] {sql.sql}, null, fetchSize(flipper), false, null, objs);
}
@Override
protected CompletableFuture
© 2015 - 2024 Weber Informatics LLC | Privacy Policy