Please wait. This can take some minutes ...
Many resources are needed to download a project. Please understand that we have to compensate our server costs. Thank you in advance.
Project price only 1 $
You can buy this project and download/modify it how often you want.
com.github.chengyuxing.sql.BakiDao Maven / Gradle / Ivy
Go to download
Light wrapper of JDBC, support ddl, dml, query, plsql/procedure/function, transaction and manage sql
file.
package com.github.chengyuxing.sql;
import com.github.chengyuxing.common.DataRow;
import com.github.chengyuxing.common.anno.Alias;
import com.github.chengyuxing.common.io.FileResource;
import com.github.chengyuxing.common.tuple.Pair;
import com.github.chengyuxing.common.utils.StringUtil;
import com.github.chengyuxing.sql.datasource.DataSourceUtil;
import com.github.chengyuxing.sql.exceptions.ConnectionStatusException;
import com.github.chengyuxing.sql.exceptions.IllegalSqlException;
import com.github.chengyuxing.sql.exceptions.UncheckedSqlException;
import com.github.chengyuxing.sql.page.IPageable;
import com.github.chengyuxing.sql.page.PageHelper;
import com.github.chengyuxing.sql.page.PageHelperProvider;
import com.github.chengyuxing.sql.page.impl.Db2PageHelper;
import com.github.chengyuxing.sql.page.impl.MysqlPageHelper;
import com.github.chengyuxing.sql.page.impl.OraclePageHelper;
import com.github.chengyuxing.sql.page.impl.PGPageHelper;
import com.github.chengyuxing.sql.support.*;
import com.github.chengyuxing.sql.support.executor.EntitySaveExecutor;
import com.github.chengyuxing.sql.support.executor.Executor;
import com.github.chengyuxing.sql.support.executor.QueryExecutor;
import com.github.chengyuxing.sql.support.executor.SaveExecutor;
import com.github.chengyuxing.sql.types.Param;
import com.github.chengyuxing.sql.utils.JdbcUtil;
import com.github.chengyuxing.sql.utils.SqlGenerator;
import com.github.chengyuxing.sql.utils.SqlUtil;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import javax.sql.DataSource;
import java.sql.*;
import java.util.*;
import java.util.function.Function;
import java.util.stream.Collectors;
import java.util.stream.Stream;
/**
* Default implementation of Baki interface
* If {@link XQLFileManager } configured, all methods will be support replace sql statement to sql name ({@code &.}).
* Example:
*
* try ({@link Stream}<{@link DataRow}> s = baki.query("&sys.getUser").stream()) {
* s.forEach(System.out::println);
* }
*/
public class BakiDao extends JdbcSupport implements Baki {
private final static Logger log = LoggerFactory.getLogger(BakiDao.class);
private final DataSource dataSource;
private DatabaseMetaData metaData;
private String databaseId;
private SqlGenerator sqlGenerator;
//---------optional properties------
/**
* Global custom page helper provider.
*/
private PageHelperProvider globalPageHelperProvider;
/**
* Custom sql interceptor.
*/
private SqlInterceptor sqlInterceptor;
/**
* Custom prepared sql statement parameter value handler.
*/
private StatementValueHandler statementValueHandler;
/**
* Do something after parse dynamic sql.
*/
private AfterParseDynamicSql afterParseDynamicSql;
/**
* XQL file manager.
*/
private XQLFileManager xqlFileManager;
/**
* Batch size for execute batch.
*/
private int batchSize = 1000;
/**
* Named parameter prefix symbol.
*/
private char namedParamPrefix = ':';
/**
* If XQL file changed, XQL file will reload when {@link #parseSql(String, Map)} invoke always.
*/
private boolean reloadXqlOnGet = false;
/**
* Load {@code xql-file-manager-}{@link #databaseId() databaseId}{@code .yml} first if exists,
* otherwise {@code xql-file-manager.yml}
*/
private boolean autoXFMConfig = false;
/**
* Constructs a new BakiDao with initial datasource.
*
* @param dataSource datasource
*/
public BakiDao(DataSource dataSource) {
this.dataSource = dataSource;
init();
}
/**
* Initialize default configuration properties.
*/
protected void init() {
this.sqlGenerator = new SqlGenerator(namedParamPrefix);
this.statementValueHandler = (ps, index, value, metaData) -> JdbcUtil.setStatementValue(ps, index, value);
this.afterParseDynamicSql = sql -> sql;
using(c -> {
try {
this.metaData = c.getMetaData();
this.databaseId = this.metaData.getDatabaseProductName().toLowerCase();
return 0;
} catch (SQLException e) {
throw new UncheckedSqlException("initialize metadata error.", e);
}
});
}
@Override
public QueryExecutor query(String sql) {
return new QueryExecutor(sql) {
@Override
public Stream stream() {
return executeQueryStream(sql, args);
}
@Override
public List> maps() {
try (Stream s = stream()) {
return s.collect(Collectors.toList());
}
}
@Override
public List rows() {
try (Stream s = stream()) {
return s.collect(Collectors.toList());
}
}
@Override
public List entities(Class entityClass) {
try (Stream s = stream()) {
return s.map(d -> d.toEntity(entityClass)).collect(Collectors.toList());
}
}
@Override
public DataRow zip() {
return DataRow.zip(rows());
}
@Override
public IPageable pageable(int page, int size) {
IPageable iPageable = new SimplePageable(sql, page, size);
return iPageable.args(args);
}
@Override
public DataRow findFirstRow() {
return findFirst().orElseGet(() -> new DataRow(0));
}
@Override
public T findFirstEntity(Class entityClass) {
return findFirst().map(d -> d.toEntity(entityClass)).orElse(null);
}
@Override
public Optional findFirst() {
return findFirst(true);
}
@Override
public Optional findFirst(boolean pageQuery1st) {
if (pageQuery1st) {
PageHelper pageHelper = defaultPager();
pageHelper.init(1, 1, 1);
Map pagedArgs = pageHelper.pagedArgs();
if (Objects.nonNull(pagedArgs)) {
args.putAll(pagedArgs);
}
Pair> result = parseSql(sql, args);
String pagedSql = pageHelper.pagedSql(result.getItem1());
try (Stream s = executeQueryStream(pagedSql, result.getItem2())) {
return s.peek(d -> d.remove(PageHelper.ROW_NUM_KEY)).findFirst();
}
}
try (Stream s = stream()) {
return s.findFirst();
}
}
@Override
public boolean exists() {
return findFirst().isPresent();
}
};
}
@Override
public SaveExecutor update(String tableName, String where) {
return new SaveExecutor() {
@Override
public int save(Map data) {
return save(Collections.singletonList(data));
}
@Override
public int save(Collection extends Map> data) {
if (data.isEmpty()) {
return 0;
}
String whereStatement = parseSql(where, Collections.emptyMap()).getItem1();
List tableFields = safe ? getTableFields(tableName) : new ArrayList<>();
if (fast) {
String update = sqlGenerator.generateNamedParamUpdate(tableName,
whereStatement,
data.iterator().next(),
tableFields,
ignoreNull);
return executeBatchUpdate(update, data, batchSize);
}
int i = 0;
for (Map item : data) {
String update = sqlGenerator.generateNamedParamUpdate(tableName,
whereStatement,
item,
tableFields,
ignoreNull);
i += executeUpdate(update, item);
}
return i;
}
};
}
@Override
public SaveExecutor insert(String tableName) {
return new SaveExecutor() {
@Override
public int save(Map data) {
return save(Collections.singletonList(data));
}
@Override
public int save(Collection extends Map> data) {
if (data.isEmpty()) {
return 0;
}
List tableFields = safe ? getTableFields(tableName) : new ArrayList<>();
if (fast) {
String insert = sqlGenerator.generateNamedParamInsert(tableName, data.iterator().next(), tableFields, ignoreNull);
return executeBatchUpdate(insert, data, batchSize);
}
int i = 0;
for (Map item : data) {
String insert = sqlGenerator.generateNamedParamInsert(tableName, item, tableFields, ignoreNull);
i += executeUpdate(insert, item);
}
return i;
}
};
}
@Override
public SaveExecutor delete(String tableName, String where) {
return new SaveExecutor() {
@Override
public int save(Map data) {
return save(Collections.singletonList(data));
}
@Override
public int save(Collection extends Map> data) {
String whereSql = parseSql(where, Collections.emptyMap()).getItem1();
String w = StringUtil.startsWithIgnoreCase(whereSql.trim(), "where") ? whereSql : "\nwhere " + whereSql;
String delete = "delete from " + tableName + w;
if (data.isEmpty()) {
return executeUpdate(delete, Collections.emptyMap());
}
if (fast) {
return executeBatchUpdate(delete, data, batchSize);
}
int i = 0;
for (Map item : data) {
i += executeUpdate(delete, item);
}
return i;
}
};
}
@Override
public EntitySaveExecutor entity(Class entityClass) {
String tableName = getTableNameByAlias(entityClass);
return new EntitySaveExecutor() {
@Override
public SaveExecutor insert() {
return BakiDao.this.insert(tableName);
}
@Override
public SaveExecutor update(String where) {
return BakiDao.this.update(tableName, where);
}
@Override
public SaveExecutor delete(String where) {
return BakiDao.this.delete(tableName, where);
}
};
}
@Override
public Executor of(String sql) {
return new Executor() {
@Override
public DataRow execute() {
return BakiDao.super.execute(sql, Collections.emptyMap());
}
@Override
public DataRow execute(Map args) {
return BakiDao.super.execute(sql, args);
}
@Override
public int executeBatch(String... moreSql) {
List sqlList = new ArrayList<>(Arrays.asList(moreSql));
sqlList.add(0, sql);
return BakiDao.super.executeBatch(sqlList, batchSize);
}
@Override
public int executeBatch(List moreSql) {
return BakiDao.super.executeBatch(moreSql, batchSize);
}
@Override
public int executeBatch(Collection extends Map> data) {
Map arg = data.isEmpty() ? new HashMap<>() : data.iterator().next();
Pair> parsed = parseSql(sql, arg);
Collection extends Map> newData;
if (parsed.getItem2().containsKey(XQLFileManager.DynamicSqlParser.FOR_VARS_KEY) &&
parsed.getItem1().contains(XQLFileManager.DynamicSqlParser.VAR_PREFIX)) {
List> list = new ArrayList<>();
for (Map item : data) {
list.add(parseSql(sql, item).getItem2());
}
newData = list;
} else {
newData = data;
}
return executeBatchUpdate(parsed.getItem1(), newData, batchSize);
}
@Override
public DataRow call(Map params) {
return executeCallStatement(sql, params);
}
};
}
@Override
public T using(Function func) {
Connection connection = null;
try {
connection = getConnection();
return func.apply(connection);
} finally {
releaseConnection(connection, getDataSource());
}
}
@Override
public DatabaseMetaData metaData() {
return this.metaData;
}
@Override
public String databaseId() {
return this.databaseId;
}
/**
* Simple page helper implementation.
*/
class SimplePageable extends IPageable {
/**
* Constructs a SimplePageable.
*
* @param recordQuery record query statement
* @param page current page
* @param size page size
*/
public SimplePageable(String recordQuery, int page, int size) {
super(recordQuery, page, size);
}
@Override
public PagedResource collect(Function mapper) {
Pair> result = parseSql(recordQuery, args);
String query = result.getItem1();
Map data = result.getItem2();
if (count == null) {
String cq = countQuery;
if (cq == null) {
cq = sqlGenerator.generateCountQuery(query);
}
List cnRows = execute(cq, data).getFirstAs();
Object cn = cnRows.get(0).getFirst();
if (cn instanceof Integer) {
count = (Integer) cn;
} else {
count = Integer.parseInt(cn.toString());
}
}
PageHelper pageHelper = null;
if (pageHelperProvider != null) {
pageHelper = pageHelperProvider.customPageHelper(metaData, databaseId, namedParamPrefix);
}
if (pageHelper == null) {
pageHelper = defaultPager();
}
pageHelper.init(page, size, count);
Args pagedArgs = pageHelper.pagedArgs();
if (pagedArgs == null) {
pagedArgs = Args.of();
}
data.putAll(rewriteArgsFunc == null ? pagedArgs : rewriteArgsFunc.apply(pagedArgs));
String executeQuery = disablePageSql ? query : pageHelper.pagedSql(query);
try (Stream s = executeQueryStream(executeQuery, data)) {
List list = s.peek(d -> d.remove(PageHelper.ROW_NUM_KEY))
.map(mapper)
.collect(Collectors.toList());
return PagedResource.of(pageHelper, list);
}
}
}
/**
* Built-in default page helper.
*
* @return PageHelper instance
* @throws UnsupportedOperationException there is no default implementation of your database
* @throws ConnectionStatusException connection status exception
*/
protected PageHelper defaultPager() {
if (Objects.nonNull(globalPageHelperProvider)) {
PageHelper pageHelper = globalPageHelperProvider.customPageHelper(metaData, databaseId, namedParamPrefix);
if (Objects.nonNull(pageHelper)) {
return pageHelper;
}
}
switch (databaseId) {
case "oracle":
return new OraclePageHelper();
case "postgresql":
case "sqlite":
return new PGPageHelper();
case "mysql":
case "mariadb":
return new MysqlPageHelper();
case "z/os":
case "sqlds":
case "iseries":
case "db2 for unix/windows":
case "cloudscape":
case "informix":
return new Db2PageHelper();
default:
throw new UnsupportedOperationException("pager of \"" + databaseId + "\" default not implement currently, see method 'setGlobalPageHelperProvider'.");
}
}
/**
* Get table name by {@link Alias @Alias} .
*
* @param entityClass entity class
* @return table name
*/
protected String getTableNameByAlias(Class> entityClass) {
if (entityClass == null) {
throw new IllegalArgumentException("entityClass must not be null.");
}
String tableName = entityClass.getSimpleName();
if (entityClass.isAnnotationPresent(Alias.class)) {
tableName = entityClass.getAnnotation(Alias.class).value();
}
return tableName;
}
/**
* Get all fields from target table.
*
* @param tableName table name
* @return table fields
* @throws UncheckedSqlException query exception
*/
protected List getTableFields(String tableName) {
String sql = parseSql("select * from " + tableName + " where 1 = 2", Collections.emptyMap()).getItem1();
return execute(sql, sc -> {
ResultSet fieldsResultSet = sc.executeQuery();
List fields = Arrays.asList(JdbcUtil.createNames(fieldsResultSet, ""));
JdbcUtil.closeResultSet(fieldsResultSet);
return fields;
});
}
/**
* Reload xql file manager by database id if necessary.
*/
protected void loadXFMConfigByDatabaseId() {
if (Objects.nonNull(xqlFileManager)) {
String pathByDb = "xql-file-manager-" + databaseId + ".yml";
FileResource resource = new FileResource(pathByDb);
if (!resource.exists()) {
resource = new FileResource(XQLFileManager.YML);
}
if (resource.exists()) {
XQLFileManagerConfig config = new XQLFileManagerConfig();
config.loadYaml(resource);
config.copyStateTo(xqlFileManager);
xqlFileManager.init();
log.debug("{} detected by '{}' and loaded!", resource.getFileName(), databaseId);
}
}
}
/**
* Get sql from {@link XQLFileManager} by sql name if first arg starts with symbol ({@code &}).
* Sql name format: {@code &.}
*
* @param sql sql statement or sql name
* @param args args
* @return sql
* @throws NullPointerException if first arg starts with symbol ({@code &}) but {@link XQLFileManager} not configured
* @throws IllegalSqlException sql interceptor reject sql
*/
@Override
protected Pair> parseSql(String sql, Map args) {
Map data = new HashMap<>();
if (Objects.nonNull(args)) {
data.putAll(args);
}
String trimSql = SqlUtil.trimEnd(sql.trim());
if (trimSql.startsWith("&")) {
if (Objects.nonNull(xqlFileManager)) {
if (reloadXqlOnGet) {
log.warn("please set 'reloadXqlOnGet' to false in production environment for improve concurrency.");
xqlFileManager.init();
}
Pair> result = xqlFileManager.get(trimSql.substring(1), data);
trimSql = afterParseDynamicSql.handle(result.getItem1());
// #for expression temp variables stored in _for variable.
if (!result.getItem2().isEmpty()) {
data.put(XQLFileManager.DynamicSqlParser.FOR_VARS_KEY, result.getItem2());
}
} else {
throw new NullPointerException("can not find property 'xqlFileManager'.");
}
}
if (trimSql.contains("${")) {
trimSql = SqlUtil.formatSql(trimSql, data, sqlGenerator.getTemplateFormatter());
if (Objects.nonNull(xqlFileManager)) {
trimSql = SqlUtil.formatSql(trimSql, xqlFileManager.getConstants(), sqlGenerator.getTemplateFormatter());
}
}
if (Objects.nonNull(sqlInterceptor)) {
boolean request = sqlInterceptor.preHandle(trimSql, data, metaData);
if (!request) {
throw new IllegalSqlException("permission denied, reject to execute sql.\nSQL: " + trimSql + "\nArgs: " + data);
}
}
return Pair.of(trimSql, data);
}
@Override
protected SqlGenerator sqlGenerator() {
return sqlGenerator;
}
@Override
protected DataSource getDataSource() {
return dataSource;
}
@Override
protected Connection getConnection() {
try {
return DataSourceUtil.getConnection(dataSource);
} catch (SQLException e) {
throw new ConnectionStatusException("fetch connection failed.", e);
}
}
@Override
protected void releaseConnection(Connection connection, DataSource dataSource) {
DataSourceUtil.releaseConnection(connection, dataSource);
}
@Override
protected void doHandleStatementValue(PreparedStatement ps, int index, Object value) throws SQLException {
statementValueHandler.handle(ps, index, value, metaData);
}
public SqlGenerator getSqlGenerator() {
return sqlGenerator;
}
public void setGlobalPageHelperProvider(PageHelperProvider globalPageHelperProvider) {
this.globalPageHelperProvider = globalPageHelperProvider;
}
public void setSqlInterceptor(SqlInterceptor sqlInterceptor) {
this.sqlInterceptor = sqlInterceptor;
}
public void setStatementValueHandler(StatementValueHandler statementValueHandler) {
if (Objects.nonNull(statementValueHandler))
this.statementValueHandler = statementValueHandler;
}
public void setAfterParseDynamicSql(AfterParseDynamicSql afterParseDynamicSql) {
if (Objects.nonNull(afterParseDynamicSql))
this.afterParseDynamicSql = afterParseDynamicSql;
}
public void setXqlFileManager(XQLFileManager xqlFileManager) {
if (Objects.nonNull(xqlFileManager)) {
this.xqlFileManager = xqlFileManager;
this.xqlFileManager.setDatabaseId(databaseId);
this.xqlFileManager.setTemplateFormatter(sqlGenerator.getTemplateFormatter());
if (autoXFMConfig) {
loadXFMConfigByDatabaseId();
return;
}
if (!this.xqlFileManager.isInitialized()) {
this.xqlFileManager.init();
}
}
}
public XQLFileManager getXqlFileManager() {
return xqlFileManager;
}
public int getBatchSize() {
return batchSize;
}
public void setBatchSize(int batchSize) {
this.batchSize = batchSize;
}
public char getNamedParamPrefix() {
return namedParamPrefix;
}
public void setNamedParamPrefix(char namedParamPrefix) {
this.namedParamPrefix = namedParamPrefix;
this.sqlGenerator = new SqlGenerator(this.namedParamPrefix);
}
public boolean isReloadXqlOnGet() {
return reloadXqlOnGet;
}
public void setReloadXqlOnGet(boolean reloadXqlOnGet) {
this.reloadXqlOnGet = reloadXqlOnGet;
}
public boolean isAutoXFMConfig() {
return autoXFMConfig;
}
public void setAutoXFMConfig(boolean autoXFMConfig) {
this.autoXFMConfig = autoXFMConfig;
if (this.autoXFMConfig) {
loadXFMConfigByDatabaseId();
}
}
}