
sf.database.dialect.DBDialect Maven / Gradle / Ivy
Go to download
Show more of this group Show more artifacts with this name
Show all versions of sorm Show documentation
Show all versions of sorm Show documentation
java jpa tool for spring
The newest version!
package sf.database.dialect;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import sf.common.exception.SmallOrmException;
import sf.common.log.LogContext;
import sf.common.log.OrmLog;
import sf.database.dbinfo.ColumnDBType;
import sf.database.dbinfo.Constraint;
import sf.database.dbinfo.ConstraintType;
import sf.database.dbinfo.DBMetaData;
import sf.database.dbinfo.DdlGenerator;
import sf.database.dbinfo.DdlGeneratorImpl;
import sf.database.dbinfo.Feature;
import sf.database.dbinfo.SequenceInfo;
import sf.database.meta.CascadeConfig;
import sf.database.meta.ColumnMapping;
import sf.database.meta.MetadataFeature;
import sf.database.meta.TableMapping;
import sf.database.meta.object.Case;
import sf.database.support.DBMS;
import sf.database.util.DBUtils;
import sf.database.util.SQLUtils;
import sf.tools.StringUtils;
import javax.persistence.Index;
import javax.persistence.Table;
import javax.persistence.UniqueConstraint;
import java.sql.Connection;
import java.sql.PreparedStatement;
import java.sql.ResultSet;
import java.sql.ResultSetMetaData;
import java.sql.SQLException;
import java.sql.SQLFeatureNotSupportedException;
import java.sql.Statement;
import java.sql.Types;
import java.text.MessageFormat;
import java.util.ArrayList;
import java.util.HashSet;
import java.util.IdentityHashMap;
import java.util.LinkedHashSet;
import java.util.List;
import java.util.Map;
import java.util.Set;
/**
* @see javax.persistence.Column
* precision和scale
* precision属性和scale属性表示精度,当字段类型为double时,precision表示数值的总长度,scale表示小数点所占的位数。
*/
public abstract class DBDialect implements IDBDialect {
private static final Logger LOGGER = LoggerFactory.getLogger(DBDialect.class);
/**
* 各种文本属性
*/
private Map properties = new IdentityHashMap();
/**
* 各种Boolean特性
*/
protected Set features = new HashSet<>();
/**
* 数据库关键字
*/
protected final Set keywords = new HashSet();
/**
* case Handler
*/
protected Case caseHandler = Case.MIXED_SENSITIVE;
protected char quoteChar;
public DBDialect() {
}
public String getObjectNameToUse(String name) {
if (name == null || name.length() == 0)
return null;
if (name.charAt(0) == quoteChar)
return name;
return caseHandler.getObjectNameToUse(name);
}
public String getColumnNameToUse(ColumnMapping column) {
return caseHandler.getObjectNameToUse(column);
}
/**
* 像Oracle,其Catlog是不用的,那么返回null mySQL没有Schema,每个database是一个catlog,那么返回值
* @param catalog
* @return
*/
@Override
public String getCatalog(String catalog) {
return null;
}
@Override
public String getSchema(String schema) {
return schema;
}
@Override
public String getDriverClass(String url) {
return null;
}
@Override
public String getDefaultSchema() {
return null;
}
@Override
public void accept(Connection conn) {
MetadataFeature feature = DBMetaData.getInstance().getMetadataFeature(conn);
this.caseHandler = feature.getDefaultCase();
String q = feature.getQuoteChar();
if (q.length() > 0) {
quoteChar = q.charAt(0);
}
}
@Override
public boolean isCaseSensitive() {
return caseHandler.isCaseSensitive();
}
protected void setProperty(DBProperty key, String value) {
properties.put(key, value);
}
public String getProperty(DBProperty key) {
return properties.get(key);
}
public String getProperty(DBProperty key, String defaultValue) {
String value = properties.get(key);
return value == null ? defaultValue : value;
}
public int getPropertyInt(DBProperty key) {
String s = properties.get(key);
if (StringUtils.isEmpty(s)) {
return 0;
}
return Integer.parseInt(s);
}
public long getPropertyLong(DBProperty key) {
String s = properties.get(key);
if (StringUtils.isEmpty(s)) {
return 0;
}
return Long.parseLong(s);
}
/**
* @return 当前数据库类型,名称参见 DB 这个枚举类型
* @see DBMS
*/
public abstract String getName();
/**
* @return 当前数据库编号,名称参见 DB 这个枚举类型,自定义的不可和DB 枚举冲突
*/
public abstract int getNumber();
/**
* 返回对应的 Hibernate 的 Dialect
*/
public abstract String getHibernateDialect();
/**
* 是否支持在添加字段的时候,指定字段位置
*/
public boolean supportsColumnPosition() {
return false;
}
/**
* 是否是nosql
* @return
*/
public boolean isNosql() {
return false;
}
/**
* 将字段名/表名与SQL保留字冲突的名称进行 Wrapper
* @param name
* @return
*/
public final String wrapKeyword(String name) {
if (name != null && keywords.contains(name.toUpperCase())) {
return wrapQuotedIdentifier(name);
}
return name;
}
/**
* 将字段名/表名与SQL保留字冲突的名称进行 Wrapper
* @param conn 为解决mysql标识符可以为:"或`的问题(无用).
* @param name
* @return
*/
public final String wrapKeyword(Connection conn, String name) {
String quote = null;
if (conn != null) {
try {
quote = DBUtils.getIdentifierQuoteString(conn);
} catch (Exception e) {
//不报错.
}
}
if (name != null && keywords.contains(name.toUpperCase())) {
if (StringUtils.isBlank(quote)) {
return wrapQuotedIdentifier(name);
} else {
if (quote.length() >= 2) {
return quote.charAt(0) + name + quote.charAt(1);
} else {
return quote + name + quote;
}
}
}
return name;
}
/**
* 字段包装,对于sql关键字作为表名,列名需要特殊处理.
* 将字段名/表名进行 Wrapper
*/
private final String wrapQuotedIdentifier(String name) {
if (name == null) {
return null;
}
String w = getProperty(DBProperty.WRAP_FOR_KEYWORD);
if (w != null && name.charAt(0) != w.charAt(0)) {
return w.charAt(0) + name + w.charAt(1);
}
return name;
}
/**
* 去除包含的符号和getQuotedIdentifier相反
* @param name
* @return
*/
protected String unwrapper(String name) {
return name;
}
/**
* SQL 语句中的字符串值,默认编码单引号(')为双单引号(')
*/
protected String escapeSqlValue(String value) {
return StringUtils.replace(value, "'", "''");
}
/**
* 批量运行多个SQL语句之间的分隔符。
*/
public String getSqlStatmentSeparator() {
return ";";
}
public abstract String sqlTableDrop(String table);
public abstract String sqlTableRename(String oldName, String newName);
public abstract String sqlColumnRename(String table, String oldColumnName, String newColumnName);
public abstract String sqlColumnAdd(String table, String column_definition, String column_position);
public abstract String sqlColumnModify(String table, String column_definition, String column_position);
public abstract String sqlColumnDrop(String table, String column);
public List getSequenceInfo(Connection conn, String schema, String seqName) throws SQLException {
throw new SQLFeatureNotSupportedException("不支持");
}
/**
* 获取下一个序列值得sql语句
* @param seqName 序列名称
* @return
*/
public String getSeqNextValSql(String seqName) {
throw new UnsupportedOperationException("不支持");
}
public boolean notHas(Feature feature) {
return !features.contains(feature);
}
public boolean has(Feature feature) {
return features.contains(feature);
}
/**
* 生成分页sql
* @param sql 原始sql
* @param offset 开始位置,从0开始 (= (Page-1)*PageSize)
* @param limit 返回的限制大小(= 分页大小 PageSize)
* @return 如果不支持,返回 空
*/
public StringBuilder sqlPageList(StringBuilder sql, long offset, int limit) {
if (offset > 0) {
return sql.append(" limit ").append(limit).append(" offset ").append(offset);
} else {
return sql.append(" limit ").append(limit);
}
}
protected String createResultSetMetaSql(TableMapping en) {
return "SELECT * FROM " + en.getTableName() + " where 1!=1";
}
public boolean containKeyword(String name) {
return name != null && keywords.contains(name.toUpperCase());
}
/**
* 设置tablemapping信息
* @param conn
* @param en
*/
public void setupEntityField(Connection conn, TableMapping en) {
Statement stat = null;
ResultSet rs = null;
ResultSetMetaData rsmd = null;
try {
// 获取数据库元信息
stat = conn.createStatement();
rs = stat.executeQuery(createResultSetMetaSql(en));
rsmd = rs.getMetaData();
// 循环字段检查
for (ColumnMapping mf : en.getSchemas()) {
if (mf.getColumnDef() == null) {
mf.setColumnDef(new ColumnDBType());
}
try {
int index = DBUtils.getColumnIndex(rsmd, mf.getRawColumnName());
if (ResultSetMetaData.columnNoNulls == rsmd.isNullable(index)) {
mf.getColumnDef().setNullable(false);
} else {
mf.getColumnDef().setNullable(true);
}
mf.setSqlType(rsmd.getColumnType(index));
} catch (Exception e) {
}
}
} catch (Exception e) {
LOGGER.debug("Table '" + en.getTableName() + "' doesn't exist!");
}
// Close ResultSet and Statement
finally {
DBUtils.closeQuietly(rs);
DBUtils.closeQuietly(stat);
}
}
/**
* 根据实体信息,返回某实体的建表语句
* @param en 实体
* @return 是否创建成功
*/
public abstract boolean createEntity(Connection conn, TableMapping en);
/**
* 不可覆盖
* @param mf
* @return
*/
public final String evalFieldDefinition(ColumnMapping mf) {
String columnDefinition = getColumnDefinition(mf);
//覆盖columnDefinition
if (StringUtils.isNotBlank(columnDefinition)) {
return columnDefinition;
} else {
StringBuilder type = new StringBuilder();
type.append(evalFieldType(mf));
evalFieldDefinitionLine(mf, type);
//处理自定义映射
// 其它的参照默认字段规则 ...
return type.toString();
}
}
public void evalFieldDefinitionLine(ColumnMapping mf, StringBuilder type) {
String defaultValue = getColumnDefaultValue(mf);
if (mf.getColumn() != null && mf.getColumn().unique()) {
type.append(" UNIQUE");
}
if ((mf.getColumn() != null && !mf.getColumn().nullable()) || mf.isPk()) {
type.append(" NOT NULL");
}
if (StringUtils.isNotBlank(defaultValue)) {
type.append(" DEFAULT ").append(defaultValue);
}
}
public String evalFieldType(ColumnMapping mf) {
int length = getColumnLength(mf);
int scale = getColumnScale(mf);
int precision = getColumnPrecision(mf);
switch (mf.getSqlType()) {
case Types.CHAR:
return "CHAR(" + length + ")";
case Types.BOOLEAN:
return "BOOLEAN";
case Types.VARCHAR:
return "VARCHAR(" + length + ")";
case Types.LONGVARCHAR:
case Types.LONGNVARCHAR:
case Types.CLOB:
return "TEXT";
case Types.BINARY:
case Types.VARBINARY:
case Types.LONGVARBINARY:
case Types.BLOB:
return "BLOB";
case Types.TIME_WITH_TIMEZONE:
case Types.TIMESTAMP_WITH_TIMEZONE:
case Types.TIMESTAMP:
return "TIMESTAMP";
case Types.DATE:
return "DATE";
case Types.TIME:
return "TIME";
case Types.INTEGER:
// 用户自定义了宽度
if (length > 0)
return "INT(" + length + ")";
// 用数据库的默认宽度
return "INT";
case Types.REAL:
case Types.FLOAT:
case Types.DOUBLE:
// 用户自定义了精度
if (precision > 0 && scale > 0) {
return "NUMERIC(" + precision + "," + scale + ")";
}
// 用默认精度
if (mf.getClz() == Double.class || mf.getClz() == Double.TYPE) {
return "NUMERIC(15,10)";
}
return "FLOAT";
case Types.ARRAY:
return "ARRAY";
default:
throw new UnsupportedOperationException("Unsupport colType '%s' of field '%s' in '%s' ");
}
}
public boolean dropEntity(Connection conn, TableMapping en) {
String tableName = en.getTableName();
String sql = "DROP TABLE " + wrapKeyword(tableName);
try {
return execute(sql, conn);
} catch (Exception e) {
return false;
}
}
public void createRelation(Connection conn, TableMapping en) {
Set sqls = new LinkedHashSet<>(5);
for (ColumnMapping lf : en.getMetaFields()) {
if (lf.getManyToMany() != null) {
List sqlArr = createRelation(conn, lf);
sqls.addAll(sqlArr);
}
}
for (String sql : sqls) {
execute(sql, conn);
}
}
protected List createRelation(Connection conn, ColumnMapping en) {
List sqls = new ArrayList<>();
List columns = new ArrayList<>();
CascadeConfig cc = en.getCascadeConfig();
DBDialect dialect = DBUtils.doGetDialect(conn, false);
String middleTableName = cc.getMiddleTableName();
if (DBMS.oracle.getNumber() == dialect.getNumber()) {
middleTableName = middleTableName.toUpperCase();
}
if (StringUtils.isBlank(middleTableName) || existsTables(conn, middleTableName)) {
return sqls;
}
StringBuilder sql = new StringBuilder();
sql.append("CREATE TABLE ").append(wrapKeyword(cc.getMiddleTableName())).append("(");
boolean f = false;
for (Map.Entry entry : cc.getMiddleTableColumns().entrySet()) {
String key = entry.getKey();
key = wrapKeyword(key);
columns.add(key);
sql.append(f ? "," : "").append(key).append(" ").append(evalFieldType(entry.getValue())).append(" not null ");
f = true;
}
sql.append(")");
sqls.add(sql.toString());
sqls.add(getPrimaryKeySql(cc.getMiddleTableName(), columns));
return sqls;
}
public void dropRelation(Connection conn, TableMapping en) {
List sqls = new ArrayList<>(5);
for (ColumnMapping lf : en.getMetaFields()) {
if (lf.getManyToMany() != null) {
CascadeConfig cc = lf.getCascadeConfig();
if (!existsTables(conn, cc.getMiddleTableName()))
continue;
String sql = "DROP TABLE " + wrapKeyword(cc.getMiddleTableName());
sqls.add(sql);
}
}
for (String sql : sqls) {
execute(sql, conn);
}
}
/**
* 该表是否存在
* @param conn
* @param tableName
* @return
*/
public boolean existsTables(Connection conn, final String tableName) {
try {
return DBMetaData.getInstance().existTable(conn, tableName);
} catch (SQLException e) {
LOGGER.error("", e);
}
return false;
}
//pg,oracle,db2支持
private static String DEFAULT_COMMENT_TABLE = "comment on table %1$s is '%2$s'";
private static String DEFAULT_COMMENT_COLUMN = "comment on column %1$s.%2$s is '%3$s'";
/**
* 创建索引,一组(与hibernate保持一致)
* @param tm
* @return
*/
public List createIndexSql(TableMapping tm) {
List sqls = new ArrayList<>();
StringBuilder sb = new StringBuilder();
Table table = tm.getTable();
if (table == null) {
return sqls;
}
Index[] indexs = table.indexes();
for (int i = 0; i < indexs.length; i++) {
Index index = indexs[i];
if (index.unique()) {
sb.append("create unique index ");
} else {
sb.append("create index ");
}
if (index.name().isEmpty()) {
sb.append("idx_").append(tm.getTableName()).append("_").
append(index.columnList().replace(" ", "").replace(",", "_"));
} else {
sb.append(index.name());
}
sb.append(" ON ").append(wrapKeyword(tm.getTableName())).append("(");
String[] columns = StringUtils.split(index.columnList(), ",");
boolean f = false;
for (int j = 0; j < columns.length; j++) {
String columnAll = columns[j];
columnAll = columnAll.trim();
String[] splits = StringUtils.split(columnAll, " ");
String column = splits[0];
splits[0] = "";
//判断是否是数据库字段
ColumnMapping temp = SQLUtils.getColumnByDBName(tm, column);
if (temp == null) {
//不为数据库字段
//如果为java字段名称
temp = tm.getJavaFieldColumnMapping(column);
}
if (temp != null) {
column = temp.getRawColumnName();
} else {
throw new SmallOrmException(tm.getThisType().getName() + " 该字段" + column + " 不是数据库字段名,也不是java字段名!");
}
sb.append(f ? "," : "").append(wrapKeyword(column)).append(StringUtils.join(splits, " "));
f = true;
}
sb.append(")");
sqls.add(sb.toString());
sb.delete(0, sb.length());
}
return sqls;
}
/**
* 创建唯一约束,一组(与hibernate保持一致)
* @param tm
* @return
*/
public final List createUniqueSql(TableMapping tm) {
List sqls = new ArrayList<>();
Table table = tm.getTable();
if (table == null) {
return sqls;
}
UniqueConstraint[] uniqueConstraints = table.uniqueConstraints();
for (int i = 0; i < uniqueConstraints.length; i++) {
UniqueConstraint uc = uniqueConstraints[i];
String name = uc.name();
if (name.isEmpty()) {
name = "uk_" + tm.getTableName() + "_" + StringUtils.join(uc.columnNames(), "_");
}
String[] columnNames = new String[uc.columnNames().length];
for (int j = 0; j < uc.columnNames().length; j++) {
String column = uc.columnNames()[j];
//判断是否是数据库字段
ColumnMapping temp = SQLUtils.getColumnByDBName(tm, column);
if (temp == null) {
//不为数据库字段
//如果为java字段名称
temp = tm.getJavaFieldColumnMapping(column);
}
if (temp != null) {
column = temp.getRawColumnName();
} else {
throw new SmallOrmException("该字段" + column + " 不是数据库字段名,也不是java字段名!");
}
columnNames[j] = column;
}
String sql = uniqueSql(tm.getTableName(), name, columnNames);
if (StringUtils.isNotBlank(sql)) {
sqls.add(sql);
}
}
return sqls;
}
/**
* 生成unique 语句.
* @param tableName 表名
* @param name 名称
* @param columnNames 表的列名,
* @return
*/
public abstract String uniqueSql(String tableName, String name, String[] columnNames);
public List addCommentSql(TableMapping en) {
List sqls = new ArrayList();
// 表注释
if (StringUtils.isNotBlank(en.getComment())) {
//table,tableComment
String s = addTableCommnetSql(wrapKeyword(en.getTableName()), en.getComment());
sqls.add(s);
}
for (ColumnMapping mf : en.getSchemas()) {
if (StringUtils.isNotBlank(mf.getComment())) {
//table,column,columnComment
String s = addColumnCommnetSql(wrapKeyword(en.getTableName()), wrapKeyword(mf.getRawColumnName()), mf.getComment());
sqls.add(s);
}
}
return sqls;
}
public String addTableCommnetSql(String tableName, String comment) {
return String.format(DEFAULT_COMMENT_TABLE, tableName, comment);
}
/**
* 生成字段注释
* @param tableName
* @param columnName
* @param comment
* @return
*/
public String addColumnCommnetSql(String tableName, String columnName, String comment) {
return String.format(DEFAULT_COMMENT_COLUMN, tableName, columnName, comment);
}
protected String getPrimaryKeySql(String tablename, List pkColumnsEntity) {
// 没有的加上主键
Constraint con = new Constraint();
con.setTableName(wrapKeyword(tablename));
con.setName("pk_" + tablename);
con.setType(ConstraintType.P);
con.setColumns(pkColumnsEntity);
DdlGenerator ddl = new DdlGeneratorImpl(this);
return ddl.addConstraint(con); // 添加新主键约束
}
public static int getColumnLength(ColumnMapping mf) {
return mf.getColumn() == null ? 255 : mf.getColumn().length();
}
public static int getColumnPrecision(ColumnMapping mf) {
return mf.getColumn() == null ? 0 : mf.getColumn().precision();
}
public static int getColumnScale(ColumnMapping mf) {
return mf.getColumn() == null ? 0 : mf.getColumn().scale();
}
public static String getColumnDefinition(ColumnMapping mf) {
return mf.getColumn() == null ? null : mf.getColumn().columnDefinition();
}
public static String getColumnDefaultValue(ColumnMapping mf) {
return mf.getColumnInfo() == null ? null : mf.getColumnInfo().defaultVale();
}
protected static String gSQL(String ptn, String table, String field) {
return MessageFormat.format(ptn, table, field);
}
public String createAddColumnSql(TableMapping en, ColumnMapping mf) {
StringBuilder sb = new StringBuilder("ALTER TABLE ");
sb.append(wrapKeyword(en.getTableName())).append(" ADD ");
sb.append("COLUMN ");
sb.append(wrapKeyword(mf.getRawColumnName())).append(" ").append(evalFieldType(mf));
sb.append(" UNSIGNED");
sb.append(" NOT NULL");
sb.append(" NULL DEFAULT NULL");
if (StringUtils.isNotBlank(mf.getComment())) {
sb.append(" COMMENT '").append(mf.getComment()).append('\'');
}
// sb.append(';');
return sb.toString();
}
public List getIndexNames(TableMapping en, Connection conn) {
List names = new ArrayList();
String showIndexs = "show index from " + wrapKeyword(en.getTableName());
PreparedStatement ppstat = null;
ResultSet rest = null;
try {
ppstat = conn.prepareStatement(showIndexs);
rest = ppstat.executeQuery();
while (rest.next()) {
String index = rest.getString(3);
names.add(index);
}
} catch (SQLException throwables) {
throwables.printStackTrace();
} finally {
DBUtils.closeQuietly(rest);
DBUtils.closeQuietly(ppstat);
}
return names;
}
protected boolean execute(String sql, Connection conn) {
LogContext log = OrmLog.commonLog(null, sql);
Statement statement = null;
try {
statement = conn.createStatement();
statement.execute(sql);
OrmLog.resultSqlLog(LOGGER, log, null, () -> OrmLog.getAutoCommit(conn));
return true;
} catch (SQLException e) {
OrmLog.resultSqlLog(LOGGER, log, e);
throw new RuntimeException(e);
} finally {
DBUtils.closeQuietly(statement);
}
}
protected void execute(List sqls, Connection conn) {
for (String sql : sqls) {
execute(sql, conn);
}
}
public long getColumnAutoIncreamentValue(ColumnMapping mapping, Connection db) {
throw new UnsupportedOperationException(mapping.getMeta().getTableName() + "." + mapping.getFieldName() + " is auto-increament, but the database '" + this.getName() + "' doesn't support fetching the next AutoIncreament value.");
}
public char getQuoteChar() {
return quoteChar;
}
/**
* 去除空值的判断方法
* @return
*/
public String getNvlFunction(String var1, String var2) {
String w = getProperty(DBProperty.NVL_FUNCTION);
if (w != null) {
return String.format(w, var1, var2);
}
return null;
}
}
© 2015 - 2025 Weber Informatics LLC | Privacy Policy