net.java.ao.DatabaseProvider Maven / Gradle / Ivy
Show all versions of activeobjects-core Show documentation
/*
* Copyright 2007 Daniel Spiewak
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package net.java.ao;
import net.java.ao.schema.Case;
import net.java.ao.schema.IndexNameConverter;
import net.java.ao.schema.NameConverters;
import net.java.ao.schema.TableNameConverter;
import net.java.ao.schema.TriggerNameConverter;
import net.java.ao.schema.UniqueNameConverter;
import net.java.ao.schema.ddl.DDLAction;
import net.java.ao.schema.ddl.DDLActionType;
import net.java.ao.schema.ddl.DDLField;
import net.java.ao.schema.ddl.DDLForeignKey;
import net.java.ao.schema.ddl.DDLIndex;
import net.java.ao.schema.ddl.DDLIndexField;
import net.java.ao.schema.ddl.DDLTable;
import net.java.ao.schema.ddl.DDLValue;
import net.java.ao.schema.ddl.SQLAction;
import net.java.ao.schema.ddl.SchemaReader;
import net.java.ao.sql.SqlUtils;
import net.java.ao.types.TypeInfo;
import net.java.ao.types.TypeManager;
import org.apache.commons.lang3.StringUtils;
import org.apache.commons.lang3.Validate;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.io.InputStream;
import java.sql.Blob;
import java.sql.Connection;
import java.sql.DatabaseMetaData;
import java.sql.PreparedStatement;
import java.sql.ResultSet;
import java.sql.SQLException;
import java.sql.Statement;
import java.sql.Types;
import java.text.ParseException;
import java.text.SimpleDateFormat;
import java.util.ArrayList;
import java.util.Calendar;
import java.util.Date;
import java.util.Deque;
import java.util.LinkedHashSet;
import java.util.LinkedList;
import java.util.List;
import java.util.Map;
import java.util.Optional;
import java.util.Set;
import java.util.concurrent.CopyOnWriteArraySet;
import java.util.concurrent.atomic.AtomicReference;
import java.util.function.Function;
import java.util.function.Predicate;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
import java.util.stream.Collectors;
import java.util.stream.Stream;
import java.util.stream.StreamSupport;
import static java.util.Arrays.stream;
import static java.util.Collections.emptyList;
import static java.util.Collections.singletonList;
import static java.util.Collections.unmodifiableList;
import static java.util.Objects.requireNonNull;
import static java.util.stream.Collectors.joining;
import static java.util.stream.Collectors.toList;
import static java.util.stream.Collectors.toSet;
import static java.util.stream.Stream.concat;
import static net.java.ao.Common.closeQuietly;
/**
* The superclass parent of all DatabaseProvider
* implementations. Various implementations allow for an abstraction
* around database-specific functionality (such as DDL). DatabaseProvider(s)
* also handle the creation of new {@link Connection} instances and
* fully encapsulate the raw JDBC driver. Any database-specific
* code should be placed in the database provider, rather than embedded
* within the API logic.
*
* This superclass contains a base-line, default implementation of most
* database-specific methods, thus requiring a minimum of work to implement
* a new database provider. For the sake of sanity (read: mine), this
* base-line implementation is basically specific to MySQL. Thus any
* DatabaseProvider implementations are really specifying the
* differences between the database in question and MySQL. To fully
* utilize the default implementations provided in this class, this fact should
* be kept in mind.
*
* This class also handles the implementation details required to ensure
* that only one active {@link Connection} instance is available per thread. This is
* in fact a very basic (and naive) form of connection pooling. It should
* not be relied upon for performance reasons. Instead one should enable
* connection pooling via the {@link javax.sql.DataSource} passed to instances. You can
* also use the net.java.ao.builder.EntityManagerBuilder builder to make it
* easier to configure the pooling. The purpose of the thread-locked connection pooling
* in this class is to satisfy transactions with external SQL statements.
*
* @author Daniel Spiewak
*/
public abstract class DatabaseProvider implements Disposable {
protected final Logger logger = LoggerFactory.getLogger(this.getClass());
protected final Logger sqlLogger = LoggerFactory.getLogger("net.java.ao.sql");
private final Set sqlListeners;
private final ThreadLocal transactionThreadLocal = new ThreadLocal();
private final DisposableDataSource dataSource;
protected final TypeManager typeManager;
private final String schema;
protected AtomicReference quoteRef = new AtomicReference();
private static final String ORDER_CLAUSE_STRING = "(?:IDENTIFIER_QUOTE_STRING(\\w+)IDENTIFIER_QUOTE_STRING\\.)?(?:IDENTIFIER_QUOTE_STRING(\\w+)IDENTIFIER_QUOTE_STRING)(?:\\s*(?i:(ASC|DESC)))?";
private static final String PROP_TRANSACTION_ISOLATION_LEVEL = "ao.transaction.isolation.level";
private final Pattern ORDER_CLAUSE_PATTERN;
private static final CachingSqlProcessor SHARED_CACHING_SQL_PROCESSOR = new CachingSqlProcessor();
private CachingSqlProcessor cachingSqlProcessor = SHARED_CACHING_SQL_PROCESSOR; // non-final, used in tests.
protected DatabaseProvider(DisposableDataSource dataSource, String schema, TypeManager typeManager) {
this.dataSource = requireNonNull(dataSource, "dataSource can't be null");
this.typeManager = typeManager;
this.schema = isBlank(schema) ? null : schema; // can be null
this.sqlListeners = new CopyOnWriteArraySet();
this.sqlListeners.add(new LoggingSqlListener(sqlLogger));
loadQuoteString();
// Exclude quote strings around table / column names in order by - some plugins like put the quote string in themselves.
String identifierQuoteStringPattern = "";
String quote = quoteRef.get();
if (quote != null && !quote.isEmpty()) {
identifierQuoteStringPattern = "(?:" + Pattern.quote(quote) + ")?";
}
ORDER_CLAUSE_PATTERN = Pattern.compile(ORDER_CLAUSE_STRING.replaceAll("IDENTIFIER_QUOTE_STRING", Matcher.quoteReplacement(identifierQuoteStringPattern)));
}
protected DatabaseProvider(DisposableDataSource dataSource, String schema) {
this(dataSource, schema, new TypeManager.Builder().build());
}
public TypeManager getTypeManager() {
return typeManager;
}
public String getSchema() {
return schema;
}
protected void loadQuoteString() {
Connection conn = null;
try {
conn = dataSource.getConnection();
if (conn == null) {
throw new IllegalStateException("Could not get connection to load quote String");
}
quoteRef.set(conn.getMetaData().getIdentifierQuoteString().trim());
} catch (SQLException e) {
throw new RuntimeException("Unable to query the database", e);
} finally {
closeQuietly(conn);
}
}
/**
* Render "SELECT * FROM LIMIT 1" in the database specific dialect
*/
public String renderMetadataQuery(final String tableName) {
return "SELECT * FROM " + withSchema(tableName) + " LIMIT 1";
}
/**
* Generates the DDL fragment required to specify an INTEGER field as
* auto-incremented. For databases which do not support such flags (which
* is just about every database exception MySQL), ""
is an
* acceptable return value. This method should never return null
* as it would cause the field rendering method to throw a {@link NullPointerException}.
*/
protected String renderAutoIncrement() {
return "AUTO_INCREMENT";
}
/**
* Top level delegating method for the process of rendering a database-agnostic
* {@link DDLAction} into the database-specific SQL actions. If the action is to
* create an entity, then each of the SQL actions may have a corresponding undo
* action to roll back creation of the entity if necessary.
*
* @param nameConverters
* @param action The database-agnostic action to render.
* @return An array of DDL statements specific to the database in question.
* @see #renderTable(net.java.ao.schema.NameConverters, net.java.ao.schema.ddl.DDLTable)
* @see #renderFunctions(net.java.ao.schema.TriggerNameConverter, net.java.ao.schema.ddl.DDLTable)
* @see #renderTriggers(net.java.ao.schema.TriggerNameConverter, net.java.ao.schema.SequenceNameConverter, net.java.ao.schema.ddl.DDLTable)
* @see #renderSequences(net.java.ao.schema.SequenceNameConverter, net.java.ao.schema.ddl.DDLTable)
* @see #renderDropTriggers(net.java.ao.schema.TriggerNameConverter, net.java.ao.schema.ddl.DDLTable)
* @see #renderDropFunctions(net.java.ao.schema.TriggerNameConverter, net.java.ao.schema.ddl.DDLTable)
* @see #renderDropSequences(net.java.ao.schema.SequenceNameConverter, net.java.ao.schema.ddl.DDLTable)
* @see #renderDropTableActions(net.java.ao.schema.NameConverters, net.java.ao.schema.ddl.DDLTable)
* @see #renderAlterTableAddColumn(net.java.ao.schema.NameConverters, net.java.ao.schema.ddl.DDLTable, net.java.ao.schema.ddl.DDLField)
* @see #renderAlterTableChangeColumn(net.java.ao.schema.NameConverters, net.java.ao.schema.ddl.DDLTable, net.java.ao.schema.ddl.DDLField, net.java.ao.schema.ddl.DDLField)
* @see #renderDropColumnActions(net.java.ao.schema.NameConverters, net.java.ao.schema.ddl.DDLTable, net.java.ao.schema.ddl.DDLField)
* @see #renderAlterTableAddKey(DDLForeignKey)
* @see #renderAlterTableDropKey(DDLForeignKey)
*/
public final Iterable renderAction(NameConverters nameConverters, DDLAction action) {
switch (action.getActionType()) {
case CREATE:
return renderCreateTableActions(nameConverters, action.getTable());
case DROP:
return renderDropTableActions(nameConverters, action.getTable());
case ALTER_ADD_COLUMN:
return renderAddColumnActions(nameConverters, action.getTable(), action.getField());
case ALTER_CHANGE_COLUMN:
return renderAlterTableChangeColumn(nameConverters, action.getTable(), action.getOldField(), action.getField());
case ALTER_DROP_COLUMN:
return renderDropColumnActions(nameConverters, action.getTable(), action.getField());
case ALTER_ADD_KEY:
return singletonList(renderAlterTableAddKey(action.getKey())
.withUndoAction(renderAlterTableDropKey(action.getKey())));
case ALTER_DROP_KEY:
return singletonList(renderAlterTableDropKey(action.getKey()));
case CREATE_INDEX:
return singletonList(renderCreateIndex(nameConverters.getIndexNameConverter(), action.getIndex())
.withUndoAction(renderDropIndex(nameConverters.getIndexNameConverter(), action.getIndex())));
case DROP_INDEX:
return singletonList(renderDropForAoManagedIndex(nameConverters.getIndexNameConverter(), action.getIndex()));
case INSERT:
return singletonList(renderInsert(action.getTable(), action.getValues()));
}
throw new IllegalArgumentException("unknown DDLAction type " + action.getActionType());
}
private SQLAction renderDropForAoManagedIndex(IndexNameConverter indexNameConverter, DDLIndex index) {
final String indexNamePrefix = indexNameConverter.getPrefix(shorten(index.getTable()));
if (index.getIndexName().toLowerCase().startsWith(indexNamePrefix)) {
return Optional.ofNullable(renderDropIndex(indexNameConverter, index)).orElse(SQLAction.of(""));
} else {
logger.debug("Ignoring Drop index {} as index not managed by AO", index.getIndexName());
return SQLAction.of("");
}
}
private Iterable renderCreateTableActions(NameConverters nameConverters, DDLTable table) {
List ret = new ArrayList<>();
ret.add(renderTable(nameConverters, table).withUndoAction(renderDropTableStatement(table)));
renderAccessories(nameConverters, table).forEach(ret::add);
return unmodifiableList(ret);
}
private Iterable renderDropTableActions(NameConverters nameConverters, DDLTable table) {
List ret = new ArrayList<>();
for (DDLIndex index : table.getIndexes()) {
final SQLAction sqlAction = renderDropIndex(nameConverters.getIndexNameConverter(), index);
if (sqlAction != null) {
ret.add(sqlAction);
}
}
renderDropAccessories(nameConverters, table).forEach(ret::add);
ret.add(renderDropTableStatement(table));
return unmodifiableList(ret);
}
private Iterable renderAddColumnActions(NameConverters nameConverters, DDLTable table, DDLField field) {
List ret = new ArrayList<>();
renderAlterTableAddColumn(nameConverters, table, field).forEach(ret::add);
return unmodifiableList(ret);
}
protected Iterable renderDropColumnActions(NameConverters nameConverters, DDLTable table, DDLField field) {
List sqlActions = new ArrayList<>();
final List dropIndexActions = Stream.of(table.getIndexes())
.filter(index -> index.containsFieldWithName(field.getName()))
.map(index -> DDLAction.builder(DDLActionType.DROP_INDEX)
.setIndex(index)
.build())
.map(action -> renderAction(nameConverters, action))
.flatMap(iterable -> StreamSupport.stream(iterable.spliterator(), false))
.collect(toList());
sqlActions.addAll(dropIndexActions);
renderAlterTableDropColumn(nameConverters, table, field).forEach(sqlActions::add);
return unmodifiableList(sqlActions);
}
/**
* Top level delegating method for rendering a database-agnostic
* {@link Query} object into its (potentially) database-specific
* query statement. This method invokes the various renderQuery*
* methods to construct its output, thus it is doubtful that any subclasses
* will have to override it. Rather, one of the delegate methods
* should be considered.
*
* An example of a database-specific query rendering would be the
* following Query
:
*
* Query.select().from(Person.class).limit(10)
*
* On MySQL, this would render to SELECT id FROM people LIMIT 10
* However, on SQL Server, this same Query would render as
* SELECT TOP 10 id FROM people
*
* @param query The database-agnostic Query object to be rendered in a
* potentially database-specific way.
* @param converter Used to convert {@link Entity} classes into table names.
* @param count If true
, render the Query as a SELECT COUNT(*)
* rather than a standard field-data query.
* @return A syntactically complete SQL statement potentially specific to the
* database.
* @see #renderQuerySelect(Query, TableNameConverter, boolean)
* @see #renderQueryJoins(Query, TableNameConverter)
* @see #renderQueryWhere(Query)
* @see #renderQueryGroupBy(Query)
* @see #renderQueryOrderBy(Query)
* @see #renderQueryLimit(Query)
*/
public String renderQuery(Query query, TableNameConverter converter, boolean count) {
StringBuilder sql = new StringBuilder();
sql.append(renderQuerySelect(query, converter, count));
sql.append(renderQueryJoins(query, converter));
sql.append(renderQueryWhere(query));
sql.append(renderQueryGroupBy(query));
sql.append(renderQueryHaving(query));
sql.append(renderQueryOrderBy(query));
sql.append(renderQueryLimit(query));
return sql.toString();
}
/**
* Parses the database-agnostic String
value relevant to the specified SQL
* type in int
form (as defined by {@link Types} and returns
* the Java value which corresponds. This method is completely database-agnostic, as are
* all of all of its delegate methods.
*
* WARNING: This method is being considered for removal to another
* class (perhaps {@link TypeManager}?) as it is not a database-specific function and thus
* confuses the purpose of this class. Do not rely upon it heavily. (better yet, don't rely on it
* at all from external code. It's not designed to be part of the public API)
*
* @param type The JDBC integer type of the database field against which to parse the
* value.
* @param value The database-agnostic String value to parse into a proper Java object
* with respect to the specified SQL type.
* @return A Java value which corresponds to the specified String.
*/
public Object parseValue(int type, String value) {
if (value == null || value.equals("NULL")) {
return null;
}
try {
switch (type) {
case Types.BIGINT:
return Long.parseLong(value);
case Types.BIT:
try {
return Byte.parseByte(value) != 0;
} catch (Throwable t) {
return Boolean.parseBoolean(value);
}
case Types.BOOLEAN:
try {
return Integer.parseInt(value) != 0;
} catch (Throwable t) {
return Boolean.parseBoolean(value);
}
case Types.CHAR:
value.charAt(0);
break;
case Types.DATE:
try {
return new SimpleDateFormat(getDateFormat()).parse(value);
} catch (ParseException e) {
return null;
}
case Types.DECIMAL:
return Double.parseDouble(value);
case Types.DOUBLE:
return Double.parseDouble(value);
case Types.FLOAT:
return Float.parseFloat(value);
case Types.INTEGER:
return Integer.parseInt(value);
case Types.NUMERIC:
return Integer.parseInt(value);
case Types.REAL:
return Double.parseDouble(value);
case Types.SMALLINT:
return Short.parseShort(value);
case Types.TIMESTAMP:
try {
return new SimpleDateFormat(getDateFormat()).parse(value);
} catch (ParseException e) {
return null;
}
case Types.TINYINT:
return Short.parseShort(value);
case Types.VARCHAR:
case Types.NVARCHAR:
return value;
}
} catch (Throwable t) {
}
return null;
}
/**
* Allows the provider to set database-specific options on a
* {@link Statement} instance prior to its usage in a SELECT
* query. This is to allow things like emulation of the
* LIMIT feature on databases which don't support it within
* the SQL implementation.
*
* This method is only called on SELECTs.
*
* @param stmt The instance against which the properties
* should be set.
* @param query The query which is being executed against
* the statement instance.
*/
public void setQueryStatementProperties(Statement stmt, Query query) throws SQLException {
}
/**
* Allows the provider to set database-specific options on a
* {@link ResultSet} instance prior to its use by the library.
* This allows for features such as row offsetting even on
* databases that don't support it (such as Oracle, Derby,
* etc).
*
* @param res The ResultSet
to modify.
* @param query The query instance which was run to produce
* the result set.
*/
public void setQueryResultSetProperties(ResultSet res, Query query) throws SQLException {
}
/**
* Returns a result set of all of the tables (and associated
* meta) in the database. The fields of the result set must
* correspond with those specified in the
* DatabaseMetaData#getTables(String, String, String, String[])
* method. In fact, the default implementation merely calls
* this method passing (null, null, "", null)
.
* For databases (such as PostgreSQL) where this is unsuitable,
* different parameters can be specified to the getTables
* method in the override, or an entirely new implementation
* written, as long as the result set corresponds in fields to
* the JDBC spec.
*
* @param conn The connection to use in retrieving the database tables.
* @return A result set of tables (and meta) corresponding in fields
* to the JDBC specification.
* @see java.sql.DatabaseMetaData#getTables(String, String, String, String[])
*/
public ResultSet getTables(Connection conn) throws SQLException {
return conn.getMetaData().getTables(conn.getCatalog(), schema, "%", new String[]{"TABLE"});
}
public ResultSet getSequences(Connection conn) throws SQLException {
return conn.getMetaData().getTables(conn.getCatalog(), schema, "%", new String[]{"SEQUENCE"});
}
public ResultSet getIndexes(Connection conn, String tableName) throws SQLException {
return conn.getMetaData().getIndexInfo(conn.getCatalog(), schema, tableName, false, false);
}
public ResultSet getImportedKeys(Connection connection, String tableName) throws SQLException {
return connection.getMetaData().getImportedKeys(connection.getCatalog(), schema, tableName);
}
/**
* Renders the SELECT portion of a given {@link Query} instance in the
* manner required by the database-specific SQL implementation. Usually,
* this is as simple as "SELECT id FROM table"
or "SELECT DISTINCT
* * FROM table"
. However, some databases require the limit and offset
* parameters to be specified as part of the SELECT clause. For example,
* on HSQLDB, a Query for the "id" field limited to 10 rows would render
* SELECT like this: SELECT TOP 10 id FROM table
.
*
* There is usually no need to call this method directly. Under normal
* operations it functions as a delegate for {@link #renderQuery(Query, TableNameConverter, boolean)}.
*
* @param query The Query instance from which to determine the SELECT properties.
* @param converter The name converter to allow conversion of the query entity
* interface into a proper table name.
* @param count Whether or not the query should be rendered as a SELECT COUNT(*)
.
* @return The database-specific SQL rendering of the SELECT portion of the query.
*/
protected String renderQuerySelect(Query query, TableNameConverter converter, boolean count) {
StringBuilder sql = new StringBuilder();
switch (query.getType()) {
case SELECT:
sql.append("SELECT ");
if (query.isDistinct()) {
sql.append("DISTINCT ");
}
if (count) {
sql.append("COUNT(*)");
} else {
sql.append(querySelectFields(query, converter));
}
sql.append(" FROM ").append(queryTableName(query, converter));
break;
}
return sql.toString();
}
protected final String queryTableName(Query query, TableNameConverter converter) {
final String queryTable = query.getTable();
final String tableName = queryTable != null ? queryTable : converter.getName(query.getTableType());
final StringBuilder queryTableName = new StringBuilder().append(withSchema(tableName));
if (query.getAlias(query.getTableType()) != null) {
queryTableName.append(" ").append(query.getAlias(query.getTableType()));
}
return queryTableName.toString();
}
protected final String querySelectFields(final Query query, final TableNameConverter converter) {
return query.getFieldMetadata().stream()
.map(fieldName -> withAlias(query, fieldName, converter))
.collect(Collectors.joining(","));
}
private String withAlias(Query query, Query.FieldMetadata field, final TableNameConverter converter) {
final StringBuilder withAlias = new StringBuilder();
if (query.getAlias(query.getTableType()) != null) {
withAlias.append(query.getAlias(query.getTableType())).append(".");
} else if (!query.getJoins().isEmpty()) {
String queryTable = query.getTable();
String tableName = queryTable != null ? queryTable : converter.getName(query.getTableType());
withAlias.append(processID(tableName)).append(".");
}
return withAlias.append(processID(field)).toString();
}
/**
* Renders the JOIN portion of the query in the database-specific SQL
* dialect. Very few databases deviate from the standard in this matter,
* thus the default implementation is usually sufficient.
*
* An example return value: " JOIN table1 ON table.id = table1.value"
*
* There is usually no need to call this method directly. Under normal
* operations it functions as a delegate for {@link #renderQuery(Query, TableNameConverter, boolean)}.
*
* @param query The Query instance from which to determine the JOIN properties.
* @param converter The name converter to allow conversion of the query entity
* interface into a proper table name.
* @return The database-specific SQL rendering of the JOIN portion of the query.
*/
protected String renderQueryJoins(Query query, TableNameConverter converter) {
final StringBuilder sql = new StringBuilder();
for (Map.Entry>, String> joinEntry : query.getJoins().entrySet()) {
sql.append(" JOIN ").append(withSchema(converter.getName(joinEntry.getKey())));
if (query.getAlias(joinEntry.getKey()) != null) {
sql.append(" ").append(query.getAlias(joinEntry.getKey()));
}
if (joinEntry.getValue() != null) {
sql.append(" ON ").append(processOnClause(joinEntry.getValue()));
}
}
return sql.toString();
}
/**
* Renders the WHERE portion of the query in the database-specific SQL
* dialect. Very few databases deviate from the standard in this matter,
* thus the default implementation is usually sufficient.
*
* An example return value: " WHERE name = ? OR age < 20"
*
* There is usually no need to call this method directly. Under normal
* operations it functions as a delegate for {@link #renderQuery(Query, TableNameConverter, boolean)}.
*
* @param query The Query instance from which to determine the WHERE properties.
* @return The database-specific SQL rendering of the WHERE portion of the query.
*/
protected String renderQueryWhere(Query query) {
StringBuilder sql = new StringBuilder();
String whereClause = query.getWhereClause();
if (whereClause != null) {
sql.append(" WHERE ");
sql.append(processWhereClause(whereClause));
}
return sql.toString();
}
/**
* Renders the GROUP BY portion of the query in the database-specific SQL
* dialect. Very few databases deviate from the standard in this matter,
* thus the default implementation is usually sufficient.
*
* An example return value: " GROUP BY name"
*
* There is usually no need to call this method directly. Under normal
* operations it functions as a delegate for {@link #renderQuery(Query, TableNameConverter, boolean)}.
*
* @param query The Query instance from which to determine the GROUP BY properties.
* @return The database-specific SQL rendering of the GROUP BY portion of the query.
*/
protected String renderQueryGroupBy(Query query) {
StringBuilder sql = new StringBuilder();
String groupClause = query.getGroupClause();
if (groupClause != null) {
sql.append(" GROUP BY ");
sql.append(processGroupByClause(groupClause));
}
return sql.toString();
}
private String processGroupByClause(String groupBy) {
return SqlUtils.processGroupByClause(groupBy, this::processID, this::processTableName);
}
/**
* Renders the HAVING portion of the query in the database-specific SQL
* dialect. Very few databases deviate from the standard in this matter,
* thus the default implementation is usually sufficient.
*
* An example return value: " HAVING COUNT(name) > 2"
*
* There is usually no need to call this method directly. Under normal
* operations it functions as a delegate for {@link #renderQuery(Query, TableNameConverter, boolean)}.
*
* @param query The Query instance from which to determine the HAVING properties.
* @return The database-specific SQL rendering of the HAVING portion of the query.
*/
protected String renderQueryHaving(Query query) {
StringBuilder sql = new StringBuilder();
String havingClause = query.getHavingClause();
if (havingClause != null) {
sql.append(" HAVING ");
sql.append(processHavingClause(havingClause));
}
return sql.toString();
}
private String processHavingClause(String having) {
return SqlUtils.processHavingClause(having, this::processID, this::processTableName);
}
/**
* Renders the ORDER BY portion of the query in the database-specific SQL
* dialect. Very few databases deviate from the standard in this matter,
* thus the default implementation is usually sufficient.
*
* An example return value: " ORDER BY name ASC"
*
* There is usually no need to call this method directly. Under normal
* operations it functions as a delegate for {@link #renderQuery(Query, TableNameConverter, boolean)}.
*
* @param query The Query instance from which to determine the ORDER BY properties.
* @return The database-specific SQL rendering of the ORDER BY portion of the query.
*/
protected String renderQueryOrderBy(Query query) {
StringBuilder sql = new StringBuilder();
String orderClause = query.getOrderClause();
if (orderClause != null) {
sql.append(" ORDER BY ");
sql.append(processOrderClause(orderClause));
}
return sql.toString();
}
public final String processOrderClause(String order) {
final Matcher matcher = ORDER_CLAUSE_PATTERN.matcher(order);
final int ORDER_CLAUSE_PATTERN_GROUP_TABLE_NAME = 1;
final int ORDER_CLAUSE_PATTERN_GROUP_COL_NAME = 2;
final int ORDER_CLAUSE_PATTERN_GROUP_DIRECTION = 3;
final StringBuffer sql = new StringBuffer();
while (matcher.find()) {
final StringBuilder repl = new StringBuilder();
// ORDER_CLAUSE_PATTERN_GROUP_TABLE_NAME signifies the (optional) table name to potentially quote
if (matcher.group(ORDER_CLAUSE_PATTERN_GROUP_TABLE_NAME) != null) {
repl.append(processTableName(matcher.group(ORDER_CLAUSE_PATTERN_GROUP_TABLE_NAME)));
repl.append(".");
}
// ORDER_CLAUSE_PATTERN_GROUP_COL_NAME signifies the (mandatory) column name to potentially quote
repl.append(processID(matcher.group(ORDER_CLAUSE_PATTERN_GROUP_COL_NAME)));
// ORDER_CLAUSE_PATTERN_GROUP_DIRECTION signifies the ASC/DESC option
if (matcher.group(ORDER_CLAUSE_PATTERN_GROUP_DIRECTION) != null) {
repl.append(" ").append(matcher.group(ORDER_CLAUSE_PATTERN_GROUP_DIRECTION));
}
matcher.appendReplacement(sql, Matcher.quoteReplacement(repl.toString()));
}
matcher.appendTail(sql);
return sql.toString();
}
/**
* Renders the LIMIT portion of the query in the database-specific SQL
* dialect. There is wide variety in database implementations of this
* particular SQL clause. In fact, many database do not support it at all.
*
* Unfortunately, we live in the real world of proprietary database
* implementations that requires us to use database specific keywords or
* semantics to achieve these outcomes. Appropriate methods should be
* overridden in such cases.
*
* An example return value: " LIMIT 10,2"
*
* There is usually no need to call this method directly. Under normal
* operations it functions as a delegate for {@link #renderQuery(Query, TableNameConverter, boolean)}.
*
* @param query The Query instance from which to determine the LIMIT properties.
* @return The database-specific SQL rendering of the LIMIT portion of the query.
*/
protected String renderQueryLimit(Query query) {
StringBuilder sql = new StringBuilder();
int limit = query.getLimit();
if (limit >= 0) {
sql.append(" LIMIT ");
sql.append(limit);
}
int offset = query.getOffset();
if (offset > 0) {
sql.append(" OFFSET ").append(offset);
}
return sql.toString();
}
/**
* Retrieves a JDBC {@link Connection} instance which corresponds
* to the database represented by the provider instance. This Connection
* can be used to execute arbitrary JDBC operations against the database.
* Also, this is the method used by the whole of ActiveObjects itself to
* get database connections when required.
*
*
All {@link Connection} instances are pooled internally by thread.
* Thus, there is never more than one connection per thread. This is
* necessary to allow arbitrary JDBC operations within a transaction
* without breaking transaction integrity. Developers using this method
* should bear this fact in mind and consider the {@link Connection}
* instance immutable. The only exception is if one is absolutely
* certain that the JDBC code in question is not being executed within
* a transaction.
*
*
Despite the fact that there is only a single connection per thread,
* the {@link Connection} instances returned from this method should
* still be treated as bona fide JDBC connections. They can and
* should be closed when their usage is complete. This is
* especially important when actual connection pooling is in use and
* non-disposal of connections can lead to a crash as the connection
* pool runs out of resources. The developer need not concern themselves
* with the single-connection-per-thread issue when closing the connection
* as the call to close()
will be intercepted and ignored
* if necessary.
*
*
* @return A new connection to the database
*/
public final Connection getConnection() throws SQLException {
Connection c = transactionThreadLocal.get();
if (c != null) {
if (!c.isClosed()) {
return c;
} else {
transactionThreadLocal.remove(); // remove the reference to the connection
}
}
final Connection connectionImpl = dataSource.getConnection();
if (connectionImpl == null) {
throw new SQLException("Unable to create connection");
}
c = DelegateConnectionHandler.newInstance(
connectionImpl,
isExtraLoggingEnabled()
);
setPostConnectionProperties(c);
return c;
}
private boolean isExtraLoggingEnabled() {
return Boolean.getBoolean("net.java.ao.sql.logging.extra");
}
public final Connection startTransaction() throws SQLException {
final Connection c = getConnection();
setCloseable(c, false);
//noinspection MagicConstant
c.setTransactionIsolation(getTransactionIsolationLevel().getLevel());
c.setAutoCommit(false);
transactionThreadLocal.set(c);
return c;
}
private TransactionIsolationLevel getTransactionIsolationLevel() {
final TransactionIsolationLevel defaultLevel = TransactionIsolationLevel.TRANSACTION_SERIALIZABLE;
final String isolationLevelProperty = System.getProperty(PROP_TRANSACTION_ISOLATION_LEVEL, defaultLevel.toString());
try {
return TransactionIsolationLevel.valueOf(isolationLevelProperty);
} catch (IllegalArgumentException e) {
final Object[] warningArgs = {isolationLevelProperty, PROP_TRANSACTION_ISOLATION_LEVEL, defaultLevel};
logger.warn("Invalid value '{}' for {}, using default value '{}'", warningArgs);
return defaultLevel;
}
}
public final Connection commitTransaction(Connection c) throws SQLException {
Validate.validState(c == transactionThreadLocal.get(), "There are two concurrently open transactions!");
Validate.validState(c != null, "Tried to commit a transaction that is not started!");
c.commit();
transactionThreadLocal.remove();
return c;
}
public final void rollbackTransaction(Connection c) throws SQLException {
Validate.validState(c == transactionThreadLocal.get(), "There are two concurrently open transactions!");
Validate.validState(c != null, "Tried to rollback a transaction that is not started!");
c.rollback();
}
void setCloseable(Connection connection, boolean closeable) {
if (connection instanceof DelegateConnection) {
((DelegateConnection) connection).setCloseable(closeable);
}
}
/**
* Frees any resources held by the database provider or delegate
* libraries (such as connection pools). This method should be
* once usage of the provider is complete to ensure that all
* connections are committed and closed.
*/
public void dispose() {
dataSource.dispose();
}
/**
* Called to make any post-creation modifications to a new
* {@link Connection} instance. This is used for databases
* such as Derby which require the schema to be set after
* the connection is created.
*
* @param conn The connection to modify according to the database
* requirements.
*/
protected void setPostConnectionProperties(Connection conn) throws SQLException {
}
/**
* Renders the foreign key constraints in database-specific DDL for
* the table in question. Actually, this method only loops through
* the foreign keys and renders indentation and line-breaks. The
* actual rendering is done in a second delegate method.
*
* @param uniqueNameConverter
* @param table The database-agnostic DDL representation of the table
* in question.
* @return The String rendering of all of the foreign keys for
* the table.
* @see #renderForeignKey(DDLForeignKey)
*/
protected String renderConstraintsForTable(UniqueNameConverter uniqueNameConverter, DDLTable table) {
StringBuilder back = new StringBuilder();
for (DDLForeignKey key : table.getForeignKeys()) {
back.append(" ").append(renderForeignKey(key)).append(",\n");
}
return back.toString();
}
protected String renderConstraints(NameConverters nameConverters, List primaryKeys, DDLTable table){
StringBuilder back = new StringBuilder();
back.append(renderConstraintsForTable(
nameConverters.getUniqueNameConverter(), table));
if (primaryKeys.size() > 0) {
back.append(renderPrimaryKey(table.getName(), primaryKeys.get(0)));
}
return back.toString();
}
/**
* Renders the specified foreign key representation into the
* database-specific DDL. The implementation must name the
* foreign key according to the DDLForeignKey#getFKName()
* value otherwise migrations will no longer function appropriately.
*
* @param key The database-agnostic foreign key representation.
* @return The database-pecific DDL fragment corresponding to the
* foreign key in question.
*/
protected String renderForeignKey(DDLForeignKey key) {
StringBuilder back = new StringBuilder();
back.append("CONSTRAINT ").append(processID(key.getFKName()));
back.append(" FOREIGN KEY (").append(processID(key.getField())).append(") REFERENCES ");
back.append(withSchema(key.getTable())).append('(').append(processID(key.getForeignField())).append(")");
return back.toString();
}
/**
* Converts the specified type into the database-specific DDL String
* value. By default, this delegates to the DatabaseType#getDefaultName()
* method. Subclass implementations should be sure to make a super
* call in order to ensure that both default naming and future special
* cases are handled appropriately.
*
* @param type The type instance to convert to a DDL string.
* @return The database-specific DDL representation of the type (e.g. "VARCHAR").
*/
protected String convertTypeToString(TypeInfo> type) {
return type.getSqlTypeIdentifier();
}
/**
* Renders the specified table representation into the corresponding
* database-specific DDL statement. For legacy reasons, this only allows
* single-statement table creation. Additional statements (triggers,
* functions, etc) must be created in one of the other delegate methods
* for DDL creation. This method does a great deal of delegation to
* other DatabaseProvider
methods for functions such as
* field rendering, foreign key rendering, etc.
*
* @param nameConverters
* @param table The database-agnostic table representation.
* @return The database-specific DDL statements which correspond to the
* specified table creation.
*/
protected final SQLAction renderTable(NameConverters nameConverters, DDLTable table) {
StringBuilder back = new StringBuilder("CREATE TABLE ");
back.append(withSchema(table.getName()));
back.append(" (\n");
List primaryKeys = new LinkedList();
for (DDLField field : table.getFields()) {
back.append(" ").append(renderField(nameConverters, table, field, new RenderFieldOptions(true, true, true))).append(",\n");
if (field.isPrimaryKey()) {
primaryKeys.add(field.getName());
}
}
if (primaryKeys.size() > 1) {
throw new RuntimeException("Entities may only have one primary key");
}
back.append(renderConstraints(nameConverters, primaryKeys,table));
back.append(")");
String tailAppend = renderAppend();
if (tailAppend != null) {
back.append(' ');
back.append(tailAppend);
}
return SQLAction.of(back);
}
protected String renderPrimaryKey(String tableName, String pkFieldName) {
StringBuilder b = new StringBuilder();
b.append(" PRIMARY KEY(");
b.append(processID(pkFieldName));
b.append(")\n");
return b.toString();
}
protected SQLAction renderInsert(DDLTable ddlTable, DDLValue[] ddlValues) {
final StringBuilder columns = new StringBuilder();
final StringBuilder values = new StringBuilder();
for (DDLValue v : ddlValues) {
columns.append(processID(v.getField().getName())).append(",");
values.append(renderValue(v.getValue())).append(",");
}
columns.deleteCharAt(columns.length() - 1);
values.deleteCharAt(values.length() - 1);
return SQLAction.of(new StringBuilder()
.append("INSERT INTO ").append(withSchema(ddlTable.getName()))
.append("(").append(columns).append(")")
.append(" VALUES (").append(values).append(")"));
}
/**
* Generates the appropriate database-specific DDL statement to
* drop the specified table representation. The default implementation
* is merely "DROP TABLE tablename"
. This is suitable
* for every database that I am aware of. Any dependent database
* objects (such as triggers, functions, etc) must be rendered in
* one of the other delegate methods (such as renderDropTriggers(DDLTable)
).
*
* @param table The table representation which is to be dropped.
* @return A database-specific DDL statement which drops the specified
* table.
*/
protected SQLAction renderDropTableStatement(DDLTable table) {
return SQLAction.of("DROP TABLE " + withSchema(table.getName()));
}
/**
* Generates the database-specific DDL statements required to create
* all of the functions, sequences, and triggers necessary for the given table,
* by calling {@link #renderAccessoriesForField(NameConverters, DDLTable, DDLField)}
* for each of the table's fields. Each returned {@link SQLAction} has a
* corresponding{@link SQLAction#getUndoAction() undo action} that deletes
* the corresponding function, sequence, or trigger.
*
* @param nameConverters
* @param table The table for which the objects must be generated.
* @return An ordered list of {@link SQLAction}s.
*/
protected final Iterable renderAccessories(final NameConverters nameConverters, final DDLTable table) {
return renderFields(
table,
ddlField -> true,
ddlField -> renderAccessoriesForField(nameConverters, table, ddlField));
}
/**
* Generates the database-specific DDL statements required to drop
* all of the functions, sequences, and triggers associated with the given table,
* by calling {@link #renderDropAccessoriesForField(NameConverters, DDLTable, DDLField)}
* for each of the table's fields.
*
* @param nameConverters
* @param table The table for which the objects must be dropped.
* @return An ordered list of {@link SQLAction}s.
*/
protected final Iterable renderDropAccessories(final NameConverters nameConverters, final DDLTable table) {
return renderFields(
table,
ddlField -> true,
ddlField -> renderDropAccessoriesForField(nameConverters, table, ddlField));
}
/**
* Generates database-specific DDL statements required to create any functions,
* sequences, or triggers required for the given field. Each returned {@link SQLAction}
* should have a corresponding {@link SQLAction#getUndoAction() undo action} that deletes
* the corresponding function, sequence, or trigger. The default implementation returns
* an empty list.
*
* @param nameConverters
* @param table
* @param field
* @return an ordered list of {@link SQLAction}s
*/
protected Iterable renderAccessoriesForField(NameConverters nameConverters, DDLTable table, DDLField field) {
return emptyList();
}
/**
* Generates database-specific DDL statements required to drop any functions,
* sequences, or triggers associated with the given field. The default implementation
* returns an empty list.
*
* @param nameConverters
* @param table
* @param field
* @return an ordered list of {@link SQLAction}s
*/
protected Iterable renderDropAccessoriesForField(NameConverters nameConverters, DDLTable table, DDLField field) {
return emptyList();
}
protected final Iterable renderFields(final DDLTable table,
final Predicate filter,
final Function> render) {
return stream(table.getFields())
.filter(filter)
.map(render)
.flatMap(it -> StreamSupport.stream(it.spliterator(), false))
.collect(toList());
}
/**
* Generates the database-specific DDL statements required to add
* a column to an existing table. Included in the return value
* should be the statements required to add all necessary functions
* and triggers to ensure that the column acts appropriately. For
* example, if the field is tagged with an @OnUpdate
* annotation, chances are there will be a trigger and possibly a
* function along with the ALTER statement. These "extra"
* functions are properly ordered and will only be appended if
* their values are not null
. Because of this, very
* few database providers will need to override this method.
*
* Each {@link SQLAction} should have a corresponding undo action;
* these will be executed in reverse order if the action needs to
* be rolled back.
*
* @param nameConverters
* @param table The table which should receive the new column.
* @param field The column to add to the specified table.
* @return An array of DDL statements to execute.
* @see #renderFunctionForField(net.java.ao.schema.TriggerNameConverter, net.java.ao.schema.ddl.DDLTable, net.java.ao.schema.ddl.DDLField)
* @see #renderTriggerForField(net.java.ao.schema.TriggerNameConverter, net.java.ao.schema.SequenceNameConverter, net.java.ao.schema.ddl.DDLTable, net.java.ao.schema.ddl.DDLField)
*/
protected Iterable renderAlterTableAddColumn(NameConverters nameConverters, DDLTable table, DDLField field) {
List back = new ArrayList<>();
back.add(renderAlterTableAddColumnStatement(nameConverters, table, field)
.withUndoAction(renderAlterTableDropColumnStatement(table, field)));
for (DDLForeignKey foreignKey : findForeignKeysForField(table, field)) {
back.add(renderAlterTableAddKey(foreignKey).withUndoAction(renderAlterTableDropKey(foreignKey)));
}
renderAccessoriesForField(nameConverters, table, field).forEach(back::add);
return unmodifiableList(back);
}
/**
* Generates the database-specific DDL statement for adding a column,
* but not including any corresponding sequences, triggers, etc.
*
* @param nameConverters
* @param table The table which should receive the new column.
* @param field The column to add to the specified table.
* @return A DDL statements to execute.
*/
protected SQLAction renderAlterTableAddColumnStatement(NameConverters nameConverters, DDLTable table, DDLField field) {
String addStmt = "ALTER TABLE " + withSchema(table.getName()) + " ADD COLUMN " + renderField(nameConverters, table, field, new RenderFieldOptions(true, true, true));
return SQLAction.of(addStmt);
}
/**
* Generates the database-specific DDL statements required to change
* the given column from its old specification to the given DDL value.
* This method will also generate the appropriate statements to remove
* old triggers and functions, as well as add new ones according to the
* requirements of the new field definition.
*
* The default implementation of this method functions in the manner
* specified by the MySQL database. Some databases will have to perform
* more complicated actions (such as dropping and re-adding the field)
* in order to satesfy the same use-case. Such databases should print
* a warning to stderr to ensure that the end-developer is aware of
* such restrictions.
*
* Thus, the specification for this method allows for data
* loss. Nevertheless, if the database supplies a mechanism to
* accomplish the task without data loss, it should be applied.
*
* For maximum flexibility, the default implementation of this method
* only deals with the dropping and addition of functions and triggers.
* The actual generation of the ALTER TABLE statement is done in the
* {@link #renderAlterTableChangeColumnStatement(net.java.ao.schema.NameConverters, net.java.ao.schema.ddl.DDLTable, net.java.ao.schema.ddl.DDLField, net.java.ao.schema.ddl.DDLField, net.java.ao.DatabaseProvider.RenderFieldOptions)}
* method.
*
* @param nameConverters
* @param table The table containing the column to change.
* @param oldField The old column definition.
* @param field The new column definition (defining the resultant DDL). @return An array of DDL statements to be executed.
* @see #getTriggerNameForField(net.java.ao.schema.TriggerNameConverter, net.java.ao.schema.ddl.DDLTable, net.java.ao.schema.ddl.DDLField)
* @see #getFunctionNameForField(net.java.ao.schema.TriggerNameConverter, net.java.ao.schema.ddl.DDLTable, net.java.ao.schema.ddl.DDLField)
* @see #renderFunctionForField(net.java.ao.schema.TriggerNameConverter, net.java.ao.schema.ddl.DDLTable, net.java.ao.schema.ddl.DDLField)
* @see #renderTriggerForField(net.java.ao.schema.TriggerNameConverter, net.java.ao.schema.SequenceNameConverter, net.java.ao.schema.ddl.DDLTable, net.java.ao.schema.ddl.DDLField)
*/
protected Iterable renderAlterTableChangeColumn(NameConverters nameConverters, DDLTable table, DDLField oldField, DDLField field) {
final List back = new ArrayList<>();
renderDropAccessoriesForField(nameConverters, table, oldField).forEach(back::add);
back.add(renderAlterTableChangeColumnStatement(nameConverters, table, oldField, field, renderFieldOptionsInAlterColumn()));
renderAccessoriesForField(nameConverters, table, field).forEach(back::add);
return unmodifiableList(back);
}
protected RenderFieldOptions renderFieldOptionsInAlterColumn() {
return new RenderFieldOptions(true, true, true, true);
}
/**
* Generates the database-specific DDL statement only for altering a table and
* changing a column. This method must only generate a single statement as it
* does not need to concern itself with functions or triggers associated with
* the column. This method is only to be called as a delegate for the
* {@link #renderAlterTableChangeColumn(net.java.ao.schema.NameConverters, net.java.ao.schema.ddl.DDLTable, net.java.ao.schema.ddl.DDLField, net.java.ao.schema.ddl.DDLField)} method,
* for which it is a primary delegate. The default implementation of this
* method functions according to the MySQL specification.
*
* @param nameConverters
* @param table The table containing the column to change.
* @param oldField The old column definition.
* @param field The new column definition (defining the resultant DDL).
* @param options
* @return A single DDL statement which is to be executed.
* @see #renderField(net.java.ao.schema.NameConverters, net.java.ao.schema.ddl.DDLTable, net.java.ao.schema.ddl.DDLField, net.java.ao.DatabaseProvider.RenderFieldOptions)
*/
protected SQLAction renderAlterTableChangeColumnStatement(NameConverters nameConverters, DDLTable table, DDLField oldField, DDLField field, RenderFieldOptions options) {
StringBuilder current = new StringBuilder();
current.append("ALTER TABLE ").append(withSchema(table.getName())).append(" CHANGE COLUMN ");
current.append(processID(oldField.getName())).append(' ');
current.append(renderField(nameConverters, table, field, options));
return SQLAction.of(current);
}
/**
* Generates the database-specific DDL statements required to remove
* the specified column from the given table. This should also
* generate the necessary statements to drop all triggers and functions
* associated with the column in question. If the database being
* implemented has a non-standard syntax for dropping functions and/or
* triggers, it may be required to override this method, even if the
* syntax to drop columns is standard.
*
* @param nameConverters
* @param table The table from which to drop the column.
* @param field The column definition to remove from the table.
* @return An array of DDL statements to be executed.
* @see #getTriggerNameForField(net.java.ao.schema.TriggerNameConverter, net.java.ao.schema.ddl.DDLTable, net.java.ao.schema.ddl.DDLField)
* @see #getFunctionNameForField(net.java.ao.schema.TriggerNameConverter, net.java.ao.schema.ddl.DDLTable, net.java.ao.schema.ddl.DDLField)
*/
protected Iterable renderAlterTableDropColumn(NameConverters nameConverters, DDLTable table, DDLField field) {
List back = new ArrayList<>();
for (DDLForeignKey foreignKey : findForeignKeysForField(table, field)) {
back.add(renderAlterTableDropKey(foreignKey));
}
renderDropAccessoriesForField(nameConverters, table, field).forEach(back::add);
back.add(renderAlterTableDropColumnStatement(table, field));
return unmodifiableList(back);
}
protected SQLAction renderAlterTableDropColumnStatement(DDLTable table, DDLField field) {
String dropStmt = "ALTER TABLE " + withSchema(table.getName()) + " DROP COLUMN " + processID(field.getName());
return SQLAction.of(dropStmt);
}
/**
* Generates the database-specific DDL statement required to add a
* foreign key to a table. For databases which do not support such
* a statement, a warning should be printed to stderr and a
* null
value returned.
*
* @param key The foreign key to be added. As this instance contains
* all necessary data (such as domestic table, field, etc), no
* additional parameters are required.
* @return A DDL statement to be executed, or null
.
* @see #renderForeignKey(DDLForeignKey)
*/
protected SQLAction renderAlterTableAddKey(DDLForeignKey key) {
return SQLAction.of("ALTER TABLE " + withSchema(key.getDomesticTable()) + " ADD " + renderForeignKey(key));
}
/**
* Generates the database-specific DDL statement required to remove a
* foreign key from a table. For databases which do not support such
* a statement, a warning should be printed to stderr and a
* null
value returned. This method assumes that the
* {@link #renderForeignKey(DDLForeignKey)} method properly names
* the foreign key according to the {@link DDLForeignKey#getFKName()}
* method.
*
* @param key The foreign key to be removed. As this instance contains
* all necessary data (such as domestic table, field, etc), no
* additional parameters are required.
* @return A DDL statement to be executed, or null
.
*/
protected SQLAction renderAlterTableDropKey(DDLForeignKey key) {
return SQLAction.of("ALTER TABLE " + withSchema(key.getDomesticTable()) + " DROP FOREIGN KEY " + processID(key.getFKName()));
}
/**
* Generates the database-specific DDL statement required to create
* a new index. The syntax for this operation is highly standardized
* and thus it is unlikely this method will be overridden. If the
* database in question does not support indexes, a warning should
* be printed to stderr and null
returned.
*
* @param indexNameConverter
* @param index The index to create. This single instance contains all
* of the data necessary to create the index, thus no separate
* parameters (such as a DDLTable
) are required.
* @return A DDL statement to be executed.
*/
protected SQLAction renderCreateIndex(IndexNameConverter indexNameConverter, DDLIndex index) {
String statement = "CREATE INDEX " + withSchema(index.getIndexName())
+ " ON " + withSchema(index.getTable()) +
Stream.of(index.getFields())
.map(DDLIndexField::getFieldName)
.map(this::processID)
.collect(joining(",", "(", ")"));
return SQLAction.of(statement);
}
/**
* Generates the database-specific DDL statement required to create
* a new composite index. This is only used by AO integration tests
* to create a composite index for testing. AO does not provide clients with
* a feature to manage composite indexes in any way.
*
* @param tableName The name of the database table
* @param indexName The name of the new index
* @param fields List of fields that make up the index
* @return A DDL statement to be executed.
* @deprecated Use {@link #renderCreateIndex(IndexNameConverter, DDLIndex)} for creating indexes.
*/
@Deprecated
public SQLAction renderCreateCompositeIndex(String tableName, String indexName, List fields) {
StringBuilder statement = new StringBuilder();
statement.append("CREATE INDEX " + processID(indexName));
statement.append(" ON " + withSchema(tableName));
statement.append(" (");
boolean needDelimiter = false;
for (String field : fields) {
if (needDelimiter) {
statement.append(",");
}
statement.append(processID(field));
needDelimiter = true;
}
statement.append(")");
return SQLAction.of(statement);
}
/**
* Generates the database-specific DDL statement required to drop
* an index. The syntax for this operation is highly standardized
* and thus it is unlikely this method will be overridden. If the
* database in question does not support indexes, a warning should
* be printed to stderr and null
returned.
*
* @param indexNameConverter
* @param index The index to drop. This single instance contains all
* of the data necessary to drop the index, thus no separate
* parameters (such as a DDLTable
) are required.
* @return A DDL statement to be executed, or null
.
*/
protected SQLAction renderDropIndex(IndexNameConverter indexNameConverter, DDLIndex index) {
final String indexName = index.getIndexName();
final String tableName = index.getTable();
if (hasIndex(tableName, indexName)) {
return SQLAction.of("DROP INDEX " + withSchema(indexName) + " ON " + withSchema(tableName));
} else {
return null;
}
}
protected boolean hasIndex(IndexNameConverter indexNameConverter, DDLIndex index) {
final String indexName = index.getIndexName();
return hasIndex(index.getTable(), indexName);
}
protected boolean hasIndex(String tableName, String indexName) {
Connection connection = null;
try {
connection = getConnection();
ResultSet indexes = getIndexes(connection, tableName);
while (indexes.next()) {
if (indexName.equalsIgnoreCase(indexes.getString("INDEX_NAME"))) {
return true;
}
}
return false;
} catch (SQLException e) {
throw new ActiveObjectsException(e);
} finally {
closeQuietly(connection);
}
}
/**
* Generates any database-specific options which must be appended
* to the end of a table definition. The only database I am aware
* of which requires this is MySQL. For example:
*
* CREATE TABLE test (
* id INTEGER NOT NULL AUTO_INCREMENT,
* name VARCHAR(45),
* PRIMARY KEY(id)
* ) ENGINE=InnoDB;
*
* The "ENGINE=InnoDB
" clause is what is returned by
* this method. The default implementation simply returns
* null
, signifying that no append should be rendered.
*
* @return A DDL clause to be appended to the CREATE TABLE DDL, or null
*/
protected String renderAppend() {
return null;
}
/**
* Generates the database-specific DDL fragment required to render the
* field and its associated type. This includes all field attributes,
* such as @NotNull
, @AutoIncrement
(if
* supported by the database at the field level) and so on. Sample
* return value:
*
* name VARCHAR(255) DEFAULT "Skye" NOT NULL
*
* Certain databases don't allow defined precision for certain types
* (such as Derby and INTEGER). The logic for whether or not to render
* precision should not be within this method, but delegated to the
* {@link #considerPrecision(DDLField)} method.
*
* Almost all functionality within this method is delegated to other
* methods within the implementation. As such, it is almost never
* necessary to override this method directly. An exception to this
* would be a database like PostgreSQL which requires a different type
* for auto-incremented fields.
*
* @param nameConverters
* @param table
* @param field The field to be rendered.
* @param options
* @return A DDL fragment to be embedded in a statement elsewhere.
*/
protected final String renderField(NameConverters nameConverters, DDLTable table, DDLField field, RenderFieldOptions options) {
StringBuilder back = new StringBuilder();
back.append(processID(field.getName()));
back.append(" ");
back.append(renderFieldType(field));
if (field.isAutoIncrement()) {
String autoIncrementValue = renderAutoIncrement();
if (!autoIncrementValue.trim().equals("")) {
back.append(' ').append(autoIncrementValue);
}
} else if ((options.forceNull && !field.isNotNull() && !field.isUnique() && !field.isPrimaryKey()) || (options.renderDefault && field.getDefaultValue() != null)) {
back.append(renderFieldDefault(table, field));
}
if (options.renderUnique && field.isUnique()) {
final String renderUniqueString = renderUnique(nameConverters.getUniqueNameConverter(), table, field);
if (!renderUniqueString.trim().equals("")) {
back.append(' ').append(renderUniqueString);
}
}
if (options.renderNotNull && (field.isNotNull() || field.isUnique())) {
back.append(" NOT NULL");
}
return back.toString();
}
protected String renderFieldDefault(DDLTable table, DDLField field) {
return new StringBuilder().append(" DEFAULT ").append(renderValue(field.getDefaultValue())).toString();
}
/**
* Renders the given Java instance in a database-specific way. This
* method handles special cases such as {@link Calendar},
* {@link Boolean} (which is always rendered as 0/1), functions,
* null
and numbers. All other values are rendered (by
* default) as 'value.toString()'
(the String value
* enclosed within single quotes). Implementations are encouraged to
* override this method as necessary.
*
* @param value The Java instance to be rendered as a database literal.
* @return The database-specific String rendering of the instance in
* question.
* @see #renderDate(Date)
*/
protected String renderValue(Object value) {
if (value == null) {
return "NULL";
} else if (value instanceof Date) {
return "'" + renderDate((Date) value) + "'";
} else if (value instanceof Boolean) {
return ((Boolean) value ? "1" : "0");
} else if (value instanceof Number) {
return value.toString();
}
return "'" + value.toString() + "'";
}
/**
* Renders the provided {@link Date} instance as a DATETIME literal
* in the database-specific format. The return value should not
* be enclosed within quotes, as this is accomplished within other
* functions when rendering is required. This method is actually a
* boiler-plate usage of the {@link SimpleDateFormat} class, using the
* date format defined within the {@link #getDateFormat()} method.
*
* @param date The time instance to be rendered.
* @return The database-specific String representation of the time.
*/
protected String renderDate(Date date) {
return new SimpleDateFormat(getDateFormat()).format(date);
}
/**
* Renders the UNIQUE
constraint as defined by the
* database-specific DDL syntax. This method is a delegate of other, more
* complex methods such as {@link #renderField(net.java.ao.schema.NameConverters, net.java.ao.schema.ddl.DDLTable, net.java.ao.schema.ddl.DDLField, net.java.ao.DatabaseProvider.RenderFieldOptions)}. The default
* implementation just returns UNIQUE
. Implementations may
* override this method to return an empty {@link String} if the database
* in question does not support the constraint.
*
* @param uniqueNameConverter
* @param table
* @param field
* @return The database-specific rendering of UNIQUE
.
*/
protected String renderUnique(UniqueNameConverter uniqueNameConverter, DDLTable table, DDLField field) {
return "UNIQUE";
}
/**
* Returns the database-specific TIMESTAMP text format as defined by
* the {@link SimpleDateFormat} syntax. This format should include
* the time down to the second (or even more precise, if allowed by
* the database). The default implementation returns the format for
* MySQL, which is: yyyy-MM-dd HH:mm:ss
*
* @return The database-specific TIMESTAMP text format
*/
protected String getDateFormat() {
return "yyyy-MM-dd HH:mm:ss";
}
/**
* Renders the database-specific DDL type for the field in question.
* This method merely delegates to the {@link #convertTypeToString(TypeInfo)}
* method, passing the field type. Thus, it is rarely necessary
* (if ever) to override this method. It may be deprecated in a
* future release.
*
* @param field The field which contains the type to be rendered.
* @return The database-specific type DDL rendering.
*/
protected String renderFieldType(DDLField field) {
return convertTypeToString(field.getType());
}
public Object handleBlob(ResultSet res, Class> type, String field) throws SQLException {
final Blob blob = res.getBlob(field);
if (blob == null) {
return null;
}
if (type.equals(InputStream.class)) {
return blob.getBinaryStream();
} else if (type.equals(byte[].class)) {
return blob.getBytes(1, (int) blob.length());
} else {
return null;
}
}
/**
* Retrieves the name of the trigger which corresponds to the field
* in question (if any). If no trigger will be automatically created
* for the specified field, null
should be returned.
* This function is to allow for databases which require the use of
* triggers on a field to allow for certain functionality (like
* ON UPDATE). The default implementation returns null
.
*
* @param triggerNameConverter
* @param table The table which contains the field for which a trigger
* may or may not exist.
* @param field The field for which a previous migration may have
* created a trigger.
* @return The unique name of the trigger which was created for the
* field, or null
if none.
* @see #renderTriggerForField(net.java.ao.schema.TriggerNameConverter, net.java.ao.schema.SequenceNameConverter, net.java.ao.schema.ddl.DDLTable, net.java.ao.schema.ddl.DDLField)
*/
protected String _getTriggerNameForField(TriggerNameConverter triggerNameConverter, DDLTable table, DDLField field) {
return null;
}
/**
* Renders the trigger which corresponds to the specified field, or
* null
if none. This is to allow for databases which
* require the use of triggers to provide functionality such as ON
* UPDATE. The default implementation returns null
.
*
* @param nameConverters
* @param table The table containing the field for which a trigger
* may need to be rendered.
* @param field The field for which the trigger should be rendered,
* if any. @return A database-specific DDL statement creating a trigger for
* the field in question, or null
.
* @see #getTriggerNameForField(net.java.ao.schema.TriggerNameConverter, net.java.ao.schema.ddl.DDLTable, net.java.ao.schema.ddl.DDLField)
*/
protected SQLAction _renderTriggerForField(NameConverters nameConverters, DDLTable table, DDLField field) {
return null;
}
/**
* Renders SQL statement(s) to drop the trigger which corresponds to the
* specified field, or null
if none.
*
* @param nameConverters
* @param table The table containing the field for which a trigger
* may need to be rendered.
* @param field The field for which the trigger should be rendered,
* if any. @return A database-specific DDL statement to drop a trigger for
* the field in question, or null
.
*/
protected SQLAction _renderDropTriggerForField(NameConverters nameConverters, DDLTable table, DDLField field) {
final String trigger = _getTriggerNameForField(nameConverters.getTriggerNameConverter(), table, field);
if (trigger != null) {
return SQLAction.of("DROP TRIGGER " + processID(trigger));
}
return null;
}
/**
* Retrieves the name of the function which corresponds to the field
* in question (if any). If no function will be automatically created
* for the specified field, null
should be returned.
* This method is to allow for databases which require the use of
* explicitly created functions which correspond to triggers (e.g.
* PostgreSQL). Few providers will need to override the default
* implementation of this method, which returns null
.
*
* @param triggerNameConverter
* @param table The table which contains the field for which a function
* may or may not exist.
* @param field The field for which a previous migration may have
* created a function.
* @return The unique name of the function which was created for the
* field, or null
if none.
*/
protected String _getFunctionNameForField(TriggerNameConverter triggerNameConverter, DDLTable table, DDLField field) {
final String triggerName = _getTriggerNameForField(triggerNameConverter, table, field);
return triggerName != null ? triggerName + "()" : null;
}
/**
* Renders the function which corresponds to the specified field, or
* null
if none. This is to allow for databases which
* require the use of triggers and explicitly created functions to
* provide functionality such as ON UPDATE (e.g. PostgreSQL). The
* default implementation returns null
.
*
* @param nameConverters
* @param table The table containing the field for which a function
* may need to be rendered.
* @param field The field for which the function should be rendered,
* if any.
* @return A database-specific DDL statement creating a function for
* the field in question, or null
.
* @see #getFunctionNameForField(net.java.ao.schema.TriggerNameConverter, net.java.ao.schema.ddl.DDLTable, net.java.ao.schema.ddl.DDLField)
*/
protected SQLAction _renderFunctionForField(NameConverters nameConverters, DDLTable table, DDLField field) {
return null;
}
/**
* Renders SQL statement(s) to drop the function which corresponds to the
* specified field, if applicable, or null
otherwise.
*
* @param nameConverters
* @param table The table containing the field for which a function
* may need to be rendered.
* @param field The field for which the function should be rendered,
* if any.
* @return A database-specific DDL statement to drop a function for
* the field in question, or null
.
*/
protected SQLAction _renderDropFunctionForField(NameConverters nameConverters, DDLTable table, DDLField field) {
final String functionName = _getFunctionNameForField(nameConverters.getTriggerNameConverter(), table, field);
if (functionName != null) {
return SQLAction.of("DROP FUNCTION " + processID(functionName));
}
return null;
}
/**
* Renders the SQL for creating a sequence for the specified field, or
* null
if none. The default implementation returns null
.
*
* @param nameConverters
* @param table The table containing the field for which a sequence
* may need to be rendered.
* @param field The field for which the sequence should be rendered,
* if any.
* @return A database-specific DDL statement creating a sequence for
* the field in question, or null
.
*/
protected SQLAction _renderSequenceForField(NameConverters nameConverters, DDLTable table, DDLField field) {
return null;
}
/**
* Renders SQL statement(s) to drop the sequence which corresponds to the
* specified field, or null
if none.
*/
protected SQLAction _renderDropSequenceForField(NameConverters nameConverters, DDLTable table, DDLField field) {
return null;
}
/**
* Generates an INSERT statement to be used to create a new row in the
* database, returning the primary key value. This method also invokes
* the delegate method, {@link #executeInsertReturningKey(EntityManager, java.sql.Connection, Class, String, String, DBParam...)}
* passing the appropriate parameters and query. This method is required
* because some databases do not support the JDBC parameter
* RETURN_GENERATED_KEYS
(such as HSQLDB and PostgreSQL).
* Also, some databases (such as MS SQL Server) require odd tricks to
* support explicit value passing to auto-generated fields. This method
* should take care of any extra queries or odd SQL generation required
* to implement both auto-generated primary key returning, as well as
* explicit primary key value definition.
*
* Overriding implementations of this method should be sure to use the
* {@link Connection} instance passed to the method, not a new
* instance generated using the {@link #getConnection()} method. This is
* because this method is in fact a delegate invoked by {@link EntityManager}
* as part of the entity creation process and may be part of a transaction,
* a bulk creation or some more complicated operation. Both optimization
* and usage patterns on the API dictate that the specified connection
* instance be used. Implementations may assume that the given connection
* instance is never null
.
*
* The default implementation of this method should be sufficient for any
* fully compliant ANSI SQL database with a properly implemented JDBC
* driver. Note that this method should not not actually execute
* the SQL it generates, but pass it on to the {@link #executeInsertReturningKey(EntityManager, java.sql.Connection, Class, String, String, DBParam...)}
* method, allowing for functional delegation and better extensibility.
* However, this method may execute any additional statements required to
* prepare for the INSERTion (as in the case of MS SQL Server which requires
* some config parameters to be set on the database itself prior to INSERT).
*
* @param manager The EntityManager
which was used to dispatch
* the INSERT in question.
* @param conn The connection to be used in the eventual execution of the
* generated SQL statement.
* @param entityType The Java class of the entity.
* @param pkType The Java type of the primary key value. Can be used to
* perform a linear search for a specified primary key value in the
* params
list. The return value of the method must be of
* the same type.
* @param pkField The database field which is the primary key for the
* table in question. Can be used to perform a linear search for a
* specified primary key value in the params
list.
* @param pkIdentity Flag indicating whether or not the primary key field
* is auto-incremented by the database (IDENTITY field).
* @param table The name of the table into which the row is to be INSERTed.
* @param params A varargs array of parameters to be passed to the
* INSERT statement. This may include a specified value for the
* primary key.
* @throws SQLException If the INSERT fails in the delegate method, or
* if any additional statements fail with an exception.
* @see #executeInsertReturningKey(EntityManager, java.sql.Connection, Class, String, String, DBParam...)
*/
@SuppressWarnings("unused")
public , K> K insertReturningKey(EntityManager manager, Connection conn,
Class entityType, Class pkType,
String pkField, boolean pkIdentity, String table, DBParam... params) throws SQLException {
final String[] fieldNames = Stream.of(params)
.map(DBParam::getField)
.toArray(String[]::new);
final String sql = generateInsertSql(pkField, table, fieldNames);
return executeInsertReturningKey(manager, conn, entityType, pkType, pkField, sql, params);
}
/**
* Inserts a batch of rows.
*
* @param manager The EntityManager
which was used to dispatch
* the INSERT in question.
* @param conn The connection to be used in the eventual execution of the
* generated SQL statement.
* @param entityType The Java class of the entity.
* @param pkType The Java type of the primary key value.
* @param pkField The database field which is the primary key for the
* table in question.
* @param pkIdentity Flag indicating whether or not the primary key field
* is auto-incremented by the database (IDENTITY field).
* @param table The name of the table into which the rows is to be INSERTed.
* @param rows A list of rows to be INSERTed. A row is represented as a map from column name to its value.
* All rows must have the same columns.
*/
public , K> void insertBatch(EntityManager manager, Connection conn,
Class entityType, Class pkType,
String pkField, boolean pkIdentity, String table,
List