net.java.ao.DatabaseProvider Maven / Gradle / Ivy
Show all versions of activeobjects Show documentation
/*
* Copyright 2007 Daniel Spiewak
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package net.java.ao;
import com.google.common.base.Function;
import com.google.common.base.Joiner;
import com.google.common.base.Predicate;
import com.google.common.base.Predicates;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.Iterables;
import com.google.common.collect.Lists;
import net.java.ao.schema.IndexNameConverter;
import net.java.ao.schema.NameConverters;
import net.java.ao.schema.SequenceNameConverter;
import net.java.ao.schema.TableNameConverter;
import net.java.ao.schema.TriggerNameConverter;
import net.java.ao.schema.UniqueNameConverter;
import net.java.ao.schema.ddl.DDLAction;
import net.java.ao.schema.ddl.DDLActionType;
import net.java.ao.schema.ddl.DDLField;
import net.java.ao.schema.ddl.DDLForeignKey;
import net.java.ao.schema.ddl.DDLIndex;
import net.java.ao.schema.ddl.DDLTable;
import net.java.ao.schema.ddl.DDLValue;
import net.java.ao.schema.ddl.SchemaReader;
import net.java.ao.sql.SqlUtils;
import net.java.ao.types.TypeInfo;
import net.java.ao.types.TypeManager;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.io.InputStream;
import java.sql.Blob;
import java.sql.Connection;
import java.sql.DatabaseMetaData;
import java.sql.PreparedStatement;
import java.sql.ResultSet;
import java.sql.SQLException;
import java.sql.Statement;
import java.sql.Types;
import java.text.ParseException;
import java.text.SimpleDateFormat;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Calendar;
import java.util.Date;
import java.util.LinkedList;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.concurrent.CopyOnWriteArraySet;
import static com.google.common.base.Preconditions.*;
import static com.google.common.collect.Iterables.transform;
import static com.google.common.collect.Lists.newArrayList;
import static net.java.ao.Common.*;
/**
* The superclass parent of all DatabaseProvider
* implementations. Various implementations allow for an abstraction
* around database-specific functionality (such as DDL). DatabaseProvider(s)
* also handle the creation of new {@link Connection} instances and
* fully encapsulate the raw JDBC driver. Any database-specific
* code should be placed in the database provider, rather than embedded
* within the API logic.
*
* This superclass contains a base-line, default implementation of most
* database-specific methods, thus requiring a minimum of work to implement
* a new database provider. For the sake of sanity (read: mine), this
* base-line implementation is basically specific to MySQL. Thus any
* DatabaseProvider implementations are really specifying the
* differences between the database in question and MySQL. To fully
* utilize the default implementations provided in this class, this fact should
* be kept in mind.
*
* This class also handles the implementation details required to ensure
* that only one active {@link Connection} instance is available per thread. This is
* in fact a very basic (and naive) form of connection pooling. It should
* not be relied upon for performance reasons. Instead one should enable
* connection pooling via the {@link javax.sql.DataSource} passed to instances. You can
* also use the net.java.ao.builder.EntityManagerBuilder builder to make it
* easier to configure the pooling. The purpose of the thread-locked connection pooling
* in this class is to satisfy transactions with external SQL statements.
*
* @author Daniel Spiewak
*/
public abstract class DatabaseProvider
{
protected final Logger logger = LoggerFactory.getLogger(this.getClass());
protected final Logger sqlLogger = LoggerFactory.getLogger("net.java.ao.sql");
private final Set sqlListeners;
private final ThreadLocal transactionThreadLocal = new ThreadLocal();
private final DisposableDataSource dataSource;
protected final TypeManager typeManager;
private final String schema;
private String quote;
protected DatabaseProvider(DisposableDataSource dataSource, String schema, TypeManager typeManager)
{
this.dataSource = checkNotNull(dataSource);
this.typeManager = typeManager;
this.schema = isBlank(schema) ? null : schema; // can be null
this.sqlListeners = new CopyOnWriteArraySet();
this.sqlListeners.add(new LoggingSqlListener(sqlLogger));
loadQuoteString();
}
protected DatabaseProvider(DisposableDataSource dataSource, String schema)
{
this(dataSource, schema, new TypeManager.Builder().build());
}
public final TypeManager getTypeManager()
{
return typeManager;
}
public String getSchema()
{
return schema;
}
private synchronized void loadQuoteString()
{
if (quote != null)
{
return;
}
Connection conn = null;
try
{
conn = getConnection();
if (conn == null)
{
throw new IllegalStateException("Could not get connection to load quote String");
}
quote = conn.getMetaData().getIdentifierQuoteString().trim();
}
catch (SQLException e)
{
throw new RuntimeException("Unable to query the database", e);
}
finally
{
closeQuietly(conn);
}
}
/**
* Generates the DDL fragment required to specify an INTEGER field as
* auto-incremented. For databases which do not support such flags (which
* is just about every database exception MySQL), ""
is an
* acceptable return value. This method should never return null
* as it would cause the field rendering method to throw a {@link NullPointerException}.
*/
protected String renderAutoIncrement()
{
return "AUTO_INCREMENT";
}
/**
* Top level delegating method for the process of rendering a database-agnostic
* {@link DDLAction} into the database-specific DDL statement(s). It is
* doubtful that any implementations will have to override this method as the
* default implementation is database-agnostic.
*
* @param nameConverters
* @param action The database-agnostic action to render.
* @return An array of DDL statements specific to the database in question.
* @see #renderTable(net.java.ao.schema.NameConverters, net.java.ao.schema.ddl.DDLTable)
* @see #renderFunctions(net.java.ao.schema.TriggerNameConverter, net.java.ao.schema.ddl.DDLTable)
* @see #renderTriggers(net.java.ao.schema.TriggerNameConverter, net.java.ao.schema.SequenceNameConverter, net.java.ao.schema.ddl.DDLTable)
* @see #renderSequences(net.java.ao.schema.SequenceNameConverter, net.java.ao.schema.ddl.DDLTable)
* @see #renderDropTriggers(net.java.ao.schema.TriggerNameConverter, net.java.ao.schema.ddl.DDLTable)
* @see #renderDropFunctions(net.java.ao.schema.TriggerNameConverter, net.java.ao.schema.ddl.DDLTable)
* @see #renderDropSequences(net.java.ao.schema.SequenceNameConverter, net.java.ao.schema.ddl.DDLTable)
* @see #renderDropTable(DDLTable)
* @see #renderAlterTableAddColumn(net.java.ao.schema.NameConverters, net.java.ao.schema.ddl.DDLTable, net.java.ao.schema.ddl.DDLField)
* @see #renderAlterTableChangeColumn(net.java.ao.schema.NameConverters, net.java.ao.schema.ddl.DDLTable, net.java.ao.schema.ddl.DDLField, net.java.ao.schema.ddl.DDLField)
* @see #renderAlterTableDropColumn(net.java.ao.schema.TriggerNameConverter, net.java.ao.schema.ddl.DDLTable, net.java.ao.schema.ddl.DDLField)
* @see #renderAlterTableAddKey(DDLForeignKey)
* @see #renderAlterTableDropKey(DDLForeignKey)
*/
public final String[] renderAction(NameConverters nameConverters, DDLAction action)
{
final List back = new ArrayList();
switch (action.getActionType())
{
case CREATE:
back.add(renderTable(nameConverters, action.getTable()));
back.addAll(renderFunctions(nameConverters.getTriggerNameConverter(), action.getTable()));
back.addAll(renderSequences(nameConverters.getSequenceNameConverter(), action.getTable()));
back.addAll(renderTriggers(nameConverters.getTriggerNameConverter(), nameConverters.getSequenceNameConverter(), action.getTable()));
for (DDLIndex index : action.getTable().getIndexes())
{
DDLAction newAction = new DDLAction(DDLActionType.CREATE_INDEX);
newAction.setIndex(index);
back.addAll(Arrays.asList(renderAction(nameConverters, newAction)));
}
break;
case DROP:
for (DDLIndex index : action.getTable().getIndexes())
{
back.add(renderDropIndex(nameConverters.getIndexNameConverter(), index));
}
back.addAll(renderDropTriggers(nameConverters.getTriggerNameConverter(), action.getTable()));
back.addAll(renderDropSequences(nameConverters.getSequenceNameConverter(), action.getTable()));
back.addAll(Arrays.asList(renderDropFunctions(nameConverters.getTriggerNameConverter(), action.getTable())));
back.add(renderDropTable(action.getTable()));
break;
case ALTER_ADD_COLUMN:
back.addAll(renderAlterTableAddColumn(
nameConverters, action.getTable(),
action.getField()));
for (DDLIndex index : action.getTable().getIndexes())
{
if (index.getField().equals(action.getField().getName()))
{
DDLAction newAction = new DDLAction(DDLActionType.CREATE_INDEX);
newAction.setIndex(index);
back.addAll(Arrays.asList(renderAction(nameConverters, newAction)));
}
}
break;
case ALTER_CHANGE_COLUMN:
back.addAll(renderAlterTableChangeColumn(nameConverters, action.getTable(), action.getOldField(), action.getField()));
break;
case ALTER_DROP_COLUMN:
for (DDLIndex index : action.getTable().getIndexes())
{
if (index.getField().equals(action.getField().getName()))
{
DDLAction newAction = new DDLAction(DDLActionType.DROP_INDEX);
newAction.setIndex(index);
back.addAll(Arrays.asList(renderAction(nameConverters, newAction)));
}
}
back.addAll(Arrays.asList(renderAlterTableDropColumn(nameConverters.getTriggerNameConverter(), action.getTable(), action.getField())));
break;
case ALTER_ADD_KEY:
back.add(renderAlterTableAddKey(action.getKey()));
break;
case ALTER_DROP_KEY:
back.add(renderAlterTableDropKey(action.getKey()));
break;
case CREATE_INDEX:
back.add(renderCreateIndex(nameConverters.getIndexNameConverter(), action.getIndex()));
break;
case DROP_INDEX:
back.add(renderDropIndex(nameConverters.getIndexNameConverter(), action.getIndex()));
break;
case INSERT:
back.add(renderInsert(action.getTable(), action.getValues()));
}
return back.toArray(new String[back.size()]);
}
/**
* Top level delegating method for rendering a database-agnostic
* {@link Query} object into its (potentially) database-specific
* query statement. This method invokes the various renderQuery*
* methods to construct its output, thus it is doubtful that any subclasses
* will have to override it. Rather, one of the delegate methods
* should be considered.
*
* An example of a database-specific query rendering would be the
* following Query
:
*
* Query.select().from(Person.class).limit(10)
*
* On MySQL, this would render to SELECT id FROM people LIMIT 10
* However, on SQL Server, this same Query would render as
* SELECT TOP 10 id FROM people
*
* @param query The database-agnostic Query object to be rendered in a
* potentially database-specific way.
* @param converter Used to convert {@link Entity} classes into table names.
* @param count If true
, render the Query as a SELECT COUNT(*)
* rather than a standard field-data query.
* @return A syntactically complete SQL statement potentially specific to the
* database.
* @see #renderQuerySelect(Query, TableNameConverter, boolean)
* @see #renderQueryJoins(Query, TableNameConverter)
* @see #renderQueryWhere(Query)
* @see #renderQueryGroupBy(Query)
* @see #renderQueryOrderBy(Query)
* @see #renderQueryLimit(Query)
*/
public String renderQuery(Query query, TableNameConverter converter, boolean count)
{
StringBuilder sql = new StringBuilder();
sql.append(renderQuerySelect(query, converter, count));
sql.append(renderQueryJoins(query, converter));
sql.append(renderQueryWhere(query));
sql.append(renderQueryGroupBy(query));
sql.append(renderQueryOrderBy(query));
sql.append(renderQueryLimit(query));
return sql.toString();
}
/**
* Parses the database-agnostic String
value relevant to the specified SQL
* type in int
form (as defined by {@link Types} and returns
* the Java value which corresponds. This method is completely database-agnostic, as are
* all of all of its delegate methods.
*
* WARNING: This method is being considered for removal to another
* class (perhaps {@link TypeManager}?) as it is not a database-specific function and thus
* confuses the purpose of this class. Do not rely upon it heavily. (better yet, don't rely on it
* at all from external code. It's not designed to be part of the public API)
*
* @param type The JDBC integer type of the database field against which to parse the
* value.
* @param value The database-agnostic String value to parse into a proper Java object
* with respect to the specified SQL type.
* @return A Java value which corresponds to the specified String.
*/
public Object parseValue(int type, String value)
{
if (value == null || value.equals("NULL"))
{
return null;
}
try
{
switch (type)
{
case Types.BIGINT:
return Long.parseLong(value);
case Types.BIT:
return Byte.parseByte(value);
case Types.BOOLEAN:
int intValue = -1;
try
{
intValue = Integer.parseInt(value);
}
catch (Throwable t)
{
return Boolean.parseBoolean(value);
}
return intValue == 0;
case Types.CHAR:
value.charAt(0);
break;
case Types.DATE:
try
{
Calendar back = Calendar.getInstance();
back.setTime(new SimpleDateFormat(getDateFormat()).parse(value));
return back;
}
catch (ParseException e)
{
return null;
}
case Types.DECIMAL:
return Double.parseDouble(value);
case Types.DOUBLE:
return Double.parseDouble(value);
case Types.FLOAT:
return Float.parseFloat(value);
case Types.INTEGER:
return Integer.parseInt(value);
case Types.NUMERIC:
return Integer.parseInt(value);
case Types.REAL:
return Double.parseDouble(value);
case Types.SMALLINT:
return Short.parseShort(value);
case Types.TIMESTAMP:
try
{
Calendar back = Calendar.getInstance();
back.setTime(new SimpleDateFormat(getDateFormat()).parse(value));
return back;
}
catch (ParseException e)
{
return null;
}
case Types.TINYINT:
return Short.parseShort(value);
case Types.VARCHAR:
return value;
}
}
catch (Throwable t)
{
}
return null;
}
/**
* Allows the provider to set database-specific options on a
* {@link Statement} instance prior to its usage in a SELECT
* query. This is to allow things like emulation of the
* LIMIT feature on databases which don't support it within
* the SQL implementation.
*
* This method is only called on SELECTs.
*
* @param stmt The instance against which the properties
* should be set.
* @param query The query which is being executed against
* the statement instance.
*/
public void setQueryStatementProperties(Statement stmt, Query query) throws SQLException
{
}
/**
* Allows the provider to set database-specific options on a
* {@link ResultSet} instance prior to its use by the library.
* This allows for features such as row offsetting even on
* databases that don't support it (such as Oracle, Derby,
* etc).
*
* @param res The ResultSet
to modify.
* @param query The query instance which was run to produce
* the result set.
*/
public void setQueryResultSetProperties(ResultSet res, Query query) throws SQLException
{
}
/**
* Returns a result set of all of the tables (and associated
* meta) in the database. The fields of the result set must
* correspond with those specified in the
* DatabaseMetaData#getTables(String, String, String, String[])
* method. In fact, the default implementation merely calls
* this method passing (null, null, "", null)
.
* For databases (such as PostgreSQL) where this is unsuitable,
* different parameters can be specified to the getTables
* method in the override, or an entirely new implementation
* written, as long as the result set corresponds in fields to
* the JDBC spec.
*
* @param conn The connection to use in retrieving the database tables.
* @return A result set of tables (and meta) corresponding in fields
* to the JDBC specification.
* @see java.sql.DatabaseMetaData#getTables(String, String, String, String[])
*/
public ResultSet getTables(Connection conn) throws SQLException
{
return conn.getMetaData().getTables(null, schema, "", new String[]{"TABLE"});
}
public ResultSet getSequences(Connection conn) throws SQLException
{
return conn.getMetaData().getTables(null, schema, "", new String[]{"SEQUENCE"});
}
public ResultSet getIndexes(Connection conn, String tableName) throws SQLException
{
return conn.getMetaData().getIndexInfo(null, schema, tableName, false, false);
}
public ResultSet getImportedKeys(Connection connection, String tableName) throws SQLException
{
return connection.getMetaData().getImportedKeys(null, schema, tableName);
}
/**
* Renders the SELECT portion of a given {@link Query} instance in the
* manner required by the database-specific SQL implementation. Usually,
* this is as simple as "SELECT id FROM table"
or "SELECT DISTINCT
* * FROM table"
. However, some databases require the limit and offset
* parameters to be specified as part of the SELECT clause. For example,
* on HSQLDB, a Query for the "id" field limited to 10 rows would render
* SELECT like this: SELECT TOP 10 id FROM table
.
*
* There is usually no need to call this method directly. Under normal
* operations it functions as a delegate for {@link #renderQuery(Query, TableNameConverter, boolean)}.
*
* @param query The Query instance from which to determine the SELECT properties.
* @param converter The name converter to allow conversion of the query entity
* interface into a proper table name.
* @param count Whether or not the query should be rendered as a SELECT COUNT(*)
.
* @return The database-specific SQL rendering of the SELECT portion of the query.
*/
protected String renderQuerySelect(Query query, TableNameConverter converter, boolean count)
{
StringBuilder sql = new StringBuilder();
switch (query.getType())
{
case SELECT:
sql.append("SELECT ");
if (query.isDistinct())
{
sql.append("DISTINCT ");
}
if (count)
{
sql.append("COUNT(*)");
}
else
{
sql.append(querySelectFields(query));
}
sql.append(" FROM ").append(queryTableName(query, converter));
break;
}
return sql.toString();
}
protected final String queryTableName(Query query, TableNameConverter converter)
{
final String queryTable = query.getTable();
final String tableName = queryTable != null ? queryTable : converter.getName(query.getTableType());
final StringBuilder queryTableName = new StringBuilder().append(withSchema(tableName));
if (query.getAlias(query.getTableType()) != null)
{
queryTableName.append(" ").append(query.getAlias(query.getTableType()));
}
return queryTableName.toString();
}
protected final String querySelectFields(final Query query)
{
return Joiner.on(',').join(transform(query.getFields(), new Function()
{
@Override
public String apply(String fieldName)
{
return withAlias(query, fieldName);
}
}));
}
private String withAlias(Query query, String field)
{
final StringBuilder withAlias = new StringBuilder();
if (query.getAlias(query.getTableType()) != null)
{
withAlias.append(query.getAlias(query.getTableType())).append(".");
}
return withAlias.append(processID(field)).toString();
}
/**
* Renders the JOIN portion of the query in the database-specific SQL
* dialect. Very few databases deviate from the standard in this matter,
* thus the default implementation is usually sufficient.
*
* An example return value: " JOIN table1 ON table.id = table1.value"
*
* There is usually no need to call this method directly. Under normal
* operations it functions as a delegate for {@link #renderQuery(Query, TableNameConverter, boolean)}.
*
* @param query The Query instance from which to determine the JOIN properties.
* @param converter The name converter to allow conversion of the query entity
* interface into a proper table name.
* @return The database-specific SQL rendering of the JOIN portion of the query.
*/
protected String renderQueryJoins(Query query, TableNameConverter converter)
{
final StringBuilder sql = new StringBuilder();
for (Map.Entry>, String> joinEntry : query.getJoins().entrySet())
{
sql.append(" JOIN ").append(withSchema(converter.getName(joinEntry.getKey())));
if (query.getAlias(joinEntry.getKey()) != null)
{
sql.append(" ").append(query.getAlias(joinEntry.getKey()));
}
if (joinEntry.getValue() != null)
{
sql.append(" ON ").append(processOnClause(joinEntry.getValue()));
}
}
return sql.toString();
}
/**
* Renders the WHERE portion of the query in the database-specific SQL
* dialect. Very few databases deviate from the standard in this matter,
* thus the default implementation is usually sufficient.
*
* An example return value: " WHERE name = ? OR age < 20"
*
* There is usually no need to call this method directly. Under normal
* operations it functions as a delegate for {@link #renderQuery(Query, TableNameConverter, boolean)}.
*
* @param query The Query instance from which to determine the WHERE properties.
* @return The database-specific SQL rendering of the WHERE portion of the query.
*/
protected String renderQueryWhere(Query query)
{
StringBuilder sql = new StringBuilder();
String whereClause = query.getWhereClause();
if (whereClause != null)
{
sql.append(" WHERE ");
sql.append(processWhereClause(whereClause));
}
return sql.toString();
}
/**
* Renders the GROUP BY portion of the query in the database-specific SQL
* dialect. Very few databases deviate from the standard in this matter,
* thus the default implementation is usually sufficient.
*
* An example return value: " GROUP BY name"
*
* There is usually no need to call this method directly. Under normal
* operations it functions as a delegate for {@link #renderQuery(Query, TableNameConverter, boolean)}.
*
* @param query The Query instance from which to determine the GROUP BY properties.
* @return The database-specific SQL rendering of the GROUP BY portion of the query.
*/
protected String renderQueryGroupBy(Query query)
{
StringBuilder sql = new StringBuilder();
String groupClause = query.getGroupClause();
if (groupClause != null)
{
sql.append(" GROUP BY ");
sql.append(processGroupByClause(groupClause));
}
return sql.toString();
}
private String processGroupByClause(String groupBy)
{
return SqlUtils.processGroupByClause(groupBy, new Function()
{
@Override
public String apply(String field)
{
return processID(field);
}
});
}
/**
* Renders the ORDER BY portion of the query in the database-specific SQL
* dialect. Very few databases deviate from the standard in this matter,
* thus the default implementation is usually sufficient.
*
* An example return value: " ORDER BY name ASC"
*
* There is usually no need to call this method directly. Under normal
* operations it functions as a delegate for {@link #renderQuery(Query, TableNameConverter, boolean)}.
*
* @param query The Query instance from which to determine the ORDER BY properties.
* @return The database-specific SQL rendering of the ORDER BY portion of the query.
*/
protected String renderQueryOrderBy(Query query)
{
StringBuilder sql = new StringBuilder();
String orderClause = query.getOrderClause();
if (orderClause != null)
{
sql.append(" ORDER BY ");
sql.append(processOrderClause(orderClause));
}
return sql.toString();
}
private String processOrderClause(String order)
{
return SqlUtils.ORDER_CLAUSE.matcher(order).replaceFirst(processID("$1"));
}
/**
* Renders the LIMIT portion of the query in the database-specific SQL
* dialect. There is wide variety in database implementations of this
* particular SQL clause. In fact, many database do not support it at all.
* If the database in question does not support LIMIT, this method should
* be overridden to return an empty String. For such databases, LIMIT
* should be implemented by overriding {@link #setQueryResultSetProperties(ResultSet, Query)}
* and {@link #setQueryStatementProperties(Statement, Query)}.
*
* An example return value: " LIMIT 10,2"
*
* There is usually no need to call this method directly. Under normal
* operations it functions as a delegate for {@link #renderQuery(Query, TableNameConverter, boolean)}.
*
* @param query The Query instance from which to determine the LIMIT properties.
* @return The database-specific SQL rendering of the LIMIT portion of the query.
*/
protected String renderQueryLimit(Query query)
{
StringBuilder sql = new StringBuilder();
int limit = query.getLimit();
if (limit >= 0)
{
sql.append(" LIMIT ");
sql.append(limit);
}
int offset = query.getOffset();
if (offset > 0)
{
sql.append(" OFFSET ").append(offset);
}
return sql.toString();
}
/**
* Retrieves a JDBC {@link Connection} instance which corresponds
* to the database represented by the provider instance. This Connection
* can be used to execute arbitrary JDBC operations against the database.
* Also, this is the method used by the whole of ActiveObjects itself to
* get database connections when required.
*
* All {@link Connection} instances are pooled internally by thread.
* Thus, there is never more than one connection per thread. This is
* necessary to allow arbitrary JDBC operations within a transaction
* without breaking transaction integrity. Developers using this method
* should bear this fact in mind and consider the {@link Connection}
* instance immutable. The only exception is if one is absolutely
* certain that the JDBC code in question is not being executed within
* a transaction.
*
* Despite the fact that there is only a single connection per thread,
* the {@link Connection} instances returned from this method should
* still be treated as bona fide JDBC connections. They can and
* should be closed when their usage is complete. This is
* especially important when actual connection pooling is in use and
* non-disposal of connections can lead to a crash as the connection
* pool runs out of resources. The developer need not concern themselves
* with the single-connection-per-thread issue when closing the connection
* as the call to close()
will be intercepted and ignored
* if necessary.
*
*
* @return A new connection to the database
*/
public final Connection getConnection() throws SQLException
{
Connection c = transactionThreadLocal.get();
if (c != null)
{
if (!c.isClosed())
{
return c;
}
else
{
transactionThreadLocal.remove(); // remove the reference to the connection
}
}
final Connection connectionImpl = dataSource.getConnection();
if (connectionImpl == null)
{
throw new SQLException("Unable to create connection");
}
c = DelegateConnectionHandler.newInstance(connectionImpl);
setPostConnectionProperties(c);
return c;
}
public final Connection startTransaction() throws SQLException
{
final Connection c = getConnection();
setCloseable(c, false);
c.setTransactionIsolation(Connection.TRANSACTION_SERIALIZABLE);
c.setAutoCommit(false);
transactionThreadLocal.set(c);
return c;
}
public final Connection commitTransaction(Connection c) throws SQLException
{
checkState(c == transactionThreadLocal.get(), "There are two concurrently open transactions!");
checkState(c != null, "Tried to commit a transaction that is not started!");
c.commit();
transactionThreadLocal.remove();
return c;
}
public final void rollbackTransaction(Connection c) throws SQLException
{
checkState(c == transactionThreadLocal.get(), "There are two concurrently open transactions!");
checkState(c != null, "Tried to rollback a transaction that is not started!");
c.rollback();
}
void setCloseable(Connection connection, boolean closeable)
{
if (connection != null && connection instanceof DelegateConnection)
{
((DelegateConnection) connection).setCloseable(closeable);
}
}
/**
* Frees any resources held by the database provider or delegate
* libraries (such as connection pools). This method should be
* once usage of the provider is complete to ensure that all
* connections are committed and closed.
*/
public void dispose()
{
dataSource.dispose();
}
/**
* Called to make any post-creation modifications to a new
* {@link Connection} instance. This is used for databases
* such as Derby which require the schema to be set after
* the connection is created.
*
* @param conn The connection to modify according to the database
* requirements.
*/
protected void setPostConnectionProperties(Connection conn) throws SQLException
{
}
/**
* Renders the foreign key constraints in database-specific DDL for
* the table in question. Actually, this method only loops through
* the foreign keys and renders indentation and line-breaks. The
* actual rendering is done in a second delegate method.
*
* @param uniqueNameConverter
* @param table The database-agnostic DDL representation of the table
* in question.
* @return The String rendering of all of the foreign keys for
* the table.
* @see #renderForeignKey(DDLForeignKey)
*/
protected String renderConstraintsForTable(UniqueNameConverter uniqueNameConverter, DDLTable table)
{
StringBuilder back = new StringBuilder();
for (DDLForeignKey key : table.getForeignKeys())
{
back.append(" ").append(renderForeignKey(key)).append(",\n");
}
return back.toString();
}
/**
* Renders the specified foreign key representation into the
* database-specific DDL. The implementation must name the
* foreign key according to the DDLForeignKey#getFKName()
* value otherwise migrations will no longer function appropriately.
*
* @param key The database-agnostic foreign key representation.
* @return The database-pecific DDL fragment corresponding to the
* foreign key in question.
*/
protected String renderForeignKey(DDLForeignKey key)
{
StringBuilder back = new StringBuilder();
back.append("CONSTRAINT ").append(processID(key.getFKName()));
back.append(" FOREIGN KEY (").append(processID(key.getField())).append(") REFERENCES ");
back.append(withSchema(key.getTable())).append('(').append(processID(key.getForeignField())).append(")");
return back.toString();
}
/**
* Converts the specified type into the database-specific DDL String
* value. By default, this delegates to the DatabaseType#getDefaultName()
* method. Subclass implementations should be sure to make a super
* call in order to ensure that both default naming and future special
* cases are handled appropriately.
*
* @param type The type instance to convert to a DDL string.
* @return The database-specific DDL representation of the type (e.g. "VARCHAR").
*/
protected String convertTypeToString(TypeInfo> type)
{
return type.getSqlTypeIdentifier();
}
/**
* Renders the specified table representation into the corresponding
* database-specific DDL statement. For legacy reasons, this only allows
* single-statement table creation. Additional statements (triggers,
* functions, etc) must be created in one of the other delegate methods
* for DDL creation. This method does a great deal of delegation to
* other DatabaseProvider
methods for functions such as
* field rendering, foreign key rendering, etc.
*
* @param nameConverters
* @param table The database-agnostic table representation.
* @return The database-specific DDL statements which correspond to the
* specified table creation.
*/
protected String renderTable(NameConverters nameConverters, DDLTable table)
{
StringBuilder back = new StringBuilder("CREATE TABLE ");
back.append(withSchema(table.getName()));
back.append(" (\n");
List primaryKeys = new LinkedList();
StringBuilder append = new StringBuilder();
for (DDLField field : table.getFields())
{
back.append(" ").append(renderField(nameConverters, table, field, new RenderFieldOptions(true, true))).append(",\n");
if (field.isPrimaryKey())
{
primaryKeys.add(field.getName());
}
}
append.append(renderConstraintsForTable(nameConverters.getUniqueNameConverter(), table));
back.append(append);
if (primaryKeys.size() > 1)
{
throw new RuntimeException("Entities may only have one primary key");
}
if (primaryKeys.size() > 0)
{
back.append(" PRIMARY KEY(");
back.append(processID(primaryKeys.get(0)));
back.append(")\n");
}
back.append(")");
String tailAppend = renderAppend();
if (tailAppend != null)
{
back.append(' ');
back.append(tailAppend);
}
return back.toString();
}
protected String renderInsert(DDLTable ddlTable, DDLValue[] ddlValues)
{
final StringBuilder columns = new StringBuilder();
final StringBuilder values = new StringBuilder();
for (DDLValue v : ddlValues)
{
columns.append(processID(v.getField().getName())).append(",");
values.append(renderValue(v.getValue())).append(",");
}
columns.deleteCharAt(columns.length() - 1);
values.deleteCharAt(values.length() - 1);
return new StringBuilder()
.append("INSERT INTO ").append(withSchema(ddlTable.getName()))
.append("(").append(columns).append(")")
.append(" VALUES (").append(values).append(")")
.toString();
}
/**
* Generates the appropriate database-specific DDL statement to
* drop the specified table representation. The default implementation
* is merely "DROP TABLE tablename"
. This is suitable
* for every database that I am aware of. Any dependant database
* objects (such as triggers, functions, etc) must be rendered in
* one of the other delegate methods (such as renderDropTriggers(DDLTable)
).
*
* @param table The table representation which is to be dropped.
* @return A database-specific DDL statement which drops the specified
* table.
*/
protected String renderDropTable(DDLTable table)
{
return "DROP TABLE " + withSchema(table.getName());
}
/**
* Generates the database-specific DDL statements required to drop all
* associated functions for the given table representation. The default
* implementation is to return an empty array. Some databases (such
* as PostgreSQL) require triggers to fire functions, unlike most
* databases which allow triggers to function almost like standalone
* functions themselves. For such databases, dropping a table means
* not only dropping the table and the associated triggers, but also
* the functions associated with the triggers themselves.
*
*
* @param triggerNameConverter
* @param table The table representation against which all functions which
* correspond (directly or indirectly) must be dropped.
* @return An array of database-specific DDL statement(s) which drop the
* required functions.
*/
protected String[] renderDropFunctions(TriggerNameConverter triggerNameConverter, DDLTable table)
{
return new String[0];
}
/**
* Generates the database-specific DDL statements required to drop all
* associated triggers for the given table representation. The default
* implementation is to return an empty array.
*
*
* @param triggerNameConverter
* @param table The table representation against which all triggers which
* correspond (directly or indirectly) must be dropped.
* @return An array of database-specific DDL statement(s) which drop the
* required triggers.
*/
protected List renderDropTriggers(TriggerNameConverter triggerNameConverter, DDLTable table)
{
return ImmutableList.of();
}
/**
* Generates the database-specific DDL statements required to drop all
* associated sequences for the given table representation. The default
* implementation is to return an empty array. This is an Oracle specific
* method used for primary key management
*
* @param sequenceNameConverter
* @param table The table representation against which all triggers which
* correspond (directly or indirectly) must be dropped.
* @return An array of database-specific DDL statement(s) which drop the
* required triggers.
*/
protected List renderDropSequences(SequenceNameConverter sequenceNameConverter, DDLTable table)
{
return ImmutableList.of();
}
/**
* Generates the database-specific DDL statements required to create
* all of the functions necessary for the given table. For most
* databases, this will simply return an empty array. The
* functionality is required for databases such as PostgreSQL which
* require a function to be explicitly declared and associated when
* a trigger is created.
*
* Most of the work for this functionality is delegated to the
* {@link #renderFunctionForField(net.java.ao.schema.TriggerNameConverter, net.java.ao.schema.ddl.DDLTable, net.java.ao.schema.ddl.DDLField)} method.
*
*
*
* @param triggerNameConverter
* @param table The table for which the functions must be generated.
* @return An array of DDL statements to execute.
*/
protected List renderFunctions(final TriggerNameConverter triggerNameConverter, final DDLTable table)
{
return renderFields(table, Predicates.alwaysTrue(), new Function()
{
@Override
public String apply(DDLField field)
{
return renderFunctionForField(triggerNameConverter, table, field);
}
});
}
protected final List renderFields(DDLTable table, Predicate filter, Function render)
{
final Iterable fields = Lists.newArrayList(table.getFields());
return ImmutableList.copyOf(
Iterables.filter(
Iterables.transform(
Iterables.filter(fields, filter), render),
Predicates.notNull()));
}
/**
* Generates the database-specific DDL statements required to create
* all of the triggers necessary for the given table. For MySQL, this
* will likely return an empty array.
*
* Most of the work for this functionality is delegated to the
* {@link #renderTriggerForField(net.java.ao.schema.TriggerNameConverter, net.java.ao.schema.SequenceNameConverter, net.java.ao.schema.ddl.DDLTable, net.java.ao.schema.ddl.DDLField)} method.
*
*
*
* @param triggerNameConverter
* @param sequenceNameConverter
*@param table The table for which the triggers must be generated. @return An array of DDL statements to execute.
*/
protected List renderTriggers(final TriggerNameConverter triggerNameConverter,
final SequenceNameConverter sequenceNameConverter,
final DDLTable table)
{
return renderFields(
table,
Predicates.alwaysTrue(),
new Function()
{
@Override
public String apply(DDLField field)
{
return renderTriggerForField(triggerNameConverter, sequenceNameConverter, table, field);
}
});
}
/**
* Generates the database-specific DDL statements required to create
* all of the sequences necessary for the given table.
*
* @param sequenceNameConverter the naming strategy for sequences
* @param table The table for which the triggers must be generated.
* @return DDL statements to execute.
*/
protected List renderSequences(SequenceNameConverter sequenceNameConverter, DDLTable table)
{
return ImmutableList.of();
}
/**
* Generates the database-specific DDL statements required to add
* a column to an existing table. Included in the return value
* should be the statements required to add all necessary functions
* and triggers to ensure that the column acts appropriately. For
* example, if the field is tagged with an @OnUpdate
* annotation, chances are there will be a trigger and possibly a
* function along with the ALTER statement. These "extra"
* functions are properly ordered and will only be appended if
* their values are not null
. Because of this, very
* few database providers will need to override this method.
*
*
* @param nameConverters
* @param table The table which should receive the new column.
* @param field The column to add to the specified table. @return An array of DDL statements to execute.
* @see #renderFunctionForField(net.java.ao.schema.TriggerNameConverter, net.java.ao.schema.ddl.DDLTable, net.java.ao.schema.ddl.DDLField)
* @see #renderTriggerForField(net.java.ao.schema.TriggerNameConverter, net.java.ao.schema.SequenceNameConverter, net.java.ao.schema.ddl.DDLTable, net.java.ao.schema.ddl.DDLField)
*/
protected List renderAlterTableAddColumn(NameConverters nameConverters, DDLTable table, DDLField field)
{
List back = new ArrayList();
back.add("ALTER TABLE " + withSchema(table.getName()) + " ADD COLUMN " + renderField(nameConverters, table, field, new RenderFieldOptions(true, true)));
for (DDLForeignKey foreignKey : findForeignKeysForField(table, field))
{
back.add(renderAlterTableAddKey(foreignKey));
}
String function = renderFunctionForField(nameConverters.getTriggerNameConverter(), table, field);
if (function != null)
{
back.add(function);
}
final String trigger = renderTriggerForField(nameConverters.getTriggerNameConverter(), nameConverters.getSequenceNameConverter(), table, field);
if (trigger != null)
{
back.add(trigger);
}
return back;
}
/**
* Generates the database-specific DDL statements required to change
* the given column from its old specification to the given DDL value.
* This method will also generate the appropriate statements to remove
* old triggers and functions, as well as add new ones according to the
* requirements of the new field definition.
*
* The default implementation of this method functions in the manner
* specified by the MySQL database. Some databases will have to perform
* more complicated actions (such as dropping and re-adding the field)
* in order to satesfy the same use-case. Such databases should print
* a warning to stderr to ensure that the end-developer is aware of
* such restrictions.
*
* Thus, the specification for this method allows for data
* loss. Nevertheless, if the database supplies a mechanism to
* accomplish the task without data loss, it should be applied.
*
* For maximum flexibility, the default implementation of this method
* only deals with the dropping and addition of functions and triggers.
* The actual generation of the ALTER TABLE statement is done in the
* {@link #renderAlterTableChangeColumnStatement(net.java.ao.schema.NameConverters, net.java.ao.schema.ddl.DDLTable, net.java.ao.schema.ddl.DDLField, net.java.ao.schema.ddl.DDLField, net.java.ao.DatabaseProvider.RenderFieldOptions)}
* method.
*
*
* @param nameConverters
* @param table The table containing the column to change.
* @param oldField The old column definition.
* @param field The new column definition (defining the resultant DDL). @return An array of DDL statements to be executed.
* @see #getTriggerNameForField(net.java.ao.schema.TriggerNameConverter, net.java.ao.schema.ddl.DDLTable, net.java.ao.schema.ddl.DDLField)
* @see #getFunctionNameForField(net.java.ao.schema.TriggerNameConverter, net.java.ao.schema.ddl.DDLTable, net.java.ao.schema.ddl.DDLField)
* @see #renderFunctionForField(net.java.ao.schema.TriggerNameConverter, net.java.ao.schema.ddl.DDLTable, net.java.ao.schema.ddl.DDLField)
* @see #renderTriggerForField(net.java.ao.schema.TriggerNameConverter, net.java.ao.schema.SequenceNameConverter, net.java.ao.schema.ddl.DDLTable, net.java.ao.schema.ddl.DDLField)
*/
protected List renderAlterTableChangeColumn(NameConverters nameConverters, DDLTable table, DDLField oldField, DDLField field)
{
final List back = new ArrayList();
final String trigger = getTriggerNameForField(nameConverters.getTriggerNameConverter(), table, oldField);
if (trigger != null)
{
back.add(new StringBuilder().append("DROP TRIGGER ").append(processID(trigger)).toString());
}
final String function = getFunctionNameForField(nameConverters.getTriggerNameConverter(), table, oldField);
if (function != null)
{
back.add(new StringBuilder().append("DROP FUNCTION ").append(processID(function)).toString());
}
back.add(renderAlterTableChangeColumnStatement(nameConverters, table, oldField, field, renderFieldOptionsInAlterColumn()));
final String toRenderFunction = renderFunctionForField(nameConverters.getTriggerNameConverter(), table, field);
if (toRenderFunction != null)
{
back.add(toRenderFunction);
}
final String toRenderTrigger = renderTriggerForField(nameConverters.getTriggerNameConverter(), nameConverters.getSequenceNameConverter(), table, field);
if (toRenderTrigger != null)
{
back.add(toRenderTrigger);
}
return back;
}
protected RenderFieldOptions renderFieldOptionsInAlterColumn()
{
return new RenderFieldOptions(true, true, true);
}
/**
* Generates the database-specific DDL statement only for altering a table and
* changing a column. This method must only generate a single statement as it
* does not need to concern itself with functions or triggers associated with
* the column. This method is only to be called as a delegate for the
* {@link #renderAlterTableChangeColumn(net.java.ao.schema.NameConverters, net.java.ao.schema.ddl.DDLTable, net.java.ao.schema.ddl.DDLField, net.java.ao.schema.ddl.DDLField)} method,
* for which it is a primary delegate. The default implementation of this
* method functions according to the MySQL specification.
*
*
* @param nameConverters
* @param table The table containing the column to change.
* @param oldField The old column definition.
* @param field The new column definition (defining the resultant DDL).
* @param options
* @return A single DDL statement which is to be executed.
* @see #renderField(net.java.ao.schema.NameConverters, net.java.ao.schema.ddl.DDLTable, net.java.ao.schema.ddl.DDLField, net.java.ao.DatabaseProvider.RenderFieldOptions)
*/
protected String renderAlterTableChangeColumnStatement(NameConverters nameConverters, DDLTable table, DDLField oldField, DDLField field, RenderFieldOptions options)
{
StringBuilder current = new StringBuilder();
current.append("ALTER TABLE ").append(withSchema(table.getName())).append(" CHANGE COLUMN ");
current.append(processID(oldField.getName())).append(' ');
current.append(renderField(nameConverters, table, field, options));
return current.toString();
}
/**
* Generates the database-specific DDL statements required to remove
* the specified column from the given table. This should also
* generate the necessary statements to drop all triggers and functions
* associated with the column in question. If the database being
* implemented has a non-standard syntax for dropping functions and/or
* triggers, it may be required to override this method, even if the
* syntax to drop columns is standard.
*
*
* @param triggerNameConverter
* @param table The table from which to drop the column.
* @param field The column definition to remove from the table.
* @return An array of DDL statements to be executed.
* @see #getTriggerNameForField(net.java.ao.schema.TriggerNameConverter, net.java.ao.schema.ddl.DDLTable, net.java.ao.schema.ddl.DDLField)
* @see #getFunctionNameForField(net.java.ao.schema.TriggerNameConverter, net.java.ao.schema.ddl.DDLTable, net.java.ao.schema.ddl.DDLField)
*/
protected String[] renderAlterTableDropColumn(TriggerNameConverter triggerNameConverter, DDLTable table, DDLField field)
{
List back = new ArrayList();
StringBuilder current = new StringBuilder();
for (DDLForeignKey foreignKey : findForeignKeysForField(table, field))
{
back.add(renderAlterTableDropKey(foreignKey));
}
String trigger = getTriggerNameForField(triggerNameConverter, table, field);
if (trigger != null)
{
current.setLength(0);
current.append("DROP TRIGGER ").append(processID(trigger));
back.add(current.toString());
}
String function = getFunctionNameForField(triggerNameConverter, table, field);
if (function != null)
{
current.setLength(0);
current.append("DROP FUNCTION ").append(processID(function));
back.add(current.toString());
}
current.setLength(0);
current.append("ALTER TABLE ").append(withSchema(table.getName())).append(
" DROP COLUMN ").append(processID(field.getName()));
back.add(current.toString());
return back.toArray(new String[back.size()]);
}
/**
* Generates the database-specific DDL statement required to add a
* foreign key to a table. For databases which do not support such
* a statement, a warning should be printed to stderr and a
* null
value returned.
*
* @param key The foreign key to be added. As this instance contains
* all necessary data (such as domestic table, field, etc), no
* additional parameters are required.
* @return A DDL statement to be executed, or null
.
* @see #renderForeignKey(DDLForeignKey)
*/
protected String renderAlterTableAddKey(DDLForeignKey key)
{
StringBuilder back = new StringBuilder();
back.append("ALTER TABLE ").append(withSchema(key.getDomesticTable())).append(" ADD ");
back.append(renderForeignKey(key));
return back.toString();
}
/**
* Generates the database-specific DDL statement required to remove a
* foreign key from a table. For databases which do not support such
* a statement, a warning should be printed to stderr and a
* null
value returned. This method assumes that the
* {@link #renderForeignKey(DDLForeignKey)} method properly names
* the foreign key according to the {@link DDLForeignKey#getFKName()}
* method.
*
* @param key The foreign key to be removed. As this instance contains
* all necessary data (such as domestic table, field, etc), no
* additional parameters are required.
* @return A DDL statement to be executed, or null
.
*/
protected String renderAlterTableDropKey(DDLForeignKey key)
{
return "ALTER TABLE " + withSchema(key.getDomesticTable()) + " DROP FOREIGN KEY " + processID(key.getFKName());
}
/**
* Generates the database-specific DDL statement required to create
* a new index. The syntax for this operation is highly standardized
* and thus it is unlikely this method will be overridden. If the
* database in question does not support indexes, a warning should
* be printed to stderr and null
returned.
*
*
*
* @param indexNameConverter
* @param index The index to create. This single instance contains all
* of the data necessary to create the index, thus no separate
* parameters (such as a DDLTable
) are required.
* @return A DDL statement to be executed, or null
.
*/
protected String renderCreateIndex(IndexNameConverter indexNameConverter, DDLIndex index)
{
StringBuilder back = new StringBuilder();
back.append("CREATE INDEX ").append(withSchema(indexNameConverter.getName(shorten(index.getTable()), shorten(index.getField()))));
back.append(" ON ").append(withSchema(index.getTable())).append('(').append(processID(index.getField())).append(')');
return back.toString();
}
/**
* Generates the database-specific DDL statement required to drop
* an index. The syntax for this operation is highly standardized
* and thus it is unlikely this method will be overridden. If the
* database in question does not support indexes, a warning should
* be printed to stderr and null
returned.
*
*
*
* @param indexNameConverter
* @param index The index to drop. This single instance contains all
* of the data necessary to drop the index, thus no separate
* parameters (such as a DDLTable
) are required.
* @return A DDL statement to be executed, or null
.
*/
protected String renderDropIndex(IndexNameConverter indexNameConverter, DDLIndex index)
{
final String indexName = indexNameConverter.getName(shorten(index.getTable()), shorten(index.getField()));
if (hasIndex(indexNameConverter, index))
{
return new StringBuilder().append("DROP INDEX ").append(withSchema(indexName)).append(" ON ").append(withSchema(index.getTable())).toString();
}
else
{
return "";
}
}
protected boolean hasIndex(IndexNameConverter indexNameConverter, DDLIndex index)
{
final String indexName = indexNameConverter.getName(shorten(index.getTable()), shorten(index.getField()));
Connection connection = null;
try
{
connection = getConnection();
ResultSet indexes = getIndexes(connection, index.getTable());
while (indexes.next())
{
if (indexName.equalsIgnoreCase(indexes.getString("INDEX_NAME")))
{
return true;
}
}
return false;
}
catch (SQLException e)
{
throw new ActiveObjectsException(e);
}
finally
{
closeQuietly(connection);
}
}
/**
* Generates any database-specific options which must be appended
* to the end of a table definition. The only database I am aware
* of which requires this is MySQL. For example:
*
* CREATE TABLE test (
* id INTEGER NOT NULL AUTO_INCREMENT,
* name VARCHAR(45),
* PRIMARY KEY(id)
* ) ENGINE=InnoDB;
*
* The "ENGINE=InnoDB
" clause is what is returned by
* this method. The default implementation simply returns
* null
, signifying that no append should be rendered.
*
* @return A DDL clause to be appended to the CREATE TABLE DDL, or null
*/
protected String renderAppend()
{
return null;
}
/**
* Generates the database-specific DDL fragment required to render the
* field and its associated type. This includes all field attributes,
* such as @NotNull
, @AutoIncrement
(if
* supported by the database at the field level) and so on. Sample
* return value:
*
* name VARCHAR(255) DEFAULT "Skye" NOT NULL
*
* Certain databases don't allow defined precision for certain types
* (such as Derby and INTEGER). The logic for whether or not to render
* precision should not be within this method, but delegated to the
* {@link #considerPrecision(DDLField)} method.
*
* Almost all functionality within this method is delegated to other
* methods within the implementation. As such, it is almost never
* necessary to override this method directly. An exception to this
* would be a database like PostgreSQL which requires a different type
* for auto-incremented fields.
*
*
* @param nameConverters
* @param table
* @param field The field to be rendered.
* @param options
* @return A DDL fragment to be embedded in a statement elsewhere.
*/
protected String renderField(NameConverters nameConverters, DDLTable table, DDLField field, RenderFieldOptions options)
{
StringBuilder back = new StringBuilder();
back.append(processID(field.getName()));
back.append(" ");
back.append(renderFieldType(field));
if (field.isAutoIncrement())
{
String autoIncrementValue = renderAutoIncrement();
if (!autoIncrementValue.trim().equals(""))
{
back.append(' ').append(autoIncrementValue);
}
}
else if ((options.forceNull && !field.isNotNull() && !field.isUnique() && !field.isPrimaryKey()) || (options.renderDefault && field.getDefaultValue() != null))
{
back.append(renderFieldDefault(table, field));
}
if (options.renderUnique && field.isUnique())
{
final String renderUniqueString = renderUnique(nameConverters.getUniqueNameConverter(), table, field);
if (!renderUniqueString.trim().equals(""))
{
back.append(' ').append(renderUniqueString);
}
}
if (field.isNotNull() || field.isUnique())
{
back.append(" NOT NULL");
}
return back.toString();
}
protected String renderFieldDefault(DDLTable table, DDLField field)
{
return new StringBuilder().append(" DEFAULT ").append(renderValue(field.getDefaultValue())).toString();
}
/**
* Renders the given Java instance in a database-specific way. This
* method handles special cases such as {@link Calendar},
* {@link Boolean} (which is always rendered as 0/1), functions,
* null
and numbers. All other values are rendered (by
* default) as 'value.toString()'
(the String value
* enclosed within single quotes). Implementations are encouraged to
* override this method as necessary.
*
* @param value The Java instance to be rendered as a database literal.
* @return The database-specific String rendering of the instance in
* question.
* @see #renderDate(Date)
*/
protected String renderValue(Object value)
{
if (value == null)
{
return "NULL";
}
else if (value instanceof Date)
{
return "'" + renderDate((Date) value) + "'";
}
else if (value instanceof Boolean)
{
return ((Boolean) value ? "1" : "0");
}
else if (value instanceof Number)
{
return value.toString();
}
return "'" + value.toString() + "'";
}
/**
* Renders the provided {@link Date} instance as a DATETIME literal
* in the database-specific format. The return value should not
* be enclosed within quotes, as this is accomplished within other
* functions when rendering is required. This method is actually a
* boiler-plate usage of the {@link SimpleDateFormat} class, using the
* date format defined within the {@link #getDateFormat()} method.
*
* @param date The time instance to be rendered.
* @return The database-specific String representation of the time.
*/
protected String renderDate(Date date)
{
return new SimpleDateFormat(getDateFormat()).format(date);
}
/**
* Renders the UNIQUE
constraint as defined by the
* database-specific DDL syntax. This method is a delegate of other, more
* complex methods such as {@link #renderField(net.java.ao.schema.NameConverters, net.java.ao.schema.ddl.DDLTable, net.java.ao.schema.ddl.DDLField, net.java.ao.DatabaseProvider.RenderFieldOptions)}. The default
* implementation just returns UNIQUE
. Implementations may
* override this method to return an empty {@link String} if the database
* in question does not support the constraint.
*
* @return The database-specific rendering of UNIQUE
.
* @param uniqueNameConverter
* @param table
* @param field
*/
protected String renderUnique(UniqueNameConverter uniqueNameConverter, DDLTable table, DDLField field)
{
return "UNIQUE";
}
/**
* Returns the database-specific TIMESTAMP text format as defined by
* the {@link SimpleDateFormat} syntax. This format should include
* the time down to the second (or even more precise, if allowed by
* the database). The default implementation returns the format for
* MySQL, which is: yyyy-MM-dd HH:mm:ss
*
* @return The database-specific TIMESTAMP text format
*/
protected String getDateFormat()
{
return "yyyy-MM-dd HH:mm:ss";
}
/**
* Renders the database-specific DDL type for the field in question.
* This method merely delegates to the {@link #convertTypeToString(TypeInfo)}
* method, passing the field type. Thus, it is rarely necessary
* (if ever) to override this method. It may be deprecated in a
* future release.
*
* @param field The field which contains the type to be rendered.
* @return The database-specific type DDL rendering.
*/
protected String renderFieldType(DDLField field)
{
return convertTypeToString(field.getType());
}
public Object handleBlob(ResultSet res, Class> type, String field) throws SQLException
{
final Blob blob = res.getBlob(field);
if (type.equals(InputStream.class))
{
return blob.getBinaryStream();
}
else if (type.equals(byte[].class))
{
return blob.getBytes(1, (int) blob.length());
}
else
{
return null;
}
}
/**
* Retrieves the name of the trigger which corresponds to the field
* in question (if any). If no trigger will be automatically created
* for the specified field, null
should be returned.
* This function is to allow for databases which require the use of
* triggers on a field to allow for certain functionality (like
* ON UPDATE). The default implementation returns null
.
*
*
* @param triggerNameConverter
* @param table The table which contains the field for which a trigger
* may or may not exist.
* @param field The field for which a previous migration may have
* created a trigger.
* @return The unique name of the trigger which was created for the
* field, or null
if none.
* @see #renderTriggerForField(net.java.ao.schema.TriggerNameConverter, net.java.ao.schema.SequenceNameConverter, net.java.ao.schema.ddl.DDLTable, net.java.ao.schema.ddl.DDLField)
*/
protected String getTriggerNameForField(TriggerNameConverter triggerNameConverter, DDLTable table, DDLField field)
{
return null;
}
/**
* Renders the trigger which corresponds to the specified field, or
* null
if none. This is to allow for databases which
* require the use of triggers to provide functionality such as ON
* UPDATE. The default implementation returns null
.
*
*
*
* @param triggerNameConverter
* @param sequenceNameConverter
*@param table The table containing the field for which a trigger
* may need to be rendered.
* @param field The field for which the trigger should be rendered,
* if any. @return A database-specific DDL statement creating a trigger for
* the field in question, or null
.
* @see #getTriggerNameForField(net.java.ao.schema.TriggerNameConverter, net.java.ao.schema.ddl.DDLTable, net.java.ao.schema.ddl.DDLField)
*/
protected String renderTriggerForField(TriggerNameConverter triggerNameConverter, SequenceNameConverter sequenceNameConverter, DDLTable table, DDLField field)
{
return null;
}
/**
* Retrieves the name of the function which corresponds to the field
* in question (if any). If no function will be automatically created
* for the specified field, null
should be returned.
* This method is to allow for databases which require the use of
* explicitly created functions which correspond to triggers (e.g.
* PostgreSQL). Few providers will need to override the default
* implementation of this method, which returns null
.
*
*
* @param triggerNameConverter
* @param table The table which contains the field for which a function
* may or may not exist.
* @param field The field for which a previous migration may have
* created a function.
* @return The unique name of the function which was created for the
* field, or null
if none.
*/
protected String getFunctionNameForField(TriggerNameConverter triggerNameConverter, DDLTable table, DDLField field)
{
final String triggerName = getTriggerNameForField(triggerNameConverter, table, field);
return triggerName != null ? triggerName + "()" : null;
}
/**
* Renders the function which corresponds to the specified field, or
* null
if none. This is to allow for databases which
* require the use of triggers and explicitly created functions to
* provide functionality such as ON UPDATE (e.g. PostgreSQL). The
* default implementation returns null
.
*
*
*
* @param triggerNameConverter
* @param table The table containing the field for which a function
* may need to be rendered.
* @param field The field for which the function should be rendered,
* if any.
* @return A database-specific DDL statement creating a function for
* the field in question, or null
.
* @see #getFunctionNameForField(net.java.ao.schema.TriggerNameConverter, net.java.ao.schema.ddl.DDLTable, net.java.ao.schema.ddl.DDLField)
*/
protected String renderFunctionForField(TriggerNameConverter triggerNameConverter, DDLTable table, DDLField field)
{
return null;
}
/**
* Generates an INSERT statement to be used to create a new row in the
* database, returning the primary key value. This method also invokes
* the delegate method, {@link #executeInsertReturningKey(EntityManager, java.sql.Connection, Class, String, String, DBParam...)}
* passing the appropriate parameters and query. This method is required
* because some databases do not support the JDBC parameter
* RETURN_GENERATED_KEYS
(such as HSQLDB and PostgreSQL).
* Also, some databases (such as MS SQL Server) require odd tricks to
* support explicit value passing to auto-generated fields. This method
* should take care of any extra queries or odd SQL generation required
* to implement both auto-generated primary key returning, as well as
* explicit primary key value definition.
*
* Overriding implementations of this method should be sure to use the
* {@link Connection} instance passed to the method, not a new
* instance generated using the {@link #getConnection()} method. This is
* because this method is in fact a delegate invoked by {@link EntityManager}
* as part of the entity creation process and may be part of a transaction,
* a bulk creation or some more complicated operation. Both optimization
* and usage patterns on the API dictate that the specified connection
* instance be used. Implementations may assume that the given connection
* instance is never null
.
*
* The default implementation of this method should be sufficient for any
* fully compliant ANSI SQL database with a properly implemented JDBC
* driver. Note that this method should not not actually execute
* the SQL it generates, but pass it on to the {@link #executeInsertReturningKey(EntityManager, java.sql.Connection, Class, String, String, DBParam...)}
* method, allowing for functional delegation and better extensibility.
* However, this method may execute any additional statements required to
* prepare for the INSERTion (as in the case of MS SQL Server which requires
* some config parameters to be set on the database itself prior to INSERT).
*
* @param manager The EntityManager
which was used to dispatch
* the INSERT in question.
* @param conn The connection to be used in the eventual execution of the
* generated SQL statement.
* @param entityType The Java class of the entity.
* @param pkType The Java type of the primary key value. Can be used to
* perform a linear search for a specified primary key value in the
* params
list. The return value of the method must be of
* the same type.
* @param pkField The database field which is the primary key for the
* table in question. Can be used to perform a linear search for a
* specified primary key value in the params
list.
* @param pkIdentity Flag indicating whether or not the primary key field
* is auto-incremented by the database (IDENTITY field).
* @param table The name of the table into which the row is to be INSERTed.
* @param params A varargs array of parameters to be passed to the
* INSERT statement. This may include a specified value for the
* primary key.
* @throws SQLException If the INSERT fails in the delegate method, or
* if any additional statements fail with an exception.
* @see #executeInsertReturningKey(EntityManager, java.sql.Connection, Class, String, String, DBParam...)
*/
@SuppressWarnings("unused")
public , K> K insertReturningKey(EntityManager manager, Connection conn,
Class entityType, Class pkType,
String pkField, boolean pkIdentity, String table, DBParam... params) throws SQLException
{
final StringBuilder sql = new StringBuilder("INSERT INTO " + withSchema(table) + " (");
for (DBParam param : params)
{
sql.append(processID(param.getField()));
sql.append(',');
}
if (params.length > 0)
{
sql.setLength(sql.length() - 1);
}
else
{
sql.append(processID(pkField));
}
sql.append(") VALUES (");
for (DBParam param : params)
{
sql.append("?,");
}
if (params.length > 0)
{
sql.setLength(sql.length() - 1);
}
else
{
sql.append("DEFAULT");
}
sql.append(")");
return executeInsertReturningKey(manager, conn, entityType, pkType, pkField, sql.toString(), params);
}
/**
* Delegate method to execute an INSERT statement returning any auto-generated
* primary key values. This method is primarily designed to be called as a delegate
* from the {@link #insertReturningKey(EntityManager, Connection, Class, String, boolean, String, DBParam...)}
* method. The idea behind this method is to allow custom implementations to
* override this method to potentially execute other statements (such as getting the
* next value in a sequence) rather than the default implementaiton which uses the
* JDBC constant, RETURN_GENERATED_KEYS
. Any database which has a
* fully-implemented JDBC driver should have no problems with the default
* implementation of this method.
*
* Part of the design behind splitting insertReturningKey
and
* executeInsertReturningKey
is so that logic for generating the actual
* INSERT statement need not be duplicated throughout the code and in custom
* implementations providing trivial changes to the default algorithm. This method
* should avoid actually generating SQL if at all possible.
*
* This method should iterate through the passed DBParam(s)
to
* ensure that no primary key value was explicitly specified. If one was, it
* should be used in leiu of one which is auto-generated by the database. Also,
* it is this value which should be returned if specified, rather than the value
* which would have been generated or null
. As such, this method
* should always return exactly the value of the primary key field in the row which
* was just inserted, regardless of what that value may be.
*
* In cases where the database mechanism for getting the next primary key value
* is not thread safe, this method should be declared synchronized
,
* or some thread synchronization technique employed. Unfortunately, it is not
* always possible to ensure that no other INSERT could (potentially) "steal" the
* expected value out from under the algorithm. Such scenarios are to be avoided
* when possible, but the algorithm need not take extremely escoteric concurrency
* cases into account. (see the HSQLDB provider for an example of such a
* less-than-thorough asynchronous algorithm)
*
* IMPORTANT: The INSERT {@link Statement} must use the specified
* connection, rather than a new one retrieved from {@link #getConnection()} or
* equivalent. This is because the INSERT may be part of a bulk insertion, a
* transaction, or possibly another such operation. It is also important to note
* that this method should not close the connection. Doing so could cause the
* entity creation algorithm to fail at a higher level up the stack.
*
* @param manager The EntityManager
which was used to dispatch
* the INSERT in question.
* @param conn The database connection to use in executing the INSERT statement.
* @param entityType The Java class of the entity.
* @param pkType The Java class type of the primary key field (for use both in
* searching the params
as well as performing value conversion
* of auto-generated DB values into proper Java instances).
* @param pkField The database field which is the primary key for the
* table in question. Can be used to perform a linear search for a
* specified primary key value in the params
list.
* @param params A varargs array of parameters to be passed to the
* INSERT statement. This may include a specified value for the
* primary key. @throws SQLException If the INSERT fails in the delegate method, or
* if any additional statements fail with an exception.
* @see #insertReturningKey(EntityManager, Connection, Class, String, boolean, String, DBParam...)
*/
protected , K> K executeInsertReturningKey(EntityManager manager, Connection conn,
Class entityType, Class pkType,
String pkField, String sql, DBParam... params) throws SQLException
{
K back = null;
final PreparedStatement stmt = preparedStatement(conn, sql, Statement.RETURN_GENERATED_KEYS);
for (int i = 0; i < params.length; i++)
{
Object value = params[i].getValue();
if (value instanceof RawEntity>)
{
value = Common.getPrimaryKeyValue((RawEntity>) value);
}
if (params[i].getField().equalsIgnoreCase(pkField))
{
back = (K) value;
}
if (value == null)
{
putNull(stmt, i + 1);
}
else
{
TypeInfo