org.sakaiproject.genericdao.springjdbc.JdbcGenericDao Maven / Gradle / Ivy
Show all versions of generic-dao Show documentation
/**
* $Id$
* $URL$
* JdbcGenericDao.java - genericdao - Apr 18, 2008 10:07:08 AM - azeckoski
**************************************************************************
* Copyright (c) 2008 Aaron Zeckoski
* Licensed under the Apache License, Version 2
*
* A copy of the Apache License, Version 2 has been included in this
* distribution and is available at: http://www.apache.org/licenses/LICENSE-2.0.txt
*
* Aaron Zeckoski ([email protected]) ([email protected]) ([email protected])
*/
package org.sakaiproject.genericdao.springjdbc;
import java.io.BufferedReader;
import java.io.ByteArrayInputStream;
import java.io.IOException;
import java.io.InputStream;
import java.io.InputStreamReader;
import java.io.Serializable;
import java.lang.annotation.Annotation;
import java.sql.Connection;
import java.sql.SQLException;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import java.util.Vector;
import java.util.Map.Entry;
import java.util.concurrent.ConcurrentHashMap;
import javax.sql.DataSource;
import org.azeckoski.reflectutils.ArrayUtils;
import org.azeckoski.reflectutils.ClassFields;
import org.azeckoski.reflectutils.ReflectUtils;
import org.azeckoski.reflectutils.ClassFields.FieldsFilter;
import org.azeckoski.reflectutils.exceptions.FieldnameNotFoundException;
import org.sakaiproject.genericdao.api.GenericDao;
import org.sakaiproject.genericdao.api.annotations.PersistentColumnMappingPolicy;
import org.sakaiproject.genericdao.api.annotations.PersistentColumnName;
import org.sakaiproject.genericdao.api.annotations.PersistentId;
import org.sakaiproject.genericdao.api.annotations.PersistentTransient;
import org.sakaiproject.genericdao.api.annotations.enums.MappingPolicy;
import org.sakaiproject.genericdao.api.caching.CacheProvider;
import org.sakaiproject.genericdao.api.interceptors.DaoOperationInterceptor;
import org.sakaiproject.genericdao.api.interceptors.ReadInterceptor;
import org.sakaiproject.genericdao.api.interceptors.WriteInterceptor;
import org.sakaiproject.genericdao.api.mappers.DataMapper;
import org.sakaiproject.genericdao.api.mappers.EntityColumnMapper;
import org.sakaiproject.genericdao.api.mappers.NamesRecord;
import org.sakaiproject.genericdao.api.mappers.StatementMapper;
import org.sakaiproject.genericdao.api.search.Search;
import org.sakaiproject.genericdao.api.translators.DatabaseTranslator;
import org.sakaiproject.genericdao.base.caching.NonCachingCacheProvider;
import org.sakaiproject.genericdao.springjdbc.translators.BasicTranslator;
import org.sakaiproject.genericdao.springjdbc.translators.DB2Translator;
import org.sakaiproject.genericdao.springjdbc.translators.DerbyTranslator;
import org.sakaiproject.genericdao.springjdbc.translators.HSQLDBTranslator;
import org.sakaiproject.genericdao.springjdbc.translators.MSSQLTranslator;
import org.sakaiproject.genericdao.springjdbc.translators.MySQLTranslator;
import org.sakaiproject.genericdao.springjdbc.translators.OracleTranslator;
import org.sakaiproject.genericdao.springjdbc.translators.PostgresTranslator;
import org.sakaiproject.genericdao.springutil.SmartDataSourceWrapper;
import org.sakaiproject.genericdao.util.JDBCUtils;
import org.sakaiproject.genericdao.util.ThreadboundConnectionsDataSourceWrapper;
import org.springframework.dao.DataAccessException;
import org.springframework.dao.IncorrectResultSizeDataAccessException;
import org.springframework.jdbc.CannotGetJdbcConnectionException;
import org.springframework.jdbc.core.JdbcTemplate;
import org.springframework.jdbc.core.support.JdbcDaoSupport;
import org.springframework.jdbc.datasource.DataSourceUtils;
import org.springframework.jdbc.datasource.SmartDataSource;
/**
* A Spring JDBC (http://hibernate.org/) based implementation of GenericDao
* which can be extended to add more specialized DAO methods.
*
* Note: This implementation is so simple it is unlikely to be useful
*
* See the overview for installation/usage tips.
*
* @author Aaron Zeckoski ([email protected])
*/
public class JdbcGenericDao extends JdbcDaoSupport implements GenericDao {
private String databaseType = DataMapper.DBTYPE_HSQLDB;
/**
* This should be set to the type of database being used
* and determines which DDL to run when creating the tables,
* defaults to {@link DataMapper#DBTYPE_HSQLDB} "HSQLDB",
* will fixup case as needed
*/
public void setDatabaseType(String databaseType) {
if (databaseType == null || databaseType.length() == 0) {
throw new IllegalArgumentException("databaseType cannot be null or empty");
}
if (DatabaseTranslator.DBTYPE_DB2.equalsIgnoreCase(databaseType)) {
this.databaseType = DatabaseTranslator.DBTYPE_DB2;
} else if (DatabaseTranslator.DBTYPE_DERBY.equalsIgnoreCase(databaseType)) {
this.databaseType = DatabaseTranslator.DBTYPE_DERBY;
} else if (DatabaseTranslator.DBTYPE_HSQLDB.equalsIgnoreCase(databaseType)) {
this.databaseType = DatabaseTranslator.DBTYPE_HSQLDB;
} else if (DatabaseTranslator.DBTYPE_MSSQL.equalsIgnoreCase(databaseType)) {
this.databaseType = DatabaseTranslator.DBTYPE_MSSQL;
} else if (DatabaseTranslator.DBTYPE_MYSQL.equalsIgnoreCase(databaseType)) {
this.databaseType = DatabaseTranslator.DBTYPE_MYSQL;
} else if (DatabaseTranslator.DBTYPE_ORACLE.equalsIgnoreCase(databaseType)) {
this.databaseType = DatabaseTranslator.DBTYPE_ORACLE;
} else if (DatabaseTranslator.DBTYPE_POSTGRES.equalsIgnoreCase(databaseType)) {
this.databaseType = DatabaseTranslator.DBTYPE_POSTGRES;
} else {
this.databaseType = databaseType.toUpperCase();
}
}
/**
* @return a constant indicating the type of database: DBTYPE_*
*/
protected String getDatabaseType() {
return databaseType;
}
/**
* @return the jdbcTemplate that is currently in use,
* allows for overriding and support DAO-4 (http://jira.sakaiproject.org/jira/browse/DAO-4)
*/
public JdbcTemplate getSpringJdbcTemplate() {
return super.getJdbcTemplate();
}
protected boolean showSQL = false;
/**
* Enable SQL debugging which will show all SQL statements and DDL statements being executed by printing them out
*
* @param showSQL if true then all SQL and DDL statements will be printed
*/
public void setShowSQL(boolean showSQL) {
this.showSQL = showSQL;
}
/**
* @return true if show SQL is enabled, not a good idea for production
*/
public boolean isShowSQL() {
return showSQL;
}
private boolean autoDDL = true;
/**
* @return true if automatic DDL execution is enabled
*/
public boolean isAutoDDL() {
return autoDDL;
}
/**
* Set to true to cause defined DDL to be executed
*
* @param autoDDL set to true to cause the DDL to be executed,
* if false then no DDL will be executed
*/
public void setAutoDDL(boolean autoDDL) {
this.autoDDL = autoDDL;
}
private boolean autoCommitDDL = false;
/**
* @return true if DDL automatic commits are enabled
*/
public boolean isAutoCommitDDL() {
return autoCommitDDL;
}
/**
* Allows control over whether the DDL should be committed after each statement as it is read in,
* this should probably be disabled if you are using a transaction manager and should be enabled if
* you are managing your own transactions
*
* @param autoCommitDDL true to enable auto commits on DDL excecution
*/
public void setAutoCommitDDL(boolean autoCommitDDL) {
this.autoCommitDDL = autoCommitDDL;
}
private boolean autoCommitOperations = false;
/**
* @return true if automatic commit is enabled for all generic DAO write operations
*/
public boolean isAutoCommitOperations() {
return autoCommitOperations;
}
/**
* Allows control over whether the DAO should do automatic commits for all generic DAO write operations
*
* @param autoCommitOperations true to enable automatic commits for all generic dao operations,
* this should be false if you are using a transaction manager
*/
public void setAutoCommitOperations(boolean autoCommitOperations) {
this.autoCommitOperations = autoCommitOperations;
}
/**
* This is a special case DataSource setter method which will wrap the DataSource,
* you must use this in the case where the {@link DataSource} you are using is not
* a Spring controlled on {@link SmartDataSource}, this will wrap the spring
* DataSource automatically so that the auto-commit and manual transactions will work,
* without this wrapper your connections will be closed after each jdbcTemplate method runs
* and nothing will ever be committed (unless the connection is set to auto-commit)
* WARNING: if you use this then you are responsible for committing TXs and closing your connections
* using the {@link #closeConnection()} and {@link #commitTransaction()} or {@link #rollbackTransaction()}
* methods
* Is not thread-bound
* @see #setNonSpringDataSource(DataSource, boolean)
*
* @param dataSource any non-spring {@link DataSource}
*/
public void setNonSpringDataSource(DataSource dataSource) {
setNonSpringDataSource(dataSource, false);
}
/**
* This is a special case DataSource setter method which will wrap the DataSource,
* you must use this in the case where the {@link DataSource} you are using is not
* a Spring controlled on {@link SmartDataSource}, this will wrap the spring
* DataSource automatically so that the auto-commit and manual transactions will work,
* without this wrapper your connections will be closed after each jdbcTemplate method runs
* and nothing will ever be committed (unless the connection is set to auto-commit)
* WARNING: if you use this then you are responsible for committing TXs and closing your connections
* using the {@link #closeConnection()} and {@link #commitTransaction()} or {@link #rollbackTransaction()}
* methods
*
* @param dataSource any non-spring {@link DataSource}
* @param threadBound if true then the connections will be thread-bound and only one returned
* for the current thread no matter how many times getConnection is called,
* if false then they will return a new connection for each call
*/
public void setNonSpringDataSource(DataSource dataSource, boolean threadBound) {
if (dataSource == null) {
throw new IllegalArgumentException("dataSource cannot be null");
}
if (dataSource instanceof SmartDataSource) {
setDataSource(dataSource);
} else {
// wrap it up
DataSource wrapper = dataSource;
if (threadBound) {
wrapper = new ThreadboundConnectionsDataSourceWrapper(wrapper);
}
wrapper = new SmartDataSourceWrapper(wrapper);
setDataSource( wrapper );
}
}
/**
* preserve the order the classes are read in,
* presumably this is also the dependency order
*/
private Map, DataMapper> dataMappers;
/**
* This tells the DAO about your persistent classes and their associated tables,
* it is the major configuration path for the DAO
* @param dataMappers a list of all {@link DataMapper}s that this DAO uses,
* ideally this is no more than a few per DAO but it should include all tables which are directly linked
*/
public void setDataMappers(List dataMappers) {
if (this.dataMappers == null) {
this.dataMappers = new ConcurrentHashMap, DataMapper>();
this.classes = new Vector>();
}
for (DataMapper dataMapper : dataMappers) {
Class> type = dataMapper.getPersistentType();
this.dataMappers.put(type, dataMapper);
this.classes.add(type);
getNamesRecord(type); // prime the names record
}
}
/**
* Get a data mapper for a specific class,
* will always return a data mapper
*/
protected DataMapper getDataMapper(Class> type) {
DataMapper dm = dataMappers.get(type);
if (dm == null && classes.contains(type)) {
// make a Simple DM, this assumes the necessary tables already exist
dm = new SimpleDataMapper(type);
dataMappers.put(type, dm); // place it in the map
}
if (dm == null) {
throw new IllegalArgumentException("type is not a persistent class type: " + type);
}
return dm;
}
// names mapping cache
private Map, NamesRecord> namesRecordsCache = new ConcurrentHashMap, NamesRecord>();
/**
* Get a names mapping for a specific class type,
* uses caching, will always return a mapping
*/
public NamesRecord getNamesRecord(Class> type) {
NamesRecord nr = namesRecordsCache.get(type);
if (nr == null) {
// get a names record from the data mapper
DataMapper dm = getDataMapper(type);
nr = dm.getPropertyToColumnNamesMapping();
if (nr != null) {
nr.setIdentifierProperty( findIdProperty(type) );
if (nr.getForeignKeyPropertyNames().isEmpty()) {
// check for foreign keys and add them (only complete fields though)
Map> types = ReflectUtils.getInstance().getFieldTypes(type, FieldsFilter.COMPLETE);
for (Entry> entry : types.entrySet()) {
// special handling for foreign keys identified by persistent types inside this object
String property = entry.getKey();
Class> pType = entry.getValue();
if (getPersistentClasses().contains(pType)) {
// this is another persistent object so this must be a foreign key
String pId = findIdProperty(pType);
String fkProp = property + "." + pId;
String column = nr.getColumnForProperty(property);
if (column != null) {
nr.setForeignKeyMapping(fkProp, column);
}
}
}
}
namesRecordsCache.put(type, nr);
}
}
if (nr == null) {
boolean usePropertyNamesForColumns = false;
DataMapper dm = getDataMapper(type);
ReflectUtils reflectUtils = ReflectUtils.getInstance();
// try to get the mapping from the class using annotations
ClassFields> classFields = reflectUtils.analyzeClass(type);
if (classFields.getClassAnnotations().contains(PersistentColumnMappingPolicy.class)) {
for (Annotation classAnnote : classFields.getClassAnnotations()) {
if (PersistentColumnMappingPolicy.class.equals(classAnnote.annotationType())) {
MappingPolicy mp = ((PersistentColumnMappingPolicy)classAnnote).policy();
if (MappingPolicy.FIELD_NAMES.equals(mp)) {
usePropertyNamesForColumns = true;
} else if (MappingPolicy.UPPER_UNDERSCORES.equals(mp)) {
usePropertyNamesForColumns = false;
}
}
if (dm instanceof SimpleDataMapper) {
// override the setting
((SimpleDataMapper)dm).setUsePropertyNamesForColumns(usePropertyNamesForColumns);
}
}
} else {
// no annotation so get the setting from the data mapper
if (dm instanceof SimpleDataMapper) {
usePropertyNamesForColumns = ((SimpleDataMapper)dm).isUsePropertyNamesForColumns();
}
}
// create a names mapping using reflection
nr = new NamesRecord();
Map> types = reflectUtils.getFieldTypes(type, FieldsFilter.COMPLETE);
for (Entry> entry : types.entrySet()) {
String property = entry.getKey();
Class> pType = entry.getValue();
String column = property;
// check for transient annotation
try {
Annotation annotation = classFields.getFieldAnnotation(PersistentTransient.class, property);
if (annotation != null) {
// skip this one
continue;
}
} catch (FieldnameNotFoundException e) {
// nothing to do
}
if (! usePropertyNamesForColumns) {
column = BasicTranslator.makeDBNameFromCamelCase(property);
}
// check for annotation override to column name
try {
PersistentColumnName annotation = (PersistentColumnName) classFields.getFieldAnnotation(PersistentColumnName.class, property);
if (annotation != null) {
column = annotation.value();
}
} catch (FieldnameNotFoundException e) {
// nothing to do
}
nr.setNameMapping(property, column);
// special handling for foreign keys identified by persistent types inside this object
if (getPersistentClasses().contains(pType)) {
// this is another persistent object so this must be a foreign key
String pId = findIdProperty(pType);
String fkProp = property + "." + pId;
nr.setForeignKeyMapping(fkProp, column);
}
}
// add in the special id marker and make sure the id is set right
nr.setIdentifierProperty( findIdProperty(type) );
namesRecordsCache.put(type, nr);
if (dm instanceof SimpleDataMapper) {
// put this NamesRecord back into the DataMapper
((SimpleDataMapper)dm).setNamesRecord(nr);
}
}
return nr;
}
private DatabaseTranslator databaseTranslator = null;
/**
* Force the current database translator to be this one
*/
public void setDatabaseTranslator(DatabaseTranslator databaseTranslator) {
if (databaseTranslator != null) {
this.databaseTranslator = databaseTranslator;
}
}
protected DatabaseTranslator getDatabaseTranslator() {
if (this.databaseTranslator == null) {
databaseTranslator = new HSQLDBTranslator();
}
return this.databaseTranslator;
}
/**
* Initialize the database translator
*/
protected void initDatabaseTranslator() {
String type = getDatabaseType();
if (this.databaseTranslator == null) {
if (DatabaseTranslator.DBTYPE_DB2.equalsIgnoreCase(type)) {
this.databaseTranslator = new DB2Translator();
} else if (DatabaseTranslator.DBTYPE_DERBY.equalsIgnoreCase(type)) {
this.databaseTranslator = new DerbyTranslator();
} else if (DatabaseTranslator.DBTYPE_HSQLDB.equalsIgnoreCase(type)) {
this.databaseTranslator = new HSQLDBTranslator();
} else if (DatabaseTranslator.DBTYPE_MSSQL.equalsIgnoreCase(type)) {
this.databaseTranslator = new MSSQLTranslator();
} else if (DatabaseTranslator.DBTYPE_MYSQL.equalsIgnoreCase(type)) {
this.databaseTranslator = new MySQLTranslator();
} else if (DatabaseTranslator.DBTYPE_ORACLE.equalsIgnoreCase(type)) {
this.databaseTranslator = new OracleTranslator();
} else if (DatabaseTranslator.DBTYPE_POSTGRES.equalsIgnoreCase(type)) {
this.databaseTranslator = new PostgresTranslator();
} else {
throw new UnsupportedOperationException("No translator for this database type: " + type);
}
}
}
/**
* Initialize the persistent classes
*/
protected void initPersistentClasses() {
// init the list of classes and mappers related to them and execute DDL if needed
for (Class> type : getPersistentClasses()) {
DataMapper dm = getDataMapper(type);
if (autoDDL) {
InputStream ddlIS = null;
// try to get DDL from the mapper first
String ddl = dm.generateDDL(getDatabaseType());
if ( ddl == null || "".equals(ddl) ) {
if (dm instanceof SimpleDataMapper) {
// try loading from a file if set
SimpleDataMapper sdm = (SimpleDataMapper) dm;
String filepath = sdm.getDbTypeToFilename().get(getDatabaseType());
if (filepath != null) {
// cleanup filename
if (filepath.startsWith("/")) {
filepath = filepath.substring(1);
}
// try looking in the classloader of the thread first
ddlIS = getInputStreamFromClassloader(Thread.currentThread().getContextClassLoader(), filepath);
if (ddlIS == null) {
// next try the classloader for this DAO
ddlIS = getInputStreamFromClassloader(this.getClass().getClassLoader(), filepath);
}
if (ddlIS == null) {
// next try the classloader for persistent type
ddlIS = getInputStreamFromClassloader(type.getClassLoader(), filepath);
}
if (ddlIS == null) {
// we got a filename but did not find the file contents, we need to die
throw new IllegalArgumentException("Could not find find DDL file resource ("+filepath+") for DB ("+getDatabaseType()+") in any searched classloader, cannot execute DDL");
}
}
} else {
// nothing to do here: we have a blank ddl and this is not a simple mapper so just continue on -AZ
}
} else {
// turn DDL into an IS
ddlIS = new ByteArrayInputStream(ddl.getBytes());
}
if (ddlIS != null) {
// execute the ddl
executeDDLforType(ddlIS, type);
}
}
}
}
/**
* Get the input stream from this classloader
*/
private InputStream getInputStreamFromClassloader(ClassLoader cl, String filename) {
InputStream ddlIS = null;
if (cl != null) {
ddlIS = cl.getResourceAsStream(filename);
}
return ddlIS;
}
/**
* Starts the DAO using the settings that have been pushed into it so far
* There is no need to trigger this if it is being controlled by spring as spring
* will call {@link #initDao()} automatically which calls this method
* @throws RuntimeException if the dao fails to start
*/
public void startup() {
try {
initDao();
} catch (Exception e) {
throw new RuntimeException("Failed to startup the dao: " + e.getMessage(), e);
}
}
/**
* Default constructor - does nothing and leaves the object in an incomplete state,
* you need to at least set the following:
* {@link #setDataSource(DataSource)}
* {@link #setAutoDDL(boolean)}
* {@link #setAutoCommitDDL(boolean)}
* {@link #setDatabaseType(String)}
* {@link #setDataMappers(List)}
*
* This does not actually start the DAO, run {@link #startup()} to start it
* Note that this will be started automatically by Spring if this is created as a Spring bean,
* no actions are necessary and setting an init method is not needed
*/
public JdbcGenericDao() {
}
/**
* Complete constructor, sets all required values for running the DAO,
* does not actually start the DAO, run {@link #startup()} to start it
* Note that this will be started automatically by Spring if this is created as a Spring bean,
* no actions are necessary and setting an init method is not needed
*
* @param dataSource the DataSource to use with this DAO
* @param threadBoundDataSource if true then the DataSource will be bound to threads and
* only unbound and closed when {@link #closeConnection()} is called,
* otherwise a new DataSource is obtained each time,
* this has no effect if the DataSource is a Spring DataSource
* @param databaseType the databaseType that this DAO is connecting to (use constants in {@link DatabaseTranslator})
* @param autoDDL if true then DDL is executed on DAO startup (can be run manually if desired)
* @param autoCommitDDL if true then commit is executed after each DDL file is executed, if false then you need a TX manager to do this for you
* @param dataMappers the data mappers which map this DAO to the tables
*/
public JdbcGenericDao(DataSource dataSource, boolean threadBoundDataSource,
String databaseType, boolean autoDDL, boolean autoCommitDDL, DataMapper[] dataMappers) {
setRequiredSettings(dataSource, threadBoundDataSource, databaseType, autoDDL, autoCommitDDL, dataMappers);
}
/**
* Set all required settings for running the DAO,
* can be used in the cases where using the complete constructor is inconvenient
* This does not actually start the DAO, run {@link #startup()} to start it
* Note that this will be started automatically by Spring if this is created as a Spring bean,
* no actions are necessary and setting an init method is not needed
*
* @param dataSource the DataSource to use with this DAO
* @param threadBoundDataSource if true then the DataSource will be bound to threads and
* only unbound and closed when {@link #closeConnection()} is called,
* otherwise a new DataSource is obtained each time,
* this has no effect if the DataSource is a Spring DataSource
* @param databaseType the databaseType that this DAO is connecting to (use constants in {@link DatabaseTranslator})
* @param autoDDL if true then DDL is executed on DAO startup (can be run manually if desired)
* @param autoCommitDDL if true then commit is executed after each DDL file is executed, if false then you need a TX manager to do this for you
* @param dataMappers the data mappers which map this DAO to the tables
*/
public void setRequiredSettings(DataSource dataSource, boolean threadBoundDataSource,
String databaseType, boolean autoDDL, boolean autoCommitDDL, DataMapper[] dataMappers) {
if (dataSource != null) {
// correctly sets the datasource if non-spring or spring
setNonSpringDataSource(dataSource, threadBoundDataSource);
}
setDatabaseType(databaseType);
if (dataMappers == null || dataMappers.length == 0) {
throw new IllegalArgumentException("DataMappers must be set and must be greater than one");
}
ArrayList mappers = new ArrayList();
for (int i = 0; i < dataMappers.length; i++) {
mappers.add(dataMappers[i]);
}
setDataMappers(mappers);
setAutoDDL(autoDDL);
setAutoCommitDDL(autoCommitDDL);
}
@Override
protected void initDao() throws Exception {
super.initDao();
// now we run our own inits
// init the database translator based on the type
initDatabaseTranslator();
// init the list of classes and mappers related to them and execute DDL if needed
initPersistentClasses();
// init the caches
initCaches();
}
/**
* Find the tablename from the classname
*/
protected String getTableNameFromClass(Class> type) {
String tableName = getDataMapper(type).getTableName();
if ("".equals(tableName) || tableName == null) {
// generate the table name based on defaults
tableName = BasicTranslator.makeTableNameFromClass(type);
}
return tableName;
}
/**
* Make an entity of type T from a map of DB data
* @param
* @param type a persistent entity type
* @param data a map of column names to data values
* @return an entity of type T with the data from the map in the entity
*/
@SuppressWarnings("unchecked")
protected T makeEntityFromMap(Class type, Map data) {
if (type == null || data == null) {
throw new NullPointerException("type and data cannot be null");
}
T entity = null;
DataMapper dm = getDataMapper(type);
if (dm != null && dm instanceof EntityColumnMapper) {
entity = (T) ((EntityColumnMapper)dm).mapColumnsToObject(data);
}
if (entity == null) {
// use reflection to construct and push the values into the object
Map> types = ReflectUtils.getInstance().getFieldTypes(type, FieldsFilter.WRITEABLE);
NamesRecord nr = getNamesRecord(type);
entity = ReflectUtils.getInstance().constructClass(type);
for (Entry entry : data.entrySet()) {
String key = entry.getKey();
String property = nr.getPropertyForColumn(key);
if (property != null) {
Object value = entry.getValue();
// special handling for persistent types inside this type
Class> pType = null;
if (types.containsKey(property)) {
pType = types.get(property);
if (! getPersistentClasses().contains(pType)) {
pType = null;
}
}
if (pType != null) {
// this is another persistent object so this must be a foreign key
String pId = getIdProperty(pType);
// TODO use the old way once reflect utils can build the path automatically
// property = property + "." + pId;
// if (value != null
// && value.getClass().isAssignableFrom(pType)) {
// value = ReflectUtil.getInstance().getFieldValue(value, pId);
// }
// this will create the object for us for now
Object pValue = value;
if (value != null
&& value.getClass().isAssignableFrom(pType)) {
pValue = ReflectUtils.getInstance().getFieldValue(value, pId);
}
value = ReflectUtils.getInstance().constructClass(pType);
ReflectUtils.getInstance().setFieldValue(value, pId, pValue);
}
ReflectUtils.getInstance().setFieldValue(entity, property, value);
}
}
}
return entity;
}
/**
* Make a map of column names -> column values from an entity
* @param entity
* @return a map of the data in a persistent entity which has the column names as keys
*/
protected Map makeMapFromEntity(Object entity) {
if (entity == null) {
throw new NullPointerException("entity cannot be null");
}
Class> type = entity.getClass();
Map data = null;
DataMapper dm = getDataMapper(type);
if (dm != null && dm instanceof EntityColumnMapper) {
data = ((EntityColumnMapper)dm).mapObjectToColumns(entity);
}
if (data == null || data.isEmpty()) {
if (data == null) {
data = new HashMap();
}
// get data from object using reflection
Map> types = ReflectUtils.getInstance().getFieldTypes(type, FieldsFilter.ALL); // faster
Map objectValues = ReflectUtils.getInstance().getObjectValues(entity, FieldsFilter.READABLE, false);
NamesRecord nr = getNamesRecord(type);
for (Entry entry : objectValues.entrySet()) {
String property = entry.getKey();
String dbName = nr.getColumnForProperty(property);
if (dbName != null) {
Object value = entry.getValue();
// special handling for persistent types inside this type
Class> pType = null;
if (types.containsKey(property)) {
pType = types.get(property);
if (! getPersistentClasses().contains(pType)) {
pType = null;
}
}
if (pType != null) {
// this is another persistent object so this must be a foreign key
String pId = getIdProperty(pType);
value = ReflectUtils.getInstance().getFieldValue(value, pId);
}
data.put(dbName, value);
}
}
}
return data;
}
// *********** Helper methods
/**
* Logs a message to standard out, allows easy control over where logging is going later on
*
* @param message the message to output
*/
protected void logInfo(String message) {
System.out.println("INFO: [GenericDao] " + message);
}
/**
* Logs a message to standard out, allows easy control over where logging is going later on
*
* @param message the message to output
*/
protected void logWarn(String message) {
System.out.println("WARN: [GenericDao] " + message);
}
/**
* This will determine the id property correctly (but fairly inefficiently) so this should be cached,
* use {@link #getIdProperty(Class)} to get the id property for a class
* @param type a persistent type
* @return
*/
protected String findIdProperty(Class> type) {
String idProp = null;
DataMapper dm = getDataMapper(type);
if (dm != null) {
idProp = dm.getIdPropertyName();
}
if (classes.contains(type)) {
if (idProp == null) {
// look for the annotation
idProp = ReflectUtils.getInstance().getFieldNameWithAnnotation(type, PersistentId.class);
}
if (idProp == null) {
idProp = "id";
}
}
return idProp;
}
/**
* @param type a persistent type
* @return the id column name
*/
public String getIdColumn(Class> type) {
String idProp = getIdProperty(type);
String idColumn = getNamesRecord(type).getColumnForProperty(idProp);
if (idColumn == null && classes.contains(type)) {
idColumn = "ID";
}
return idColumn;
}
/**
* @param object a persistent object
* @return the value in the id property for the supplied object
*/
protected Object getIdValue(Object object) {
Class> entityClass = object.getClass();
Object id = null;
String idProp = getIdProperty(entityClass);
if (idProp != null) {
id = ReflectUtils.getInstance().getFieldValue(object, idProp);
}
return id;
}
/**
* This helper method will convert the incoming data if it needs to be
* converted for the given column, otherwise it will do nothing to the value
* @param type the persistent class type
* @param column the name of the column this value is associated with
* @param value the value to convert
* @return the converted value or the original value if no conversion needed
*/
protected Object convertColumn(Class> type, String column, Object value) {
if (type != null && column != null) {
NamesRecord namesRecord = getNamesRecord(type);
value = JDBCUtils.convertColumn(namesRecord, column, value);
}
return value;
}
/**
* @return the template to use when generating insert statements,
* uses the {@link StatementMapper#TABLE_NAME}, {@link StatementMapper#COLUMNS} and {@link StatementMapper#VALUES} constants to indicate replacements
*/
public String getInsertTemplate(Class> type) {
DataMapper dm = getDataMapper(type);
String template = null;
if (dm instanceof StatementMapper) {
template = ((StatementMapper)dm).getInsertTemplate();
}
if (template == null || "".equals(template)) {
template = StatementMapper.BASIC_INSERT;
}
return template;
}
/**
* @return the template to use when generating selects,
* uses the {@link StatementMapper#SELECT}, {@link StatementMapper#TABLE_NAME}, and {@link StatementMapper#WHERE} constants to indicate replacements
*/
public String getSelectTemplate(Class> type) {
DataMapper dm = getDataMapper(type);
String template = null;
if (dm instanceof StatementMapper) {
template = ((StatementMapper)dm).getSelectTemplate();
}
if (template == null || "".equals(template)) {
template = StatementMapper.BASIC_SELECT;
}
return template;
}
/**
* @return the template to use when generating updates,
* uses the {@link StatementMapper#TABLE_NAME}, {@link StatementMapper#UPDATE} and {@link StatementMapper#WHERE} constants to indicate replacements
*/
public String getUpdateTemplate(Class> type) {
DataMapper dm = getDataMapper(type);
String template = null;
if (dm instanceof StatementMapper) {
template = ((StatementMapper)dm).getUpdateTemplate();
}
if (template == null || "".equals(template)) {
template = StatementMapper.BASIC_UPDATE;
}
return template;
}
/**
* @return the template to use when generating deletes,
* uses the {@link StatementMapper#TABLE_NAME} and {@link StatementMapper#WHERE} constants to indicate replacements
*/
public String getDeleteTemplate(Class> type) {
DataMapper dm = getDataMapper(type);
String template = null;
if (dm instanceof StatementMapper) {
template = ((StatementMapper)dm).getDeleteTemplate();
}
if (template == null || "".equals(template)) {
template = StatementMapper.BASIC_DELETE;
}
return template;
}
// ********* DDL methods
/**
* Clear the table associated with this DAO persistent class
* @param type any persistent type
*/
public void clearDataForType(Class> type) {
String sql = "TRUNCATE TABLE " + getTableNameFromClass(type);
try {
getSpringJdbcTemplate().execute(sql);
if (showSQL) {
logInfo("SQL="+sql);
}
} catch (DataAccessException e) {
// ok, try doing the clear using a delete from which is much slower
sql = "DELETE FROM " + getTableNameFromClass(type);
if (showSQL) {
logInfo("SQL="+sql);
}
getSpringJdbcTemplate().execute(sql);
} finally {
commitTransaction();
}
}
/**
* Drop the table associated with this persistent type
* @param type any persistent type
*/
public void dropTableForType(Class> type) {
String sql = "DROP TABLE " + getTableNameFromClass(type);
try {
if (showSQL) {
logInfo("SQL="+sql);
}
getSpringJdbcTemplate().execute("DROP TABLE " + getTableNameFromClass(type));
commitTransaction();
} catch (DataAccessException e) {
rollbackTransaction();
throw e;
}
}
/**
* Execute a DDL (database definition language) script, this will execute
* a set of DDL commands which are fed in via an InputStream
* The first non-comment ('--') line will be run, and if successful,
* all other non-comment lines will be run. SQL statements may be on
* multiple lines but must have ';' terminators.
*
* NOTE: The script should be located in the current ClassLoader,
* in other words, it must be a visible resource that is packaged with
* the code that is running this method
* Recommended (Example) usage:
* 1) Place the script into the src folder of your service impl project like so:
* impl/src/sql/oracle/myscript.sql
* 2) Load the sql files into the jar by setting up the maven pom like so:
*
...
${basedir}/src/sql
** /*.sql
...
* Note: remove the extra space between "**" and "/*.sql"
* 3) Execute the ddl script in the init method of your DAO (when it first starts up):
* ClassLoader loader = this.getClass().getClassLoader();
* String ddlResource = getDatabaseType() + "/myscript.sql";
* InputStream stream = loader.getResourceAsStream(ddlResource);
* executeDDL(stream);
*
* @param sqlDDL the sql ddl commands to execute
* @throws IllegalArgumentException if ddl is invalid and cannot be executed
*/
public void executeDDL(InputStream sqlDDL) {
if (sqlDDL == null) {
throw new IllegalArgumentException("sqlDDL cannot be null");
}
executeDDLforType(sqlDDL, null);
}
private void executeDDLforType(InputStream sqlDDL, Class> type) {
// Now run the DDL commands if possible
try {
if (isAutoCommitDDL()) {
commitTransaction(); // start the transaction
}
BufferedReader r = new BufferedReader(new InputStreamReader(sqlDDL));
try {
// read the first line, skipping any '--' comment lines
boolean firstLine = true;
StringBuffer buf = new StringBuffer();
for (String line = r.readLine(); line != null; line = r.readLine()) {
line = line.trim();
if (line.startsWith("--")) continue;
if (line.length() == 0) continue;
// add the line to the buffer
buf.append(' ');
buf.append(line);
// process if the line ends with a ';'
boolean process = line.endsWith(";");
if (!process) continue;
// remove trailing ';'
buf.setLength(buf.length() - 1);
String ddl = buf.toString().trim();
// FIXME do replacements even if we do not know the type
if (type != null) {
// handle ddl replacements
ddl = handleTypeReplacements(type, ddl);
}
// run the first line as the test - if it fails, we are done
if (firstLine) {
firstLine = false;
try {
if (showSQL) {
logInfo("DDL="+ddl);
}
getSpringJdbcTemplate().execute(ddl);
} catch (DataAccessException e) {
//log.info("Could not to execute first DDL ("+ddl+"), skipping the rest");
logInfo("Could not execute first DDL line, skipping the rest: " + e.getMessage() + ":" + e.getCause());
//e.printStackTrace();
return;
}
} else {
// run other lines, until done - any one can fail (we will report it)
try {
if (showSQL) {
logInfo("DDL="+ddl);
}
getSpringJdbcTemplate().execute(ddl);
} catch (DataAccessException e) {
throw new IllegalArgumentException("Failed while executing ddl: " + e.getMessage(), e);
}
}
if (isAutoCommitDDL()) {
commitTransaction();
}
// clear the buffer for next
buf.setLength(0);
}
} catch (IOException any) {
throw new RuntimeException("Failure while processing DDL", any);
} finally {
try {
r.close();
} catch (IOException any) {
//log.warn("Failure while closing DDL inputstream reader", any);
}
// close the connection used for this DDL
if (isAutoCommitDDL()) {
closeConnection();
}
}
} finally {
try {
sqlDDL.close();
} catch (IOException any) {
//log.warn("Failure while closing inputstream", any);
}
}
}
// *********** SQL methods
/**
* Make SQL from a template
*
* @param sqlTemplate pass in an SQL template (one of the BASIC_* ones or make your own),
* the replacement names will be replaced with the replacement values
* @param tableName the name of the table for this SQL
* @param replacements a sequence of replacement names (probably {@link #COLUMNS}, {@link #WHERE}, etc.) and values,
* alternating like so: name,value,name,value
* @return the SQL with all replacements made
*/
protected String makeSQL(String sqlTemplate, String tableName, String... replacements) {
String sql = sqlTemplate;
sql = sql.replace(StatementMapper.TABLE_NAME, tableName);
for (int i = 0; i < replacements.length; i++) {
if (replacements.length < i + 1) {
break;
}
sql = sql.replace(replacements[i], replacements[i+1]);
i++;
}
// put in the select replacement last in case it was already replaced
sql = sql.replace(StatementMapper.SELECT, "*");
return sql;
}
/**
* Handle the replacements of DDL constants in the supplied DDL with real values
* based on the supplied type
* The following keys will be replaced automatically:
* {TABLENAME} - the value returned by {@link #getTableName()}
* {ID} - the column name of the unique identifier
* {TABLENAME:org.domain.MyClass} - the value returned by {@link #getTableName()} for the persistent type MyClass
* {ID:org.domain.MyClass} - the column name of the unique identifier for the persistent type MyClass
* {COLUMNNAME:propertyName} - the column name which maps to the propertyName
* {IDSEQNAME} - (Oracle) a sequence name will be generated and inserted
* based on the table name for use in generating IDs,
* if you want to specify your own sequence name then you will lose
* the ability to have the ID inserted into newly created objects
*
* @param type a persistent class type
* @param ddl database definition statements (can also be SQL)
* @return the DDL string with the {vars} replaced
*/
protected String handleTypeReplacements(Class> type, String ddl) {
//StringBuilder sb = new StringBuilder(ddl);
if (type == null || ddl == null) {
throw new IllegalArgumentException("Type and ddl cannot be null");
}
String tableName = getTableNameFromClass(type);
ddl = ddl.replace(DataMapper.DDL_TABLENAME, tableName);
ddl = ddl.replace(DataMapper.DDL_ID_COLUMN, getIdColumn(type));
if (DataMapper.DBTYPE_ORACLE.equals(getDatabaseType())) {
ddl = ddl.replace(DataMapper.DDL_ID_SEQNAME, OracleTranslator.getOracleSeqName(tableName) );
}
// now check for the other types and handle them if needed
if (ddl.contains(DataMapper.DDL_TABLENAME_TYPE_PREFIX)) {
// resolve the classname for a tablename
List> pClasses = getPersistentClasses();
for (Class> pType : pClasses) {
// attempt to replace all parent tablenames
String tName = getTableNameFromClass(pType);
ddl = ddl.replace(DataMapper.DDL_TABLENAME_TYPE_PREFIX + pType.getName() + "}", tName);
ddl = ddl.replace(DataMapper.DDL_TABLENAME_TYPE_PREFIX + pType.getSimpleName() + "}", tName);
}
}
if (ddl.contains(DataMapper.DDL_ID_TYPE_PREFIX)) {
// resolve the classname for a tablename
List> pClasses = getPersistentClasses();
for (Class> pType : pClasses) {
// attempt to replace all parent tablenames
String pId = getIdColumn(pType);
ddl = ddl.replace(DataMapper.DDL_ID_TYPE_PREFIX + pType.getName() + "}", pId);
ddl = ddl.replace(DataMapper.DDL_ID_TYPE_PREFIX + pType.getSimpleName() + "}", pId);
}
}
if (ddl.contains(DataMapper.DDL_COLUMN_PREFIX)) {
// resolve the classname for a tablename
NamesRecord nr = getNamesRecord(type);
for (String property : nr.getPropertyNames()) {
// attempt to replace all property names
String column = nr.getColumnForProperty(property);
ddl = ddl.replace(DataMapper.DDL_COLUMN_PREFIX + property + "}", column);
}
// TODO support for columns by type?
}
return ddl;
}
/**
* Create comparison SQL but converts the value object to a string
*
* @param column the name of a database column
* @param comparisonConstant the comparison constant (e.g. EQUALS)
* @param value the value to compare the property to
* @return a string representing the SQL snippet (e.g. propA = ?)
*/
protected String makeComparisonSQL(String column, int comparisonConstant, Object value) {
return JDBCUtils.makeComparisonSQL(column, comparisonConstant, value);
}
/**
* @param params a set of params to add this value to
* @param column the name of a database column
* @param comparisonConstant the comparison constant (e.g. EQUALS)
* @param value the value to compare the property to
* @return a string representing the SQL snippet (e.g. propA = ?)
*/
protected String makeComparisonSQL(List