org.datanucleus.store.rdbms.query.ResultMetaDataROF Maven / Gradle / Ivy
/**********************************************************************
Copyright (c) 2007 Andy Jefferson and others. All rights reserved.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
Contributors:
...
**********************************************************************/
package org.datanucleus.store.rdbms.query;
import java.sql.ResultSet;
import java.sql.ResultSetMetaData;
import java.sql.SQLException;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.HashSet;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import java.util.Set;
import org.datanucleus.ExecutionContext;
import org.datanucleus.FetchPlan;
import org.datanucleus.exceptions.NucleusDataStoreException;
import org.datanucleus.exceptions.NucleusUserException;
import org.datanucleus.identity.IdentityUtils;
import org.datanucleus.metadata.AbstractClassMetaData;
import org.datanucleus.metadata.AbstractMemberMetaData;
import org.datanucleus.metadata.IdentityType;
import org.datanucleus.metadata.QueryResultMetaData;
import org.datanucleus.metadata.QueryResultMetaData.ConstructorTypeColumn;
import org.datanucleus.metadata.QueryResultMetaData.ConstructorTypeMapping;
import org.datanucleus.metadata.QueryResultMetaData.PersistentTypeMapping;
import org.datanucleus.state.DNStateManager;
import org.datanucleus.store.FieldValues;
import org.datanucleus.store.rdbms.mapping.java.JavaTypeMapping;
import org.datanucleus.store.rdbms.table.Column;
import org.datanucleus.store.rdbms.table.DatastoreClass;
import org.datanucleus.store.schema.table.SurrogateColumnType;
import org.datanucleus.store.types.converters.TypeConversionHelper;
import org.datanucleus.store.rdbms.RDBMSStoreManager;
import org.datanucleus.store.rdbms.fieldmanager.ResultSetGetter;
import org.datanucleus.util.ClassUtils;
import org.datanucleus.util.Localiser;
import org.datanucleus.util.NucleusLogger;
import org.datanucleus.util.StringUtils;
/**
* ResultObjectFactory that operates using a QueryResultMetaData and returns objects based on the definition.
* A QueryResultMetaData allows for a row of a ResultSet to be returned as a mix of :-
*
* - a number of persistent objects (made up of several ResultSet columns)
* - a number of Objects (from individual ResultSet columns)
*
* Each call to getObject() will then return a set of objects as per the MetaData definition.
* ResultSet to object mapping
* Each row of the ResultSet has a set of columns, and these columns are either used for direct outputting
* back to the user as a "simple" object, or as a field in a persistent object.
* So you could have a situation like this :-
*
* ResultSet Column Output Object
* ================ =============
* COL1 PC1.field1
* COL2 PC1.field2
* COL3 Simple Object
* COL4 PC2.field3
* COL5 PC2.field1
* COL6 PC2.field2
* COL7 Simple Object
* COL8 PC1.field3
* ...
*
* So this example will return an Object[4] comprised of Object[0] = instance of PC1, Object[1] = instance of PC2, Object[2] = simple object, Object[3] = simple object.
* When creating the instance of PC1 we take the ResultSet columns (COL1, COL2, COL8).
* When creating the instance of PC2 we take the ResultSet columns (COL5, COL6, COL4).
* Columns to persistable object mapping
* Where we have a number of columns forming a persistable object, such as (COL1, COL2, COL8) above we make use of ResultSetGetter
* to populate the fields of the persistable object from the ResultSet.
*/
public class ResultMetaDataROF extends AbstractROF
{
/** MetaData defining the result from the Query. */
QueryResultMetaData queryResultMetaData = null;
/** Column names in the ResultSet. */
String[] columnNames = null;
/** ResultSetGetter objects used for any persistable objects in the result. Set when processing the first row. */
protected ResultSetGetter[] persistentTypeResultSetGetters = null;
/**
* Constructor.
* @param ec ExecutionContext
* @param rs ResultSet
* @param fp FetchPlan
* @param qrmd MetaData defining the results from the query.
*/
public ResultMetaDataROF(ExecutionContext ec, ResultSet rs, FetchPlan fp, QueryResultMetaData qrmd)
{
super(ec, rs, fp);
this.queryResultMetaData = qrmd;
try
{
//obtain column names
ResultSetMetaData rsmd = rs.getMetaData();
int columnCount = rsmd.getColumnCount();
columnNames = new String[columnCount];
for (int i=0; i returnObjects = new ArrayList<>();
// A). Process persistent types
PersistentTypeMapping[] persistentTypes = queryResultMetaData.getPersistentTypeMappings();
if (persistentTypes != null)
{
if (persistentTypeResultSetGetters == null)
{
persistentTypeResultSetGetters = new ResultSetGetter[persistentTypes.length];
}
int startColumnIndex = 0;
for (int i=0;i columnsInThisType = new HashSet<>();
AbstractMemberMetaData[] mmds = new AbstractMemberMetaData[columnNames.length];
Map fieldColumns = new HashMap<>();
DatastoreClass dc = ((RDBMSStoreManager)ec.getStoreManager()).getDatastoreClass(persistentTypes[i].getClassName(), ec.getClassLoaderResolver());
AbstractClassMetaData acmd = ec.getMetaDataManager().getMetaDataForClass(persistentTypes[i].getClassName(), ec.getClassLoaderResolver());
Object id = null;
// Note that in this block we compare against the column name case-insensitive to attempt to catch
// JDBC drivers that change the case of the columns that were passed in to the SQL statement. This
// could potentially cause an issue if you're using a table which has case sensitive column names
// and two columns with similar names e.g "Col1" and "col1". Until that situation comes up we ignore it :-)
for (int j=startColumnIndex;j resultMmds = new HashSet<>();
resultMmds.addAll(fieldColumns.values());
int[] resultFieldNumbers = new int[resultMmds.size()];
int j=0;
for (AbstractMemberMetaData apmd : resultMmds)
{
StatementMappingIndex stmtMapping = new StatementMappingIndex(dc.getMemberMapping(apmd));
resultFieldNumbers[j] = apmd.getAbsoluteFieldNumber();
List indexes = new ArrayList<>();
for (int k=0; k ctrCls = ec.getClassLoaderResolver().classForName(ctrClassName);
List ctrColumns = ctrTypeMappings[i].getColumnsForConstructor();
Class[] ctrArgTypes = null;
Object[] ctrArgVals = null;
if (ctrColumns != null && ctrColumns.size() > 0)
{
int j=0;
ctrArgTypes = new Class[ctrColumns.size()];
ctrArgVals = new Object[ctrColumns.size()];
Iterator colIter = ctrColumns.iterator();
while (colIter.hasNext())
{
ConstructorTypeColumn ctrCol = colIter.next();
try
{
Object colVal = rs.getObject(ctrCol.getColumnName());
ctrArgTypes[j] = colVal.getClass();
if (ctrCol.getJavaType() != null)
{
// Attempt to convert to the type requested
ctrArgTypes[j] = ctrCol.getJavaType();
ctrArgVals[j] = TypeConversionHelper.convertTo(colVal, ctrArgTypes[j]);
}
else
{
ctrArgTypes[j] = colVal.getClass();
ctrArgVals[j] = colVal;
}
}
catch (SQLException sqle)
{
// TODO Handle this
}
j++;
}
}
returnObjects.add(ClassUtils.newInstance(ctrCls, ctrArgTypes, ctrArgVals));
}
}
if (returnObjects.size() == 0)
{
// No objects so user must have supplied incorrect MetaData
return null;
}
else if (returnObjects.size() == 1)
{
// Return Object
return returnObjects.get(0);
}
else
{
// Return Object[]
return returnObjects.toArray(new Object[returnObjects.size()]);
}
}
}