Many resources are needed to download a project. Please understand that we have to compensate our server costs. Thank you in advance. Project price only 1 $
You can buy this project and download/modify it how often you want.
/*
* Copyright (c) 1998, 2021 Oracle and/or its affiliates. All rights reserved.
* Copyright (c) 2019 IBM Corporation. All rights reserved.
*
* This program and the accompanying materials are made available under the
* terms of the Eclipse Public License v. 2.0 which is available at
* http://www.eclipse.org/legal/epl-2.0,
* or the Eclipse Distribution License v. 1.0 which is available at
* http://www.eclipse.org/org/documents/edl-v10.php.
*
* SPDX-License-Identifier: EPL-2.0 OR BSD-3-Clause
*/
// Contributors:
// Oracle - initial API and implementation from Oracle TopLink
// 07/19/2011-2.2.1 Guy Pelletier
// - 338812: ManyToMany mapping in aggregate object violate integrity constraint on deletion
// 04/09/2012-2.4 Guy Pelletier
// - 374377: OrderBy with ElementCollection doesn't work
// 14/05/2012-2.4 Guy Pelletier
// - 376603: Provide for table per tenant support for multitenant applications
// 30/05/2012-2.4 Guy Pelletier
// - 354678: Temp classloader is still being used during metadata processing
// 08/01/2012-2.5 Chris Delahunt
// - 371950: Metadata caching
// 06/03/2013-2.5.1 Guy Pelletier
// - 402380: 3 jpa21/advanced tests failed on server with
// "java.lang.NoClassDefFoundError: org/eclipse/persistence/testing/models/jpa21/advanced/enums/Gender"
package org.eclipse.persistence.mappings;
import java.beans.PropertyChangeEvent;
import java.security.AccessController;
import java.security.PrivilegedActionException;
import java.util.ArrayList;
import java.util.Enumeration;
import java.util.HashMap;
import java.util.HashSet;
import java.util.IdentityHashMap;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.Vector;
import org.eclipse.persistence.annotations.BatchFetchType;
import org.eclipse.persistence.descriptors.ClassDescriptor;
import org.eclipse.persistence.descriptors.TablePerMultitenantPolicy;
import org.eclipse.persistence.descriptors.changetracking.ChangeTracker;
import org.eclipse.persistence.descriptors.changetracking.CollectionChangeEvent;
import org.eclipse.persistence.exceptions.ConversionException;
import org.eclipse.persistence.exceptions.DatabaseException;
import org.eclipse.persistence.exceptions.DescriptorException;
import org.eclipse.persistence.exceptions.QueryException;
import org.eclipse.persistence.exceptions.ValidationException;
import org.eclipse.persistence.expressions.Expression;
import org.eclipse.persistence.expressions.ExpressionBuilder;
import org.eclipse.persistence.history.AsOfClause;
import org.eclipse.persistence.history.HistoryPolicy;
import org.eclipse.persistence.indirection.IndirectCollection;
import org.eclipse.persistence.indirection.IndirectList;
import org.eclipse.persistence.indirection.ValueHolder;
import org.eclipse.persistence.internal.databaseaccess.Platform;
import org.eclipse.persistence.internal.descriptors.DescriptorIterator;
import org.eclipse.persistence.internal.descriptors.ObjectBuilder;
import org.eclipse.persistence.internal.descriptors.changetracking.AttributeChangeListener;
import org.eclipse.persistence.internal.descriptors.changetracking.ObjectChangeListener;
import org.eclipse.persistence.internal.expressions.ForUpdateClause;
import org.eclipse.persistence.internal.expressions.ObjectExpression;
import org.eclipse.persistence.internal.expressions.SQLDeleteStatement;
import org.eclipse.persistence.internal.expressions.SQLInsertStatement;
import org.eclipse.persistence.internal.expressions.SQLSelectStatement;
import org.eclipse.persistence.internal.expressions.SQLUpdateStatement;
import org.eclipse.persistence.internal.expressions.TableExpression;
import org.eclipse.persistence.internal.helper.ConversionManager;
import org.eclipse.persistence.internal.helper.DatabaseField;
import org.eclipse.persistence.internal.helper.DatabaseTable;
import org.eclipse.persistence.internal.helper.Helper;
import org.eclipse.persistence.internal.helper.NonSynchronizedVector;
import org.eclipse.persistence.internal.identitymaps.CacheId;
import org.eclipse.persistence.internal.identitymaps.CacheKey;
import org.eclipse.persistence.internal.queries.ContainerPolicy;
import org.eclipse.persistence.internal.queries.JoinedAttributeManager;
import org.eclipse.persistence.internal.queries.OrderedListContainerPolicy;
import org.eclipse.persistence.internal.security.PrivilegedAccessHelper;
import org.eclipse.persistence.internal.security.PrivilegedClassForName;
import org.eclipse.persistence.internal.security.PrivilegedNewInstanceFromClass;
import org.eclipse.persistence.internal.sessions.AbstractRecord;
import org.eclipse.persistence.internal.sessions.AbstractSession;
import org.eclipse.persistence.internal.sessions.ChangeRecord;
import org.eclipse.persistence.internal.sessions.DirectCollectionChangeRecord;
import org.eclipse.persistence.internal.sessions.MergeManager;
import org.eclipse.persistence.internal.sessions.ObjectChangeSet;
import org.eclipse.persistence.internal.sessions.UnitOfWorkImpl;
import org.eclipse.persistence.internal.sessions.remote.RemoteSessionController;
import org.eclipse.persistence.mappings.converters.Converter;
import org.eclipse.persistence.mappings.converters.ObjectTypeConverter;
import org.eclipse.persistence.mappings.converters.SerializedObjectConverter;
import org.eclipse.persistence.mappings.converters.TypeConversionConverter;
import org.eclipse.persistence.queries.DataModifyQuery;
import org.eclipse.persistence.queries.DataReadQuery;
import org.eclipse.persistence.queries.DatabaseQuery;
import org.eclipse.persistence.queries.DeleteObjectQuery;
import org.eclipse.persistence.queries.DirectReadQuery;
import org.eclipse.persistence.queries.ModifyQuery;
import org.eclipse.persistence.queries.ObjectBuildingQuery;
import org.eclipse.persistence.queries.ObjectLevelReadQuery;
import org.eclipse.persistence.queries.QueryByExamplePolicy;
import org.eclipse.persistence.queries.ReadAllQuery;
import org.eclipse.persistence.queries.ReadQuery;
import org.eclipse.persistence.queries.ReportQuery;
import org.eclipse.persistence.queries.WriteObjectQuery;
import org.eclipse.persistence.sessions.CopyGroup;
import org.eclipse.persistence.sessions.DatabaseRecord;
import org.eclipse.persistence.sessions.remote.DistributedSession;
/**
*
Purpose: This mapping is used to store a collection of simple types (String, Number, Date, etc.)
* into a single table. The table must store the value and a foreign key to the source object.
* A converter can be used if the desired object type and the data type do not match.
*
* @see Converter
* @see ObjectTypeConverter
* @see TypeConversionConverter
* @see SerializedObjectConverter
*
* @author Sati
* @since TOPLink/Java 1.0
*
* 09/18/2009-2.0 Michael O'Brien
* - 266912: JPA 2.0 Metamodel API (part of the JSR-317 EJB 3.1 Criteria API)
* add support for passing BasicMap value type to MapAttributeImpl via new attributeClassification field
*/
public class DirectCollectionMapping extends CollectionMapping implements RelationalMapping {
/** Used for data modification events. */
protected static final String Delete = "delete";
protected static final String Insert = "insert";
protected static final String DeleteAll = "deleteAll";
protected static final String DeleteAtIndex = "deleteAtIndex";
protected static final String UpdateAtIndex = "updateAtIndex";
/** Allows user defined conversion between the object value and the database value. */
protected Converter valueConverter;
protected String valueConverterClassName;
protected List orderByExpressions;
/** Stores the reference table*/
protected DatabaseTable referenceTable;
/** The direct field name is converted and stored */
protected DatabaseField directField;
protected Vector sourceKeyFields;
protected Vector referenceKeyFields;
/** Used for insertion for m-m and dc, not used in 1-m. */
protected DataModifyQuery insertQuery;
/** Used for deletion when ChangeSets are used */
protected ModifyQuery changeSetDeleteQuery;
protected transient ModifyQuery changeSetDeleteNullQuery; // Bug 306075
protected boolean hasCustomDeleteQuery;
protected boolean hasCustomInsertQuery;
protected HistoryPolicy historyPolicy;
/** Used (only in case listOrderField != null) to delete object with particular orderFieldValue */
protected ModifyQuery deleteAtIndexQuery;
/** Used (only in case listOrderField != null) to update orderFieldValue of object with particular orderFieldValue */
protected ModifyQuery updateAtIndexQuery;
protected boolean hasCustomDeleteAtIndexQuery;
protected boolean hasCustomUpdateAtIndexQuery;
/**
* @since Java Persistence API 2.0
* Referenced by MapAttributeImpl to pick up the BasicMap value parameter type
* To specify the conversion type
* */
protected transient Class attributeClassification;
protected String attributeClassificationName;
/**
* PUBLIC:
* Default constructor.
*/
public DirectCollectionMapping() {
this.insertQuery = new DataModifyQuery();
this.orderByExpressions = new ArrayList<>();
this.sourceKeyFields = org.eclipse.persistence.internal.helper.NonSynchronizedVector.newInstance(1);
this.referenceKeyFields = org.eclipse.persistence.internal.helper.NonSynchronizedVector.newInstance(1);
this.selectionQuery = new DirectReadQuery();
this.hasCustomInsertQuery = false;
this.isPrivateOwned = true;
this.isListOrderFieldSupported = true;
}
/**
* PUBLIC:
* Provide ascending order support for this direct collection mapping.
*/
public void addAscendingOrdering() {
this.hasOrderBy = true;
orderByExpressions.add(new ExpressionBuilder().getField(getDirectFieldName()).ascending());
}
/**
* PUBLIC:
* Provide descending order support for this direct collection mapping.
*/
public void addDescendingOrdering() {
this.hasOrderBy = true;
orderByExpressions.add(new ExpressionBuilder().getField(getDirectFieldName()).descending());
}
/**
* ADVANCED:
* Used this method to add custom ordering expressions when fetching
* the collection. This could be things like expressions using a functions
* like UPPER or NULLS LAST etc.
*/
public void addOrdering(Expression expression) {
this.orderByExpressions.add(expression);
}
@Override
public boolean isRelationalMapping() {
return true;
}
/**
* PUBLIC:
* Return the converter on the mapping.
* A converter can be used to convert between the direct collection's object value and database value.
*/
public Converter getValueConverter() {
return valueConverter;
}
/**
* PUBLIC:
* Set the converter on the mapping.
* A converter can be used to convert between the direct collection's object value and database value.
*/
public void setValueConverter(Converter valueConverter) {
this.valueConverter = valueConverter;
}
/**
* PUBLIC:
* Set the converter class name on the mapping. Initialized in
* convertClassNamesToClasses.
* A converter can be used to convert between the direct collection's object value and database value.
*/
public void setValueConverterClassName(String valueConverterClassName) {
this.valueConverterClassName = valueConverterClassName;
}
/**
* PUBLIC:
* Add the reference key field.
* This is used for composite reference keys.
* This is the foreign key field in the direct table referencing the primary key of the source object.
* Both the reference field and the source field that it references must be provided.
*/
public void addReferenceKeyField(DatabaseField referenceForeignKeyField, DatabaseField sourcePrimaryKeyField) {
getSourceKeyFields().addElement(sourcePrimaryKeyField);
getReferenceKeyFields().addElement(referenceForeignKeyField);
}
/**
* PUBLIC:
* Add the name of the reference key field.
* This is used for composite reference keys.
* This is the foreign key field in the direct table referencing the primary key of the source object.
* Both the reference field name and the name of the source field that it references must be provided.
*/
public void addReferenceKeyFieldName(String referenceForeignKeyFieldName, String sourcePrimaryKeyFieldName) {
addReferenceKeyField(new DatabaseField(referenceForeignKeyFieldName), new DatabaseField(sourcePrimaryKeyFieldName));
}
/**
* INTERNAL:
* Clone and prepare the selection query as a nested batch read query.
* This is used for nested batch reading.
*/
@Override
public ReadQuery prepareNestedBatchQuery(ObjectLevelReadQuery query) {
// For CR#2646-S.M. In case of inheritance the descriptor to use may not be that
// of the source query (the base class descriptor), but that of the subclass, if the
// attribute is only of the subclass. Thus in this case use the descriptor from the mapping.
// Also: for Bug 5478648 - Do not switch the descriptor if the query's descriptor is an aggregate
ClassDescriptor descriptorToUse = query.getDescriptor();
if ((descriptorToUse != this.descriptor) && (!descriptorToUse.getMappings().contains(this)) && (!this.descriptor.isDescriptorTypeAggregate())) {
descriptorToUse = this.descriptor;
}
DataReadQuery batchQuery = new DataReadQuery();
batchQuery.setName(getAttributeName());
// Join the query where clause with the mappings,
// this will cause a join that should bring in all of the target objects.
ExpressionBuilder builder;
Expression originalSelectionCriteria = null;
// 2612538 - the default size of Map (32) is appropriate
Map clonedExpressions = new IdentityHashMap<>();
builder = new ExpressionBuilder();
// For flashback.
if (query.hasAsOfClause()) {
builder.asOf(query.getAsOfClause());
}
Expression batchSelectionCriteria = null;
// Build the batch query, either using joining, or an exist sub-select.
BatchFetchType batchType = query.getBatchFetchPolicy().getType();
if (this.batchFetchType != null) {
batchType = this.batchFetchType;
}
if (batchType == BatchFetchType.EXISTS) {
// Using a EXISTS sub-select (WHERE EXIST ( AND AND )
ExpressionBuilder subBuilder = new ExpressionBuilder(descriptorToUse.getJavaClass());
subBuilder.setQueryClassAndDescriptor(descriptorToUse.getJavaClass(), descriptorToUse);
ReportQuery subQuery = new ReportQuery(descriptorToUse.getJavaClass(), subBuilder);
subQuery.setDescriptor(descriptorToUse);
subQuery.setShouldRetrieveFirstPrimaryKey(true);
Expression subCriteria = subBuilder.twist(getSelectionCriteria(), builder);
if (query.getSelectionCriteria() != null) {
// For bug 2612567, any query can have batch attributes, so the
// original selection criteria can be quite complex, with multiple
// builders (i.e. for parallel selects).
// Now uses cloneUsing(newBase) instead of rebuildOn(newBase).
subCriteria = query.getSelectionCriteria().cloneUsing(subBuilder).and(subCriteria);
}
subQuery.setSelectionCriteria(subCriteria);
batchSelectionCriteria = builder.exists(subQuery);
} else if (batchType == BatchFetchType.IN) {
// Using a IN with foreign key values (WHERE FK IN :QUERY_BATCH_PARAMETER)
batchSelectionCriteria = buildBatchCriteria(builder, query);
} else {
// For 2729729 must clone the original selection criteria first,
// otherwise the original query will be corrupted.
if (query.getSelectionCriteria() != null) {
originalSelectionCriteria = query.getSelectionCriteria().copiedVersionFrom(clonedExpressions);
builder = originalSelectionCriteria.getBuilder();
}
// Using a join, (WHERE AND )
if (this.selectionQuery.isReadAllQuery()) {
batchSelectionCriteria = builder.twist(this.selectionQuery.getSelectionCriteria(), builder);
} else {
batchSelectionCriteria = builder.twist(this.selectionQuery.getSQLStatement().getWhereClause(), builder);
}
// For 2729729, rebuildOn is not needed as the base is still the same.
if (originalSelectionCriteria != null) {
batchSelectionCriteria = batchSelectionCriteria.and(originalSelectionCriteria);
}
if (descriptorToUse.getQueryManager().getAdditionalJoinExpression() != null) {
batchSelectionCriteria = batchSelectionCriteria.and(query.getDescriptor().getQueryManager().getAdditionalJoinExpression().rebuildOn(builder));
}
if (this.historyPolicy != null) {
if (query.getSession().getAsOfClause() != null) {
builder.asOf(query.getSession().getAsOfClause());
} else if (builder.getAsOfClause() == null) {
builder.asOf(AsOfClause.NO_CLAUSE);
}
batchSelectionCriteria = batchSelectionCriteria.and(this.historyPolicy.additionalHistoryExpression(builder, builder));
}
}
SQLSelectStatement batchStatement = new SQLSelectStatement();
for (DatabaseField keyField : getReferenceKeyFields()) {
batchStatement.addField(builder.getTable(this.referenceTable).getField(keyField));
}
batchStatement.addField(builder.getTable(this.referenceTable).getField(this.directField));
batchStatement.setWhereClause(batchSelectionCriteria);
batchQuery.setSQLStatement(batchStatement);
this.containerPolicy.addAdditionalFieldsToQuery(batchQuery, getAdditionalFieldsBaseExpression(batchQuery));
batchStatement.normalize(query.getSession(), descriptorToUse, clonedExpressions);
return batchQuery;
}
/**
* INTERNAL:
* Clone and prepare the joined direct query.
* Since direct-collection does not build objects a nest query is not required.
*/
@Override
public ObjectLevelReadQuery prepareNestedJoins(JoinedAttributeManager joinManager, ObjectBuildingQuery baseQuery, AbstractSession session) {
return null;
}
/**
* INTERNAL:
* Return the value of the field from the row or a value holder on the query to obtain the object.
*/
@Override
protected Object valueFromRowInternalWithJoin(AbstractRecord row, JoinedAttributeManager joinManager, ObjectBuildingQuery sourceQuery, CacheKey parentCacheKey, AbstractSession executionSession, boolean isTargetProtected) throws DatabaseException {
ContainerPolicy policy = getContainerPolicy();
Object value = policy.containerInstance();
ObjectBuilder objectBuilder = this.descriptor.getObjectBuilder();
// Extract the primary key of the source object, to filter only the joined rows for that object.
Object sourceKey = objectBuilder.extractPrimaryKeyFromRow(row, executionSession);
// If the query was using joining, all of the result rows by primary key will have been computed.
List rows = joinManager.getDataResultsByPrimaryKey().get(sourceKey);
// If no 1-m rows were fetch joined, then get the value normally,
// this can occur with pagination where the last row may not be complete.
if (rows == null) {
return valueFromRowInternal(row, joinManager, sourceQuery, executionSession);
}
int size = rows.size();
if(size > 0) {
// A set of direct values must be maintained to avoid duplicates from multiple 1-m joins.
Set directValues = new HashSet();
ArrayList directValuesList = null;
ArrayList targetRows = null;
boolean shouldAddAll = policy.shouldAddAll();
if(shouldAddAll) {
directValuesList = new ArrayList(size);
targetRows = new ArrayList(size);
}
Converter valueConverter = getValueConverter();
// indicates if collection contains null
boolean containsNull = false;
// For each rows, extract the target row and build the target object and add to the collection.
for (int index = 0; index < size; index++) {
AbstractRecord sourceRow = rows.get(index);
AbstractRecord targetRow = sourceRow;
// The field for many objects may be in the row,
// so build the subpartion of the row through the computed values in the query,
// this also helps the field indexing match.
targetRow = trimRowForJoin(targetRow, joinManager, executionSession);
// Partial object queries must select the primary key of the source and related objects.
// If the target joined rows in null (outerjoin) means an empty collection.
Object directValue = targetRow.get(this.directField);
if (directValue == null) {
if (size == 1) {
// A null direct value means an empty collection returned as nulls from an outerjoin.
return getIndirectionPolicy().valueFromRow(value);
} else {
containsNull = true;
}
}
// Only build/add the target object once, skip duplicates from multiple 1-m joins.
if (!directValues.contains(directValue)) {
directValues.add(directValue);
// Allow for value conversion.
if (valueConverter != null) {
directValue = valueConverter.convertDataValueToObjectValue(directValue, executionSession);
}
if (shouldAddAll) {
directValuesList.add(directValue);
targetRows.add(targetRow);
} else {
policy.addInto(directValue, value, executionSession, targetRow, sourceQuery, parentCacheKey, isTargetProtected);
}
}
}
if (shouldAddAll) {
// if collection contains a single element which is null then return an empty collection
if (!(containsNull && targetRows.size() == 1)) {
policy.addAll(directValuesList, value, executionSession, targetRows, sourceQuery, parentCacheKey, isTargetProtected);
}
} else {
// if collection contains a single element which is null then return an empty collection
if (containsNull && policy.sizeFor(value) == 1) {
policy.clear(value);
}
}
}
return getIndirectionPolicy().valueFromRow(value);
}
/**
* INTERNAL:
* Copy of the attribute of the object.
* This is NOT used for unit of work but for templatizing an object.
*/
@Override
public void buildCopy(Object copy, Object original, CopyGroup group) {
Object attributeValue = getRealCollectionAttributeValueFromObject(original, group.getSession());
attributeValue = getContainerPolicy().cloneFor(attributeValue);
// if value holder is used, then the value holder shared with original substituted for a new ValueHolder.
getIndirectionPolicy().reset(copy);
setRealAttributeValueInObject(copy, attributeValue);
}
/**
* INTERNAL:
* Clone the element, if necessary.
* DirectCollections hold on to objects that do not have Descriptors
* (e.g. int, String). These objects do not need to be cloned, unless they use a converter - they
* are immutable.
*/
@Override
public Object buildElementClone(Object element, Object parent, CacheKey parentCacheKey, Integer refreshCascade, AbstractSession cloningSession, boolean isExisting, boolean isFromSharedCache) {
Object cloneValue = element;
if ((getValueConverter() != null) && getValueConverter().isMutable()) {
cloneValue = getValueConverter().convertDataValueToObjectValue(getValueConverter().convertObjectValueToDataValue(cloneValue, cloningSession), cloningSession);
}
return cloneValue;
}
/**
* INTERNAL:
* In case Query By Example is used, this method builds and returns an expression that
* corresponds to a single attribute and it's value.
*/
@Override
public Expression buildExpression(Object queryObject, QueryByExamplePolicy policy, Expression expressionBuilder, Map processedObjects, AbstractSession session) {
if (policy.shouldValidateExample()){
throw QueryException.unsupportedMappingQueryByExample(queryObject.getClass().getName(), this);
}
return null;
}
/**
* INTERNAL:
* Verifies listOrderField's table: it must be reference table.
* Precondition: listOrderField != null.
*/
@Override
protected void buildListOrderField() {
if(this.listOrderField.hasTableName()) {
if(!getReferenceTable().equals(this.listOrderField.getTable())) {
throw DescriptorException.listOrderFieldTableIsWrong(this.getDescriptor(), this, this.listOrderField.getTable(), getReferenceTable());
}
} else {
this.listOrderField.setTable(getReferenceTable());
}
this.listOrderField = getDescriptor().buildField(this.listOrderField, getReferenceTable());
}
/**
* INTERNAL:
* Cascade perform delete through mappings that require the cascade
*/
@Override
public void cascadePerformRemoveIfRequired(Object object, UnitOfWorkImpl uow, Map visitedObjects) {
//as this mapping type references primitive objects this method does not apply
}
/**
* INTERNAL:
* Cascade perform removal of orphaned private owned objects from the UnitOfWorkChangeSet
*/
@Override
public void cascadePerformRemovePrivateOwnedObjectFromChangeSetIfRequired(Object object, UnitOfWorkImpl uow, Map visitedObjects) {
// as this mapping type references primitive objects this method does not apply
}
/**
* INTERNAL:
* Cascade registerNew for Create through mappings that require the cascade
*/
@Override
public void cascadeRegisterNewIfRequired(Object object, UnitOfWorkImpl uow, Map visitedObjects) {
//as this mapping type references primitive objects this method does not apply
}
/**
* INTERNAL:
* Cascade discover and persist new objects during commit.
*/
@Override
public void cascadeDiscoverAndPersistUnregisteredNewObjects(Object object, Map newObjects, Map unregisteredExistingObjects, Map visitedObjects, UnitOfWorkImpl uow, Set cascadeErrors) {
// Direct mappings do not require any cascading.
}
/**
* INTERNAL:
* The mapping clones itself to create deep copy.
*/
@Override
public Object clone() {
DirectCollectionMapping clone = (DirectCollectionMapping)super.clone();
clone.setSourceKeyFields(cloneFields(getSourceKeyFields()));
clone.setReferenceKeyFields(cloneFields(getReferenceKeyFields()));
if(this.changeSetDeleteQuery != null) {
clone.changeSetDeleteQuery = (ModifyQuery)this.changeSetDeleteQuery.clone();
}
// Bug 306075
if(this.changeSetDeleteNullQuery != null) {
clone.changeSetDeleteNullQuery = (ModifyQuery)this.changeSetDeleteNullQuery.clone();
}
if(this.deleteAtIndexQuery != null) {
clone.deleteAtIndexQuery = (ModifyQuery)this.deleteAtIndexQuery.clone();
}
if(this.updateAtIndexQuery != null) {
clone.updateAtIndexQuery = (ModifyQuery)this.updateAtIndexQuery.clone();
}
return clone;
}
/**
* INTERNAL:
* This method is used to calculate the differences between two collections.
*/
@Override
public void compareCollectionsForChange(Object oldCollection, Object newCollection, ChangeRecord changeRecord, AbstractSession session) {
if(this.listOrderField != null) {
compareListsForChange((List)oldCollection, (List)newCollection, changeRecord, session);
return;
}
ContainerPolicy cp = getContainerPolicy();
int numberOfNewNulls = 0;
HashMap originalKeyValues = new HashMap(10);
HashMap cloneKeyValues = new HashMap(10);
if (oldCollection != null) {
Object backUpIter = cp.iteratorFor(oldCollection);
while (cp.hasNext(backUpIter)) {// Make a lookup of the objects
Object secondObject = cp.next(backUpIter, session);
// For CR#2258/CR#2378 handle null values inserted in a collection.
if (secondObject == null) {
numberOfNewNulls--;
} else {
Integer count = (Integer)originalKeyValues.get(secondObject);
if (count == null) {
originalKeyValues.put(secondObject, Integer.valueOf(1));
} else {
originalKeyValues.put(secondObject, Integer.valueOf(count.intValue() + 1));
}
}
}
}
// should a removal occur this is the original count of objects on the database.
// this value is used to determine how many objects to re-insert after the delete as a
// delete will delete all of the objects not just one.
HashMap databaseCount = (HashMap)originalKeyValues.clone();
int databaseNullCount = Math.abs(numberOfNewNulls);
if (newCollection != null) {
Object cloneIter = cp.iteratorFor(newCollection);
/* The following code is used to compare objects in a direct collection.
Because objects in a direct collection are primitives and may be the same object
the following code must count the number of instances in the collection not just the
existence of an object.
*/
while (cp.hasNext(cloneIter)) {//Compare them with the objects from the clone
Object firstObject = cp.next(cloneIter, session);
// For CR#2258/CR#2378 handle null values inserted in a collection.
if (firstObject == null) {
numberOfNewNulls++;
} else {
Integer count = (Integer)originalKeyValues.get(firstObject);
if (count == null) {//the object was not in the backup
Integer cloneCount = (Integer)cloneKeyValues.get(firstObject);
//Add it to the additions hashtable
if (cloneCount == null) {
cloneKeyValues.put(firstObject, Integer.valueOf(1));
} else {
cloneKeyValues.put(firstObject, Integer.valueOf(cloneCount.intValue() + 1));
}
} else if (count.intValue() == 1) {
//There is only one object so remove the whole reference
originalKeyValues.remove(firstObject);
} else {
originalKeyValues.put(firstObject, Integer.valueOf(count.intValue() - 1));
}
}
}
}
if (cloneKeyValues.isEmpty() && originalKeyValues.isEmpty() && (numberOfNewNulls == 0) && (!changeRecord.getOwner().isNew())) {
return;
}
((DirectCollectionChangeRecord)changeRecord).clearChanges();
((DirectCollectionChangeRecord)changeRecord).addAdditionChange(cloneKeyValues, databaseCount);
((DirectCollectionChangeRecord)changeRecord).addRemoveChange(originalKeyValues, databaseCount);
((DirectCollectionChangeRecord)changeRecord).setIsDeferred(false);
((DirectCollectionChangeRecord)changeRecord).setLatestCollection(null);
//For CR#2258, produce a changeRecord which reflects the addition and removal of null values.
if (numberOfNewNulls != 0) {
((DirectCollectionChangeRecord)changeRecord).getCommitAddMap().put(null, Integer.valueOf(databaseNullCount));
if (numberOfNewNulls > 0) {
((DirectCollectionChangeRecord)changeRecord).addAdditionChange(null, Integer.valueOf(numberOfNewNulls));
} else {
numberOfNewNulls *= -1;
((DirectCollectionChangeRecord)changeRecord).addRemoveChange(null, Integer.valueOf(numberOfNewNulls));
}
}
}
/**
* INTERNAL:
* This method is used to calculate the differences between two Lists.
*/
public void compareListsForChange(List oldList, List newList, ChangeRecord changeRecord, AbstractSession session) {
// Maps objects (null included) in newList and oldList to an array of two Sets:
// the first one contains indexes of the object in oldList, the second - in newList.
// Contains only the objects for which the set of indexes in newList and oldList are different;
// only changed indexes appear in the sets (therefore the old index set and new index set don't intersect).
// Examples:
// obj was first (index 0) in oldList; first and second (indexes 0 and 1)in newList: obj -> {{}, {1}};
// obj was not in oldList; first in newList: obj -> {null, {0}};
// obj was first in oldList; not in newList: obj -> {{0}, null};
// obj was first and second in oldList; first in newList: obj -> {{1}, {}};
// Note the difference between null and empty set:
// empty set means there's at least one index (the same in oldList and newList - otherwise it would've been in the set);
// null means there's no indexes.
// That helps during deletion - if we know there is no remaining duplicates for the object to be removed
// we can delete it without checking its index (which allows delete several duplicates in one sql).
// Map entry sets with no new and no old indexes removed.
int nOldSize = oldList == null ? 0 : oldList.size();
int nNewSize = newList == null ? 0 : newList.size();
HashMap