Please wait. This can take some minutes ...
Many resources are needed to download a project. Please understand that we have to compensate our server costs. Thank you in advance.
Project price only 1 $
You can buy this project and download/modify it how often you want.
com.sap.cds.impl.CdsDataStoreImpl Maven / Gradle / Ivy
/************************************************************************
* © 2019-2024 SAP SE or an SAP affiliate company. All rights reserved. *
************************************************************************/
package com.sap.cds.impl;
import static com.sap.cds.ResultBuilder.deletedRows;
import static com.sap.cds.ResultBuilder.insertedRows;
import static com.sap.cds.impl.builder.model.CqnParam.params;
import static com.sap.cds.impl.docstore.DocStoreUtils.targetsDocStore;
import static com.sap.cds.impl.parser.token.CqnBoolLiteral.TRUE;
import static com.sap.cds.util.CdsModelUtils.concreteKeyNames;
import static com.sap.cds.util.CdsModelUtils.entity;
import static com.sap.cds.util.CdsModelUtils.usesRuntimeViews;
import static com.sap.cds.util.CdsModelUtils.CascadeType.DELETE;
import static com.sap.cds.util.CqnStatementUtils.addTenantFilter;
import static com.sap.cds.util.CqnStatementUtils.isSelectStar;
import static com.sap.cds.util.CqnStatementUtils.moveKeyValuesToWhere;
import static com.sap.cds.util.CqnStatementUtils.rowType;
import static com.sap.cds.util.DataUtils.isDeep;
import static com.sap.cds.util.PathExpressionResolver.resolvePath;
import static java.util.Arrays.stream;
import static java.util.Collections.emptyList;
import static java.util.Collections.singletonList;
import static java.util.Objects.hash;
import static java.util.stream.Collectors.groupingBy;
import static java.util.stream.Collectors.toList;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collection;
import java.util.Comparator;
import java.util.HashMap;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import java.util.Optional;
import java.util.Set;
import java.util.TreeMap;
import java.util.function.BiConsumer;
import java.util.function.Function;
import java.util.function.ToLongFunction;
import java.util.stream.Collectors;
import java.util.stream.Stream;
import java.util.stream.StreamSupport;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import com.google.common.collect.Iterables;
import com.google.common.collect.Iterators;
import com.google.common.collect.Streams;
import com.sap.cds.CdsDataStore;
import com.sap.cds.CdsException;
import com.sap.cds.Result;
import com.sap.cds.ResultBuilder;
import com.sap.cds.Row;
import com.sap.cds.SessionContext;
import com.sap.cds.impl.EntityCascader.EntityKeys;
import com.sap.cds.impl.EntityCascader.EntityOperation;
import com.sap.cds.impl.EntityCascader.EntityOperation.Operation;
import com.sap.cds.impl.EntityCascader.EntityOperations;
import com.sap.cds.impl.ExceptionHandler.HanaHexException;
import com.sap.cds.impl.builder.model.Conjunction;
import com.sap.cds.jdbc.hana.HanaDbContext;
import com.sap.cds.ql.CQL;
import com.sap.cds.ql.Delete;
import com.sap.cds.ql.Insert;
import com.sap.cds.ql.Select;
import com.sap.cds.ql.StructuredType;
import com.sap.cds.ql.Update;
import com.sap.cds.ql.Upsert;
import com.sap.cds.ql.cqn.CqnAnalyzer;
import com.sap.cds.ql.cqn.CqnDelete;
import com.sap.cds.ql.cqn.CqnInsert;
import com.sap.cds.ql.cqn.CqnPredicate;
import com.sap.cds.ql.cqn.CqnSelect;
import com.sap.cds.ql.cqn.CqnUpdate;
import com.sap.cds.ql.cqn.CqnUpsert;
import com.sap.cds.ql.cqn.CqnXsert;
import com.sap.cds.ql.impl.CqnNormalizer;
import com.sap.cds.ql.impl.DeepInsertSplitter;
import com.sap.cds.ql.impl.DeepUpdateSplitter;
import com.sap.cds.ql.impl.DeleteBuilder;
import com.sap.cds.reflect.CdsEntity;
import com.sap.cds.reflect.CdsModel;
import com.sap.cds.reflect.CdsStructuredType;
import com.sap.cds.util.CdsModelUtils.CascadeType;
import com.sap.cds.util.CqnStatementUtils;
import com.sap.cds.util.CqnStatementUtils.Count;
import com.sap.cds.util.DataUtils;
import com.sap.cds.util.OccUtils;
import com.sap.cds.util.ProjectionProcessor;
public class CdsDataStoreImpl implements CdsDataStore {
private static final Logger logger = LoggerFactory.getLogger(CdsDataStoreImpl.class);
private final CqnValidator cqnValidator;
private final ConnectedClient connectedClient;
private final CqnNormalizer cqnNormalizer;
private final CqnAnalyzer cqnAnalyzer;
private final DataUtils dataUtils;
private final Context context;
private final CdsModel model;
private final ProjectionProcessor projectionProcessor;
private final TimingLogger timed;
private final InlineCountProcessorFactory inlineCountProcessorFactory;
public CdsDataStoreImpl(Context context, ConnectedDataStoreConnector connector) {
this.context = context;
this.model = context.getCdsModel();
this.connectedClient = connector.create(context);
this.cqnValidator = CqnValidator.create(context, connectedClient.capabilities());
this.dataUtils = DataUtils.create(context::getSessionContext,
context.getDbContext().getCapabilities().timestampPrecision());
this.connectedClient.setSessionContext(context.getSessionContext());
this.cqnNormalizer = new CqnNormalizer(context);
this.cqnAnalyzer = CqnAnalyzer.create(model);
this.projectionProcessor = ProjectionProcessor.create(model, dataUtils);
this.timed = new TimingLogger(logger, context.getDataStoreConfiguration());
this.inlineCountProcessorFactory = new InlineCountProcessorFactory(this, context.getDataStoreConfiguration());
}
@Override
public Result execute(CqnSelect select, Object... paramValues) {
return execute(select, toIndexMap(paramValues));
}
@Override
public Result execute(CqnSelect select, Map paramValues) {
return timed.cqn(() -> resolveAndExecuteQuery(select, paramValues), select);
}
private Result resolveAndExecuteQuery(CqnSelect select, Map paramValues) {
CqnSelect filteredSelect = addTenantFilter(select, model);
CdsEntity targetEntity = null;
CdsStructuredType rowType = null;
if (filteredSelect.from().isRef()) {
targetEntity = entity(model, select.from().asRef());
if (isSelectStar(filteredSelect.items()) && filteredSelect.excluding().isEmpty()) {
rowType = targetEntity;
}
filteredSelect = projectionProcessor.resolveRuntimeViews(filteredSelect);
}
cqnValidator.validate(select, targetEntity);
try {
return execute(filteredSelect, paramValues, targetEntity, rowType);
} catch (HanaHexException e) {
// HANA: hex enforced but cannot be selected
((Select>) filteredSelect).hint(HanaDbContext.NO_USE_HEX_PLAN, true).hint(HanaDbContext.USE_HEX_PLAN, false);
return execute(filteredSelect, paramValues, targetEntity, rowType);
}
}
private Result execute(CqnSelect select, Map paramValues, CdsEntity targetEntity,
CdsStructuredType rowType) {
CqnSelect normSelect = cqnNormalizer.normalize(select);
if (rowType == null) {
rowType = rowType(model, normSelect);
}
normSelect = cqnNormalizer.resolveForwardMappedAssocs(normSelect);
return executeResolvedQuery(normSelect, paramValues).entity(targetEntity).rowType(rowType).result();
}
public ResultBuilder executeResolvedQuery(CqnSelect select, Map paramValues) {
ResultBuilder result;
InlineCountProcessor inlineCountProcessor = inlineCountProcessorFactory.create(select);
select = inlineCountProcessor.prepare(select);
if (CqnStatementUtils.filterIsFalse(select)) {
logger.debug("Statement filter condition evaluates to FALSE. Execution skipped.");
result = ResultBuilder.selectedRows(emptyList());
} else {
PreparedCqnStatement pcqn = connectedClient.prepare(select);
result = connectedClient.executeQuery(pcqn, paramValues, this, select.getLock().isPresent());
}
return inlineCountProcessor.execute(result, paramValues);
}
@Override
public Result execute(CqnSelect select, Iterable> valueSets, int maxBatchSize) {
int valueSetSize = Iterables.size(valueSets);
if (valueSetSize == 1) {
return execute(select, valueSets.iterator().next());
}
if (!select.orderBy().isEmpty() && valueSetSize > maxBatchSize) {
throw new UnsupportedOperationException(
"Order by is not supported when query is executed in multiple batches");
}
List rows = new ArrayList<>(valueSetSize);
Iterator>> partitions = Iterators.partition(valueSets.iterator(), maxBatchSize);
while (partitions.hasNext()) {
CqnSelect batchSelect = CqnStatementUtils.batchSelect(select, partitions.next());
List result = execute(batchSelect).list();
rows.addAll(result);
}
// TODO Support streaming
return ResultBuilder.selectedRows(rows).result();
}
@Override
public Result execute(CqnDelete delete, Object... paramValues) {
return execute(delete, toIndexMap(paramValues));
}
@Override
public Result execute(CqnDelete delete, Map namedValues) {
return execute(delete, singletonList(namedValues));
}
@Override
public Result execute(CqnDelete delete, Iterable> valueSets) {
CqnDelete filteredDelete = addTenantFilter(delete, model);
filteredDelete = projectionProcessor.resolve(filteredDelete);
filteredDelete = cqnNormalizer.normalize(filteredDelete);
CdsEntity target = entity(model, filteredDelete.ref());
// try optimized delete of child entities
boolean deleted = Cascader.create(CascadeType.DELETE, target).from(filteredDelete.ref())
.where(filteredDelete.where()).cascade(path -> bulkDelete(Delete.from(path).hints(delete.hints()), valueSets, true));
if (deleted) { // delete root
return bulkDelete(filteredDelete, valueSets, false);
} else { // fallback for cyclic models and subqueries in where
target = model.getEntity(filteredDelete.ref().firstSegment());
// compute object graph (select key values) and bulk delete
Set entities = cascade(target, filteredDelete.where(), valueSets);
Result result = bulkDelete(filteredDelete, valueSets, false);
delete(entities.stream().map(k -> EntityOperation.delete(k, null, context.getSessionContext())), true, delete.hints());
return result;
}
}
private Set cascade(CdsEntity target, Optional filter,
Iterable extends Map> valueSets) {
EntityCascader cascader = EntityCascader.from(this, target).where(filter);
Set keySets;
if (valueSets.iterator().hasNext()) {
keySets = Streams.stream(valueSets).flatMap(v -> cascader.with(v).cascade(DELETE).stream())
.collect(Collectors.toSet());
} else {
keySets = cascader.cascade(DELETE);
}
return keySets;
}
private void updateOrInsert(Stream operations, Map hints) {
List notInDatastore = update(operations, hints);
insert(notInDatastore.stream(), hints);
}
private void insert(Stream ops, Map hints) {
ops.collect(groupingBy(EntityOperation::targetEntity)).forEach((entity, data) -> {
Iterator rows = execute(Insert.into(entity).entries(data).hints(hints)).iterator();
for (EntityOperation op : data) {
op.inserted(rows.next());
}
});
}
private void groupAndExecute(Stream ops, BiConsumer> action) {
ops.collect(groupingBy(o -> hash(o.targetEntity().getQualifiedName(), o.updateValues().keySet())))
.forEach((g, batch) -> {
CdsEntity entity = batch.get(0).targetEntity();
action.accept(entity, batch);
});
}
private void upsert(Stream ops, Map hints) {
groupAndExecute(ops, (entity, upserts) -> {
List> entries = upserts.stream().map(EntityOperation::updateValues).collect(toList());
Result result = execute(Upsert.into(entity).entries(entries).hints(hints));
Iterator resultIter = result.iterator();
Iterator upsertIter = upserts.iterator();
for (int i = 0; i < result.batchCount(); i++) {
EntityOperation u = upsertIter.next();
u.inserted(resultIter.next());
}
});
}
private List update(Stream ops, Map hints) {
List notFound = new ArrayList<>();
groupAndExecute(ops, (entity, updates) -> {
List> entries = updates.stream().map(EntityOperation::updateValues).collect(toList());
Result result = execute(Update.entity(entity).entries(entries).hints(hints));
if (result.rowCount() > 0) {
Iterator resultIter = result.iterator();
Iterator updateIter = updates.iterator();
for (int i = 0; i < result.batchCount(); i++) {
EntityOperation u = updateIter.next();
long rowCount = result.rowCount(i);
u.updated(resultIter.next(), rowCount);
if (rowCount == 0) {
notFound.add(u);
}
}
} else {
notFound.addAll(updates);
}
});
return notFound;
}
private void delete(Stream ops, boolean rollbackOnFail, Map hints) {
Collection> grouped = groupByEntityAndPath(ops);
for (List batch : grouped) {
if (!batch.isEmpty()) {
EntityOperation o = batch.iterator().next();
Set keyNames = o.targetKeys().keySet();
StructuredType> ref = CQL.entity(o.targetEntity().getQualifiedName()).matching(params(keyNames));
if (o.path() != null) {
ref = ref.to(o.path());
}
CqnDelete delete = DeleteBuilder.from(ref).hints(hints);
bulkDelete(delete, batch, rollbackOnFail);
batch.forEach(EntityOperation::deleted);
}
}
}
private static Collection> groupByEntityAndPath(Stream ops) {
// Group by Entity name and path. Order by entity with longest path first
// TODO simplify & optimize grouping
ToLongFunction pathLength = s -> s.chars().filter(c -> c == '.').count();
Comparator longestPath = Comparator.comparingLong(pathLength).reversed()
.thenComparing(Function.identity());
TreeMap> grouped = ops.collect(
groupingBy(o -> o.targetEntity().getQualifiedName() + (o.path() != null ? ":." + o.path() : ""),
() -> new TreeMap<>(longestPath), toList()));
return grouped.values();
}
private Result bulkDelete(CqnDelete delete, Iterable extends Map> valueSets,
boolean rollbackOnFail) {
CdsEntity target = entity(model, delete.ref());
cqnValidator.validate(delete, target);
delete = resolvePath(delete, target);
CqnDelete projectedDelete = projectionProcessor.resolve(delete);
return timed.cqn(() -> {
int[] deleteCount;
if (CqnStatementUtils.filterIsFalse(projectedDelete)) {
logger.debug("Statement filter condition evaluates to FALSE. Execution skipped.");
int n = (int) StreamSupport.stream(valueSets.spliterator(), false).count();
deleteCount = new int[n];
} else {
PreparedCqnStatement pcqn = connectedClient.prepare(projectedDelete);
List> parameterValues = new ArrayList<>();
valueSets.forEach(parameterValues::add);
try {
deleteCount = connectedClient.executeUpdate(pcqn, parameterValues);
} catch (Exception e) {
if (rollbackOnFail) {
connectedClient.setRollbackOnly();
}
throw e;
}
}
return deletedRows(deleteCount).result();
}, projectedDelete, valueSets.spliterator()::estimateSize);
}
@Override
public Result execute(CqnInsert insert) {
insert = cqnNormalizer.normalize(insert);
return deepInsert(insert, false);
}
@Override
public Result execute(CqnUpsert upsert) {
if (upsert.entries().isEmpty()) {
return insertedRows(emptyList()).result();
}
upsert = cqnNormalizer.normalize(upsert);
CdsEntity root = model.getEntity(upsert.ref().firstSegment());
CdsEntity target = entity(model, upsert.ref());
dataUtils.enforceTenant(upsert, root, upsert.entries());
cqnValidator.validate(upsert, target);
if (isDeep(target, upsert.entries()) || !DataUtils.uniformData(target, upsert.entries())) {
return deepUpsert(target, upsert);
}
List> entries = upsert.entries();
dataUtils.prepareForUpdate(target, entries);
final CqnUpsert resolvedUpsert = projectionProcessor.resolve(CQL.copy(upsert));
return timed.cqn(() -> {
CdsEntity resolvedEntity = entity(model, resolvedUpsert.ref());
dataUtils.removeVirtualElements(resolvedEntity, resolvedUpsert.entries());
if (isDeep(resolvedEntity, resolvedUpsert.entries())) {
return deepUpsert(target, resolvedUpsert);
}
PreparedCqnStatement pcqn = connectedClient.prepare(resolvedUpsert);
connectedClient.executeUpdate(pcqn, resolvedUpsert.entries());
return insertedRows(entries).result();
}, resolvedUpsert, entries::size);
}
private Result deepUpsert(CdsEntity entity, CqnUpsert upsert) {
Map targetKeys = cqnAnalyzer.analyze(upsert.ref()).targetKeyValues();
DeepUpdateSplitter updateSplitter = new DeepUpdateSplitter(this);
EntityOperations operations = updateSplitter.computeOperations(entity, upsert, targetKeys);
return runOperations(operations, upsert.hints()).result();
}
private Result deepInsert(CqnXsert xsert, boolean rollbackOnFail) {
CdsEntity entity = model.getEntity(xsert.ref().firstSegment());
List> entries = xsert.entries();
dataUtils.prepareForInsert(entity, entries);
dataUtils.enforceTenant(xsert, entity, entries);
List inserts = new DeepInsertSplitter(entity, context.getSessionContext(), xsert.hints()).split(entries);
boolean isRollbackOnly = rollbackOnFail || inserts.size() > 1;
inserts.forEach(insert -> {
cqnValidator.validate(insert, entity(model, insert.ref()));
final CqnInsert projectedInsert = projectionProcessor.resolve(insert);
CdsEntity target = model.getEntity(projectedInsert.ref().firstSegment());
dataUtils.removeVirtualElements(target, projectedInsert.entries());
if (isDeep(target, projectedInsert.entries())) {
deepInsert(projectedInsert, true);
return;
}
// process @cds.on.insert again as element might be excluded by projection
dataUtils.processOnInsert(target, projectedInsert.entries());
timed.cqn(() -> {
PreparedCqnStatement pcqn = connectedClient.prepare(projectedInsert);
try {
return connectedClient.executeUpdate(pcqn, projectedInsert.entries());
} catch (Exception e) {
if (isRollbackOnly) {
connectedClient.setRollbackOnly();
}
throw e;
}
}, projectedInsert, projectedInsert.entries()::size);
});
return insertedRows(entries).entity(entity).result();
}
@Override
public Result execute(CqnUpdate update, Object... paramValues) {
return execute(update, toIndexMap(paramValues));
}
@Override
public Result execute(CqnUpdate update, Map namedValues) {
return execute(update, namedValues.isEmpty() ? emptyList() : singletonList(namedValues));
}
@Override
public Result execute(CqnUpdate update, Iterable> valueSets) {
CdsEntity entity = entity(model, update.ref());
cqnValidator.validate(update, entity);
dataUtils.prepareForUpdate(entity, update.entries());
dataUtils.enforceTenant(update, entity, update.entries());
if (!hasUpdateData(entity, update)) {
Result count = selectCountAll(entity, update);
update.entries().forEach(Map::clear);
return count;
}
CqnUpdate resolvedUpdate = update;
CdsEntity resolvedEntity = entity;
if (usesRuntimeViews(model, update.ref())) {
resolvedUpdate = projectionProcessor.resolveProjection(update);
resolvedEntity = entity(model, resolvedUpdate.ref());
}
resolvedUpdate = cqnNormalizer.normalize(resolvedUpdate); // paths to where exists
if (isDeep(entity, resolvedUpdate.entries()) || !DataUtils.uniformData(entity, resolvedUpdate.entries())) {
Map targetKeys = cqnAnalyzer.analyze(resolvedUpdate).targetKeyValues();
return deepUpdate(resolvedEntity, resolvedUpdate, targetKeys).entity(entity).result();
}
CqnUpdate resolved = projectionProcessor.resolve(resolvedUpdate);
if (resolved != resolvedUpdate) {
resolvedUpdate = resolved;
resolvedEntity = entity(model, resolvedUpdate.ref());
dataUtils.prepareForUpdate(resolvedEntity, resolvedUpdate.entries());
if (isDeep(resolvedEntity, resolvedUpdate.entries())) {
// projection resolvement introduces nested data when resolving paths
Map targetKeys = cqnAnalyzer.analyze(update).targetKeyValues();
return deepUpdate(resolvedEntity, resolvedUpdate, targetKeys).entity(entity).result();
}
}
dataUtils.removeVirtualElements(resolvedEntity, resolvedUpdate.entries());
if (!hasUpdateData(resolvedEntity, resolvedUpdate)) {
Result count = selectCountAll(resolvedEntity, resolvedUpdate);
update.entries().forEach(Map::clear);
return count;
}
int[] updateCount = flatUpdate(resolvedUpdate, resolvedEntity, valueSets);
return updateResult(update, entity, resolvedUpdate, resolvedEntity, updateCount);
}
private Result updateResult(CqnUpdate update, CdsEntity entity, CqnUpdate resolvedUpdate, CdsEntity resolvedEntity,
int[] updateCount) {
List> entries = update.entries();
if (entries.size() == 1) {
entries = filledList(updateCount.length, entries.get(0));
}
addKeyValuesToEntries(update, entries);
OccUtils.incrementOrSetEtagValuesVersioned(resolvedEntity, update, entries, resolvedUpdate.entries());
return batchUpdateResult(entries, updateCount).entity(entity).result();
}
private void addKeyValuesToEntries(CqnUpdate update, List> entries) {
Map keyValues = cqnAnalyzer.analyze(update).targetKeyValues();
if (!keyValues.isEmpty()) {
entries.forEach(e -> e.putAll(keyValues));
}
}
private static List> filledList(int length, Map entry) {
List> entries;
entries = new ArrayList<>(length);
for (int i = 0; i < length; i++) {
entries.add(entry);
}
return entries;
}
private static ResultBuilder batchUpdateResult(List extends Map> entries, int[] updateCount) {
return batchUpdateResult(entries, stream(updateCount).asLongStream().toArray());
}
private static ResultBuilder batchUpdateResult(List extends Map> entries, long[] updateCount) {
ResultBuilder builder = ResultBuilder.batchUpdate();
builder.addUpdatedRows(updateCount, entries);
return builder;
}
private int[] flatUpdate(CqnUpdate update, CdsEntity entity, Iterable> valueSets) {
List> parameterValues = mergeParams(update.entries(), valueSets);
OccUtils.prepareVersionParams(update, entity, parameterValues, () -> context.getSessionContext().getNow());
if (!targetsDocStore(entity)) {
moveKeyValuesToWhere(entity, update, true);
}
return executeTimed(update, valueSets, parameterValues);
}
private int[] executeTimed(CqnUpdate update, Iterable> valueSets,
List> parameterValues) {
return timed.cqn(() -> {
if (CqnStatementUtils.filterIsFalse(update)) {
logger.debug("Statement filter condition evaluates to FALSE. Execution skipped.");
int n = (int) StreamSupport.stream(valueSets.spliterator(), false).count();
return new int[n];
} else {
PreparedCqnStatement pcqn = connectedClient.prepare(update);
return connectedClient.executeUpdate(pcqn, parameterValues);
}
}, update, update.entries()::size);
}
private ResultBuilder deepUpdate(CdsEntity entity, CqnUpdate update, Map targetKeys) {
DeepUpdateSplitter updateSplitter = new DeepUpdateSplitter(this);
EntityOperations operations = updateSplitter.computeOperations(entity, update, targetKeys);
return runOperations(operations, update.hints());
}
private ResultBuilder runOperations(EntityOperations operations, Map hints) {
try {
delete(operations.filter(Operation.DELETE), false, hints);
insert(operations.filter(Operation.INSERT), hints);
updateOrInsert(operations.filter(Operation.UPDATE_OR_INSERT), hints);
upsert(operations.filter(Operation.UPSERT), hints);
update(operations.filter(Operation.UPDATE), hints);
} catch (Exception e) {
connectedClient.setRollbackOnly();
throw e;
}
if (operations.entries().size() == 1 && operations.updateCount().length > 1) {
return searchedUpdateResult(operations);
}
return batchUpdateResult(operations.entries(), operations.updateCount());
}
private ResultBuilder searchedUpdateResult(EntityOperations operations) {
Map data = operations.entries().get(0);
return ResultBuilder.updatedRows(Arrays.stream(operations.updateCount()).sum(), data);
}
private Result selectCountAll(CdsEntity entity, CqnUpdate update) {
Set keys = concreteKeyNames(entity);
Select> countQuery = CqnStatementUtils.countAll(update);
long[] rowCount = new long[update.entries().size()];
int i = 0;
for (Map entry : update.entries()) {
CqnPredicate where = update.where().orElse(TRUE);
where = Conjunction.and(where, update.elements().filter(keys::contains)
.map(key -> CQL.get(key).eq(CQL.param(key))).collect(Conjunction.and()));
countQuery.where(where);
rowCount[i] = execute(countQuery, entry).single().as(Count.class).getCount();
i++;
}
return batchUpdateResult(filledList(rowCount.length, new HashMap<>()), rowCount).result();
}
private static List> mergeParams(List> updateData,
Iterable> valueSets) {
List> paramVals = new ArrayList<>();
if (!valueSets.iterator().hasNext()) {
// no parameter set
updateData.forEach(v -> paramVals.add(DataUtils.copyMap(v)));
return paramVals;
}
valueSets.forEach(v -> paramVals.add(DataUtils.copyMap(v)));
if (updateData.size() == 1) {
// (mass) update with one data set
Map data = updateData.get(0);
paramVals.forEach(p -> p.putAll(data));
return paramVals;
}
// batch update of multiple entities
if (updateData.size() == paramVals.size()) {
// with parameter set for each entry
Iterator> keyIter = updateData.iterator();
paramVals.forEach(p -> p.putAll(keyIter.next()));
return paramVals;
}
throw new CdsException("Batch update failed: Parameter value list size (" + paramVals.size()
+ ") does not match batch size (" + updateData.size() + ")");
}
private static Map toIndexMap(Object... paramValues) {
Map parameters = new HashMap<>();
for (int i = 0; i < paramValues.length; i++) {
parameters.put(String.valueOf(i), paramValues[i]);
}
return parameters;
}
private static boolean hasUpdateData(CdsEntity target, CqnUpdate update) {
return !update.setters().isEmpty() || DataUtils.hasNonKeyValues(target, update.data());
}
@Override
public SessionContext getSessionContext() {
return context.getSessionContext();
}
@Override
public void setSessionContext(SessionContext session) {
this.context.setSessionContext(session);
this.connectedClient.setSessionContext(session);
}
@Override
public void deleteAll(Stream entities) {
connectedClient.deleteAll(entities);
}
}