Please wait. This can take some minutes ...
Many resources are needed to download a project. Please understand that we have to compensate our server costs. Thank you in advance.
Project price only 1 $
You can buy this project and download/modify it how often you want.
com.sap.cds.impl.CdsDataStoreImpl Maven / Gradle / Ivy
/************************************************************************
* © 2019-2022 SAP SE or an SAP affiliate company. All rights reserved. *
************************************************************************/
package com.sap.cds.impl;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collection;
import java.util.Comparator;
import java.util.HashMap;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import java.util.Optional;
import java.util.Set;
import java.util.TreeMap;
import java.util.function.BiConsumer;
import java.util.function.Function;
import java.util.function.ToLongFunction;
import java.util.stream.Collectors;
import java.util.stream.Stream;
import com.google.common.annotations.VisibleForTesting;
import com.google.common.collect.Iterables;
import com.google.common.collect.Iterators;
import com.google.common.collect.Streams;
import com.sap.cds.CdsDataStore;
import com.sap.cds.CdsException;
import com.sap.cds.DataStoreConfiguration;
import com.sap.cds.Result;
import com.sap.cds.ResultBuilder;
import com.sap.cds.Row;
import com.sap.cds.SessionContext;
import com.sap.cds.impl.EntityCascader.EntityKeys;
import com.sap.cds.impl.EntityCascader.EntityOperation;
import com.sap.cds.impl.EntityCascader.EntityOperation.Operation;
import com.sap.cds.impl.EntityCascader.EntityOperations;
import com.sap.cds.impl.builder.model.Conjunction;
import com.sap.cds.ql.CQL;
import com.sap.cds.ql.Delete;
import com.sap.cds.ql.Insert;
import com.sap.cds.ql.Select;
import com.sap.cds.ql.StructuredType;
import com.sap.cds.ql.Update;
import com.sap.cds.ql.Upsert;
import com.sap.cds.ql.cqn.CqnAnalyzer;
import com.sap.cds.ql.cqn.CqnDelete;
import com.sap.cds.ql.cqn.CqnInsert;
import com.sap.cds.ql.cqn.CqnPredicate;
import com.sap.cds.ql.cqn.CqnSelect;
import com.sap.cds.ql.cqn.CqnStatement;
import com.sap.cds.ql.cqn.CqnUpdate;
import com.sap.cds.ql.cqn.CqnUpsert;
import com.sap.cds.ql.cqn.CqnXsert;
import com.sap.cds.ql.impl.CqnNormalizer;
import com.sap.cds.ql.impl.DeepInsertSplitter;
import com.sap.cds.ql.impl.DeepUpdateSplitter;
import com.sap.cds.ql.impl.DeleteBuilder;
import com.sap.cds.reflect.CdsEntity;
import com.sap.cds.reflect.CdsModel;
import com.sap.cds.reflect.CdsStructuredType;
import com.sap.cds.util.CdsModelUtils.CascadeType;
import com.sap.cds.util.CqnStatementUtils;
import com.sap.cds.util.CqnStatementUtils.Count;
import com.sap.cds.util.DataUtils;
import com.sap.cds.util.ProjectionProcessor;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import static com.sap.cds.DataStoreConfiguration.UPSERT_STRATEGY;
import static com.sap.cds.DataStoreConfiguration.UPSERT_STRATEGY_REPLACE;
import static com.sap.cds.DataStoreConfiguration.UPSERT_STRATEGY_UPSERT;
import static com.sap.cds.ResultBuilder.deletedRows;
import static com.sap.cds.ResultBuilder.insertedRows;
import static com.sap.cds.impl.builder.model.CqnParam.params;
import static com.sap.cds.impl.docstore.DocStoreUtils.targetsDocStore;
import static com.sap.cds.impl.parser.token.CqnBoolLiteral.TRUE;
import static com.sap.cds.util.CdsModelUtils.CascadeType.DELETE;
import static com.sap.cds.util.CdsModelUtils.entity;
import static com.sap.cds.util.CdsModelUtils.keyNames;
import static com.sap.cds.util.CqnStatementUtils.inlineCountQuery;
import static com.sap.cds.util.CqnStatementUtils.isSelectStar;
import static com.sap.cds.util.CqnStatementUtils.moveKeyValuesToWhere;
import static com.sap.cds.util.CqnStatementUtils.rowType;
import static com.sap.cds.util.DataUtils.isDeep;
import static com.sap.cds.util.PathExpressionResolver.resolvePath;
import static java.lang.Math.min;
import static java.util.Arrays.stream;
import static java.util.Collections.emptyList;
import static java.util.Collections.singletonList;
import static java.util.Objects.hash;
import static java.util.stream.Collectors.groupingBy;
import static java.util.stream.Collectors.toList;
import static java.util.stream.Collectors.toMap;
public class CdsDataStoreImpl implements CdsDataStore {
private final CqnValidator cqnValidator;
private final ConnectedClient connectedClient;
private final CqnNormalizer cqnNormalizer;
private final CqnAnalyzer cqnAnalyzer;
private final DataUtils dataUtils;
private final Context context;
private final CdsModel model;
private final ProjectionProcessor projectionProcessor;
private static final Logger logger = LoggerFactory.getLogger(CdsDataStoreImpl.class);
private static final TimingLogger timed = new TimingLogger(logger);
public CdsDataStoreImpl(Context context, ConnectedDataStoreConnector connector) {
this.context = context;
this.model = context.getCdsModel();
this.cqnValidator = CqnValidator.create(context);
this.connectedClient = connector.create(context);
this.dataUtils = DataUtils.create(context::getSessionContext,
context.getDbContext().getCapabilities().timestampPrecision());
this.connectedClient.setSessionContext(context.getSessionContext());
this.cqnNormalizer = new CqnNormalizer(context);
this.cqnAnalyzer = CqnAnalyzer.create(model);
this.projectionProcessor = ProjectionProcessor.create(model, cqnAnalyzer, dataUtils);
// TODO - set the initial session context values?
}
@Override
public Result execute(CqnSelect select, Object... paramValues) {
return execute(select, toIndexMap(paramValues));
}
@Override
public Result execute(CqnSelect select, Map cqnParameterValues) {
return timed.debug(() -> {
CdsEntity targetEntity = null;
CdsStructuredType rowType = null;
if (select.from().isRef()) {
targetEntity = entity(model, select.from().asRef());
if (isSelectStar(select.items()) && select.excluding().isEmpty()) {
rowType = targetEntity;
}
}
CqnSelect normSelect = cqnNormalizer.normalize(select);
cqnValidator.validate(normSelect, connectedClient.capabilities());
if (rowType == null) {
rowType = rowType(model, normSelect);
}
normSelect = cqnNormalizer.resolveForwardMappedAssocs(normSelect);
PreparedCqnStatement pcqn = connectedClient.prepare(normSelect);
ResultBuilder result = connectedClient
.executeQuery(pcqn, cqnParameterValues, this, normSelect.getLock().isPresent()).rowType(rowType);
if (normSelect.hasInlineCount()) {
long rowCount = result.result().rowCount();
if (normSelect.hasLimit() && requiresInlineCountQuery(normSelect.top(), normSelect.skip(), rowCount)) {
result.inlineCount(getInlineCount(normSelect, cqnParameterValues));
} else {
result.inlineCount(rowCount);
}
}
return result.entity(targetEntity).result();
}, "CQN >>{}<<", () -> new Object[] { safeToJson(select, context.getDataStoreConfiguration()) });
}
@Override
public Result execute(CqnSelect select, Iterable> valueSets, int maxBatchSize) {
int valueSetSize = Iterables.size(valueSets);
if (valueSetSize == 1) {
return execute(select, valueSets.iterator().next());
}
if (!select.orderBy().isEmpty() && valueSetSize > maxBatchSize) {
throw new UnsupportedOperationException(
"Order by is not supported when query is executed in multiple batches");
}
List rows = new ArrayList<>(valueSetSize);
Iterator>> partitions = Iterators.partition(valueSets.iterator(), maxBatchSize);
while (partitions.hasNext()) {
CqnSelect batchSelect = CqnStatementUtils.batchSelect(select, partitions.next());
List result = execute(batchSelect).list();
rows.addAll(result);
}
// TODO Support streaming
return ResultBuilder.selectedRows(rows).result();
}
private long getInlineCount(CqnSelect select, Map cqnParameterValues) {
CqnSelect inlineCountQuery = inlineCountQuery(select);
PreparedCqnStatement pcqn = connectedClient.prepare(inlineCountQuery);
Result result = connectedClient.executeQuery(pcqn, cqnParameterValues, this, false).result();
return result.single(Count.class).getCount();
}
@VisibleForTesting
static String safeToJson(CqnStatement statement, DataStoreConfiguration config) {
boolean doLogValues = config.getProperty(DataStoreConfiguration.LOG_CQN_VALUES, false);
try {
if (!doLogValues) {
return CqnStatementUtils.anonymizeStatement(statement).toJson();
}
return statement.toJson();
} catch (RuntimeException ex) {
logger.error("cannot serialize CQN statement");
return "Unserializable CQN";
}
}
@VisibleForTesting
static boolean requiresInlineCountQuery(long top, long skip, long rowCount) {
return skip > 0 || top <= rowCount;
}
@Override
public Result execute(CqnDelete delete, Object... paramValues) {
return execute(delete, toIndexMap(paramValues));
}
@Override
public Result execute(CqnDelete delete, Map namedValues) {
return execute(delete, singletonList(namedValues));
}
@Override
public Result execute(CqnDelete delete, Iterable> valueSets) {
delete = projectionProcessor.resolve(delete);
delete = cqnNormalizer.normalize(delete);
CdsEntity target = entity(model, delete.ref());
// try optimized delete of child entities
boolean deleted = Cascader.create(CascadeType.DELETE, target).from(delete.ref()).where(delete.where())
.cascade(path -> bulkDelete(Delete.from(path), valueSets, true));
if (deleted) { // delete root
return bulkDelete(delete, valueSets, false);
} else { // fallback for cyclic models and subqueries in where
target = model.getEntity(delete.ref().firstSegment());
// compute object graph (select key values) and bulk delete
Set entities = cascade(target, delete.where(), valueSets);
Result result = bulkDelete(delete, valueSets, false);
delete(entities.stream().map(k -> EntityOperation.delete(k, null, context.getSessionContext())), true);
return result;
}
}
private Set cascade(CdsEntity target, Optional filter,
Iterable extends Map> valueSets) {
EntityCascader cascader = EntityCascader.from(this, target).where(filter);
Set keySets;
if (valueSets.iterator().hasNext()) {
keySets = Streams.stream(valueSets).flatMap(v -> cascader.with(v).cascade(DELETE).stream())
.collect(Collectors.toSet());
} else {
keySets = cascader.cascade(DELETE);
}
return keySets;
}
private void updateOrInsert(Stream operations) {
List notInDatastore = update(operations);
insert(notInDatastore.stream());
}
private void insert(Stream ops) {
ops.collect(groupingBy(EntityOperation::targetEntity)).forEach((entity, data) -> {
Iterator rows = execute(Insert.into(entity).entries(data)).iterator();
for (EntityOperation op : data) {
op.inserted(rows.next());
}
});
}
private void groupAndExecute(Stream ops, BiConsumer> action) {
ops.collect(groupingBy(o -> hash(o.targetEntity().getQualifiedName(), o.updateValues().keySet())))
.forEach((g, batch) -> {
CdsEntity entity = batch.get(0).targetEntity();
action.accept(entity, batch);
});
}
private void upsert(Stream ops) {
groupAndExecute(ops, (entity, upserts) -> {
List> entries = upserts.stream().map(EntityOperation::updateValues).collect(toList());
Result result = execute(Upsert.into(entity).entries(entries));
Iterator resultIter = result.iterator();
Iterator upsertIter = upserts.iterator();
for (int i = 0; i < result.batchCount(); i++) {
EntityOperation u = upsertIter.next();
u.inserted(resultIter.next());
}
});
}
private List update(Stream ops) {
List notFound = new ArrayList<>();
groupAndExecute(ops, (entity, updates) -> {
List> entries = updates.stream().map(EntityOperation::updateValues).collect(toList());
Result result = execute(Update.entity(entity).entries(entries));
if (result.rowCount() > 0) {
Iterator resultIter = result.iterator();
Iterator updateIter = updates.iterator();
for (int i = 0; i < result.batchCount(); i++) {
EntityOperation u = updateIter.next();
long rowCount = result.rowCount(i);
u.updated(resultIter.next(), rowCount);
if (rowCount == 0) {
notFound.add(u);
}
}
} else {
notFound.addAll(updates);
}
});
return notFound;
}
private void delete(Stream ops, boolean rollbackOnFail) {
Collection> grouped = groupByEntityAndPath(ops);
for (List batch : grouped) {
if (!batch.isEmpty()) {
EntityOperation o = batch.iterator().next();
Set keyNames = o.targetKeys().keySet();
StructuredType> ref = CQL.entity(o.targetEntity().getQualifiedName()).matching(params(keyNames));
if (o.path() != null) {
ref = ref.to(o.path());
}
CqnDelete delete = DeleteBuilder.from(ref);
bulkDelete(delete, batch, rollbackOnFail);
batch.forEach(EntityOperation::deleted);
}
}
}
private static Collection> groupByEntityAndPath(Stream ops) {
// Group by Entity name and path. Order by entity with longest path first
// TODO simplify & optimize grouping
ToLongFunction pathLength = s -> s.chars().filter(c -> c == '.').count();
Comparator longestPath = Comparator.comparingLong(pathLength).reversed()
.thenComparing(Function.identity());
TreeMap> grouped = ops.collect(
groupingBy(o -> o.targetEntity().getQualifiedName() + (o.path() != null ? ":." + o.path() : ""),
() -> new TreeMap<>(longestPath), toList()));
return grouped.values();
}
private Result bulkDelete(CqnDelete delete, Iterable extends Map> valueSets,
boolean rollbackOnFail) {
cqnValidator.validate(delete);
delete = resolvePath(model, delete);
CqnDelete projectedDelete = projectionProcessor.resolve(delete);
return timed.debug(() -> {
PreparedCqnStatement pcqn = connectedClient.prepare(projectedDelete);
List> parameterValues = new ArrayList<>();
valueSets.forEach(parameterValues::add);
try {
int[] deleteCount = connectedClient.executeUpdate(pcqn, parameterValues);
return deletedRows(deleteCount).result();
} catch (Exception e) {
if (rollbackOnFail) {
connectedClient.setRollbackOnly();
}
throw e;
}
}, "CQN bulk delete >>{}<< with {} value sets.",
safeToJson(projectedDelete, context.getDataStoreConfiguration()),
valueSets.spliterator().estimateSize());
}
@Override
public Result execute(CqnInsert insert) {
insert = isDraftEnabled(insert);
insert = cqnNormalizer.normalize(insert);
return deepInsert(insert, false);
}
private CqnInsert isDraftEnabled(CqnInsert insert) {
CdsEntity entity = model.getEntity(insert.ref().firstSegment());
if (entity.findAnnotation("odata.draft.enabled").isPresent()) {
insert = projectionProcessor.resolve(insert);
}
return insert;
}
@Override
public Result execute(CqnUpsert upsert) {
if (upsert.entries().isEmpty()) {
return insertedRows(emptyList()).result();
}
upsert = cqnNormalizer.normalize(upsert);
CdsEntity entity = entity(model, upsert.ref());
Object strategy = context.getDataStoreConfiguration().getProperty(UPSERT_STRATEGY, UPSERT_STRATEGY_UPSERT);
strategy = upsert.hints().getOrDefault(UPSERT_STRATEGY, strategy);
if (UPSERT_STRATEGY_REPLACE.equals(strategy)) {
return executeReplace(upsert, entity);
}
cqnValidator.validate(upsert);
if (isDeep(entity, upsert.entries()) || !DataUtils.uniformData(entity, upsert.entries())) {
return deepUpsert(entity, upsert);
}
List> entries = upsert.entries();
dataUtils.prepareForUpdate(entity, entries);
final CqnUpsert resolvedUpsert = projectionProcessor.resolve(upsert);
return timed.debug(() -> {
CdsEntity resolvedEntity = entity(model, resolvedUpsert.ref());
if (isDeep(resolvedEntity, resolvedUpsert.entries())) {
return deepUpsert(entity, resolvedUpsert);
}
PreparedCqnStatement pcqn = connectedClient.prepare(resolvedUpsert);
connectedClient.executeUpdate(pcqn, resolvedUpsert.entries());
return insertedRows(entries).result();
}, "CQN upsert >>{}<< with {} entries.", safeToJson(resolvedUpsert, context.getDataStoreConfiguration()),
entries.size());
}
private Result deepUpsert(CdsEntity entity, CqnUpsert upsert) {
Map targetKeys = cqnAnalyzer.analyze(upsert.ref()).targetKeyValues();
DeepUpdateSplitter updateSplitter = new DeepUpdateSplitter(this);
EntityOperations operations = updateSplitter.computeOperations(entity, upsert, targetKeys);
return runOperations(operations).result();
}
private Result executeReplace(CqnUpsert upsert, CdsEntity entity) {
deleteByKeys(entity, upsert.entries());
return deepInsert(upsert, true);
}
private void deleteByKeys(CdsEntity entity, Iterable> keyValues) {
CqnDelete delete = Delete.from(entity).matching(keyNames(entity) //
.stream().collect(toMap(k -> k, CQL::param)));
DataUtils.normalizedUuidKeys(entity, keyValues);
execute(delete, keyValues);
}
private Result deepInsert(CqnXsert xsert, boolean rollbackOnFail) {
CdsEntity entity = model.getEntity(xsert.ref().firstSegment());
List> entries = xsert.entries();
dataUtils.prepareForInsert(entity, entries);
List inserts = new DeepInsertSplitter(entity, context.getSessionContext()).split(entries);
boolean isRollbackOnly = rollbackOnFail || inserts.size() > 1;
inserts.forEach(insert -> {
cqnValidator.validate(insert);
final CqnInsert projectedInsert = projectionProcessor.resolve(insert);
CdsEntity target = model.getEntity(projectedInsert.ref().firstSegment());
DataUtils.resolvePaths(target, projectedInsert.entries());
if (isDeep(target, projectedInsert.entries())) {
deepInsert(projectedInsert, true);
return;
}
// process @cds.on.insert again as element might be excluded by projection
dataUtils.processOnInsert(target, projectedInsert.entries());
timed.debug(() -> {
PreparedCqnStatement pcqn = connectedClient.prepare(projectedInsert);
try {
return connectedClient.executeUpdate(pcqn, projectedInsert.entries());
} catch (Exception e) {
if (isRollbackOnly) {
connectedClient.setRollbackOnly();
}
throw e;
}
}, "CQN upsert >>{}<< with {} entries.", safeToJson(projectedInsert, context.getDataStoreConfiguration()),
projectedInsert.entries().size());
});
return insertedRows(entries).entity(entity).result();
}
@Override
public Result execute(CqnUpdate update, Object... paramValues) {
return execute(update, toIndexMap(paramValues));
}
@Override
public Result execute(CqnUpdate update, Map namedValues) {
return execute(update, namedValues.isEmpty() ? emptyList() : singletonList(namedValues));
}
@Override
public Result execute(CqnUpdate update, Iterable> valueSets) {
cqnValidator.validate(update);
CdsEntity entity = cqnAnalyzer.analyze(update.ref()).targetEntity();
dataUtils.prepareForUpdate(entity, update.entries());
if (!DataUtils.hasNonKeyValues(entity, update.data())) {
Result count = selectCountAll(update, entity, update);
update.entries().forEach(Map::clear);
return count;
}
CqnUpdate normUpdate = cqnNormalizer.normalize(update);
if (isDeep(entity, normUpdate.entries()) || !DataUtils.uniformData(entity, normUpdate.entries())) {
Map targetKeys = cqnAnalyzer.analyze(update).targetKeyValues();
return deepUpdate(entity, normUpdate, targetKeys).entity(entity).result();
}
CqnUpdate resolvedUpdate = projectionProcessor.resolve(normUpdate);
CdsEntity resolvedEntity = entity;
if (resolvedUpdate != normUpdate) {
resolvedEntity = cqnAnalyzer.analyze(resolvedUpdate.ref()).targetEntity();
dataUtils.prepareForUpdate(resolvedEntity, resolvedUpdate.entries());
if (isDeep(resolvedEntity, resolvedUpdate.entries())) {
Map targetKeys = cqnAnalyzer.analyze(update).targetKeyValues();
return deepUpdate(resolvedEntity, resolvedUpdate, targetKeys).entity(entity).result();
}
}
int[] updateCount = flatUpdate(resolvedUpdate, resolvedEntity, valueSets);
List> entries = update.entries();
if (entries.size() == 1) {
entries = filledList(updateCount.length, entries.get(0));
}
addKeyValuesToEntries(update, entries);
return batchUpdateResult(entries, updateCount).entity(entity).result();
}
private void addKeyValuesToEntries(CqnUpdate update, List> entries) {
Map keyValues = cqnAnalyzer.analyze(update).targetKeyValues();
if (!keyValues.isEmpty()) {
entries.forEach(e -> e.putAll(keyValues));
}
}
private static List> filledList(int length, Map entry) {
List> entries;
entries = new ArrayList<>(length);
for (int i = 0; i < length; i++) {
entries.add(entry);
}
return entries;
}
private static ResultBuilder batchUpdateResult(List extends Map> entries, int[] updateCount) {
return batchUpdateResult(entries, stream(updateCount).asLongStream().toArray());
}
private static ResultBuilder batchUpdateResult(List extends Map> entries, long[] updateCount) {
int size = entries.size();
int length = updateCount.length;
ResultBuilder builder = ResultBuilder.batchUpdate();
for (int i = 0; i < length; i++) {
builder.addUpdatedRows(updateCount[i], entries.get(min(size, i)));
}
return builder;
}
private int[] flatUpdate(CqnUpdate update, CdsEntity entity, Iterable> valueSets) {
List> parameterValues = mergeParams(update.entries(), valueSets);
if (!targetsDocStore(entity)) {
moveKeyValuesToWhere(entity, update, true);
}
return timed.debug(() -> {
PreparedCqnStatement pcqn = connectedClient.prepare(update);
return connectedClient.executeUpdate(pcqn, parameterValues);
}, "CQN Update >>{}<< with {} entries.", safeToJson(update, context.getDataStoreConfiguration()), update.entries().size());
}
private ResultBuilder deepUpdate(CdsEntity entity, CqnUpdate update, Map targetKeys) {
DeepUpdateSplitter updateSplitter = new DeepUpdateSplitter(this);
EntityOperations operations = updateSplitter.computeOperations(entity, update, targetKeys);
return runOperations(operations);
}
private ResultBuilder runOperations(EntityOperations operations) {
try {
delete(operations.filter(Operation.DELETE), false);
insert(operations.filter(Operation.INSERT));
updateOrInsert(operations.filter(Operation.UPDATE_OR_INSERT));
upsert(operations.filter(Operation.UPSERT));
update(operations.filter(Operation.UPDATE));
} catch (Exception e) {
connectedClient.setRollbackOnly();
throw e;
}
if (operations.entries().size() == 1 && operations.updateCount().length > 1) {
return searchedUpdateResult(operations);
}
return batchUpdateResult(operations.entries(), operations.updateCount());
}
private ResultBuilder searchedUpdateResult(EntityOperations operations) {
Map data = operations.entries().get(0);
return ResultBuilder.updatedRows(Arrays.stream(operations.updateCount()).sum(), data);
}
private Result selectCountAll(CqnUpdate update, CdsEntity entity, CqnUpdate resolvedUpdate) {
Set keys = keyNames(entity);
Select> countQuery = CqnStatementUtils.countAll(update);
long[] rowCount = new long[resolvedUpdate.entries().size()];
int i = 0;
for (Map entry : update.entries()) {
CqnPredicate where = update.where().orElse(TRUE);
where = Conjunction.and(where, update.elements().filter(keys::contains)
.map(key -> CQL.get(key).eq(CQL.param(key))).collect(Conjunction.and()));
countQuery.where(where);
rowCount[i] = execute(countQuery, entry).single().as(Count.class).getCount();
i++;
}
return batchUpdateResult(filledList(rowCount.length, new HashMap<>()), rowCount).result();
}
private static List> mergeParams(List> updateData,
Iterable> valueSets) {
List> paramVals = new ArrayList<>();
if (!valueSets.iterator().hasNext()) {
// no parameter set
updateData.forEach(v -> paramVals.add(DataUtils.copyMap(v)));
return paramVals;
}
valueSets.forEach(v -> paramVals.add(DataUtils.copyMap(v)));
if (updateData.size() == 1) {
// (mass) update with one data set
Map data = updateData.get(0);
paramVals.forEach(p -> p.putAll(data));
return paramVals;
}
// batch update of multiple entities
if (updateData.size() == paramVals.size()) {
// with parameter set for each entry
Iterator> keyIter = updateData.iterator();
paramVals.forEach(p -> p.putAll(keyIter.next()));
return paramVals;
}
throw new CdsException("Batch update failed: Parameter value list size (" + paramVals.size()
+ ") does not match batch size (" + updateData.size() + ")");
}
private static Map toIndexMap(Object... paramValues) {
Map parameters = new HashMap<>();
for (int i = 0; i < paramValues.length; i++) {
parameters.put(String.valueOf(i), paramValues[i]);
}
return parameters;
}
@Override
public SessionContext getSessionContext() {
return context.getSessionContext();
}
@Override
public void setSessionContext(SessionContext session) {
this.context.setSessionContext(session);
this.connectedClient.setSessionContext(session);
}
}