org.protempa.backend.dsb.relationaldb.AbstractSQLGenerator Maven / Gradle / Ivy
Show all versions of protempa-dsb-relationaldb Show documentation
/*
* #%L
* Protempa Commons Backend Provider
* %%
* Copyright (C) 2012 - 2013 Emory University
* %%
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
* #L%
*/
package org.protempa.backend.dsb.relationaldb;
import org.apache.commons.lang3.StringUtils;
import org.arp.javautil.arrays.Arrays;
import org.arp.javautil.collections.Collections;
import org.arp.javautil.sql.ConnectionSpec;
import org.protempa.DataSourceReadException;
import org.protempa.DataStreamingEventIterator;
import org.protempa.UniqueIdPair;
import org.protempa.backend.dsb.filter.Filter;
import org.protempa.backend.dsb.filter.PositionFilter;
import org.protempa.proposition.Proposition;
import org.protempa.proposition.value.Granularity;
import org.protempa.proposition.value.GranularityFactory;
import org.protempa.proposition.value.Unit;
import org.protempa.proposition.value.UnitFactory;
import java.sql.Connection;
import java.sql.SQLException;
import java.text.MessageFormat;
import java.util.ArrayList;
import java.util.Collection;
import java.util.HashMap;
import java.util.HashSet;
import java.util.Iterator;
import java.util.LinkedHashMap;
import java.util.LinkedList;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.concurrent.Callable;
import java.util.concurrent.ExecutionException;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Executors;
import java.util.concurrent.Future;
import java.util.logging.Level;
import java.util.logging.Logger;
/**
* Abstract class for implement database and driver-specific SQL generators.
*
* @author Andrew Post
*/
public abstract class AbstractSQLGenerator implements SQLGenerator {
static final int FETCH_SIZE = 10000;
private static final int DEFAULT_QUERY_THREAD_COUNT = 4;
private static final String readPropositionsSQL = "select {0} from {1} {2}";
private ConnectionSpec connectionSpec;
private final Map> primitiveParameterSpecs;
private EntitySpec[] primitiveParameterEntitySpecs;
private final Map> eventSpecs;
private EntitySpec[] eventEntitySpecs;
private final Map> constantSpecs;
private EntitySpec[] constantEntitySpecs;
private StagingSpec[] stagedTableSpecs;
private GranularityFactory granularities;
private UnitFactory units;
private RelationalDbDataSourceBackend backend;
private int queryThreadCount;
protected AbstractSQLGenerator() {
this.primitiveParameterSpecs = new HashMap<>();
this.eventSpecs = new HashMap<>();
this.constantSpecs = new HashMap<>();
this.queryThreadCount = DEFAULT_QUERY_THREAD_COUNT;
}
@Override
public void initialize(
ConnectionSpec connectionSpec, RelationalDatabaseSpec relationalDatabaseSpec, RelationalDbDataSourceBackend backend) {
if (relationalDatabaseSpec != null) {
this.primitiveParameterEntitySpecs = relationalDatabaseSpec.getPrimitiveParameterSpecs();
populatePropositionMap(this.primitiveParameterSpecs,
this.primitiveParameterEntitySpecs);
this.eventEntitySpecs = relationalDatabaseSpec.getEventSpecs();
populatePropositionMap(this.eventSpecs, this.eventEntitySpecs);
this.constantEntitySpecs = relationalDatabaseSpec.getConstantSpecs();
populatePropositionMap(this.constantSpecs, this.constantEntitySpecs);
this.stagedTableSpecs = relationalDatabaseSpec.getStagedSpecs();
this.granularities = relationalDatabaseSpec.getGranularities();
this.units = relationalDatabaseSpec.getUnits();
this.connectionSpec = connectionSpec;
Integer queryThreadCountSetting = backend.getQueryThreadCount();
if (queryThreadCountSetting != null) {
this.queryThreadCount = queryThreadCountSetting;
}
} else {
throw new IllegalArgumentException(
"relationalDatabaseSpec cannot be null");
}
this.backend = backend;
}
public boolean getStreamingMode() {
return true;
}
@Override
public GranularityFactory getGranularities() {
return this.granularities;
}
@Override
public UnitFactory getUnits() {
return this.units;
}
@Override
public final boolean loadDriverIfNeeded() {
String className = getDriverClassNameToLoad();
if (className == null) {
return true;
}
try {
Class.forName(className);
return true;
} catch (ClassNotFoundException ex) {
Logger logger = SQLGenUtil.logger();
if (logger.isLoggable(Level.FINE)) {
logger.log(Level.FINE,
"{0} when trying to load {1}.",
new Object[]{ex.getClass().getName(), className});
}
return false;
}
}
private class SQLExecutorCallable implements Callable> {
private final Map allEntitySpecToResultProcessor;
private final Collection allEntitySpecs;
private final Filter filters;
private final Set propIds;
private final Set keyIds;
private final EntitySpec entitySpec;
public SQLExecutorCallable(EntitySpec entitySpec,
Map allEntitySpecToResultProcessor,
Collection allEntitySpecs,
Filter filters,
Set propIds,
Set keyIds) {
this.entitySpec = entitySpec;
this.allEntitySpecToResultProcessor = allEntitySpecToResultProcessor;
this.allEntitySpecs = allEntitySpecs;
this.filters = filters;
this.propIds = propIds;
this.keyIds = keyIds;
}
@Override
public List call() throws Exception {
Connection conn;
try {
conn = connectionSpec.getOrCreate();
} catch (SQLException ex) {
throw new DataSourceReadException(ex);
}
return processEntitySpecStreaming(this.entitySpec,
allEntitySpecToResultProcessor,
allEntitySpecs, filters,
propIds,
keyIds, new StreamingSQLExecutor(
conn, backendNameForMessages(),
backend.getQueryTimeout()));
}
}
@Override
public DataStreamingEventIterator readPropositionsStreaming(
final Set keyIds, final Set propIds, final Filter filters)
throws DataSourceReadException {
final Map> entitySpecToPropIds
= entitySpecToPropIds(propIds);
final Map allEntitySpecToResultProcessor = allEntitySpecToResultProcessor();
final Collection allEntitySpecs
= allEntitySpecToResultProcessor.keySet();
DataStager stager = null;
if (stagingApplies()) {
stager = doStage(allEntitySpecs, filters, propIds, keyIds, null);
}
final List itrs = new ArrayList<>();
ExecutorService executor = Executors.newFixedThreadPool(this.queryThreadCount);
List>> list = new ArrayList<>();
List connections = new ArrayList<>();
for (EntitySpec entitySpec : entitySpecToPropIds.keySet()) {
list.add(executor.submit(new SQLExecutorCallable(entitySpec, allEntitySpecToResultProcessor, allEntitySpecs, filters, propIds, keyIds)));
}
for (Future> future : list) {
try {
itrs.addAll(future.get());
} catch (InterruptedException ex) {
SQLGenUtil.logger().log(Level.FINER, "SQL generation thread interrupted", ex);
} catch (ExecutionException ex) {
throw new DataSourceReadException(ex);
}
}
executor.shutdown();
List> events
= new ArrayList<>(
itrs.size());
List> refs
= new ArrayList<>();
for (StreamingIteratorPair pair : itrs) {
events.add(pair.getProps());
refs.addAll(pair.getRefs());
}
RelationalDbDataReadIterator streamingResults
= new RelationalDbDataReadIterator(refs, events, connections,
stagingApplies() ? stager : null);
return streamingResults;
}
private class StreamingIteratorPair {
private final DataStreamingEventIterator props;
private final List extends DataStreamingEventIterator> refs;
private final Connection connection;
StreamingIteratorPair(DataStreamingEventIterator props,
List extends DataStreamingEventIterator> refs,
Connection connection) {
this.props = props;
this.refs = refs;
this.connection = connection;
}
public DataStreamingEventIterator getProps() {
return props;
}
public List extends DataStreamingEventIterator> getRefs() {
return refs;
}
public Connection getConnection() {
return connection;
}
}
private List processEntitySpecStreaming(EntitySpec entitySpec,
Map allEntitySpecToResultProcessor,
Collection allEntitySpecs, Filter filters,
Set propIds, Set keyIds,
StreamingSQLExecutor executor)
throws DataSourceReadException {
List result = new ArrayList<>();
Logger logger = SQLGenUtil.logger();
logProcessingEntitySpec(logger, entitySpec);
SQLGenResultProcessorFactory factory
= getResultProcessorFactory(allEntitySpecToResultProcessor,
entitySpec);
List applicableEntitySpecs
= computeApplicableEntitySpecs(allEntitySpecs, entitySpec);
Set applicableFilters = computeApplicableFilters(filters,
allEntitySpecs, entitySpec);
List> partitions = constructPartitions(entitySpec,
applicableFilters);
LinkedHashMap inboundRefSpecs
= collectInboundRefSpecs(applicableEntitySpecs, entitySpec, propIds);
Map bidirRefSpecs = collectBidirectionalReferences(applicableEntitySpecs, entitySpec, propIds);
String dataSourceBackendId = this.backend.getDataSourceBackendId();
StreamingMainResultProcessor resultProcessor
= factory.getStreamingInstance(dataSourceBackendId, entitySpec,
inboundRefSpecs, bidirRefSpecs, propIds);
for (Set filterSet : partitions) {
generateAndExecuteSelectStreaming(entitySpec, null, propIds, filterSet,
applicableEntitySpecs, inboundRefSpecs, keyIds,
SQLOrderBy.ASCENDING,
resultProcessor, executor, true);
DataStreamingEventIterator results
= resultProcessor.getResults();
List> refResults
= java.util.Collections.singletonList(resultProcessor
.getInboundReferenceResults());
result.add(new StreamingIteratorPair(results, refResults, executor.getConnection()));
}
logDoneProcessing(logger, entitySpec);
return result;
}
private static List computeApplicableEntitySpecs(
Collection allEntitySpecs, EntitySpec entitySpec) {
List result = new LinkedList<>(allEntitySpecs);
removeNonApplicableEntitySpecs(entitySpec, result);
logApplicableEntitySpecs(result);
assert !result.isEmpty() :
"allEntitySpecsCopy should have at least one element";
return result;
}
private Set computeApplicableFilters(Filter filters,
Collection allEntitySpecs, EntitySpec entitySpec) {
Set filtersCopy = copyFilters(filters);
removeNonApplicableFilters(allEntitySpecs, filtersCopy, entitySpec);
removeStagedFilters(entitySpec, filtersCopy);
return filtersCopy;
}
/*
* Partitions position filters according to the data source backend's
* configuration. This has the effect of splitting up one query into
* multiple queries to improve performance. Currently this only works when
* upper and lower bounds are provided on the same side of the specified
* proposition's intervals. If there are multiple position filters
* specified, which one gets chosen to partition is non-deterministic.
*/
private List> constructPartitions(EntitySpec entitySpec,
Set filtersCopy) {
PositionFilter positionFilter = null;
for (Filter filter : filtersCopy) {
if (filter instanceof PositionFilter) {
positionFilter = (PositionFilter) filter;
break;
}
}
Unit partitionBy = entitySpec.getPartitionBy();
List> filterList = new ArrayList<>();
if (partitionBy == null
|| positionFilter == null
|| positionFilter.getStart() == null
|| positionFilter.getFinish() == null
|| !positionFilter.getStartSide().equals(
positionFilter.getFinishSide())) {
filterList.add(filtersCopy);
} else {
Long start = positionFilter.getStart();
Long actualFinish = positionFilter.getFinish();
Granularity startGran = positionFilter.getStartGranularity();
Granularity finishGran = positionFilter.getFinishGranularity();
Unit finishUnit = finishGran != null ? finishGran.getCorrespondingUnit() : null;
boolean doLoop = true;
while (doLoop) {
Set newFiltersCopy = new HashSet<>(filtersCopy);
newFiltersCopy.remove(positionFilter);
Long nextStart = partitionBy.addToPosition(start, 1);
Long finish = finishUnit != null ? finishUnit.addToPosition(
nextStart, -1) : -1;
if (finish.compareTo(actualFinish) >= 0) {
finish = actualFinish;
doLoop = false;
}
PositionFilter newPositionFilter = new PositionFilter(
positionFilter.getPropositionIds(), start, startGran,
finish, finishGran, positionFilter.getStartSide(),
positionFilter.getFinishSide());
newFiltersCopy.add(newPositionFilter);
filterList.add(newFiltersCopy);
start = nextStart;
}
}
return filterList;
}
private DataStager doStage(Collection allEntitySpecs,
Filter filters, Set propIds, Set keyIds,
SQLOrderBy order) throws DataSourceReadException {
DataStager stager = null;
try {
stager = getDataStager(this.stagedTableSpecs, null,
new LinkedList<>(allEntitySpecs),
copyFilters(filters), propIds, keyIds, order,
this.connectionSpec);
stager.stageTables();
} catch (SQLException ex) {
Logger logger = SQLGenUtil.logger();
logger.log(Level.SEVERE, "Failed to create staging area", ex);
throw new DataSourceReadException(ex);
}
return stager;
}
private static SQLGenResultProcessorFactory getResultProcessorFactory(
Map allEntitySpecToResultProcessor,
EntitySpec entitySpec) {
// we know that the map contains only factory instances that are
// parameterized by implementations of Proposition
@SuppressWarnings("unchecked")
SQLGenResultProcessorFactory factory
= allEntitySpecToResultProcessor.get(entitySpec);
assert factory != null : "factory should never be null";
return factory;
}
/**
* Leave behind a position filter, if one exists and the entity spec has
* partition by set and the duration of the partition is less than the
* duration of the filter's time range.
*
* @param filtersCopy
*/
private void removeStagedFilters(EntitySpec entitySpec,
Set filtersCopy) {
boolean first = true;
boolean isPositionFilter;
Unit partitionBy = entitySpec.getPartitionBy();
for (Iterator itr = filtersCopy.iterator(); itr.hasNext();) {
isPositionFilter = false;
Filter f = itr.next();
if (f instanceof PositionFilter) {
isPositionFilter = true;
}
String[] propIds = f.getPropositionIds();
FILTER_LOOP:
for (String propId : propIds) {
List entitySpecs = new ArrayList<>();
if (this.constantSpecs.containsKey(propId)) {
entitySpecs.addAll(this.constantSpecs.get(propId));
}
if (this.eventSpecs.containsKey(propId)) {
entitySpecs.addAll(this.eventSpecs.get(propId));
}
if (this.primitiveParameterSpecs.containsKey(propId)) {
entitySpecs.addAll(this.primitiveParameterSpecs.get(propId));
}
for (StagingSpec staged : this.stagedTableSpecs) {
for (EntitySpec es : entitySpecs) {
for (EntitySpec ses : staged.getEntitySpecs()) {
if (SQLGenUtil.somePropIdsMatch(es, ses)) {
if (first && isPositionFilter
&& partitionBy != null) {
PositionFilter pf = (PositionFilter) f;
Long start = pf.getStart();
if (start != null) {
long addToPosition
= partitionBy.addToPosition(start, 1);
Long finish = pf.getFinish();
if (finish != null) {
if (addToPosition < finish) {
break FILTER_LOOP;
}
}
}
}
itr.remove();
break FILTER_LOOP;
}
}
}
}
}
}
}
private boolean stagingApplies() {
return this.stagedTableSpecs != null
&& this.stagedTableSpecs.length > 0;
}
private void logDoneProcessing(Logger logger, EntitySpec entitySpec) {
if (logger.isLoggable(Level.FINE)) {
logger.log(Level.FINE,
"Results of query for {0} in data source backend {1} "
+ "have been processed",
new Object[]{entitySpec.getName(),
backendNameForMessages()});
}
}
private void logProcessingEntitySpec(Logger logger, EntitySpec entitySpec) {
if (logger.isLoggable(Level.FINE)) {
logger.log(
Level.FINE,
"Data source backend {0} is processing entity spec {1}",
new Object[]{backendNameForMessages(),
entitySpec.getName()});
}
}
private static void logApplicableEntitySpecs(List allEntitySpecsCopy) {
Logger logger = SQLGenUtil.logger();
if (logger.isLoggable(Level.FINER)) {
String[] allEntitySpecsCopyNames = new String[allEntitySpecsCopy.size()];
int i = 0;
for (EntitySpec aesc : allEntitySpecsCopy) {
allEntitySpecsCopyNames[i++] = aesc.getName();
}
logger.log(Level.FINER, "Applicable entity specs are {0}",
StringUtils.join(allEntitySpecsCopyNames, ", "));
}
}
private static LinkedHashMap collectInboundRefSpecs(Collection entitySpecs, EntitySpec rhsEntitySpec,
Set propIds) {
LinkedHashMap result = new LinkedHashMap<>();
for (EntitySpec lhsReferenceSpec : entitySpecs) {
if (lhsReferenceSpec.hasReferenceTo(rhsEntitySpec)
&& Collections.containsAny(propIds, lhsReferenceSpec.getPropositionIds())) {
boolean isMany = false;
if (rhsEntitySpec.hasReferenceTo(lhsReferenceSpec)) {
for (ReferenceSpec rhsToLhsReferenceSpec : rhsEntitySpec.getReferenceSpecs()) {
if (rhsToLhsReferenceSpec.getEntityName().equals(lhsReferenceSpec.getName())
&& rhsToLhsReferenceSpec.getType() == ReferenceSpec.Type.MANY) {
isMany = true;
break;
}
}
}
if (!isMany) {
for (ReferenceSpec lhsToRhsReferenceSpec : lhsReferenceSpec.getReferenceSpecs()) {
if (lhsToRhsReferenceSpec.getEntityName().equals(rhsEntitySpec.getName())) {
result.put(lhsReferenceSpec.getName(), lhsToRhsReferenceSpec);
}
}
}
}
}
return result;
}
private static Map collectBidirectionalReferences(Collection entitySpecs, EntitySpec lhsEntitySpec,
Set propIds) {
Map result = new HashMap<>();
for (ReferenceSpec lhsToRhsReferenceSpec : lhsEntitySpec.getReferenceSpecs()) {
for (EntitySpec rhsEntitySpec : entitySpecs) {
if (rhsEntitySpec.getName().equals(lhsToRhsReferenceSpec.getEntityName())
&& Collections.containsAny(propIds, rhsEntitySpec.getPropositionIds())) {
if (rhsEntitySpec.hasReferenceTo(lhsEntitySpec)) {
for (ReferenceSpec rhsToLhsReferenceSpec : rhsEntitySpec.getReferenceSpecs()) {
if (rhsToLhsReferenceSpec.getEntityName().equals(lhsEntitySpec
.getName()) && rhsToLhsReferenceSpec.getType()
== ReferenceSpec.Type.MANY) {
result.put(rhsEntitySpec.getName(), lhsToRhsReferenceSpec);
}
}
}
}
}
}
return result;
}
/*
* We need to store multiple types of result processor factories in the same
* map
*/
@SuppressWarnings("rawtypes")
private Map allEntitySpecToResultProcessor() {
/*
* The order of the entity specs matters for multiple with the same
* name. Thus, we use a LinkedHashMap.
*/
Map result = new LinkedHashMap<>();
PrimitiveParameterResultProcessorFactory ppFactory = new PrimitiveParameterResultProcessorFactory();
for (EntitySpec es : this.primitiveParameterEntitySpecs) {
result.put(es, ppFactory);
}
EventResultProcessorFactory eFactory = new EventResultProcessorFactory();
for (EntitySpec es : this.eventEntitySpecs) {
result.put(es, eFactory);
}
ConstantResultProcessorFactory cFactory = new ConstantResultProcessorFactory();
for (EntitySpec es : this.constantEntitySpecs) {
result.put(es, cFactory);
}
// staging queries generate no results
for (StagingSpec ss : this.stagedTableSpecs) {
for (EntitySpec es : ss.getEntitySpecs()) {
result.put(es, null);
}
}
return result;
}
private static Set copyFilters(Filter filters) {
Set filtersCopy = new HashSet<>();
if (filters != null) {
for (Iterator itr = filters.andIterator(); itr.hasNext();) {
filtersCopy.add(itr.next());
}
}
return filtersCopy;
}
private void generateAndExecuteSelectStreaming(
EntitySpec entitySpec, ReferenceSpec referenceSpec,
Set propIds, Set filtersCopy,
List entitySpecsCopy, LinkedHashMap inboundRefSpecs, Set keyIds,
SQLOrderBy order, StreamingResultProcessor resultProcessor,
StreamingSQLExecutor executor,
boolean wrapKeyId) throws DataSourceReadException {
Logger logger = SQLGenUtil.logger();
String backendNameForMessages = backendNameForMessages();
String entitySpecName = entitySpec.getName();
if (logger.isLoggable(Level.FINE)) {
logger.log(Level.FINE,
"Data source backend {0} is generating query for {1}",
new Object[]{backendNameForMessages, entitySpecName});
}
String query = getSelectStatement(entitySpec, referenceSpec,
entitySpecsCopy, inboundRefSpecs, filtersCopy, propIds,
keyIds, order,
resultProcessor, this.stagedTableSpecs, wrapKeyId).generateStatement();
if (logger.isLoggable(Level.FINE)) {
logger.log(
Level.FINE,
"Data source backend {0} generated the following query for {1}: {2}",
new Object[]{backendNameForMessages, entitySpecName,
query});
}
executor.executeSelect(entitySpecName, query, resultProcessor);
}
private static void removeNonApplicableEntitySpecs(EntitySpec entitySpec,
Collection entitySpecs) {
for (Iterator itr = entitySpecs.iterator(); itr.hasNext();) {
EntitySpec es = itr.next();
if (es != entitySpec && !es.hasReferenceTo(entitySpec)) {
itr.remove();
}
}
}
/**
* Remove filters that are not directly applicable to the given entity spec
* and are not applicable to other entity specs that refer to it.
*
* @param entitySpecs
* @param filtersCopy
* @param entitySpec
*/
private static void removeNonApplicableFilters(
Collection entitySpecs, Set filtersCopy,
EntitySpec entitySpec) {
Set entitySpecsSet = new HashSet<>();
Set filterPropIds = new HashSet<>();
String[] entitySpecPropIds = entitySpec.getPropositionIds();
for (Iterator itr = filtersCopy.iterator(); itr.hasNext();) {
Filter f = itr.next();
Arrays.addAll(filterPropIds, f.getPropositionIds());
for (EntitySpec es : entitySpecs) {
if (Collections.containsAny(filterPropIds,
es.getPropositionIds())) {
entitySpecsSet.add(es);
}
}
if (Collections.containsAny(filterPropIds, entitySpecPropIds)) {
return;
}
if (!atLeastOneInInboundReferences(entitySpecsSet, entitySpec)) {
itr.remove();
}
entitySpecsSet.clear();
filterPropIds.clear();
}
}
private static boolean atLeastOneInInboundReferences(
Set entitySpecsSet, EntitySpec entitySpec) {
for (EntitySpec es : entitySpecsSet) {
if (es.hasReferenceTo(entitySpec)) {
return true;
}
}
return false;
}
protected abstract SelectStatement getSelectStatement(
EntitySpec entitySpec, ReferenceSpec referenceSpec,
List entitySpecs, Map inboundRefSpecs, Set filters,
Set propIds, Set keyIds, SQLOrderBy order,
SQLGenResultProcessor resultProcessor, StagingSpec[] stagedTables,
boolean wrapKeyId);
protected DataStager getDataStager(StagingSpec[] stagingSpecs,
ReferenceSpec referenceSpec, List entitySpecs,
Set filters, Set propIds, Set keyIds,
SQLOrderBy order, ConnectionSpec connectionSpec) {
throw new UnsupportedOperationException("SQL generator "
+ getClass().getName() + " does not support data staging");
}
protected String assembleReadPropositionsQuery(StringBuilder selectClause,
StringBuilder fromClause, StringBuilder whereClause) {
return MessageFormat.format(readPropositionsSQL, selectClause,
fromClause, whereClause);
}
/**
* Returns whether an IN clause containing the proposition ids of interest
* should be added to the WHERE clause.
*
* @param queryPropIds the proposition ids to query.
* @param entitySpecPropIds the proposition ids corresponding to the current
* entity spec.
* @return true if the query contains < 85% of the proposition
* ids that are known to the data source and if the where clause would
* contain less than or equal to 2000 codes.
*/
static boolean needsPropIdInClause(Set queryPropIds,
String[] entitySpecPropIds) {
Set entitySpecPropIdsSet = Arrays.asSet(entitySpecPropIds);
// Filter propIds that are not in the entitySpecPropIds array.
List filteredPropIds = new ArrayList<>(
entitySpecPropIds.length);
for (String propId : queryPropIds) {
if (entitySpecPropIdsSet.contains(propId)) {
filteredPropIds.add(propId);
}
}
return (filteredPropIds.size() < entitySpecPropIds.length * 0.85f)
&& (filteredPropIds.size() <= 2000);
}
/**
* Gets a the class name of the driver to load for this SQL generator, or
* null if the driver is a JDBC 4 driver and does not need to
* be loaded explicitly. Returning not- null will do no harm if
* a JDBC 4 driver.
*
* This implementation returns null. Override it to return a
* driver's class name.
*
* @return a class name {@link String}.
*/
protected String getDriverClassNameToLoad() {
return null;
}
private String backendNameForMessages() {
String backendDisplayName = this.backend.getDisplayName();
if (backendDisplayName != null) {
return backendDisplayName + "(" + this.backend.getClass().getName()
+ ")";
} else {
return this.backend.getClass().getName();
}
}
private Map> entitySpecToPropIds(
Set propIds) throws AssertionError {
Map> result = new HashMap<>();
for (String propId : propIds) {
boolean inDataSource = populateEntitySpecToPropIdMap(
new String[]{propId}, result);
Logger logger = SQLGenUtil.logger();
if (!inDataSource && logger.isLoggable(Level.FINER)) {
logger.log(
Level.FINER,
"Data source backend {0} does not know about proposition {1}",
new Object[]{backendNameForMessages(), propId});
}
}
return result;
}
private List entitySpecs(String propId) {
if (this.primitiveParameterSpecs.containsKey(propId)) {
return this.primitiveParameterSpecs.get(propId);
} else if (this.eventSpecs.containsKey(propId)) {
return this.eventSpecs.get(propId);
} else if (this.constantSpecs.containsKey(propId)) {
return this.constantSpecs.get(propId);
} else {
return null;
}
}
private boolean populateEntitySpecToPropIdMap(String[] propIds,
Map> entitySpecToPropIdMap)
throws AssertionError {
boolean result = false;
for (String propId : propIds) {
List entitySpecs = entitySpecs(propId);
if (entitySpecs != null) {
for (EntitySpec entitySpec : entitySpecs) {
Collections.putList(entitySpecToPropIdMap, entitySpec,
propId);
result = true;
}
}
}
return result;
}
private static void populatePropositionMap(
Map> map, EntitySpec[] entitySpecs) {
if (entitySpecs != null) {
for (EntitySpec entitySpec : entitySpecs) {
for (String code : entitySpec.getPropositionIds()) {
Collections.putList(map, code, entitySpec);
}
}
}
}
}