Please wait. This can take some minutes ...
Many resources are needed to download a project. Please understand that we have to compensate our server costs. Thank you in advance.
Project price only 1 $
You can buy this project and download/modify it how often you want.
io.trino.sql.planner.SplitSourceFactory Maven / Gradle / Ivy
/*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package io.trino.sql.planner;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.ImmutableMap;
import com.google.inject.Inject;
import io.airlift.log.Logger;
import io.opentelemetry.api.trace.Span;
import io.trino.Session;
import io.trino.metadata.TableHandle;
import io.trino.server.DynamicFilterService;
import io.trino.spi.connector.ColumnHandle;
import io.trino.spi.connector.Constraint;
import io.trino.spi.connector.DynamicFilter;
import io.trino.spi.predicate.TupleDomain;
import io.trino.split.SampledSplitSource;
import io.trino.split.SplitManager;
import io.trino.split.SplitSource;
import io.trino.sql.DynamicFilters;
import io.trino.sql.PlannerContext;
import io.trino.sql.planner.plan.AggregationNode;
import io.trino.sql.planner.plan.AssignUniqueId;
import io.trino.sql.planner.plan.DistinctLimitNode;
import io.trino.sql.planner.plan.DynamicFilterSourceNode;
import io.trino.sql.planner.plan.EnforceSingleRowNode;
import io.trino.sql.planner.plan.ExchangeNode;
import io.trino.sql.planner.plan.ExplainAnalyzeNode;
import io.trino.sql.planner.plan.FilterNode;
import io.trino.sql.planner.plan.GroupIdNode;
import io.trino.sql.planner.plan.IndexJoinNode;
import io.trino.sql.planner.plan.JoinNode;
import io.trino.sql.planner.plan.LimitNode;
import io.trino.sql.planner.plan.MarkDistinctNode;
import io.trino.sql.planner.plan.MergeProcessorNode;
import io.trino.sql.planner.plan.MergeWriterNode;
import io.trino.sql.planner.plan.OutputNode;
import io.trino.sql.planner.plan.PatternRecognitionNode;
import io.trino.sql.planner.plan.PlanNode;
import io.trino.sql.planner.plan.PlanNodeId;
import io.trino.sql.planner.plan.PlanVisitor;
import io.trino.sql.planner.plan.ProjectNode;
import io.trino.sql.planner.plan.RefreshMaterializedViewNode;
import io.trino.sql.planner.plan.RemoteSourceNode;
import io.trino.sql.planner.plan.RowNumberNode;
import io.trino.sql.planner.plan.SampleNode;
import io.trino.sql.planner.plan.SemiJoinNode;
import io.trino.sql.planner.plan.SimpleTableExecuteNode;
import io.trino.sql.planner.plan.SortNode;
import io.trino.sql.planner.plan.SpatialJoinNode;
import io.trino.sql.planner.plan.StatisticsWriterNode;
import io.trino.sql.planner.plan.TableDeleteNode;
import io.trino.sql.planner.plan.TableExecuteNode;
import io.trino.sql.planner.plan.TableFinishNode;
import io.trino.sql.planner.plan.TableFunctionProcessorNode;
import io.trino.sql.planner.plan.TableScanNode;
import io.trino.sql.planner.plan.TableUpdateNode;
import io.trino.sql.planner.plan.TableWriterNode;
import io.trino.sql.planner.plan.TopNNode;
import io.trino.sql.planner.plan.TopNRankingNode;
import io.trino.sql.planner.plan.UnionNode;
import io.trino.sql.planner.plan.UnnestNode;
import io.trino.sql.planner.plan.ValuesNode;
import io.trino.sql.planner.plan.WindowNode;
import io.trino.sql.tree.Expression;
import java.util.List;
import java.util.Map;
import java.util.Optional;
import static com.google.common.collect.Iterables.getOnlyElement;
import static io.trino.spi.connector.Constraint.alwaysTrue;
import static io.trino.spi.connector.DynamicFilter.EMPTY;
import static io.trino.sql.ExpressionUtils.filterConjuncts;
import static java.util.Objects.requireNonNull;
public class SplitSourceFactory
{
private static final Logger log = Logger.get(SplitSourceFactory.class);
private final SplitManager splitManager;
private final PlannerContext plannerContext;
private final DynamicFilterService dynamicFilterService;
private final TypeAnalyzer typeAnalyzer;
@Inject
public SplitSourceFactory(SplitManager splitManager, PlannerContext plannerContext, DynamicFilterService dynamicFilterService, TypeAnalyzer typeAnalyzer)
{
this.splitManager = requireNonNull(splitManager, "splitManager is null");
this.plannerContext = requireNonNull(plannerContext, "metadata is null");
this.dynamicFilterService = requireNonNull(dynamicFilterService, "dynamicFilterService is null");
this.typeAnalyzer = requireNonNull(typeAnalyzer, "typeAnalyzer is null");
}
public Map createSplitSources(Session session, Span stageSpan, PlanFragment fragment)
{
ImmutableList.Builder allSplitSources = ImmutableList.builder();
try {
// get splits for this fragment, this is lazy so split assignments aren't actually calculated here
return fragment.getRoot().accept(
new Visitor(session, stageSpan, TypeProvider.copyOf(fragment.getSymbols()), allSplitSources),
null);
}
catch (Throwable t) {
allSplitSources.build().forEach(SplitSourceFactory::closeSplitSource);
throw t;
}
}
private static void closeSplitSource(SplitSource source)
{
try {
source.close();
}
catch (Throwable t) {
log.warn(t, "Error closing split source");
}
}
private final class Visitor
extends PlanVisitor, Void>
{
private final Session session;
private final Span stageSpan;
private final TypeProvider typeProvider;
private final ImmutableList.Builder splitSources;
private Visitor(
Session session,
Span stageSpan,
TypeProvider typeProvider,
ImmutableList.Builder allSplitSources)
{
this.session = session;
this.stageSpan = stageSpan;
this.typeProvider = typeProvider;
this.splitSources = allSplitSources;
}
@Override
public Map visitExplainAnalyze(ExplainAnalyzeNode node, Void context)
{
return node.getSource().accept(this, context);
}
@Override
public Map visitTableScan(TableScanNode node, Void context)
{
SplitSource splitSource = createSplitSource(node.getTable(), node.getAssignments(), Optional.empty());
splitSources.add(splitSource);
return ImmutableMap.of(node.getId(), splitSource);
}
private SplitSource createSplitSource(TableHandle table, Map assignments, Optional filterPredicate)
{
List dynamicFilters = filterPredicate
.map(DynamicFilters::extractDynamicFilters)
.map(DynamicFilters.ExtractResult::getDynamicConjuncts)
.orElse(ImmutableList.of());
DynamicFilter dynamicFilter = EMPTY;
if (!dynamicFilters.isEmpty()) {
log.debug("Dynamic filters: %s", dynamicFilters);
dynamicFilter = dynamicFilterService.createDynamicFilter(session.getQueryId(), dynamicFilters, assignments, typeProvider);
}
Constraint constraint = filterPredicate
.map(predicate -> filterConjuncts(plannerContext.getMetadata(), predicate, expression -> !DynamicFilters.isDynamicFilter(expression)))
.map(predicate -> new LayoutConstraintEvaluator(plannerContext, typeAnalyzer, session, typeProvider, assignments, predicate))
.map(evaluator -> new Constraint(TupleDomain.all(), evaluator::isCandidate, evaluator.getArguments())) // we are interested only in functional predicate here, so we set the summary to ALL.
.orElse(alwaysTrue());
// get dataSource for table
return splitManager.getSplits(
session,
stageSpan,
table,
dynamicFilter,
constraint);
}
@Override
public Map visitJoin(JoinNode node, Void context)
{
Map leftSplits = node.getLeft().accept(this, context);
Map rightSplits = node.getRight().accept(this, context);
return ImmutableMap.builder()
.putAll(leftSplits)
.putAll(rightSplits)
.buildOrThrow();
}
@Override
public Map visitSemiJoin(SemiJoinNode node, Void context)
{
Map sourceSplits = node.getSource().accept(this, context);
Map filteringSourceSplits = node.getFilteringSource().accept(this, context);
return ImmutableMap.builder()
.putAll(sourceSplits)
.putAll(filteringSourceSplits)
.buildOrThrow();
}
@Override
public Map visitSpatialJoin(SpatialJoinNode node, Void context)
{
Map leftSplits = node.getLeft().accept(this, context);
Map rightSplits = node.getRight().accept(this, context);
return ImmutableMap.builder()
.putAll(leftSplits)
.putAll(rightSplits)
.buildOrThrow();
}
@Override
public Map visitIndexJoin(IndexJoinNode node, Void context)
{
return node.getProbeSource().accept(this, context);
}
@Override
public Map visitDynamicFilterSource(DynamicFilterSourceNode node, Void context)
{
return node.getSource().accept(this, context);
}
@Override
public Map visitRemoteSource(RemoteSourceNode node, Void context)
{
// remote source node does not have splits
return ImmutableMap.of();
}
@Override
public Map visitValues(ValuesNode node, Void context)
{
// values node does not have splits
return ImmutableMap.of();
}
@Override
public Map visitFilter(FilterNode node, Void context)
{
if (node.getSource() instanceof TableScanNode scan) {
SplitSource splitSource = createSplitSource(scan.getTable(), scan.getAssignments(), Optional.of(node.getPredicate()));
splitSources.add(splitSource);
return ImmutableMap.of(scan.getId(), splitSource);
}
return node.getSource().accept(this, context);
}
@Override
public Map visitSample(SampleNode node, Void context)
{
return switch (node.getSampleType()) {
case BERNOULLI -> node.getSource().accept(this, context);
case SYSTEM -> {
Map nodeSplits = node.getSource().accept(this, context);
// TODO: when this happens we should switch to either BERNOULLI or page sampling
if (nodeSplits.size() == 1) {
PlanNodeId planNodeId = getOnlyElement(nodeSplits.keySet());
SplitSource sampledSplitSource = new SampledSplitSource(nodeSplits.get(planNodeId), node.getSampleRatio());
yield ImmutableMap.of(planNodeId, sampledSplitSource);
}
// table sampling on a sub query without splits is meaningless
yield nodeSplits;
}
};
}
@Override
public Map visitAggregation(AggregationNode node, Void context)
{
return node.getSource().accept(this, context);
}
@Override
public Map visitGroupId(GroupIdNode node, Void context)
{
return node.getSource().accept(this, context);
}
@Override
public Map visitMarkDistinct(MarkDistinctNode node, Void context)
{
return node.getSource().accept(this, context);
}
@Override
public Map visitWindow(WindowNode node, Void context)
{
return node.getSource().accept(this, context);
}
@Override
public Map visitPatternRecognition(PatternRecognitionNode node, Void context)
{
return node.getSource().accept(this, context);
}
@Override
public Map visitTableFunctionProcessor(TableFunctionProcessorNode node, Void context)
{
if (node.getSource().isEmpty()) {
// this is a source node, so produce splits
SplitSource splitSource = splitManager.getSplits(session, stageSpan, node.getHandle());
splitSources.add(splitSource);
return ImmutableMap.of(node.getId(), splitSource);
}
return node.getSource().orElseThrow().accept(this, context);
}
@Override
public Map visitRowNumber(RowNumberNode node, Void context)
{
return node.getSource().accept(this, context);
}
@Override
public Map visitTopNRanking(TopNRankingNode node, Void context)
{
return node.getSource().accept(this, context);
}
@Override
public Map visitProject(ProjectNode node, Void context)
{
return node.getSource().accept(this, context);
}
@Override
public Map visitUnnest(UnnestNode node, Void context)
{
return node.getSource().accept(this, context);
}
@Override
public Map visitTopN(TopNNode node, Void context)
{
return node.getSource().accept(this, context);
}
@Override
public Map visitOutput(OutputNode node, Void context)
{
return node.getSource().accept(this, context);
}
@Override
public Map visitEnforceSingleRow(EnforceSingleRowNode node, Void context)
{
return node.getSource().accept(this, context);
}
@Override
public Map visitAssignUniqueId(AssignUniqueId node, Void context)
{
return node.getSource().accept(this, context);
}
@Override
public Map visitLimit(LimitNode node, Void context)
{
return node.getSource().accept(this, context);
}
@Override
public Map visitDistinctLimit(DistinctLimitNode node, Void context)
{
return node.getSource().accept(this, context);
}
@Override
public Map visitSort(SortNode node, Void context)
{
return node.getSource().accept(this, context);
}
@Override
public Map visitRefreshMaterializedView(RefreshMaterializedViewNode node, Void context)
{
// RefreshMaterializedViewNode does not have splits
return ImmutableMap.of();
}
@Override
public Map visitTableWriter(TableWriterNode node, Void context)
{
return node.getSource().accept(this, context);
}
@Override
public Map visitTableFinish(TableFinishNode node, Void context)
{
return node.getSource().accept(this, context);
}
@Override
public Map visitStatisticsWriterNode(StatisticsWriterNode node, Void context)
{
return node.getSource().accept(this, context);
}
@Override
public Map visitMergeWriter(MergeWriterNode node, Void context)
{
return node.getSource().accept(this, context);
}
@Override
public Map visitMergeProcessor(MergeProcessorNode node, Void context)
{
return node.getSource().accept(this, context);
}
@Override
public Map visitTableDelete(TableDeleteNode node, Void context)
{
// node does not have splits
return ImmutableMap.of();
}
@Override
public Map visitTableUpdate(TableUpdateNode node, Void context)
{
// node does not have splits
return ImmutableMap.of();
}
@Override
public Map visitTableExecute(TableExecuteNode node, Void context)
{
return node.getSource().accept(this, context);
}
@Override
public Map visitSimpleTableExecuteNode(SimpleTableExecuteNode node, Void context)
{
// node does not have splits
return ImmutableMap.of();
}
@Override
public Map visitUnion(UnionNode node, Void context)
{
return processSources(node.getSources(), context);
}
@Override
public Map visitExchange(ExchangeNode node, Void context)
{
return processSources(node.getSources(), context);
}
private Map processSources(List sources, Void context)
{
ImmutableMap.Builder result = ImmutableMap.builder();
for (PlanNode child : sources) {
result.putAll(child.accept(this, context));
}
return result.buildOrThrow();
}
@Override
protected Map visitPlan(PlanNode node, Void context)
{
throw new UnsupportedOperationException("not yet implemented: " + node.getClass().getName());
}
}
}