Many resources are needed to download a project. Please understand that we have to compensate our server costs. Thank you in advance. Project price only 1 $
You can buy this project and download/modify it how often you want.
/*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package io.trino.sql.analyzer;
import com.google.common.base.Joiner;
import com.google.common.base.VerifyException;
import com.google.common.collect.ArrayListMultimap;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.ImmutableMap;
import com.google.common.collect.ImmutableMultimap;
import com.google.common.collect.ImmutableSet;
import com.google.common.collect.Iterables;
import com.google.common.collect.ListMultimap;
import com.google.common.collect.Multimap;
import com.google.common.collect.Sets;
import com.google.common.collect.Streams;
import com.google.common.math.IntMath;
import io.airlift.slice.Slice;
import io.trino.Session;
import io.trino.SystemSessionProperties;
import io.trino.connector.system.GlobalSystemConnector;
import io.trino.execution.Column;
import io.trino.execution.warnings.WarningCollector;
import io.trino.metadata.AnalyzePropertyManager;
import io.trino.metadata.FunctionResolver;
import io.trino.metadata.MaterializedViewDefinition;
import io.trino.metadata.Metadata;
import io.trino.metadata.OperatorNotFoundException;
import io.trino.metadata.QualifiedObjectName;
import io.trino.metadata.RedirectionAwareTableHandle;
import io.trino.metadata.ResolvedFunction;
import io.trino.metadata.TableExecuteHandle;
import io.trino.metadata.TableFunctionMetadata;
import io.trino.metadata.TableFunctionRegistry;
import io.trino.metadata.TableHandle;
import io.trino.metadata.TableLayout;
import io.trino.metadata.TableMetadata;
import io.trino.metadata.TableProceduresPropertyManager;
import io.trino.metadata.TableProceduresRegistry;
import io.trino.metadata.TablePropertyManager;
import io.trino.metadata.TableSchema;
import io.trino.metadata.TableVersion;
import io.trino.metadata.ViewColumn;
import io.trino.metadata.ViewDefinition;
import io.trino.security.AccessControl;
import io.trino.security.AllowAllAccessControl;
import io.trino.security.InjectedConnectorAccessControl;
import io.trino.security.SecurityContext;
import io.trino.security.ViewAccessControl;
import io.trino.spi.TrinoException;
import io.trino.spi.TrinoWarning;
import io.trino.spi.connector.CatalogHandle;
import io.trino.spi.connector.CatalogSchemaName;
import io.trino.spi.connector.CatalogSchemaTableName;
import io.trino.spi.connector.ColumnHandle;
import io.trino.spi.connector.ColumnMetadata;
import io.trino.spi.connector.ColumnSchema;
import io.trino.spi.connector.ConnectorTableMetadata;
import io.trino.spi.connector.ConnectorTransactionHandle;
import io.trino.spi.connector.MaterializedViewFreshness;
import io.trino.spi.connector.PointerType;
import io.trino.spi.connector.SchemaTableName;
import io.trino.spi.connector.TableProcedureMetadata;
import io.trino.spi.function.CatalogSchemaFunctionName;
import io.trino.spi.function.FunctionKind;
import io.trino.spi.function.OperatorType;
import io.trino.spi.function.table.Argument;
import io.trino.spi.function.table.ArgumentSpecification;
import io.trino.spi.function.table.ConnectorTableFunction;
import io.trino.spi.function.table.Descriptor;
import io.trino.spi.function.table.DescriptorArgument;
import io.trino.spi.function.table.DescriptorArgumentSpecification;
import io.trino.spi.function.table.ReturnTypeSpecification;
import io.trino.spi.function.table.ReturnTypeSpecification.DescribedTable;
import io.trino.spi.function.table.ScalarArgument;
import io.trino.spi.function.table.ScalarArgumentSpecification;
import io.trino.spi.function.table.TableArgument;
import io.trino.spi.function.table.TableArgumentSpecification;
import io.trino.spi.function.table.TableFunctionAnalysis;
import io.trino.spi.security.AccessDeniedException;
import io.trino.spi.security.GroupProvider;
import io.trino.spi.security.Identity;
import io.trino.spi.security.ViewExpression;
import io.trino.spi.type.ArrayType;
import io.trino.spi.type.CharType;
import io.trino.spi.type.DateType;
import io.trino.spi.type.LongTimestampWithTimeZone;
import io.trino.spi.type.MapType;
import io.trino.spi.type.RowType;
import io.trino.spi.type.TimestampType;
import io.trino.spi.type.TimestampWithTimeZoneType;
import io.trino.spi.type.Type;
import io.trino.spi.type.TypeNotFoundException;
import io.trino.spi.type.VarcharType;
import io.trino.sql.InterpretedFunctionInvoker;
import io.trino.sql.PlannerContext;
import io.trino.sql.analyzer.Analysis.GroupingSetAnalysis;
import io.trino.sql.analyzer.Analysis.JsonTableAnalysis;
import io.trino.sql.analyzer.Analysis.MergeAnalysis;
import io.trino.sql.analyzer.Analysis.ResolvedWindow;
import io.trino.sql.analyzer.Analysis.SelectExpression;
import io.trino.sql.analyzer.Analysis.SourceColumn;
import io.trino.sql.analyzer.Analysis.TableArgumentAnalysis;
import io.trino.sql.analyzer.Analysis.TableFunctionInvocationAnalysis;
import io.trino.sql.analyzer.Analysis.UnnestAnalysis;
import io.trino.sql.analyzer.ExpressionAnalyzer.ParametersTypeAndAnalysis;
import io.trino.sql.analyzer.ExpressionAnalyzer.TypeAndAnalysis;
import io.trino.sql.analyzer.JsonPathAnalyzer.JsonPathAnalysis;
import io.trino.sql.analyzer.Scope.AsteriskedIdentifierChainBasis;
import io.trino.sql.parser.ParsingException;
import io.trino.sql.parser.SqlParser;
import io.trino.sql.planner.PartitioningHandle;
import io.trino.sql.planner.ScopeAware;
import io.trino.sql.tree.AddColumn;
import io.trino.sql.tree.AliasedRelation;
import io.trino.sql.tree.AllColumns;
import io.trino.sql.tree.AllRows;
import io.trino.sql.tree.Analyze;
import io.trino.sql.tree.AstVisitor;
import io.trino.sql.tree.Call;
import io.trino.sql.tree.CallArgument;
import io.trino.sql.tree.ColumnDefinition;
import io.trino.sql.tree.Comment;
import io.trino.sql.tree.Commit;
import io.trino.sql.tree.CreateCatalog;
import io.trino.sql.tree.CreateMaterializedView;
import io.trino.sql.tree.CreateSchema;
import io.trino.sql.tree.CreateTable;
import io.trino.sql.tree.CreateTableAsSelect;
import io.trino.sql.tree.CreateView;
import io.trino.sql.tree.Deallocate;
import io.trino.sql.tree.Delete;
import io.trino.sql.tree.Deny;
import io.trino.sql.tree.DereferenceExpression;
import io.trino.sql.tree.DropCatalog;
import io.trino.sql.tree.DropColumn;
import io.trino.sql.tree.DropMaterializedView;
import io.trino.sql.tree.DropNotNullConstraint;
import io.trino.sql.tree.DropSchema;
import io.trino.sql.tree.DropTable;
import io.trino.sql.tree.DropView;
import io.trino.sql.tree.EmptyTableTreatment;
import io.trino.sql.tree.Except;
import io.trino.sql.tree.Execute;
import io.trino.sql.tree.ExecuteImmediate;
import io.trino.sql.tree.Explain;
import io.trino.sql.tree.ExplainAnalyze;
import io.trino.sql.tree.Expression;
import io.trino.sql.tree.ExpressionRewriter;
import io.trino.sql.tree.ExpressionTreeRewriter;
import io.trino.sql.tree.FetchFirst;
import io.trino.sql.tree.FieldReference;
import io.trino.sql.tree.FrameBound;
import io.trino.sql.tree.FunctionCall;
import io.trino.sql.tree.FunctionSpecification;
import io.trino.sql.tree.Grant;
import io.trino.sql.tree.GroupBy;
import io.trino.sql.tree.GroupingElement;
import io.trino.sql.tree.GroupingOperation;
import io.trino.sql.tree.GroupingSets;
import io.trino.sql.tree.Identifier;
import io.trino.sql.tree.Insert;
import io.trino.sql.tree.Intersect;
import io.trino.sql.tree.Join;
import io.trino.sql.tree.JoinCriteria;
import io.trino.sql.tree.JoinOn;
import io.trino.sql.tree.JoinUsing;
import io.trino.sql.tree.JsonPathInvocation;
import io.trino.sql.tree.JsonPathParameter;
import io.trino.sql.tree.JsonTable;
import io.trino.sql.tree.JsonTableColumnDefinition;
import io.trino.sql.tree.JsonTableSpecificPlan;
import io.trino.sql.tree.Lateral;
import io.trino.sql.tree.Limit;
import io.trino.sql.tree.LongLiteral;
import io.trino.sql.tree.MeasureDefinition;
import io.trino.sql.tree.Merge;
import io.trino.sql.tree.MergeCase;
import io.trino.sql.tree.MergeDelete;
import io.trino.sql.tree.MergeInsert;
import io.trino.sql.tree.MergeUpdate;
import io.trino.sql.tree.NaturalJoin;
import io.trino.sql.tree.NestedColumns;
import io.trino.sql.tree.Node;
import io.trino.sql.tree.NodeLocation;
import io.trino.sql.tree.NodeRef;
import io.trino.sql.tree.Offset;
import io.trino.sql.tree.OrderBy;
import io.trino.sql.tree.OrdinalityColumn;
import io.trino.sql.tree.Parameter;
import io.trino.sql.tree.PatternRecognitionRelation;
import io.trino.sql.tree.PlanLeaf;
import io.trino.sql.tree.PlanParentChild;
import io.trino.sql.tree.PlanSiblings;
import io.trino.sql.tree.Prepare;
import io.trino.sql.tree.Property;
import io.trino.sql.tree.QualifiedName;
import io.trino.sql.tree.Query;
import io.trino.sql.tree.QueryColumn;
import io.trino.sql.tree.QueryPeriod;
import io.trino.sql.tree.QuerySpecification;
import io.trino.sql.tree.RefreshMaterializedView;
import io.trino.sql.tree.Relation;
import io.trino.sql.tree.RenameColumn;
import io.trino.sql.tree.RenameMaterializedView;
import io.trino.sql.tree.RenameSchema;
import io.trino.sql.tree.RenameTable;
import io.trino.sql.tree.RenameView;
import io.trino.sql.tree.ResetSession;
import io.trino.sql.tree.ResetSessionAuthorization;
import io.trino.sql.tree.Revoke;
import io.trino.sql.tree.Rollback;
import io.trino.sql.tree.Row;
import io.trino.sql.tree.RowPattern;
import io.trino.sql.tree.SampledRelation;
import io.trino.sql.tree.SecurityCharacteristic;
import io.trino.sql.tree.Select;
import io.trino.sql.tree.SelectItem;
import io.trino.sql.tree.SetColumnType;
import io.trino.sql.tree.SetOperation;
import io.trino.sql.tree.SetProperties;
import io.trino.sql.tree.SetSchemaAuthorization;
import io.trino.sql.tree.SetSession;
import io.trino.sql.tree.SetSessionAuthorization;
import io.trino.sql.tree.SetTableAuthorization;
import io.trino.sql.tree.SetTimeZone;
import io.trino.sql.tree.SetViewAuthorization;
import io.trino.sql.tree.SimpleGroupBy;
import io.trino.sql.tree.SingleColumn;
import io.trino.sql.tree.SkipTo;
import io.trino.sql.tree.SortItem;
import io.trino.sql.tree.StartTransaction;
import io.trino.sql.tree.Statement;
import io.trino.sql.tree.StringLiteral;
import io.trino.sql.tree.SubqueryExpression;
import io.trino.sql.tree.SubscriptExpression;
import io.trino.sql.tree.SubsetDefinition;
import io.trino.sql.tree.Table;
import io.trino.sql.tree.TableExecute;
import io.trino.sql.tree.TableFunctionArgument;
import io.trino.sql.tree.TableFunctionDescriptorArgument;
import io.trino.sql.tree.TableFunctionInvocation;
import io.trino.sql.tree.TableFunctionTableArgument;
import io.trino.sql.tree.TableSubquery;
import io.trino.sql.tree.TruncateTable;
import io.trino.sql.tree.Union;
import io.trino.sql.tree.Unnest;
import io.trino.sql.tree.Update;
import io.trino.sql.tree.UpdateAssignment;
import io.trino.sql.tree.Use;
import io.trino.sql.tree.ValueColumn;
import io.trino.sql.tree.Values;
import io.trino.sql.tree.VariableDefinition;
import io.trino.sql.tree.Window;
import io.trino.sql.tree.WindowDefinition;
import io.trino.sql.tree.WindowFrame;
import io.trino.sql.tree.WindowOperation;
import io.trino.sql.tree.WindowReference;
import io.trino.sql.tree.WindowSpecification;
import io.trino.sql.tree.With;
import io.trino.sql.tree.WithQuery;
import io.trino.transaction.TransactionManager;
import io.trino.type.TypeCoercion;
import java.math.RoundingMode;
import java.time.Duration;
import java.time.Instant;
import java.time.temporal.ChronoUnit;
import java.util.ArrayList;
import java.util.Collection;
import java.util.HashMap;
import java.util.HashSet;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import java.util.Optional;
import java.util.OptionalLong;
import java.util.Set;
import java.util.function.Function;
import java.util.function.Predicate;
import java.util.stream.Collectors;
import java.util.stream.Stream;
import static com.google.common.base.Preconditions.checkArgument;
import static com.google.common.base.Preconditions.checkNotNull;
import static com.google.common.base.Preconditions.checkState;
import static com.google.common.base.Verify.verify;
import static com.google.common.collect.ImmutableList.toImmutableList;
import static com.google.common.collect.ImmutableMap.toImmutableMap;
import static com.google.common.collect.ImmutableSet.toImmutableSet;
import static com.google.common.collect.Iterables.getLast;
import static com.google.common.collect.Iterables.getOnlyElement;
import static io.trino.SystemSessionProperties.getMaxGroupingSets;
import static io.trino.metadata.FunctionResolver.toPath;
import static io.trino.metadata.GlobalFunctionCatalog.isBuiltinFunctionName;
import static io.trino.metadata.MetadataUtil.createQualifiedObjectName;
import static io.trino.metadata.MetadataUtil.getRequiredCatalogHandle;
import static io.trino.spi.StandardErrorCode.AMBIGUOUS_NAME;
import static io.trino.spi.StandardErrorCode.AMBIGUOUS_RETURN_TYPE;
import static io.trino.spi.StandardErrorCode.COLUMN_NOT_FOUND;
import static io.trino.spi.StandardErrorCode.COLUMN_TYPE_UNKNOWN;
import static io.trino.spi.StandardErrorCode.DUPLICATE_COLUMN_NAME;
import static io.trino.spi.StandardErrorCode.DUPLICATE_COLUMN_OR_PATH_NAME;
import static io.trino.spi.StandardErrorCode.DUPLICATE_NAMED_QUERY;
import static io.trino.spi.StandardErrorCode.DUPLICATE_PROPERTY;
import static io.trino.spi.StandardErrorCode.DUPLICATE_RANGE_VARIABLE;
import static io.trino.spi.StandardErrorCode.DUPLICATE_WINDOW_NAME;
import static io.trino.spi.StandardErrorCode.EXPRESSION_NOT_CONSTANT;
import static io.trino.spi.StandardErrorCode.EXPRESSION_NOT_IN_DISTINCT;
import static io.trino.spi.StandardErrorCode.FUNCTION_IMPLEMENTATION_ERROR;
import static io.trino.spi.StandardErrorCode.FUNCTION_NOT_FOUND;
import static io.trino.spi.StandardErrorCode.FUNCTION_NOT_WINDOW;
import static io.trino.spi.StandardErrorCode.INVALID_ARGUMENTS;
import static io.trino.spi.StandardErrorCode.INVALID_CATALOG_PROPERTY;
import static io.trino.spi.StandardErrorCode.INVALID_CHECK_CONSTRAINT;
import static io.trino.spi.StandardErrorCode.INVALID_COLUMN_REFERENCE;
import static io.trino.spi.StandardErrorCode.INVALID_COPARTITIONING;
import static io.trino.spi.StandardErrorCode.INVALID_FUNCTION_ARGUMENT;
import static io.trino.spi.StandardErrorCode.INVALID_LIMIT_CLAUSE;
import static io.trino.spi.StandardErrorCode.INVALID_ORDER_BY;
import static io.trino.spi.StandardErrorCode.INVALID_PARTITION_BY;
import static io.trino.spi.StandardErrorCode.INVALID_PLAN;
import static io.trino.spi.StandardErrorCode.INVALID_RECURSIVE_REFERENCE;
import static io.trino.spi.StandardErrorCode.INVALID_ROW_FILTER;
import static io.trino.spi.StandardErrorCode.INVALID_TABLE_FUNCTION_INVOCATION;
import static io.trino.spi.StandardErrorCode.INVALID_VIEW;
import static io.trino.spi.StandardErrorCode.INVALID_WINDOW_FRAME;
import static io.trino.spi.StandardErrorCode.INVALID_WINDOW_REFERENCE;
import static io.trino.spi.StandardErrorCode.MISMATCHED_COLUMN_ALIASES;
import static io.trino.spi.StandardErrorCode.MISSING_ARGUMENT;
import static io.trino.spi.StandardErrorCode.MISSING_COLUMN_ALIASES;
import static io.trino.spi.StandardErrorCode.MISSING_COLUMN_NAME;
import static io.trino.spi.StandardErrorCode.MISSING_GROUP_BY;
import static io.trino.spi.StandardErrorCode.MISSING_ORDER_BY;
import static io.trino.spi.StandardErrorCode.MISSING_PATH_NAME;
import static io.trino.spi.StandardErrorCode.MISSING_RETURN_TYPE;
import static io.trino.spi.StandardErrorCode.NESTED_RECURSIVE;
import static io.trino.spi.StandardErrorCode.NESTED_ROW_PATTERN_RECOGNITION;
import static io.trino.spi.StandardErrorCode.NESTED_WINDOW;
import static io.trino.spi.StandardErrorCode.NOT_SUPPORTED;
import static io.trino.spi.StandardErrorCode.NULL_TREATMENT_NOT_ALLOWED;
import static io.trino.spi.StandardErrorCode.NUMERIC_VALUE_OUT_OF_RANGE;
import static io.trino.spi.StandardErrorCode.SCHEMA_NOT_FOUND;
import static io.trino.spi.StandardErrorCode.SYNTAX_ERROR;
import static io.trino.spi.StandardErrorCode.TABLE_ALREADY_EXISTS;
import static io.trino.spi.StandardErrorCode.TABLE_HAS_NO_COLUMNS;
import static io.trino.spi.StandardErrorCode.TABLE_NOT_FOUND;
import static io.trino.spi.StandardErrorCode.TOO_MANY_GROUPING_SETS;
import static io.trino.spi.StandardErrorCode.TYPE_MISMATCH;
import static io.trino.spi.StandardErrorCode.UNSUPPORTED_SUBQUERY;
import static io.trino.spi.StandardErrorCode.VIEW_IS_RECURSIVE;
import static io.trino.spi.StandardErrorCode.VIEW_IS_STALE;
import static io.trino.spi.connector.MaterializedViewFreshness.Freshness.FRESH;
import static io.trino.spi.connector.StandardWarningCode.REDUNDANT_ORDER_BY;
import static io.trino.spi.function.FunctionKind.AGGREGATE;
import static io.trino.spi.function.FunctionKind.WINDOW;
import static io.trino.spi.function.table.DescriptorArgument.NULL_DESCRIPTOR;
import static io.trino.spi.function.table.ReturnTypeSpecification.GenericTable.GENERIC_TABLE;
import static io.trino.spi.function.table.ReturnTypeSpecification.OnlyPassThrough.ONLY_PASS_THROUGH;
import static io.trino.spi.security.AccessDeniedException.denyExecuteFunction;
import static io.trino.spi.type.BigintType.BIGINT;
import static io.trino.spi.type.BooleanType.BOOLEAN;
import static io.trino.spi.type.DoubleType.DOUBLE;
import static io.trino.spi.type.IntegerType.INTEGER;
import static io.trino.spi.type.TimestampWithTimeZoneType.createTimestampWithTimeZoneType;
import static io.trino.spi.type.Timestamps.PICOSECONDS_PER_NANOSECOND;
import static io.trino.spi.type.TinyintType.TINYINT;
import static io.trino.spi.type.VarcharType.VARCHAR;
import static io.trino.spi.type.VarcharType.createUnboundedVarcharType;
import static io.trino.sql.NodeUtils.getSortItemsFromOrderBy;
import static io.trino.sql.analyzer.AggregationAnalyzer.verifyOrderByAggregations;
import static io.trino.sql.analyzer.AggregationAnalyzer.verifySourceAggregations;
import static io.trino.sql.analyzer.Analyzer.verifyNoAggregateWindowOrGroupingFunctions;
import static io.trino.sql.analyzer.CanonicalizationAware.canonicalizationAwareKey;
import static io.trino.sql.analyzer.ConstantEvaluator.evaluateConstant;
import static io.trino.sql.analyzer.DeterminismEvaluator.containsCurrentTimeFunctions;
import static io.trino.sql.analyzer.DeterminismEvaluator.isDeterministic;
import static io.trino.sql.analyzer.ExpressionAnalyzer.analyzeJsonQueryExpression;
import static io.trino.sql.analyzer.ExpressionAnalyzer.analyzeJsonValueExpression;
import static io.trino.sql.analyzer.ExpressionAnalyzer.createConstantAnalyzer;
import static io.trino.sql.analyzer.ExpressionTreeUtils.asQualifiedName;
import static io.trino.sql.analyzer.ExpressionTreeUtils.extractAggregateFunctions;
import static io.trino.sql.analyzer.ExpressionTreeUtils.extractExpressions;
import static io.trino.sql.analyzer.ExpressionTreeUtils.extractLocation;
import static io.trino.sql.analyzer.ExpressionTreeUtils.extractWindowExpressions;
import static io.trino.sql.analyzer.ExpressionTreeUtils.extractWindowFunctions;
import static io.trino.sql.analyzer.ExpressionTreeUtils.extractWindowMeasures;
import static io.trino.sql.analyzer.Scope.BasisType.TABLE;
import static io.trino.sql.analyzer.ScopeReferenceExtractor.getReferencesToScope;
import static io.trino.sql.analyzer.SemanticExceptions.semanticException;
import static io.trino.sql.analyzer.TypeSignatureProvider.fromTypes;
import static io.trino.sql.analyzer.TypeSignatureTranslator.toTypeSignature;
import static io.trino.sql.tree.BooleanLiteral.TRUE_LITERAL;
import static io.trino.sql.tree.DereferenceExpression.getQualifiedName;
import static io.trino.sql.tree.Join.Type.FULL;
import static io.trino.sql.tree.Join.Type.INNER;
import static io.trino.sql.tree.Join.Type.LEFT;
import static io.trino.sql.tree.Join.Type.RIGHT;
import static io.trino.sql.tree.PatternRecognitionRelation.RowsPerMatch.ONE;
import static io.trino.sql.tree.SaveMode.IGNORE;
import static io.trino.sql.tree.SaveMode.REPLACE;
import static io.trino.sql.util.AstUtils.preOrder;
import static io.trino.type.UnknownType.UNKNOWN;
import static io.trino.util.MoreLists.mappedCopy;
import static java.lang.Math.toIntExact;
import static java.lang.String.format;
import static java.util.Collections.emptyList;
import static java.util.Locale.ENGLISH;
import static java.util.Objects.requireNonNull;
class StatementAnalyzer
{
private static final Set WINDOW_VALUE_FUNCTIONS = ImmutableSet.of("lead", "lag", "first_value", "last_value", "nth_value");
private final StatementAnalyzerFactory statementAnalyzerFactory;
private final Analysis analysis;
private final Metadata metadata;
private final PlannerContext plannerContext;
private final TypeCoercion typeCoercion;
private final Session session;
private final SqlParser sqlParser;
private final SessionTimeProvider sessionTimeProvider;
private final GroupProvider groupProvider;
private final AccessControl accessControl;
private final TransactionManager transactionManager;
private final TableProceduresRegistry tableProceduresRegistry;
private final TableFunctionRegistry tableFunctionRegistry;
private final TablePropertyManager tablePropertyManager;
private final AnalyzePropertyManager analyzePropertyManager;
private final TableProceduresPropertyManager tableProceduresPropertyManager;
private final FunctionResolver functionResolver;
private final WarningCollector warningCollector;
private final CorrelationSupport correlationSupport;
StatementAnalyzer(
StatementAnalyzerFactory statementAnalyzerFactory,
Analysis analysis,
PlannerContext plannerContext,
SqlParser sqlParser,
SessionTimeProvider sessionTimeProvider,
GroupProvider groupProvider,
AccessControl accessControl,
TransactionManager transactionManager,
Session session,
TableProceduresRegistry tableProceduresRegistry,
TableFunctionRegistry tableFunctionRegistry,
TablePropertyManager tablePropertyManager,
AnalyzePropertyManager analyzePropertyManager,
TableProceduresPropertyManager tableProceduresPropertyManager,
WarningCollector warningCollector,
CorrelationSupport correlationSupport)
{
this.statementAnalyzerFactory = requireNonNull(statementAnalyzerFactory, "statementAnalyzerFactory is null");
this.analysis = requireNonNull(analysis, "analysis is null");
this.plannerContext = requireNonNull(plannerContext, "plannerContext is null");
this.metadata = plannerContext.getMetadata();
this.typeCoercion = new TypeCoercion(plannerContext.getTypeManager()::getType);
this.sqlParser = requireNonNull(sqlParser, "sqlParser is null");
this.sessionTimeProvider = requireNonNull(sessionTimeProvider, "sessionTimeProvider is null");
this.groupProvider = requireNonNull(groupProvider, "groupProvider is null");
this.accessControl = requireNonNull(accessControl, "accessControl is null");
this.transactionManager = requireNonNull(transactionManager, "transactionManager is null");
this.session = requireNonNull(session, "session is null");
this.tableProceduresRegistry = requireNonNull(tableProceduresRegistry, "tableProceduresRegistry is null");
this.tableFunctionRegistry = requireNonNull(tableFunctionRegistry, "tableFunctionRegistry is null");
this.tablePropertyManager = requireNonNull(tablePropertyManager, "tablePropertyManager is null");
this.analyzePropertyManager = requireNonNull(analyzePropertyManager, "analyzePropertyManager is null");
this.tableProceduresPropertyManager = tableProceduresPropertyManager;
this.warningCollector = requireNonNull(warningCollector, "warningCollector is null");
this.correlationSupport = requireNonNull(correlationSupport, "correlationSupport is null");
this.functionResolver = plannerContext.getFunctionResolver(warningCollector);
}
public Scope analyze(Node node)
{
return analyze(node, Optional.empty(), true);
}
public Scope analyze(Node node, Scope outerQueryScope)
{
return analyze(node, Optional.of(outerQueryScope), false);
}
private Scope analyze(Node node, Optional outerQueryScope, boolean isTopLevel)
{
return new Visitor(outerQueryScope, warningCollector, Optional.empty(), isTopLevel)
.process(node, Optional.empty());
}
private Scope analyzeForUpdate(Relation relation, Optional outerQueryScope, UpdateKind updateKind)
{
return new Visitor(outerQueryScope, warningCollector, Optional.of(updateKind), true)
.process(relation, Optional.empty());
}
private enum UpdateKind
{
DELETE,
UPDATE,
MERGE,
}
/**
* Visitor context represents local query scope (if exists). The invariant is
* that the local query scopes hierarchy should always have outer query scope
* (if provided) as ancestor.
*/
private final class Visitor
extends AstVisitor>
{
private final boolean isTopLevel;
private final Optional outerQueryScope;
private final WarningCollector warningCollector;
private final Optional updateKind;
private Visitor(Optional outerQueryScope, WarningCollector warningCollector, Optional updateKind, boolean isTopLevel)
{
this.outerQueryScope = requireNonNull(outerQueryScope, "outerQueryScope is null");
this.warningCollector = requireNonNull(warningCollector, "warningCollector is null");
this.updateKind = requireNonNull(updateKind, "updateKind is null");
this.isTopLevel = isTopLevel;
}
@Override
public Scope process(Node node, Optional scope)
{
Scope returnScope = super.process(node, scope);
checkState(returnScope.getOuterQueryParent().equals(outerQueryScope), "result scope should have outer query scope equal with parameter outer query scope");
scope.ifPresent(value -> checkState(hasScopeAsLocalParent(returnScope, value), "return scope should have context scope as one of its ancestors"));
return returnScope;
}
private Scope process(Node node, Scope scope)
{
return process(node, Optional.of(scope));
}
@Override
protected Scope visitNode(Node node, Optional context)
{
throw new IllegalStateException("Unsupported node type: " + node.getClass().getName());
}
@Override
protected Scope visitUse(Use node, Optional scope)
{
throw semanticException(NOT_SUPPORTED, node, "USE statement is not supported");
}
@Override
protected Scope visitInsert(Insert insert, Optional scope)
{
QualifiedObjectName targetTable = createQualifiedObjectName(session, insert, insert.getTarget());
if (metadata.isMaterializedView(session, targetTable)) {
throw semanticException(NOT_SUPPORTED, insert, "Inserting into materialized views is not supported");
}
if (metadata.isView(session, targetTable)) {
throw semanticException(NOT_SUPPORTED, insert, "Inserting into views is not supported");
}
analysis.setUpdateType("INSERT");
// analyze the query that creates the data
Scope queryScope = analyze(insert.getQuery(), Optional.empty(), false);
// verify the insert destination columns match the query
RedirectionAwareTableHandle redirection = metadata.getRedirectionAwareTableHandle(session, targetTable);
Optional targetTableHandle = redirection.tableHandle();
targetTable = redirection.redirectedTableName().orElse(targetTable);
if (targetTableHandle.isEmpty()) {
throw semanticException(TABLE_NOT_FOUND, insert, "Table '%s' does not exist", targetTable);
}
accessControl.checkCanInsertIntoTable(session.toSecurityContext(), targetTable);
TableSchema tableSchema = metadata.getTableSchema(session, targetTableHandle.get());
List columns = tableSchema.columns().stream()
.filter(column -> !column.isHidden())
.collect(toImmutableList());
List checkConstraints = tableSchema.tableSchema().getCheckConstraints();
if (!accessControl.getColumnMasks(session.toSecurityContext(), targetTable, columns).isEmpty()) {
throw semanticException(NOT_SUPPORTED, insert, "Insert into table with column masks is not supported");
}
Map columnHandles = metadata.getColumnHandles(session, targetTableHandle.get());
List tableFields = analyzeTableOutputFields(insert.getTable(), targetTable, tableSchema, columnHandles);
Scope accessControlScope = Scope.builder()
.withRelationType(RelationId.anonymous(), new RelationType(tableFields))
.build();
analyzeFiltersAndMasks(insert.getTable(), targetTable, new RelationType(tableFields), accessControlScope);
analyzeCheckConstraints(insert.getTable(), targetTable, accessControlScope, checkConstraints);
analysis.registerTable(insert.getTable(), targetTableHandle, targetTable, session.getIdentity().getUser(), accessControlScope, Optional.empty());
List tableColumns = columns.stream()
.map(ColumnSchema::getName)
.collect(toImmutableList());
// analyze target table layout, table columns should contain all partition columns
Optional newTableLayout = metadata.getInsertLayout(session, targetTableHandle.get());
newTableLayout.ifPresent(layout -> {
if (!ImmutableSet.copyOf(tableColumns).containsAll(layout.getPartitionColumns())) {
throw new TrinoException(NOT_SUPPORTED, "INSERT must write all distribution columns: " + layout.getPartitionColumns());
}
});
List insertColumns;
if (insert.getColumns().isPresent()) {
insertColumns = insert.getColumns().get().stream()
.map(Identifier::getValue)
.map(column -> column.toLowerCase(ENGLISH))
.collect(toImmutableList());
Set columnNames = new HashSet<>();
for (String insertColumn : insertColumns) {
if (!tableColumns.contains(insertColumn)) {
throw semanticException(COLUMN_NOT_FOUND, insert, "Insert column name does not exist in target table: %s", insertColumn);
}
if (!columnNames.add(insertColumn)) {
throw semanticException(DUPLICATE_COLUMN_NAME, insert, "Insert column name is specified more than once: %s", insertColumn);
}
}
}
else {
insertColumns = tableColumns;
}
analysis.setInsert(new Analysis.Insert(
insert.getTable(),
targetTableHandle.get(),
insertColumns.stream().map(columnHandles::get).collect(toImmutableList()),
newTableLayout));
List tableTypes = insertColumns.stream()
.map(insertColumn -> tableSchema.column(insertColumn).getType())
.collect(toImmutableList());
List queryTypes = queryScope.getRelationType().getVisibleFields().stream()
.map(Field::getType)
.collect(toImmutableList());
if (!typesMatchForInsert(tableTypes, queryTypes)) {
throw semanticException(TYPE_MISMATCH,
insert,
"Insert query has mismatched column types: Table: [%s], Query: [%s]",
Joiner.on(", ").join(tableTypes),
Joiner.on(", ").join(queryTypes));
}
Stream columnStream = Streams.zip(
insertColumns.stream(),
tableTypes.stream()
.map(Type::toString),
Column::new);
analysis.setUpdateTarget(
targetTableHandle.get().catalogHandle().getVersion(),
targetTable,
Optional.empty(),
Optional.of(Streams.zip(
columnStream,
queryScope.getRelationType().getVisibleFields().stream(),
(column, field) -> new OutputColumn(column, analysis.getSourceColumns(field)))
.collect(toImmutableList())));
return createAndAssignScope(insert, scope, Field.newUnqualified("rows", BIGINT));
}
@Override
protected Scope visitRefreshMaterializedView(RefreshMaterializedView refreshMaterializedView, Optional scope)
{
QualifiedObjectName name = createQualifiedObjectName(session, refreshMaterializedView, refreshMaterializedView.getName());
MaterializedViewDefinition view = metadata.getMaterializedView(session, name)
.orElseThrow(() -> semanticException(TABLE_NOT_FOUND, refreshMaterializedView, "Materialized view '%s' does not exist", name));
accessControl.checkCanRefreshMaterializedView(session.toSecurityContext(), name);
analysis.setUpdateType("REFRESH MATERIALIZED VIEW");
CatalogHandle catalogHandle = getRequiredCatalogHandle(metadata, session, refreshMaterializedView, name.catalogName());
if (metadata.delegateMaterializedViewRefreshToConnector(session, name)) {
analysis.setDelegatedRefreshMaterializedView(name);
analysis.setUpdateTarget(
catalogHandle.getVersion(),
name,
Optional.empty(),
Optional.empty());
return createAndAssignScope(refreshMaterializedView, scope);
}
QualifiedName storageName = getMaterializedViewStorageTableName(view)
.orElseThrow(() -> semanticException(TABLE_NOT_FOUND, refreshMaterializedView, "Storage Table for materialized view '%s' does not exist", name));
QualifiedObjectName targetTable = createQualifiedObjectName(session, refreshMaterializedView, storageName);
checkStorageTableNotRedirected(targetTable);
// analyze the query that creates the data
Query query = parseView(view.getOriginalSql(), name, refreshMaterializedView);
Scope queryScope = process(query, scope);
// verify the insert destination columns match the query
TableHandle targetTableHandle = metadata.getTableHandle(session, targetTable)
.orElseThrow(() -> semanticException(TABLE_NOT_FOUND, refreshMaterializedView, "Table '%s' does not exist", targetTable));
analysis.setSkipMaterializedViewRefresh(metadata.getMaterializedViewFreshness(session, name).getFreshness() == FRESH);
TableMetadata tableMetadata = metadata.getTableMetadata(session, targetTableHandle);
List insertColumns = tableMetadata.columns().stream()
.filter(column -> !column.isHidden())
.map(ColumnMetadata::getName)
.collect(toImmutableList());
Map columnHandles = metadata.getColumnHandles(session, targetTableHandle);
analysis.setRefreshMaterializedView(new Analysis.RefreshMaterializedViewAnalysis(
refreshMaterializedView.getTable(),
targetTableHandle, query,
insertColumns.stream().map(columnHandles::get).collect(toImmutableList())));
List tableTypes = insertColumns.stream()
.map(insertColumn -> tableMetadata.column(insertColumn).getType())
.collect(toImmutableList());
Stream columns = Streams.zip(
insertColumns.stream(),
tableTypes.stream()
.map(Type::toString),
Column::new);
analysis.setUpdateTarget(
catalogHandle.getVersion(),
name,
Optional.empty(),
Optional.of(Streams.zip(
columns,
queryScope.getRelationType().getVisibleFields().stream(),
(column, field) -> new OutputColumn(column, analysis.getSourceColumns(field)))
.collect(toImmutableList())));
return createAndAssignScope(refreshMaterializedView, scope, Field.newUnqualified("rows", BIGINT));
}
private boolean typesMatchForInsert(List tableTypes, List queryTypes)
{
if (tableTypes.size() != queryTypes.size()) {
return false;
}
/*
TODO enable coercions based on type compatibility for INSERT of structural types containing nested bounded character types.
It might require defining a new range of cast operators and changes in GlobalFunctionCatalog to ensure proper handling
of nested types.
Currently, INSERT for such structural types is only allowed in the case of strict type coercibility.
INSERT for other types is allowed in all cases described by the Standard. It is obtained
by emulating a "guarded cast" in LogicalPlanner, and without any changes to the actual operators.
*/
for (int i = 0; i < tableTypes.size(); i++) {
if (hasNestedBoundedCharacterType(tableTypes.get(i))) {
if (!typeCoercion.canCoerce(queryTypes.get(i), tableTypes.get(i))) {
return false;
}
}
else if (!typeCoercion.isCompatible(queryTypes.get(i), tableTypes.get(i))) {
return false;
}
}
return true;
}
private boolean hasNestedBoundedCharacterType(Type type)
{
if (type instanceof ArrayType) {
return hasBoundedCharacterType(((ArrayType) type).getElementType());
}
if (type instanceof MapType) {
return hasBoundedCharacterType(((MapType) type).getKeyType()) || hasBoundedCharacterType(((MapType) type).getValueType());
}
if (type instanceof RowType) {
for (Type fieldType : type.getTypeParameters()) {
if (hasBoundedCharacterType(fieldType)) {
return true;
}
}
}
return false;
}
private boolean hasBoundedCharacterType(Type type)
{
return type instanceof CharType || (type instanceof VarcharType && !((VarcharType) type).isUnbounded()) || hasNestedBoundedCharacterType(type);
}
@Override
protected Scope visitDelete(Delete node, Optional scope)
{
Table table = node.getTable();
QualifiedObjectName originalName = createQualifiedObjectName(session, table, table.getName());
if (metadata.isMaterializedView(session, originalName)) {
throw semanticException(NOT_SUPPORTED, node, "Deleting from materialized views is not supported");
}
if (metadata.isView(session, originalName)) {
throw semanticException(NOT_SUPPORTED, node, "Deleting from views is not supported");
}
RedirectionAwareTableHandle redirection = metadata.getRedirectionAwareTableHandle(session, originalName);
QualifiedObjectName tableName = redirection.redirectedTableName().orElse(originalName);
TableHandle handle = redirection.tableHandle()
.orElseThrow(() -> semanticException(TABLE_NOT_FOUND, table, "Table '%s' does not exist", tableName));
accessControl.checkCanDeleteFromTable(session.toSecurityContext(), tableName);
TableSchema tableSchema = metadata.getTableSchema(session, handle);
if (!accessControl.getColumnMasks(session.toSecurityContext(), tableName, tableSchema.tableSchema().getColumns()).isEmpty()) {
throw semanticException(NOT_SUPPORTED, node, "Delete from table with column mask");
}
// Analyzer checks for select permissions but DELETE has a separate permission, so disable access checks
// TODO: we shouldn't need to create a new analyzer. The access control should be carried in the context object
StatementAnalyzer analyzer = statementAnalyzerFactory
.withSpecializedAccessControl(new AllowAllAccessControl())
.createStatementAnalyzer(analysis, session, warningCollector, CorrelationSupport.ALLOWED);
Scope tableScope = analyzer.analyzeForUpdate(table, scope, UpdateKind.DELETE);
node.getWhere().ifPresent(where -> analyzeWhere(node, tableScope, where));
analysis.setUpdateType("DELETE");
analysis.setUpdateTarget(handle.catalogHandle().getVersion(), tableName, Optional.of(table), Optional.empty());
Scope accessControlScope = Scope.builder()
.withRelationType(RelationId.anonymous(), analysis.getScope(table).getRelationType())
.build();
analyzeFiltersAndMasks(table, tableName, analysis.getScope(table).getRelationType(), accessControlScope);
analyzeCheckConstraints(table, tableName, accessControlScope, tableSchema.tableSchema().getCheckConstraints());
analysis.registerTable(table, Optional.of(handle), tableName, session.getIdentity().getUser(), accessControlScope, Optional.empty());
createMergeAnalysis(table, handle, tableSchema, tableScope, tableScope, ImmutableList.of());
return createAndAssignScope(node, scope, Field.newUnqualified("rows", BIGINT));
}
@Override
protected Scope visitAnalyze(Analyze node, Optional scope)
{
QualifiedObjectName tableName = createQualifiedObjectName(session, node, node.getTableName());
if (metadata.isView(session, tableName)) {
throw semanticException(NOT_SUPPORTED, node, "Analyzing views is not supported");
}
TableHandle tableHandle = metadata.getTableHandle(session, tableName)
.orElseThrow(() -> semanticException(TABLE_NOT_FOUND, node, "Table '%s' does not exist", tableName));
analysis.setUpdateType("ANALYZE");
analysis.setUpdateTarget(tableHandle.catalogHandle().getVersion(), tableName, Optional.empty(), Optional.empty());
validateProperties(node.getProperties(), scope);
String catalogName = tableName.catalogName();
CatalogHandle catalogHandle = getRequiredCatalogHandle(metadata, session, node, catalogName);
Map analyzeProperties = analyzePropertyManager.getProperties(
catalogName,
catalogHandle,
node.getProperties(),
session,
plannerContext,
accessControl,
analysis.getParameters(),
true);
analysis.setAnalyzeMetadata(metadata.getStatisticsCollectionMetadata(session, tableHandle, analyzeProperties));
// user must have read and insert permission in order to analyze stats of a table
analysis.addTableColumnReferences(
accessControl,
session.getIdentity(),
ImmutableMultimap.builder()
.putAll(tableName, metadata.getColumnHandles(session, tableHandle).keySet())
.build());
try {
accessControl.checkCanInsertIntoTable(session.toSecurityContext(), tableName);
}
catch (AccessDeniedException exception) {
throw new AccessDeniedException(format("Cannot ANALYZE (missing insert privilege) table %s", tableName), exception);
}
return createAndAssignScope(node, scope, Field.newUnqualified("rows", BIGINT));
}
@Override
protected Scope visitCreateTableAsSelect(CreateTableAsSelect node, Optional scope)
{
// turn this into a query that has a new table writer node on top.
QualifiedObjectName targetTable = createQualifiedObjectName(session, node, node.getName());
Optional targetTableHandle = metadata.getTableHandle(session, targetTable);
if (targetTableHandle.isPresent() && node.getSaveMode() != REPLACE) {
if (node.getSaveMode() == IGNORE) {
analysis.setCreate(new Analysis.Create(
Optional.of(targetTable),
Optional.empty(),
Optional.empty(),
node.isWithData(),
true,
false));
analysis.setUpdateType("CREATE TABLE");
analysis.setUpdateTarget(targetTableHandle.get().catalogHandle().getVersion(), targetTable, Optional.empty(), Optional.of(ImmutableList.of()));
return createAndAssignScope(node, scope, Field.newUnqualified("rows", BIGINT));
}
throw semanticException(TABLE_ALREADY_EXISTS, node, "Destination table '%s' already exists", targetTable);
}
validateProperties(node.getProperties(), scope);
String catalogName = targetTable.catalogName();
CatalogHandle catalogHandle = getRequiredCatalogHandle(metadata, session, node, catalogName);
Map properties = tablePropertyManager.getProperties(
catalogName,
catalogHandle,
node.getProperties(),
session,
plannerContext,
accessControl,
analysis.getParameters(),
true);
Set specifiedPropertyKeys = node.getProperties().stream()
// property names are case-insensitive and normalized to lower case
.map(property -> property.getName().getValue().toLowerCase(ENGLISH))
.collect(toImmutableSet());
Map explicitlySetProperties = properties.keySet().stream()
.peek(key -> verify(key.equals(key.toLowerCase(ENGLISH)), "Property name '%s' not in lower-case", key))
.filter(specifiedPropertyKeys::contains)
.collect(toImmutableMap(Function.identity(), properties::get));
accessControl.checkCanCreateTable(session.toSecurityContext(), targetTable, explicitlySetProperties);
// analyze the query that creates the table
Scope queryScope = analyze(node.getQuery(), Optional.empty(), false);
ImmutableList.Builder columnsBuilder = ImmutableList.builder();
// analyze target table columns and column aliases
ImmutableList.Builder outputColumns = ImmutableList.builder();
if (node.getColumnAliases().isPresent()) {
validateColumnAliases(node.getColumnAliases().get(), queryScope.getRelationType().getVisibleFieldCount());
int aliasPosition = 0;
for (Field field : queryScope.getRelationType().getVisibleFields()) {
if (field.getType().equals(UNKNOWN)) {
throw semanticException(COLUMN_TYPE_UNKNOWN, node, "Column type is unknown at position %s", queryScope.getRelationType().indexOf(field) + 1);
}
String columnName = node.getColumnAliases().get().get(aliasPosition).getValue();
columnsBuilder.add(new ColumnMetadata(columnName, metadata.getSupportedType(session, catalogHandle, properties, field.getType()).orElse(field.getType())));
outputColumns.add(new OutputColumn(new Column(columnName, field.getType().toString()), analysis.getSourceColumns(field)));
aliasPosition++;
}
}
else {
validateColumns(node, queryScope.getRelationType());
columnsBuilder.addAll(queryScope.getRelationType().getVisibleFields().stream()
.map(field -> new ColumnMetadata(field.getName().orElseThrow(), metadata.getSupportedType(session, catalogHandle, properties, field.getType()).orElse(field.getType())))
.collect(toImmutableList()));
queryScope.getRelationType().getVisibleFields().stream()
.map(this::createOutputColumn)
.forEach(outputColumns::add);
}
// create target table metadata
List columns = columnsBuilder.build();
ConnectorTableMetadata tableMetadata = new ConnectorTableMetadata(targetTable.asSchemaTableName(), columns, properties, node.getComment());
// analyze target table layout
Optional newTableLayout = metadata.getNewTableLayout(session, catalogName, tableMetadata);
Set columnNames = columns.stream()
.map(ColumnMetadata::getName)
.collect(toImmutableSet());
if (newTableLayout.isPresent()) {
TableLayout layout = newTableLayout.get();
if (!columnNames.containsAll(layout.getPartitionColumns())) {
if (layout.getLayout().getPartitioning().isPresent()) {
throw new TrinoException(NOT_SUPPORTED, "INSERT must write all distribution columns: " + layout.getPartitionColumns());
}
// created table does not contain all columns required by preferred layout
newTableLayout = Optional.empty();
}
}
analysis.setCreate(new Analysis.Create(
Optional.of(targetTable),
Optional.of(tableMetadata),
newTableLayout,
node.isWithData(),
false,
node.getSaveMode() == REPLACE));
analysis.setUpdateType("CREATE TABLE");
analysis.setUpdateTarget(
catalogHandle.getVersion(),
targetTable,
Optional.empty(),
Optional.of(outputColumns.build()));
return createAndAssignScope(node, scope, Field.newUnqualified("rows", BIGINT));
}
@Override
protected Scope visitCreateView(CreateView node, Optional scope)
{
QualifiedObjectName viewName = createQualifiedObjectName(session, node, node.getName());
node.getQuery().getFunctions().stream().findFirst().ifPresent(function -> {
throw semanticException(NOT_SUPPORTED, function, "Views cannot contain inline functions");
});
// analyze the query that creates the view
StatementAnalyzer analyzer = statementAnalyzerFactory.createStatementAnalyzer(analysis, session, warningCollector, CorrelationSupport.ALLOWED);
Scope queryScope = analyzer.analyze(node.getQuery());
accessControl.checkCanCreateView(session.toSecurityContext(), viewName);
validateColumns(node, queryScope.getRelationType());
CatalogHandle catalogHandle = getRequiredCatalogHandle(metadata, session, node, viewName.catalogName());
analysis.setUpdateType("CREATE VIEW");
analysis.setUpdateTarget(
catalogHandle.getVersion(),
viewName,
Optional.empty(),
Optional.of(queryScope.getRelationType().getVisibleFields().stream()
.map(this::createOutputColumn)
.collect(toImmutableList())));
return createAndAssignScope(node, scope);
}
@Override
protected Scope visitSetSession(SetSession node, Optional scope)
{
return createAndAssignScope(node, scope);
}
@Override
protected Scope visitResetSession(ResetSession node, Optional scope)
{
return createAndAssignScope(node, scope);
}
@Override
protected Scope visitSetSessionAuthorization(SetSessionAuthorization node, Optional scope)
{
return createAndAssignScope(node, scope);
}
@Override
protected Scope visitResetSessionAuthorization(ResetSessionAuthorization node, Optional scope)
{
return createAndAssignScope(node, scope);
}
@Override
protected Scope visitAddColumn(AddColumn node, Optional scope)
{
ColumnDefinition element = node.getColumn();
if (element.getName().getParts().size() > 1) {
if (!element.isNullable()) {
throw semanticException(NOT_SUPPORTED, node, "Adding fields with NOT NULL constraint is unsupported");
}
if (!element.getProperties().isEmpty()) {
throw semanticException(NOT_SUPPORTED, node, "Adding fields with column properties is unsupported");
}
if (element.getComment().isPresent()) {
throw semanticException(NOT_SUPPORTED, node, "Adding fields with COMMENT is unsupported");
}
}
return createAndAssignScope(node, scope);
}
@Override
protected Scope visitSetColumnType(SetColumnType node, Optional scope)
{
return createAndAssignScope(node, scope);
}
@Override
protected Scope visitDropNotNullConstraint(DropNotNullConstraint node, Optional scope)
{
return createAndAssignScope(node, scope);
}
@Override
protected Scope visitCreateCatalog(CreateCatalog node, Optional scope)
{
for (Property property : node.getProperties()) {
if (property.isSetToDefault()) {
throw semanticException(INVALID_CATALOG_PROPERTY, property, "Catalog properties do not support DEFAULT value");
}
}
validateProperties(node.getProperties(), scope);
return createAndAssignScope(node, scope);
}
@Override
protected Scope visitDropCatalog(DropCatalog node, Optional scope)
{
return createAndAssignScope(node, scope);
}
@Override
protected Scope visitCreateSchema(CreateSchema node, Optional scope)
{
validateProperties(node.getProperties(), scope);
return createAndAssignScope(node, scope);
}
@Override
protected Scope visitDropSchema(DropSchema node, Optional scope)
{
return createAndAssignScope(node, scope);
}
@Override
protected Scope visitRenameSchema(RenameSchema node, Optional scope)
{
return createAndAssignScope(node, scope);
}
@Override
protected Scope visitSetSchemaAuthorization(SetSchemaAuthorization node, Optional scope)
{
return createAndAssignScope(node, scope);
}
@Override
protected Scope visitCreateTable(CreateTable node, Optional scope)
{
validateProperties(node.getProperties(), scope);
return createAndAssignScope(node, scope);
}
@Override
protected Scope visitProperty(Property node, Optional scope)
{
if (node.isSetToDefault()) {
return createAndAssignScope(node, scope);
}
// Property value expressions must be constant
createConstantAnalyzer(plannerContext, accessControl, session, analysis.getParameters(), WarningCollector.NOOP, analysis.isDescribe())
.analyze(node.getNonDefaultValue(), createScope(scope));
return createAndAssignScope(node, scope);
}
@Override
protected Scope visitCallArgument(CallArgument node, Optional scope)
{
// CallArgument value expressions must be constant
createConstantAnalyzer(plannerContext, accessControl, session, analysis.getParameters(), WarningCollector.NOOP, analysis.isDescribe())
.analyze(node.getValue(), createScope(scope));
return createAndAssignScope(node, scope);
}
@Override
protected Scope visitTruncateTable(TruncateTable node, Optional scope)
{
return createAndAssignScope(node, scope);
}
@Override
protected Scope visitDropTable(DropTable node, Optional scope)
{
return createAndAssignScope(node, scope);
}
@Override
protected Scope visitRenameTable(RenameTable node, Optional scope)
{
return createAndAssignScope(node, scope);
}
@Override
protected Scope visitSetProperties(SetProperties node, Optional context)
{
return createAndAssignScope(node, context);
}
@Override
protected Scope visitComment(Comment node, Optional scope)
{
return createAndAssignScope(node, scope);
}
@Override
protected Scope visitRenameColumn(RenameColumn node, Optional scope)
{
return createAndAssignScope(node, scope);
}
@Override
protected Scope visitDropColumn(DropColumn node, Optional scope)
{
return createAndAssignScope(node, scope);
}
@Override
protected Scope visitSetTableAuthorization(SetTableAuthorization node, Optional scope)
{
return createAndAssignScope(node, scope);
}
@Override
protected Scope visitTableExecute(TableExecute node, Optional scope)
{
Table table = node.getTable();
QualifiedObjectName originalName = createQualifiedObjectName(session, table, table.getName());
String procedureName = node.getProcedureName().getCanonicalValue();
if (metadata.isMaterializedView(session, originalName)) {
throw semanticException(NOT_SUPPORTED, node, "ALTER TABLE EXECUTE is not supported for materialized views");
}
if (metadata.isView(session, originalName)) {
throw semanticException(NOT_SUPPORTED, node, "ALTER TABLE EXECUTE is not supported for views");
}
RedirectionAwareTableHandle redirection = metadata.getRedirectionAwareTableHandle(session, originalName);
QualifiedObjectName tableName = redirection.redirectedTableName().orElse(originalName);
TableHandle tableHandle = redirection.tableHandle()
.orElseThrow(() -> semanticException(TABLE_NOT_FOUND, table, "Table '%s' does not exist", tableName));
accessControl.checkCanExecuteTableProcedure(
session.toSecurityContext(),
tableName,
procedureName);
if (!accessControl.getRowFilters(session.toSecurityContext(), tableName).isEmpty()) {
throw semanticException(NOT_SUPPORTED, node, "ALTER TABLE EXECUTE is not supported for table with row filter");
}
TableMetadata tableMetadata = metadata.getTableMetadata(session, tableHandle);
if (!accessControl.getColumnMasks(session.toSecurityContext(), tableName, tableMetadata.columns().stream().map(ColumnMetadata::getColumnSchema).collect(toImmutableList())).isEmpty()) {
throw semanticException(NOT_SUPPORTED, node, "ALTER TABLE EXECUTE is not supported for table with column masks");
}
Scope tableScope = analyze(table);
String catalogName = tableName.catalogName();
CatalogHandle catalogHandle = getRequiredCatalogHandle(metadata, session, node, catalogName);
TableProcedureMetadata procedureMetadata = tableProceduresRegistry.resolve(catalogHandle, procedureName);
// analyze WHERE
if (!procedureMetadata.getExecutionMode().supportsFilter() && node.getWhere().isPresent()) {
throw semanticException(NOT_SUPPORTED, node, "WHERE not supported for procedure %s", procedureName);
}
node.getWhere().ifPresent(where -> analyzeWhere(node, tableScope, where));
// analyze arguments
List arguments = processTableExecuteArguments(node, procedureMetadata, scope);
Map tableProperties = tableProceduresPropertyManager.getProperties(
catalogName,
catalogHandle,
procedureName,
arguments,
session,
plannerContext,
accessControl,
analysis.getParameters());
TableExecuteHandle executeHandle =
metadata.getTableHandleForExecute(
session,
tableHandle,
procedureName,
tableProperties)
.orElseThrow(() -> semanticException(NOT_SUPPORTED, node, "Procedure '%s' cannot be executed on table '%s'", procedureName, tableName));
analysis.setTableExecuteReadsData(procedureMetadata.getExecutionMode().isReadsData());
analysis.setTableExecuteHandle(executeHandle);
analysis.setUpdateType("ALTER TABLE EXECUTE");
analysis.setUpdateTarget(executeHandle.catalogHandle().getVersion(), tableName, Optional.of(table), Optional.empty());
return createAndAssignScope(node, scope, Field.newUnqualified("rows", BIGINT));
}
private List processTableExecuteArguments(TableExecute node, TableProcedureMetadata procedureMetadata, Optional scope)
{
List arguments = node.getArguments();
Predicate hasName = argument -> argument.getName().isPresent();
boolean anyNamed = arguments.stream().anyMatch(hasName);
boolean allNamed = arguments.stream().allMatch(hasName);
if (anyNamed && !allNamed) {
throw semanticException(INVALID_ARGUMENTS, node, "Named and positional arguments cannot be mixed");
}
if (!anyNamed && arguments.size() > procedureMetadata.getProperties().size()) {
throw semanticException(INVALID_ARGUMENTS, node, "Too many positional arguments");
}
for (CallArgument argument : arguments) {
process(argument, scope);
}
List properties = new ArrayList<>();
if (anyNamed) {
// all properties named
Set names = new HashSet<>();
for (CallArgument argument : arguments) {
Identifier name = argument.getName().orElseThrow();
if (!names.add(name.getCanonicalValue())) {
throw semanticException(DUPLICATE_PROPERTY, argument, "Duplicate named argument: %s", name);
}
properties.add(new Property(argument.getLocation(), name, argument.getValue()));
}
}
else {
// all properties unnamed
int pos = 0;
for (CallArgument argument : arguments) {
Identifier name = new Identifier(procedureMetadata.getProperties().get(pos).getName());
properties.add(new Property(argument.getLocation(), name, argument.getValue()));
pos++;
}
}
return properties;
}
@Override
protected Scope visitRenameView(RenameView node, Optional scope)
{
return createAndAssignScope(node, scope);
}
@Override
protected Scope visitRenameMaterializedView(RenameMaterializedView node, Optional scope)
{
return createAndAssignScope(node, scope);
}
@Override
protected Scope visitSetViewAuthorization(SetViewAuthorization node, Optional scope)
{
return createAndAssignScope(node, scope);
}
@Override
protected Scope visitDropView(DropView node, Optional scope)
{
return createAndAssignScope(node, scope);
}
@Override
protected Scope visitStartTransaction(StartTransaction node, Optional scope)
{
return createAndAssignScope(node, scope);
}
@Override
protected Scope visitCommit(Commit node, Optional scope)
{
return createAndAssignScope(node, scope);
}
@Override
protected Scope visitRollback(Rollback node, Optional scope)
{
return createAndAssignScope(node, scope);
}
@Override
protected Scope visitPrepare(Prepare node, Optional scope)
{
return createAndAssignScope(node, scope);
}
@Override
protected Scope visitDeallocate(Deallocate node, Optional scope)
{
return createAndAssignScope(node, scope);
}
@Override
protected Scope visitExecute(Execute node, Optional scope)
{
return createAndAssignScope(node, scope);
}
@Override
protected Scope visitExecuteImmediate(ExecuteImmediate node, Optional scope)
{
return createAndAssignScope(node, scope);
}
@Override
protected Scope visitGrant(Grant node, Optional scope)
{
return createAndAssignScope(node, scope);
}
@Override
protected Scope visitDeny(Deny node, Optional scope)
{
return createAndAssignScope(node, scope);
}
@Override
protected Scope visitRevoke(Revoke node, Optional scope)
{
return createAndAssignScope(node, scope);
}
@Override
protected Scope visitCall(Call node, Optional scope)
{
return createAndAssignScope(node, scope);
}
@Override
protected Scope visitCreateMaterializedView(CreateMaterializedView node, Optional scope)
{
QualifiedObjectName viewName = createQualifiedObjectName(session, node, node.getName());
if (node.isReplace() && node.isNotExists()) {
throw semanticException(NOT_SUPPORTED, node, "'CREATE OR REPLACE' and 'IF NOT EXISTS' clauses can not be used together");
}
node.getGracePeriod().ifPresent(gracePeriod -> analyzeExpression(gracePeriod, Scope.create()));
// analyze the query that creates the view
StatementAnalyzer analyzer = statementAnalyzerFactory.createStatementAnalyzer(analysis, session, warningCollector, CorrelationSupport.ALLOWED);
Scope queryScope = analyzer.analyze(node.getQuery());
validateColumns(node, queryScope.getRelationType());
CatalogHandle catalogHandle = getRequiredCatalogHandle(metadata, session, node, viewName.catalogName());
analysis.setUpdateType("CREATE MATERIALIZED VIEW");
analysis.setUpdateTarget(
catalogHandle.getVersion(),
viewName,
Optional.empty(),
Optional.of(
queryScope.getRelationType().getVisibleFields().stream()
.map(this::createOutputColumn)
.collect(toImmutableList())));
return createAndAssignScope(node, scope);
}
@Override
protected Scope visitDropMaterializedView(DropMaterializedView node, Optional scope)
{
return createAndAssignScope(node, scope);
}
@Override
protected Scope visitSetTimeZone(SetTimeZone node, Optional scope)
{
return createAndAssignScope(node, scope);
}
private void validateProperties(List properties, Optional scope)
{
Set propertyNames = new HashSet<>();
for (Property property : properties) {
if (!propertyNames.add(property.getName().getValue())) {
throw semanticException(DUPLICATE_PROPERTY, property, "Duplicate property: %s", property.getName().getValue());
}
}
for (Property property : properties) {
process(property, scope);
}
}
private void validateColumns(Statement node, RelationType descriptor)
{
// verify that all column names are specified and unique
// TODO: collect errors and return them all at once
Set names = new HashSet<>();
for (Field field : descriptor.getVisibleFields()) {
String fieldName = field.getName()
.orElseThrow(() -> semanticException(MISSING_COLUMN_NAME, node, "Column name not specified at position %s", descriptor.indexOf(field) + 1));
if (!names.add(fieldName)) {
throw semanticException(DUPLICATE_COLUMN_NAME, node, "Column name '%s' specified more than once", fieldName);
}
if (field.getType().equals(UNKNOWN)) {
throw semanticException(COLUMN_TYPE_UNKNOWN, node, "Column type is unknown: %s", fieldName);
}
}
}
private void validateColumnAliases(List columnAliases, int sourceColumnSize)
{
validateColumnAliasesCount(columnAliases, sourceColumnSize);
Set names = new HashSet<>();
for (Identifier identifier : columnAliases) {
if (names.contains(identifier.getValue().toLowerCase(ENGLISH))) {
throw semanticException(DUPLICATE_COLUMN_NAME, identifier, "Column name '%s' specified more than once", identifier.getValue());
}
names.add(identifier.getValue().toLowerCase(ENGLISH));
}
}
private void validateColumnAliasesCount(List columnAliases, int sourceColumnSize)
{
if (columnAliases.size() != sourceColumnSize) {
throw semanticException(
MISMATCHED_COLUMN_ALIASES,
columnAliases.get(0),
"Column alias list has %s entries but relation has %s columns",
columnAliases.size(),
sourceColumnSize);
}
}
@Override
protected Scope visitExplain(Explain node, Optional scope)
{
process(node.getStatement(), scope);
return createAndAssignScope(node, scope, Field.newUnqualified("Query Plan", VARCHAR));
}
@Override
protected Scope visitExplainAnalyze(ExplainAnalyze node, Optional scope)
{
process(node.getStatement(), scope);
return createAndAssignScope(node, scope, Field.newUnqualified("Query Plan", VARCHAR));
}
@Override
protected Scope visitQuery(Query node, Optional scope)
{
for (FunctionSpecification function : node.getFunctions()) {
if (function.getName().getPrefix().isPresent()) {
throw semanticException(SYNTAX_ERROR, function, "Inline function names cannot be qualified: %s", function.getName());
}
function.getRoutineCharacteristics().stream()
.filter(SecurityCharacteristic.class::isInstance)
.findFirst()
.ifPresent(security -> {
throw semanticException(NOT_SUPPORTED, security, "Security mode not supported for inline functions");
});
plannerContext.getLanguageFunctionManager().addInlineFunction(session, function, accessControl);
}
Scope withScope = analyzeWith(node, scope);
Scope queryBodyScope = process(node.getQueryBody(), withScope);
List orderByExpressions = emptyList();
if (node.getOrderBy().isPresent()) {
orderByExpressions = analyzeOrderBy(node, getSortItemsFromOrderBy(node.getOrderBy()), queryBodyScope);
if ((queryBodyScope.getOuterQueryParent().isPresent() || !isTopLevel) && node.getLimit().isEmpty() && node.getOffset().isEmpty()) {
// not the root scope and ORDER BY is ineffective
analysis.markRedundantOrderBy(node.getOrderBy().get());
warningCollector.add(new TrinoWarning(REDUNDANT_ORDER_BY, "ORDER BY in subquery may have no effect"));
}
}
analysis.setOrderByExpressions(node, orderByExpressions);
if (node.getOffset().isPresent()) {
analyzeOffset(node.getOffset().get(), queryBodyScope);
}
if (node.getLimit().isPresent()) {
boolean requiresOrderBy = analyzeLimit(node.getLimit().get(), queryBodyScope);
if (requiresOrderBy && node.getOrderBy().isEmpty()) {
throw semanticException(MISSING_ORDER_BY, node.getLimit().get(), "FETCH FIRST WITH TIES clause requires ORDER BY");
}
}
// Input fields == Output fields
analysis.setSelectExpressions(
node,
descriptorToFields(queryBodyScope).stream()
.map(expression -> new SelectExpression(expression, Optional.empty()))
.collect(toImmutableList()));
Scope queryScope = Scope.builder()
.withParent(withScope)
.withRelationType(RelationId.of(node), queryBodyScope.getRelationType())
.build();
analysis.setScope(node, queryScope);
return queryScope;
}
@Override
protected Scope visitUnnest(Unnest node, Optional scope)
{
ImmutableMap.Builder, List> mappings = ImmutableMap.builder();
ImmutableList.Builder outputFields = ImmutableList.builder();
for (Expression expression : node.getExpressions()) {
verifyNoAggregateWindowOrGroupingFunctions(session, functionResolver, accessControl, expression, "UNNEST");
List expressionOutputs = new ArrayList<>();
ExpressionAnalysis expressionAnalysis = analyzeExpression(expression, createScope(scope));
analysis.recordSubqueries(node, expressionAnalysis);
Type expressionType = expressionAnalysis.getType(expression);
if (expressionType instanceof ArrayType) {
Type elementType = ((ArrayType) expressionType).getElementType();
if (elementType instanceof RowType) {
((RowType) elementType).getFields().stream()
.map(field -> Field.newUnqualified(field.getName(), field.getType()))
.forEach(expressionOutputs::add);
}
else {
expressionOutputs.add(Field.newUnqualified(Optional.empty(), elementType));
}
}
else if (expressionType instanceof MapType) {
expressionOutputs.add(Field.newUnqualified(Optional.empty(), ((MapType) expressionType).getKeyType()));
expressionOutputs.add(Field.newUnqualified(Optional.empty(), ((MapType) expressionType).getValueType()));
}
else {
throw new TrinoException(INVALID_FUNCTION_ARGUMENT, "Cannot unnest type: " + expressionType);
}
outputFields.addAll(expressionOutputs);
mappings.put(NodeRef.of(expression), expressionOutputs);
}
Optional ordinalityField = Optional.empty();
if (node.isWithOrdinality()) {
ordinalityField = Optional.of(Field.newUnqualified(Optional.empty(), BIGINT));
}
ordinalityField.ifPresent(outputFields::add);
analysis.setUnnest(node, new UnnestAnalysis(mappings.buildOrThrow(), ordinalityField));
return createAndAssignScope(node, scope, outputFields.build());
}
@Override
protected Scope visitLateral(Lateral node, Optional scope)
{
StatementAnalyzer analyzer = statementAnalyzerFactory.createStatementAnalyzer(analysis, session, warningCollector, CorrelationSupport.ALLOWED);
Scope queryScope = analyzer.analyze(node.getQuery(), scope.orElseThrow());
return createAndAssignScope(node, scope, queryScope.getRelationType());
}
@Override
protected Scope visitTableFunctionInvocation(TableFunctionInvocation node, Optional scope)
{
TableFunctionMetadata tableFunctionMetadata = resolveTableFunction(node)
.orElseThrow(() -> semanticException(FUNCTION_NOT_FOUND, node, "Table function '%s' not registered", node.getName()));
ConnectorTableFunction function = tableFunctionMetadata.function();
CatalogHandle catalogHandle = tableFunctionMetadata.catalogHandle();
Node errorLocation = node;
if (!node.getArguments().isEmpty()) {
errorLocation = node.getArguments().getFirst();
}
ArgumentsAnalysis argumentsAnalysis = analyzeArguments(function.getArguments(), node.getArguments(), scope, errorLocation);
ConnectorTransactionHandle transactionHandle = transactionManager.getConnectorTransaction(session.getRequiredTransactionId(), catalogHandle);
TableFunctionAnalysis functionAnalysis = function.analyze(
session.toConnectorSession(catalogHandle),
transactionHandle,
argumentsAnalysis.getPassedArguments(),
new InjectedConnectorAccessControl(accessControl, session.toSecurityContext(), catalogHandle.getCatalogName().toString()));
List> copartitioningLists = analyzeCopartitioning(node.getCopartitioning(), argumentsAnalysis.getTableArgumentAnalyses());
// determine the result relation type per SQL standard ISO/IEC 9075-2, 4.33 SQL-invoked routines, p. 123, 413, 414
ReturnTypeSpecification returnTypeSpecification = function.getReturnTypeSpecification();
if (returnTypeSpecification == GENERIC_TABLE || !argumentsAnalysis.getTableArgumentAnalyses().isEmpty()) {
analysis.addPolymorphicTableFunction(node);
}
Optional analyzedProperColumnsDescriptor = functionAnalysis.getReturnedType();
Descriptor properColumnsDescriptor;
if (returnTypeSpecification == ONLY_PASS_THROUGH) {
if (analysis.isAliased(node)) {
// According to SQL standard ISO/IEC 9075-2, 7.6
, p. 409,
// table alias is prohibited for a table function with ONLY PASS THROUGH returned type.
throw semanticException(INVALID_TABLE_FUNCTION_INVOCATION, node, "Alias specified for table function with ONLY PASS THROUGH return type");
}
if (analyzedProperColumnsDescriptor.isPresent()) {
// If a table function has ONLY PASS THROUGH returned type, it does not produce any proper columns,
// so the function's analyze() method should not return the proper columns descriptor.
throw semanticException(AMBIGUOUS_RETURN_TYPE, node, "Returned relation type for table function %s is ambiguous", node.getName());
}
if (function.getArguments().stream()
.filter(TableArgumentSpecification.class::isInstance)
.map(TableArgumentSpecification.class::cast)
.noneMatch(TableArgumentSpecification::isPassThroughColumns)) {
// According to SQL standard ISO/IEC 9075-2, 10.4 , p. 764,
// if there is no generic table parameter that specifies PASS THROUGH, then number of proper columns shall be positive.
// For GENERIC_TABLE and DescribedTable returned types, this is enforced by the Descriptor constructor, which requires positive number of fields.
// Here we enforce it for the remaining returned type specification: ONLY_PASS_THROUGH.
throw new TrinoException(FUNCTION_IMPLEMENTATION_ERROR, "A table function with ONLY_PASS_THROUGH return type must have a table argument with pass-through columns.");
}
properColumnsDescriptor = null;
}
else if (returnTypeSpecification == GENERIC_TABLE) {
// According to SQL standard ISO/IEC 9075-2, 7.6
, p. 409,
// table alias is mandatory for a polymorphic table function invocation which produces proper columns.
// We don't enforce this requirement.
properColumnsDescriptor = analyzedProperColumnsDescriptor
.orElseThrow(() -> semanticException(MISSING_RETURN_TYPE, node, "Cannot determine returned relation type for table function %s", node.getName()));
}
else { // returned type is statically declared at function declaration
// According to SQL standard ISO/IEC 9075-2, 7.6
, p. 409,
// table alias is mandatory for a polymorphic table function invocation which produces proper columns.
// We don't enforce this requirement.
if (analyzedProperColumnsDescriptor.isPresent()) {
// If a table function has statically declared returned type, it is returned in TableFunctionMetadata
// so the function's analyze() method should not return the proper columns descriptor.
throw semanticException(AMBIGUOUS_RETURN_TYPE, node, "Returned relation type for table function %s is ambiguous", node.getName());
}
properColumnsDescriptor = ((DescribedTable) returnTypeSpecification).getDescriptor();
}
// validate the required input columns
Map> requiredColumns = functionAnalysis.getRequiredColumns();
Map tableArgumentsByName = argumentsAnalysis.getTableArgumentAnalyses().stream()
.collect(toImmutableMap(TableArgumentAnalysis::getArgumentName, Function.identity()));
Set allInputs = ImmutableSet.copyOf(tableArgumentsByName.keySet());
requiredColumns.forEach((name, columns) -> {
if (!allInputs.contains(name)) {
throw new TrinoException(FUNCTION_IMPLEMENTATION_ERROR, format("Table function %s specifies required columns from table argument %s which cannot be found", node.getName(), name));
}
if (columns.isEmpty()) {
throw new TrinoException(FUNCTION_IMPLEMENTATION_ERROR, format("Table function %s specifies empty list of required columns from table argument %s", node.getName(), name));
}
// the scope is recorded, because table arguments are already analyzed
Scope inputScope = analysis.getScope(tableArgumentsByName.get(name).getRelation());
columns.stream()
.filter(column -> column < 0 || column >= inputScope.getRelationType().getVisibleFieldCount())
.findFirst()
.ifPresent(column -> {
throw new TrinoException(FUNCTION_IMPLEMENTATION_ERROR, format("Invalid index: %s of required column from table argument %s", column, name));
});
// record the required columns for access control
columns.stream()
.map(inputScope.getRelationType()::getFieldByIndex)
.forEach(this::recordColumnAccess);
});
Set requiredInputs = ImmutableSet.copyOf(requiredColumns.keySet());
allInputs.stream()
.filter(input -> !requiredInputs.contains(input))
.findFirst()
.ifPresent(input -> {
throw new TrinoException(FUNCTION_IMPLEMENTATION_ERROR, format("Table function %s does not specify required input columns from table argument %s", node.getName(), input));
});
// The result relation type of a table function consists of:
// 1. columns created by the table function, called the proper columns.
// 2. passed columns from input tables:
// - for tables with the "pass through columns" option, these are all columns of the table,
// - for tables without the "pass through columns" option, these are the partitioning columns of the table, if any.
ImmutableList.Builder fields = ImmutableList.builder();
// proper columns first
if (properColumnsDescriptor != null) {
properColumnsDescriptor.getFields().stream()
// per spec, field names are mandatory. We support anonymous fields.
.map(field -> Field.newUnqualified(field.getName(), field.getType().orElseThrow(() -> new IllegalStateException("missing returned type for proper field"))))
.forEach(fields::add);
}
// next, columns derived from table arguments, in order of argument declarations
List tableArgumentNames = function.getArguments().stream()
.filter(argumentSpecification -> argumentSpecification instanceof TableArgumentSpecification)
.map(ArgumentSpecification::getName)
.collect(toImmutableList());
// table arguments in order of argument declarations
ImmutableList.Builder orderedTableArguments = ImmutableList.builder();
for (String name : tableArgumentNames) {
TableArgumentAnalysis argument = tableArgumentsByName.get(name);
orderedTableArguments.add(argument);
Scope argumentScope = analysis.getScope(argument.getRelation());
if (argument.isPassThroughColumns()) {
argumentScope.getRelationType().getAllFields()
.forEach(fields::add);
}
else if (argument.getPartitionBy().isPresent()) {
argument.getPartitionBy().get().stream()
.map(expression -> validateAndGetInputField(expression, argumentScope))
.forEach(fields::add);
}
}
analysis.setTableFunctionAnalysis(node, new TableFunctionInvocationAnalysis(
catalogHandle,
function.getName(),
argumentsAnalysis.getPassedArguments(),
orderedTableArguments.build(),
functionAnalysis.getRequiredColumns(),
copartitioningLists,
properColumnsDescriptor == null ? 0 : properColumnsDescriptor.getFields().size(),
functionAnalysis.getHandle(),
transactionHandle));
return createAndAssignScope(node, scope, fields.build());
}
private Optional resolveTableFunction(TableFunctionInvocation node)
{
boolean unauthorized = false;
for (CatalogSchemaFunctionName name : toPath(session, node.getName(), accessControl)) {
CatalogHandle catalogHandle = getRequiredCatalogHandle(metadata, session, node, name.getCatalogName());
Optional resolved = tableFunctionRegistry.resolve(catalogHandle, name.getSchemaFunctionName());
if (resolved.isPresent()) {
if (isBuiltinFunctionName(name) || accessControl.canExecuteFunction(SecurityContext.of(session), new QualifiedObjectName(name.getCatalogName(), name.getSchemaName(), name.getFunctionName()))) {
return Optional.of(new TableFunctionMetadata(catalogHandle, resolved.get()));
}
unauthorized = true;
}
}
if (unauthorized) {
denyExecuteFunction(node.getName().toString());
}
return Optional.empty();
}
private ArgumentsAnalysis analyzeArguments(List argumentSpecifications, List arguments, Optional scope, Node errorLocation)
{
if (argumentSpecifications.size() < arguments.size()) {
throw semanticException(INVALID_ARGUMENTS, errorLocation, "Too many arguments. Expected at most %s arguments, got %s arguments", argumentSpecifications.size(), arguments.size());
}
if (argumentSpecifications.isEmpty()) {
return new ArgumentsAnalysis(ImmutableMap.of(), ImmutableList.of());
}
boolean argumentsPassedByName = !arguments.isEmpty() && arguments.stream().allMatch(argument -> argument.getName().isPresent());
boolean argumentsPassedByPosition = arguments.stream().allMatch(argument -> argument.getName().isEmpty());
if (!argumentsPassedByName && !argumentsPassedByPosition) {
throw semanticException(INVALID_ARGUMENTS, errorLocation, "All arguments must be passed by name or all must be passed positionally");
}
ImmutableMap.Builder passedArguments = ImmutableMap.builder();
ImmutableList.Builder tableArgumentAnalyses = ImmutableList.builder();
if (argumentsPassedByName) {
Map argumentSpecificationsByName = new HashMap<>();
for (ArgumentSpecification argumentSpecification : argumentSpecifications) {
if (argumentSpecificationsByName.put(argumentSpecification.getName(), argumentSpecification) != null) {
// this should never happen, because the argument names are validated at function registration time
throw new IllegalStateException("Duplicate argument specification for name: " + argumentSpecification.getName());
}
}
Set uniqueArgumentNames = new HashSet<>();
for (TableFunctionArgument argument : arguments) {
String argumentName = argument.getName().orElseThrow().getCanonicalValue();
if (!uniqueArgumentNames.add(argumentName)) {
throw semanticException(INVALID_FUNCTION_ARGUMENT, argument, "Duplicate argument name: %s", argumentName);
}
ArgumentSpecification argumentSpecification = argumentSpecificationsByName.remove(argumentName);
if (argumentSpecification == null) {
throw semanticException(INVALID_FUNCTION_ARGUMENT, argument, "Unexpected argument name: %s", argumentName);
}
ArgumentAnalysis argumentAnalysis = analyzeArgument(argumentSpecification, argument, scope);
passedArguments.put(argumentSpecification.getName(), argumentAnalysis.getArgument());
argumentAnalysis.getTableArgumentAnalysis().ifPresent(tableArgumentAnalyses::add);
}
// apply defaults for not specified arguments
for (Map.Entry entry : argumentSpecificationsByName.entrySet()) {
ArgumentSpecification argumentSpecification = entry.getValue();
passedArguments.put(argumentSpecification.getName(), analyzeDefault(argumentSpecification, errorLocation));
}
}
else {
for (int i = 0; i < arguments.size(); i++) {
TableFunctionArgument argument = arguments.get(i);
ArgumentSpecification argumentSpecification = argumentSpecifications.get(i); // TODO args passed positionally - can one only pass some prefix of args?
ArgumentAnalysis argumentAnalysis = analyzeArgument(argumentSpecification, argument, scope);
passedArguments.put(argumentSpecification.getName(), argumentAnalysis.getArgument());
argumentAnalysis.getTableArgumentAnalysis().ifPresent(tableArgumentAnalyses::add);
}
// apply defaults for not specified arguments
for (int i = arguments.size(); i < argumentSpecifications.size(); i++) {
ArgumentSpecification argumentSpecification = argumentSpecifications.get(i);
passedArguments.put(argumentSpecification.getName(), analyzeDefault(argumentSpecification, errorLocation));
}
}
return new ArgumentsAnalysis(passedArguments.buildOrThrow(), tableArgumentAnalyses.build());
}
private ArgumentAnalysis analyzeArgument(ArgumentSpecification argumentSpecification, TableFunctionArgument argument, Optional scope)
{
String actualType;
if (argument.getValue() instanceof TableFunctionTableArgument) {
actualType = "table";
}
else if (argument.getValue() instanceof TableFunctionDescriptorArgument) {
actualType = "descriptor";
}
else if (argument.getValue() instanceof Expression) {
actualType = "expression";
}
else {
throw semanticException(INVALID_FUNCTION_ARGUMENT, argument, "Unexpected table function argument type: %s", argument.getClass().getSimpleName());
}
if (argumentSpecification instanceof TableArgumentSpecification) {
if (!(argument.getValue() instanceof TableFunctionTableArgument)) {
if (argument.getValue() instanceof FunctionCall) {
// probably an attempt to pass a table function call, which is not supported, and was parsed as a function call
throw semanticException(NOT_SUPPORTED, argument, "Invalid table argument %s. Table functions are not allowed as table function arguments", argumentSpecification.getName());
}
throw semanticException(INVALID_FUNCTION_ARGUMENT, argument, "Invalid argument %s. Expected table, got %s", argumentSpecification.getName(), actualType);
}
return analyzeTableArgument(argument, (TableArgumentSpecification) argumentSpecification, scope);
}
if (argumentSpecification instanceof DescriptorArgumentSpecification) {
if (!(argument.getValue() instanceof TableFunctionDescriptorArgument)) {
if (argument.getValue() instanceof FunctionCall && ((FunctionCall) argument.getValue()).getName().hasSuffix(QualifiedName.of("descriptor"))) { // function name is always compared case-insensitive
// malformed descriptor which parsed as a function call
throw semanticException(INVALID_FUNCTION_ARGUMENT, argument, "Invalid descriptor argument %s. Descriptors should be formatted as 'DESCRIPTOR(name [type], ...)'", argumentSpecification.getName());
}
throw semanticException(INVALID_FUNCTION_ARGUMENT, argument, "Invalid argument %s. Expected descriptor, got %s", argumentSpecification.getName(), actualType);
}
return analyzeDescriptorArgument((TableFunctionDescriptorArgument) argument.getValue());
}
if (argumentSpecification instanceof ScalarArgumentSpecification) {
if (!(argument.getValue() instanceof Expression expression)) {
throw semanticException(INVALID_FUNCTION_ARGUMENT, argument, "Invalid argument %s. Expected expression, got %s", argumentSpecification.getName(), actualType);
}
// 'descriptor' as a function name is not allowed in this context
if (expression instanceof FunctionCall && ((FunctionCall) expression).getName().hasSuffix(QualifiedName.of("descriptor"))) { // function name is always compared case-insensitive
throw semanticException(INVALID_FUNCTION_ARGUMENT, argument, "'descriptor' function is not allowed as a table function argument");
}
return analyzeScalarArgument(expression, ((ScalarArgumentSpecification) argumentSpecification).getType());
}
throw new IllegalStateException("Unexpected argument specification: " + argumentSpecification.getClass().getSimpleName());
}
private ArgumentAnalysis analyzeTableArgument(TableFunctionArgument argument, TableArgumentSpecification argumentSpecification, Optional scope)
{
TableFunctionTableArgument tableArgument = (TableFunctionTableArgument) argument.getValue();
TableArgument.Builder argumentBuilder = TableArgument.builder();
TableArgumentAnalysis.Builder analysisBuilder = TableArgumentAnalysis.builder();
analysisBuilder.withArgumentName(argumentSpecification.getName());
// process the relation
Relation relation = tableArgument.getTable();
analysisBuilder.withRelation(relation);
Scope argumentScope = process(relation, scope);
QualifiedName relationName = analysis.getRelationName(relation);
if (relationName != null) {
analysisBuilder.withName(relationName);
}
argumentBuilder.rowType(RowType.from(argumentScope.getRelationType().getVisibleFields().stream()
.map(field -> new RowType.Field(field.getName(), field.getType()))
.collect(toImmutableList())));
// analyze PARTITION BY
if (tableArgument.getPartitionBy().isPresent()) {
if (argumentSpecification.isRowSemantics()) {
throw semanticException(INVALID_FUNCTION_ARGUMENT, argument, "Invalid argument %s. Partitioning specified for table argument with row semantics", argumentSpecification.getName());
}
List partitionBy = tableArgument.getPartitionBy().get();
analysisBuilder.withPartitionBy(partitionBy);
partitionBy.forEach(partitioningColumn -> {
validateAndGetInputField(partitioningColumn, argumentScope);
Type type = analyzeExpression(partitioningColumn, argumentScope).getType(partitioningColumn);
if (!type.isComparable()) {
throw semanticException(TYPE_MISMATCH, partitioningColumn, "%s is not comparable, and therefore cannot be used in PARTITION BY", type);
}
});
argumentBuilder.partitionBy(partitionBy.stream()
.map(expression -> switch (expression) {
case Identifier identifier -> identifier.getValue();
case DereferenceExpression dereferenceExpression -> dereferenceExpression.toString();
default -> throw new IllegalStateException("Unexpected partitionBy expression: " + expression);
})
.collect(toImmutableList()));
}
// analyze ORDER BY
if (tableArgument.getOrderBy().isPresent()) {
if (argumentSpecification.isRowSemantics()) {
throw semanticException(INVALID_FUNCTION_ARGUMENT, argument, "Invalid argument %s. Ordering specified for table argument with row semantics", argumentSpecification.getName());
}
OrderBy orderBy = tableArgument.getOrderBy().get();
analysisBuilder.withOrderBy(orderBy);
orderBy.getSortItems().stream()
.map(SortItem::getSortKey)
.forEach(orderingColumn -> {
validateAndGetInputField(orderingColumn, argumentScope);
Type type = analyzeExpression(orderingColumn, argumentScope).getType(orderingColumn);
if (!type.isOrderable()) {
throw semanticException(TYPE_MISMATCH, orderingColumn, "%s is not orderable, and therefore cannot be used in ORDER BY", type);
}
});
argumentBuilder.orderBy(orderBy.getSortItems().stream()
.map(SortItem::getSortKey)
.map(expression -> switch (expression) {
case Identifier identifier -> identifier.getValue();
case DereferenceExpression dereferenceExpression -> dereferenceExpression.toString();
default -> throw new IllegalStateException("Unexpected orderBy expression: " + expression);
})
.collect(toImmutableList()));
}
// analyze the PRUNE/KEEP WHEN EMPTY property
boolean pruneWhenEmpty = argumentSpecification.isPruneWhenEmpty();
if (tableArgument.getEmptyTableTreatment().isPresent()) {
if (argumentSpecification.isRowSemantics()) {
throw semanticException(INVALID_FUNCTION_ARGUMENT, tableArgument.getEmptyTableTreatment().get(), "Invalid argument %s. Empty behavior specified for table argument with row semantics", argumentSpecification.getName());
}
pruneWhenEmpty = tableArgument.getEmptyTableTreatment().get().getTreatment() == EmptyTableTreatment.Treatment.PRUNE;
}
analysisBuilder.withPruneWhenEmpty(pruneWhenEmpty);
// record remaining properties
analysisBuilder.withRowSemantics(argumentSpecification.isRowSemantics());
analysisBuilder.withPassThroughColumns(argumentSpecification.isPassThroughColumns());
return new ArgumentAnalysis(argumentBuilder.build(), Optional.of(analysisBuilder.build()));
}
private ArgumentAnalysis analyzeDescriptorArgument(TableFunctionDescriptorArgument argument)
{
return new ArgumentAnalysis(
argument.getDescriptor()
.map(descriptor -> DescriptorArgument.builder()
.descriptor(new Descriptor(descriptor.getFields().stream()
.map(field -> new Descriptor.Field(
field.getName().getCanonicalValue(),
field.getType().map(type -> {
try {
return plannerContext.getTypeManager().getType(toTypeSignature(type));
}
catch (TypeNotFoundException e) {
throw semanticException(TYPE_MISMATCH, type, "Unknown type: %s", type);
}
})))
.collect(toImmutableList())))
.build())
.orElse(NULL_DESCRIPTOR),
Optional.empty());
}
private ArgumentAnalysis analyzeScalarArgument(Expression expression, Type type)
{
// inline parameters
Expression inlined = ExpressionTreeRewriter.rewriteWith(new ExpressionRewriter<>()
{
@Override
public Expression rewriteParameter(Parameter node, Void context, ExpressionTreeRewriter treeRewriter)
{
if (analysis.isDescribe()) {
// We cannot handle DESCRIBE when a table function argument involves a parameter.
// In DESCRIBE, the parameter values are not known. We cannot pass a dummy value for a parameter.
// The value of a table function argument can affect the returned relation type. The returned
// relation type can affect the assumed types for other parameters in the query.
throw semanticException(NOT_SUPPORTED, node, "DESCRIBE is not supported if a table function uses parameters");
}
return analysis.getParameters().get(NodeRef.of(node));
}
}, expression);
// currently, only constant arguments are supported
Object constantValue = evaluateConstant(inlined, type, plannerContext, session, accessControl);
return new ArgumentAnalysis(
ScalarArgument.builder()
.type(type)
.value(constantValue)
.build(),
Optional.empty());
}
private Argument analyzeDefault(ArgumentSpecification argumentSpecification, Node errorLocation)
{
if (argumentSpecification.isRequired()) {
throw semanticException(MISSING_ARGUMENT, errorLocation, "Missing argument: %s", argumentSpecification.getName());
}
checkArgument(!(argumentSpecification instanceof TableArgumentSpecification), "invalid table argument specification: default set");
if (argumentSpecification instanceof DescriptorArgumentSpecification) {
return DescriptorArgument.builder()
.descriptor((Descriptor) argumentSpecification.getDefaultValue())
.build();
}
if (argumentSpecification instanceof ScalarArgumentSpecification) {
return ScalarArgument.builder()
.type(((ScalarArgumentSpecification) argumentSpecification).getType())
.value(argumentSpecification.getDefaultValue())
.build();
}
throw new IllegalStateException("Unexpected argument specification: " + argumentSpecification.getClass().getSimpleName());
}
private List> analyzeCopartitioning(List> copartitioning, List tableArgumentAnalyses)
{
// map table arguments by relation names. usa a multimap, because multiple arguments can have the same value, e.g. input_1 => tpch.tiny.orders, input_2 => tpch.tiny.orders
ImmutableMultimap.Builder unqualifiedInputsBuilder = ImmutableMultimap.builder();
ImmutableMultimap.Builder qualifiedInputsBuilder = ImmutableMultimap.builder();
tableArgumentAnalyses.stream()
.filter(argument -> argument.getName().isPresent())
.forEach(argument -> {
QualifiedName name = argument.getName().get();
if (name.getParts().size() == 1) {
unqualifiedInputsBuilder.put(name, argument);
}
else if (name.getParts().size() == 3) {
qualifiedInputsBuilder.put(name, argument);
}
else {
throw new IllegalStateException("relation name should be unqualified or fully qualified");
}
});
Multimap unqualifiedInputs = unqualifiedInputsBuilder.build();
Multimap qualifiedInputs = qualifiedInputsBuilder.build();
ImmutableList.Builder> copartitionBuilder = ImmutableList.builder();
Set referencedArguments = new HashSet<>();
for (List nameList : copartitioning) {
ImmutableList.Builder copartitionListBuilder = ImmutableList.builder();
// resolve copartition tables as references to table arguments
for (QualifiedName name : nameList) {
Collection candidates = emptyList();
if (name.getParts().size() == 1) {
// try to match unqualified name. it might be a reference to a CTE or an aliased relation
candidates = unqualifiedInputs.get(name);
}
if (candidates.isEmpty()) {
// qualify the name using current schema and catalog
QualifiedObjectName fullyQualifiedName = createQualifiedObjectName(session, name.getOriginalParts().get(0), name);
candidates = qualifiedInputs.get(QualifiedName.of(fullyQualifiedName.catalogName(), fullyQualifiedName.schemaName(), fullyQualifiedName.objectName()));
}
if (candidates.isEmpty()) {
throw semanticException(INVALID_COPARTITIONING, name.getOriginalParts().get(0), "No table argument found for name: %s", name);
}
if (candidates.size() > 1) {
throw semanticException(INVALID_COPARTITIONING, name.getOriginalParts().get(0), "Ambiguous reference: multiple table arguments found for name: %s", name);
}
TableArgumentAnalysis argument = getOnlyElement(candidates);
if (!referencedArguments.add(argument.getArgumentName())) {
// multiple references to argument in COPARTITION clause are implicitly prohibited by
// ISO/IEC TR REPORT 19075-7, p.33, Feature B203, “More than one copartition specification”
throw semanticException(INVALID_COPARTITIONING, name.getOriginalParts().get(0), "Multiple references to table argument: %s in COPARTITION clause", name);
}
copartitionListBuilder.add(argument);
}
List copartitionList = copartitionListBuilder.build();
// analyze partitioning columns
copartitionList.stream()
.filter(argument -> argument.getPartitionBy().isEmpty())
.findFirst().ifPresent(unpartitioned -> {
throw semanticException(INVALID_COPARTITIONING, unpartitioned.getRelation(), "Table %s referenced in COPARTITION clause is not partitioned", unpartitioned.getName().orElseThrow());
});
// TODO make sure that copartitioned tables cannot have empty partitioning lists.
// ISO/IEC TR REPORT 19075-7, 4.5 Partitioning and ordering, p.25 is not clear: "With copartitioning, the copartitioned table arguments must have the same number of partitioning columns,
// and corresponding partitioning columns must be comparable. The DBMS effectively performs a full outer equijoin on the copartitioning columns"
copartitionList.stream()
.filter(argument -> argument.getPartitionBy().orElseThrow().isEmpty())
.findFirst().ifPresent(partitionedOnEmpty -> {
// table is partitioned but no partitioning columns are specified (single partition)
throw semanticException(INVALID_COPARTITIONING, partitionedOnEmpty.getRelation(), "No partitioning columns specified for table %s referenced in COPARTITION clause", partitionedOnEmpty.getName().orElseThrow());
});
List> partitioningColumns = copartitionList.stream()
.map(TableArgumentAnalysis::getPartitionBy)
.map(Optional::orElseThrow)
.collect(toImmutableList());
if (partitioningColumns.stream()
.map(List::size)
.distinct()
.count() > 1) {
throw semanticException(INVALID_COPARTITIONING, nameList.get(0).getOriginalParts().get(0), "Numbers of partitioning columns in copartitioned tables do not match");
}
// coerce corresponding copartition columns to common supertype
for (int index = 0; index < partitioningColumns.get(0).size(); index++) {
Type commonSuperType = analysis.getType(partitioningColumns.get(0).get(index));
// find common supertype
for (List columnList : partitioningColumns) {
Optional superType = typeCoercion.getCommonSuperType(commonSuperType, analysis.getType(columnList.get(index)));
if (superType.isEmpty()) {
throw semanticException(TYPE_MISMATCH, nameList.get(0).getOriginalParts().get(0), "Partitioning columns in copartitioned tables have incompatible types");
}
commonSuperType = superType.get();
}
for (List columnList : partitioningColumns) {
Expression column = columnList.get(index);
Type type = analysis.getType(column);
if (!type.equals(commonSuperType)) {
if (!typeCoercion.canCoerce(type, commonSuperType)) {
throw semanticException(TYPE_MISMATCH, column, "Cannot coerce column of type %s to common supertype: %s", type.getDisplayName(), commonSuperType.getDisplayName());
}
analysis.addCoercion(column, commonSuperType);
}
}
}
// record the resolved copartition arguments by argument names
copartitionBuilder.add(copartitionList.stream()
.map(TableArgumentAnalysis::getArgumentName)
.collect(toImmutableList()));
}
return copartitionBuilder.build();
}
private Optional getMaterializedViewStorageTableName(MaterializedViewDefinition viewDefinition)
{
if (viewDefinition.getStorageTable().isEmpty()) {
return Optional.empty();
}
CatalogSchemaTableName catalogSchemaTableName = viewDefinition.getStorageTable().get();
SchemaTableName schemaTableName = catalogSchemaTableName.getSchemaTableName();
return Optional.of(QualifiedName.of(ImmutableList.of(
new Identifier(catalogSchemaTableName.getCatalogName(), true),
new Identifier(schemaTableName.getSchemaName(), true),
new Identifier(schemaTableName.getTableName(), true))));
}
@Override
protected Scope visitTable(Table table, Optional scope)
{
if (table.getName().getPrefix().isEmpty()) {
// is this a reference to a WITH query?
Optional withQuery = createScope(scope).getNamedQuery(table.getName().getSuffix());
if (withQuery.isPresent()) {
analysis.setRelationName(table, table.getName());
return createScopeForCommonTableExpression(table, scope, withQuery.get());
}
// is this a recursive reference in expandable WITH query? If so, there's base scope recorded.
Optional expandableBaseScope = analysis.getExpandableBaseScope(table);
if (expandableBaseScope.isPresent()) {
Scope baseScope = expandableBaseScope.get();
// adjust local and outer parent scopes accordingly to the local context of the recursive reference
Scope resultScope = scopeBuilder(scope)
.withRelationType(baseScope.getRelationId(), baseScope.getRelationType())
.build();
analysis.setScope(table, resultScope);
analysis.setRelationName(table, table.getName());
return resultScope;
}
}
QualifiedObjectName name = createQualifiedObjectName(session, table, table.getName());
analysis.setRelationName(table, QualifiedName.of(name.catalogName(), name.schemaName(), name.objectName()));
Optional optionalMaterializedView = metadata.getMaterializedView(session, name);
if (optionalMaterializedView.isPresent()) {
MaterializedViewDefinition materializedViewDefinition = optionalMaterializedView.get();
analysis.addEmptyColumnReferencesForTable(accessControl, session.getIdentity(), name);
if (isMaterializedViewSufficientlyFresh(session, name, materializedViewDefinition)) {
// If materialized view is sufficiently fresh with respect to its grace period, answer the query using the storage table
QualifiedName storageName = getMaterializedViewStorageTableName(materializedViewDefinition)
.orElseThrow(() -> semanticException(INVALID_VIEW, table, "Materialized view '%s' is fresh but does not have storage table name", name));
QualifiedObjectName storageTableName = createQualifiedObjectName(session, table, storageName);
checkStorageTableNotRedirected(storageTableName);
TableHandle tableHandle = metadata.getTableHandle(session, storageTableName)
.orElseThrow(() -> semanticException(INVALID_VIEW, table, "Storage table '%s' does not exist", storageTableName));
return createScopeForMaterializedView(table, name, scope, materializedViewDefinition, Optional.of(tableHandle));
}
// This is a stale materialized view and should be expanded like a logical view
return createScopeForMaterializedView(table, name, scope, materializedViewDefinition, Optional.empty());
}
// This could be a reference to a logical view or a table
Optional optionalView = metadata.getView(session, name);
if (optionalView.isPresent()) {
analysis.addEmptyColumnReferencesForTable(accessControl, session.getIdentity(), name);
return createScopeForView(table, name, scope, optionalView.get());
}
// This can only be a table
RedirectionAwareTableHandle redirection = getTableHandle(table, name, scope);
Optional tableHandle = redirection.tableHandle();
QualifiedObjectName targetTableName = redirection.redirectedTableName().orElse(name);
analysis.addEmptyColumnReferencesForTable(accessControl, session.getIdentity(), targetTableName);
if (tableHandle.isEmpty()) {
getRequiredCatalogHandle(metadata, session, table, targetTableName.catalogName());
if (!metadata.schemaExists(session, new CatalogSchemaName(targetTableName.catalogName(), targetTableName.schemaName()))) {
throw semanticException(SCHEMA_NOT_FOUND, table, "Schema '%s' does not exist", targetTableName.schemaName());
}
throw semanticException(TABLE_NOT_FOUND, table, "Table '%s' does not exist", targetTableName);
}
TableSchema tableSchema = metadata.getTableSchema(session, tableHandle.get());
Map columnHandles = metadata.getColumnHandles(session, tableHandle.get());
ImmutableList.Builder fields = ImmutableList.builder();
fields.addAll(analyzeTableOutputFields(table, targetTableName, tableSchema, columnHandles));
boolean addRowIdColumn = updateKind.isPresent();
if (addRowIdColumn) {
// Add the row id field
ColumnHandle rowIdColumnHandle = metadata.getMergeRowIdColumnHandle(session, tableHandle.get());
Type type = metadata.getColumnMetadata(session, tableHandle.get(), rowIdColumnHandle).getType();
Field field = Field.newUnqualified(Optional.empty(), type);
fields.add(field);
analysis.setColumn(field, rowIdColumnHandle);
}
List outputFields = fields.build();
Scope accessControlScope = Scope.builder()
.withRelationType(RelationId.anonymous(), new RelationType(outputFields))
.build();
analyzeFiltersAndMasks(table, targetTableName, new RelationType(outputFields), accessControlScope);
analysis.registerTable(table, tableHandle, targetTableName, session.getIdentity().getUser(), accessControlScope, Optional.empty());
Scope tableScope = createAndAssignScope(table, scope, outputFields);
if (addRowIdColumn) {
FieldReference reference = new FieldReference(outputFields.size() - 1);
analyzeExpression(reference, tableScope);
analysis.setRowIdField(table, reference);
}
return tableScope;
}
private boolean isMaterializedViewSufficientlyFresh(Session session, QualifiedObjectName name, MaterializedViewDefinition materializedViewDefinition)
{
MaterializedViewFreshness materializedViewFreshness = metadata.getMaterializedViewFreshness(session, name);
MaterializedViewFreshness.Freshness freshness = materializedViewFreshness.getFreshness();
if (freshness == FRESH) {
return true;
}
Optional lastFreshTime = materializedViewFreshness.getLastFreshTime();
if (lastFreshTime.isEmpty()) {
// E.g. never refreshed, or connector not updated to report fresh time
return false;
}
if (materializedViewDefinition.getGracePeriod().isEmpty()) {
// Unlimited grace period
return true;
}
Duration gracePeriod = materializedViewDefinition.getGracePeriod().get();
if (gracePeriod.isZero()) {
// Consider 0 as a special value meaning "do not accept any staleness". This makes 0 more reliable, and more likely what user wanted,
// regardless of lastFreshTime, query time or rounding.
return false;
}
// Can be negative
// TODO should we compare lastFreshTime with session.start() or with current time? The freshness is calculated with respect to current state of things.
Duration staleness = Duration.between(lastFreshTime.get(), sessionTimeProvider.getStart(session));
return staleness.compareTo(gracePeriod) <= 0;
}
private void checkStorageTableNotRedirected(QualifiedObjectName source)
{
metadata.getRedirectionAwareTableHandle(session, source).redirectedTableName().ifPresent(name -> {
throw new TrinoException(NOT_SUPPORTED, format("Redirection of materialized view storage table '%s' to '%s' is not supported", source, name));
});
}
private void analyzeFiltersAndMasks(Table table, QualifiedObjectName name, RelationType relationType, Scope accessControlScope)
{
ImmutableList.Builder columnSchemaBuilder = ImmutableList.builder();
for (int index = 0; index < relationType.getAllFieldCount(); index++) {
Field field = relationType.getFieldByIndex(index);
field.getName().ifPresent(fieldName -> columnSchemaBuilder.add(ColumnSchema.builder()
.setName(fieldName)
.setType(field.getType())
.setHidden(field.isHidden())
.build()));
}
List columnSchemas = columnSchemaBuilder.build();
Map masks = accessControl.getColumnMasks(session.toSecurityContext(), name, columnSchemas);
for (ColumnSchema columnSchema : columnSchemas) {
Optional.ofNullable(masks.get(columnSchema)).ifPresent(mask -> {
if (checkCanSelectFromColumn(name, columnSchema.getName())) {
analyzeColumnMask(session.getIdentity().getUser(), table, name, columnSchema, accessControlScope, mask);
}
});
}
accessControl.getRowFilters(session.toSecurityContext(), name)
.forEach(filter -> analyzeRowFilter(session.getIdentity().getUser(), table, name, accessControlScope, filter));
}
private void analyzeCheckConstraints(Table table, QualifiedObjectName name, Scope accessControlScope, List constraints)
{
for (String constraint : constraints) {
ViewExpression expression = ViewExpression.builder()
.catalog(name.catalogName())
.schema(name.schemaName())
.expression(constraint)
.build();
analyzeCheckConstraint(table, name, accessControlScope, expression);
}
}
private boolean checkCanSelectFromColumn(QualifiedObjectName name, String column)
{
try {
accessControl.checkCanSelectFromColumns(session.toSecurityContext(), name, ImmutableSet.of(column));
return true;
}
catch (AccessDeniedException e) {
return false;
}
}
private Scope createScopeForCommonTableExpression(Table table, Optional scope, WithQuery withQuery)
{
Query query = withQuery.getQuery();
analysis.registerNamedQuery(table, query);
// re-alias the fields with the name assigned to the query in the WITH declaration
RelationType queryDescriptor = analysis.getOutputDescriptor(query);
List fields;
Optional> columnNames = withQuery.getColumnNames();
if (columnNames.isPresent()) {
// if columns are explicitly aliased -> WITH cte(alias1, alias2 ...)
checkState(columnNames.get().size() == queryDescriptor.getVisibleFieldCount(), "mismatched aliases");
ImmutableList.Builder fieldBuilder = ImmutableList.builder();
Iterator aliases = columnNames.get().iterator();
for (int i = 0; i < queryDescriptor.getAllFieldCount(); i++) {
Field inputField = queryDescriptor.getFieldByIndex(i);
if (!inputField.isHidden()) {
Field field = Field.newQualified(
QualifiedName.of(table.getName().getSuffix()),
Optional.of(aliases.next().getValue()),
inputField.getType(),
false,
inputField.getOriginTable(),
inputField.getOriginColumnName(),
inputField.isAliased());
fieldBuilder.add(field);
analysis.addSourceColumns(field, analysis.getSourceColumns(inputField));
}
}
fields = fieldBuilder.build();
}
else {
ImmutableList.Builder fieldBuilder = ImmutableList.builder();
for (int i = 0; i < queryDescriptor.getAllFieldCount(); i++) {
Field inputField = queryDescriptor.getFieldByIndex(i);
if (!inputField.isHidden()) {
Field field = Field.newQualified(
QualifiedName.of(table.getName().getSuffix()),
inputField.getName(),
inputField.getType(),
false,
inputField.getOriginTable(),
inputField.getOriginColumnName(),
inputField.isAliased());
fieldBuilder.add(field);
analysis.addSourceColumns(field, analysis.getSourceColumns(inputField));
}
}
fields = fieldBuilder.build();
}
return createAndAssignScope(table, scope, fields);
}
private Scope createScopeForMaterializedView(Table table, QualifiedObjectName name, Optional scope, MaterializedViewDefinition view, Optional storageTable)
{
return createScopeForView(
table,
name,
scope,
view.getOriginalSql(),
view.getCatalog(),
view.getSchema(),
view.getRunAsIdentity(),
view.getPath(),
view.getColumns(),
storageTable,
true);
}
private Scope createScopeForView(Table table, QualifiedObjectName name, Optional scope, ViewDefinition view)
{
return createScopeForView(table,
name,
scope,
view.getOriginalSql(),
view.getCatalog(),
view.getSchema(),
view.getRunAsIdentity(),
view.getPath(),
view.getColumns(),
Optional.empty(),
false);
}
private Scope createScopeForView(
Table table,
QualifiedObjectName name,
Optional scope,
String originalSql,
Optional catalog,
Optional schema,
Optional owner,
List path,
List columns,
Optional storageTable,
boolean isMaterializedView)
{
Statement statement = analysis.getStatement();
if (statement instanceof CreateView viewStatement) {
QualifiedObjectName viewNameFromStatement = createQualifiedObjectName(session, viewStatement, viewStatement.getName());
if (viewStatement.isReplace() && viewNameFromStatement.equals(name)) {
throw semanticException(VIEW_IS_RECURSIVE, table, "Statement would create a recursive view");
}
}
if (statement instanceof CreateMaterializedView viewStatement) {
QualifiedObjectName viewNameFromStatement = createQualifiedObjectName(session, viewStatement, viewStatement.getName());
if (viewStatement.isReplace() && viewNameFromStatement.equals(name)) {
throw semanticException(VIEW_IS_RECURSIVE, table, "Statement would create a recursive materialized view");
}
}
if (analysis.hasTableInView(table)) {
throw semanticException(VIEW_IS_RECURSIVE, table, "View is recursive");
}
Query query = parseView(originalSql, name, table);
if (!query.getFunctions().isEmpty()) {
throw semanticException(NOT_SUPPORTED, table, "View contains inline function: %s", name);
}
analysis.registerTableForView(table, name, isMaterializedView);
RelationType descriptor = analyzeView(query, name, catalog, schema, owner, path, table);
analysis.unregisterTableForView();
checkViewStaleness(columns, descriptor.getVisibleFields(), name, table)
.ifPresent(explanation -> { throw semanticException(VIEW_IS_STALE, table, "View '%s' is stale or in invalid state: %s", name, explanation); });
// Derive the type of the view from the stored definition, not from the analysis of the underlying query.
// This is needed in case the underlying table(s) changed and the query in the view now produces types that
// are implicitly coercible to the declared view types.
List viewFields = columns.stream()
.map(column -> Field.newQualified(
table.getName(),
Optional.of(column.name()),
getViewColumnType(column, name, table),
false,
Optional.of(name),
Optional.of(column.name()),
false))
.collect(toImmutableList());
if (storageTable.isPresent()) {
List storageTableFields = analyzeStorageTable(table, viewFields, storageTable.get());
analysis.setMaterializedViewStorageTableFields(table, storageTableFields);
}
else {
analysis.registerNamedQuery(table, query);
}
Scope accessControlScope = Scope.builder()
.withRelationType(RelationId.anonymous(), new RelationType(viewFields))
.build();
analyzeFiltersAndMasks(table, name, new RelationType(viewFields), accessControlScope);
analysis.registerTable(table, storageTable, name, session.getIdentity().getUser(), accessControlScope, Optional.of(originalSql));
viewFields.forEach(field -> analysis.addSourceColumns(field, ImmutableSet.of(new SourceColumn(name, field.getName().orElseThrow()))));
return createAndAssignScope(table, scope, viewFields);
}
private List analyzeStorageTable(Table table, List viewFields, TableHandle storageTable)
{
TableSchema tableSchema = metadata.getTableSchema(session, storageTable);
Map columnHandles = metadata.getColumnHandles(session, storageTable);
QualifiedObjectName tableName = createQualifiedObjectName(session, table, table.getName());
checkStorageTableNotRedirected(tableName);
List tableFields = analyzeTableOutputFields(table, tableName, tableSchema, columnHandles)
.stream()
.filter(field -> !field.isHidden())
.collect(toImmutableList());
// make sure storage table fields match view fields
if (tableFields.size() != viewFields.size()) {
throw semanticException(
INVALID_VIEW,
table,
"storage table column count (%s) does not match column count derived from the materialized view query analysis (%s)",
tableFields.size(),
viewFields.size());
}
for (int index = 0; index < tableFields.size(); index++) {
Field tableField = tableFields.get(index);
Field viewField = viewFields.get(index);
if (tableField.getName().isEmpty()) {
throw semanticException(
INVALID_VIEW,
table,
"a column of type %s projected from query view at position %s has no name",
tableField.getType(),
index);
}
String tableFieldName = tableField.getName().orElseThrow();
String viewFieldName = viewField.getName().orElseThrow();
if (!viewFieldName.equalsIgnoreCase(tableFieldName)) {
throw semanticException(
INVALID_VIEW,
table,
"column [%s] of type %s projected from storage table at position %s has a different name from column [%s] of type %s stored in materialized view definition",
tableFieldName,
tableField.getType(),
index,
viewFieldName,
viewField.getType());
}
if (!tableField.getType().equals(viewField.getType())) {
try {
metadata.getCoercion(viewField.getType(), tableField.getType());
}
catch (TrinoException e) {
throw semanticException(
INVALID_VIEW,
table,
"cannot cast column [%s] of type %s projected from storage table at position %s into column [%s] of type %s stored in view definition",
tableFieldName,
tableField.getType(),
index,
viewFieldName,
viewField.getType());
}
}
}
return tableFields;
}
private List analyzeTableOutputFields(Table table, QualifiedObjectName tableName, TableSchema tableSchema, Map columnHandles)
{
// TODO: discover columns lazily based on where they are needed (to support connectors that can't enumerate all tables)
ImmutableList.Builder fields = ImmutableList.builder();
for (ColumnSchema column : tableSchema.columns()) {
Field field = Field.newQualified(
table.getName(),
Optional.of(column.getName()),
column.getType(),
column.isHidden(),
Optional.of(tableName),
Optional.of(column.getName()),
false);
fields.add(field);
ColumnHandle columnHandle = columnHandles.get(column.getName());
checkArgument(columnHandle != null, "Unknown field %s", field);
analysis.setColumn(field, columnHandle);
analysis.addSourceColumns(field, ImmutableSet.of(new SourceColumn(tableName, column.getName())));
}
return fields.build();
}
@Override
protected Scope visitPatternRecognitionRelation(PatternRecognitionRelation relation, Optional scope)
{
Scope inputScope = process(relation.getInput(), scope);
// MATCH_RECOGNIZE cannot be applied to a polymorphic table function (SQL standard ISO/IEC 9075-2, 7.6
, p. 409)
validateNoNestedTableFunction(relation.getInput(), "row pattern matching");
// check that input table column names are not ambiguous
// Note: This check is not compliant with SQL identifier semantics. Quoted identifiers should have different comparison rules than unquoted identifiers.
// However, field names do not contain the information about quotation, and so every comparison is case-insensitive. For example, if there are fields named
// 'a' and 'A' (quoted), they should be considered non-ambiguous. However, their names will be compared case-insensitive and will cause failure as ambiguous.
Set inputNames = new HashSet<>();
for (Field field : inputScope.getRelationType().getAllFields()) {
field.getName().ifPresent(name -> {
if (!inputNames.add(name.toUpperCase(ENGLISH))) {
throw semanticException(AMBIGUOUS_NAME, relation.getInput(), "ambiguous column: %s in row pattern input relation", name);
}
});
}
// analyze PARTITION BY
for (Expression expression : relation.getPartitionBy()) {
// The PARTITION BY clause is a list of columns of the row pattern input table.
validateAndGetInputField(expression, inputScope);
Type type = analyzeExpression(expression, inputScope).getType(expression);
if (!type.isComparable()) {
throw semanticException(TYPE_MISMATCH, expression, "%s is not comparable, and therefore cannot be used in PARTITION BY", type);
}
}
// analyze ORDER BY
for (SortItem sortItem : getSortItemsFromOrderBy(relation.getOrderBy())) {
// The ORDER BY clause is a list of columns of the row pattern input table.
Expression expression = sortItem.getSortKey();
validateAndGetInputField(expression, inputScope);
Type type = analyzeExpression(expression, inputScope).getType(sortItem.getSortKey());
if (!type.isOrderable()) {
throw semanticException(TYPE_MISMATCH, sortItem, "%s is not orderable, and therefore cannot be used in ORDER BY", type);
}
}
// analyze pattern recognition clauses
PatternRecognitionAnalysis patternRecognitionAnalysis = PatternRecognitionAnalyzer.analyze(
relation.getSubsets(),
relation.getVariableDefinitions(),
relation.getMeasures(),
relation.getPattern(),
relation.getAfterMatchSkipTo());
relation.getAfterMatchSkipTo()
.flatMap(SkipTo::getIdentifier)
.ifPresent(label -> analysis.addResolvedLabel(label, label.getCanonicalValue()));
for (SubsetDefinition subset : relation.getSubsets()) {
analysis.addResolvedLabel(subset.getName(), subset.getName().getCanonicalValue());
analysis.addSubsetLabels(
subset,
subset.getIdentifiers().stream()
.map(Identifier::getCanonicalValue)
.collect(Collectors.toSet()));
}
analysis.setUndefinedLabels(relation.getPattern(), patternRecognitionAnalysis.undefinedLabels());
analysis.setRanges(patternRecognitionAnalysis.ranges());
PatternRecognitionAnalyzer.validateNoPatternSearchMode(relation.getPatternSearchMode());
PatternRecognitionAnalyzer.validatePatternExclusions(relation.getRowsPerMatch(), relation.getPattern());
// Notes on potential name ambiguity between pattern labels and other identifiers:
// Labels are allowed in expressions of MEASURES and DEFINE clauses. In those expressions, qualifying column names with table name is not allowed.
// Theoretically, user might define pattern label "T" where input table name was "T". Then a dereference "T.column" would refer to:
// - input table's column, if it was in PARTITION BY or ORDER BY clause,
// - subset of rows matched with label "T", if it was in MEASURES or DEFINE clause.
// There could be a check to catch such non-intuitive situation and produce a warning.
// Similarly, it is possible to define pattern label with the same name as some input column. However, this causes no ambiguity, as labels can only
// appear as column name's prefix, and column names in pattern recognition context cannot be dereferenced.
// analyze expressions in MEASURES and DEFINE (with set of all labels passed as context)
for (VariableDefinition variableDefinition : relation.getVariableDefinitions()) {
Expression expression = variableDefinition.getExpression();
ExpressionAnalysis expressionAnalysis = analyzePatternRecognitionExpression(expression, inputScope, patternRecognitionAnalysis.allLabels());
analysis.recordSubqueries(relation, expressionAnalysis);
analysis.addResolvedLabel(variableDefinition.getName(), variableDefinition.getName().getCanonicalValue());
Type type = expressionAnalysis.getType(expression);
if (!type.equals(BOOLEAN)) {
throw semanticException(TYPE_MISMATCH, expression, "Expression defining a label must be boolean (actual type: %s)", type);
}
}
ImmutableMap.Builder, Type> measureTypesBuilder = ImmutableMap.builder();
for (MeasureDefinition measureDefinition : relation.getMeasures()) {
Expression expression = measureDefinition.getExpression();
ExpressionAnalysis expressionAnalysis = analyzePatternRecognitionExpression(expression, inputScope, patternRecognitionAnalysis.allLabels());
analysis.recordSubqueries(relation, expressionAnalysis);
analysis.addResolvedLabel(measureDefinition.getName(), measureDefinition.getName().getCanonicalValue());
measureTypesBuilder.put(NodeRef.of(expression), expressionAnalysis.getType(expression));
}
Map, Type> measureTypes = measureTypesBuilder.buildOrThrow();
// create output scope
// ONE ROW PER MATCH: PARTITION BY columns, then MEASURES columns in order of declaration
// ALL ROWS PER MATCH: PARTITION BY columns, ORDER BY columns, MEASURES columns, then any remaining input table columns in order of declaration
// Note: row pattern input table name should not be exposed on output
PatternRecognitionRelation.RowsPerMatch rowsPerMatch = relation.getRowsPerMatch().orElse(ONE);
boolean oneRowPerMatch = rowsPerMatch == PatternRecognitionRelation.RowsPerMatch.ONE ||
rowsPerMatch == PatternRecognitionRelation.RowsPerMatch.WINDOW;
ImmutableSet.Builder inputFieldsOnOutputBuilder = ImmutableSet.builder();
ImmutableList.Builder outputFieldsBuilder = ImmutableList.builder();
for (Expression expression : relation.getPartitionBy()) {
Field inputField = validateAndGetInputField(expression, inputScope);
outputFieldsBuilder.add(unqualifiedVisible(inputField));
inputFieldsOnOutputBuilder.add(inputField);
}
if (!oneRowPerMatch) {
for (SortItem sortItem : getSortItemsFromOrderBy(relation.getOrderBy())) {
Field inputField = validateAndGetInputField(sortItem.getSortKey(), inputScope);
outputFieldsBuilder.add(unqualifiedVisible(inputField));
inputFieldsOnOutputBuilder.add(inputField); // might have duplicates (ORDER BY a ASC, a DESC)
}
}
for (MeasureDefinition measureDefinition : relation.getMeasures()) {
outputFieldsBuilder.add(Field.newUnqualified(
measureDefinition.getName().getValue(),
measureTypes.get(NodeRef.of(measureDefinition.getExpression()))));
}
if (!oneRowPerMatch) {
Set inputFieldsOnOutput = inputFieldsOnOutputBuilder.build();
for (Field inputField : inputScope.getRelationType().getAllFields()) {
if (!inputFieldsOnOutput.contains(inputField)) {
outputFieldsBuilder.add(unqualified(inputField));
}
}
}
// pattern recognition output must have at least 1 column
List outputFields = outputFieldsBuilder.build();
if (outputFields.isEmpty()) {
throw semanticException(TABLE_HAS_NO_COLUMNS, relation, "pattern recognition output table has no columns");
}
return createAndAssignScope(relation, scope, outputFields);
}
private Field validateAndGetInputField(Expression expression, Scope inputScope)
{
QualifiedName qualifiedName;
if (expression instanceof Identifier) {
qualifiedName = QualifiedName.of(ImmutableList.of((Identifier) expression));
}
else if (expression instanceof DereferenceExpression) {
qualifiedName = getQualifiedName((DereferenceExpression) expression);
}
else {
throw semanticException(INVALID_COLUMN_REFERENCE, expression, "Expected column reference. Actual: %s", expression);
}
Optional field = inputScope.tryResolveField(expression, qualifiedName);
if (field.isEmpty() || !field.get().isLocal()) {
throw semanticException(COLUMN_NOT_FOUND, expression, "Column %s is not present in the input relation", expression);
}
return field.get().getField();
}
private Field unqualifiedVisible(Field field)
{
return new Field(
Optional.empty(),
field.getName(),
field.getType(),
false,
field.getOriginTable(),
field.getOriginColumnName(),
field.isAliased());
}
private Field unqualified(Field field)
{
return new Field(
Optional.empty(),
field.getName(),
field.getType(),
field.isHidden(),
field.getOriginTable(),
field.getOriginColumnName(),
field.isAliased());
}
private ExpressionAnalysis analyzePatternRecognitionExpression(Expression expression, Scope scope, Set labels)
{
List nestedWindowExpressions = extractWindowExpressions(ImmutableList.of(expression));
if (!nestedWindowExpressions.isEmpty()) {
throw semanticException(NESTED_WINDOW, nestedWindowExpressions.getFirst(), "Cannot nest window functions or row pattern measures inside pattern recognition expressions");
}
return ExpressionAnalyzer.analyzePatternRecognitionExpression(
session,
plannerContext,
statementAnalyzerFactory,
accessControl,
scope,
analysis,
expression,
warningCollector,
labels);
}
@Override
protected Scope visitAliasedRelation(AliasedRelation relation, Optional scope)
{
analysis.setRelationName(relation, QualifiedName.of(ImmutableList.of(relation.getAlias())));
analysis.addAliased(relation.getRelation());
Scope relationScope = process(relation.getRelation(), scope);
RelationType relationType = relationScope.getRelationType();
// special-handle table function invocation
if (relation.getRelation() instanceof TableFunctionInvocation function) {
return createAndAssignScope(relation, scope, aliasTableFunctionInvocation(relation, relationType, function));
}
// todo this check should be inside of TupleDescriptor.withAlias, but the exception needs the node object
if (relation.getColumnNames() != null) {
int totalColumns = relationType.getVisibleFieldCount();
if (totalColumns != relation.getColumnNames().size()) {
throw semanticException(MISMATCHED_COLUMN_ALIASES, relation, "Column alias list has %s entries but '%s' has %s columns available", relation.getColumnNames().size(), relation.getAlias(), totalColumns);
}
}
List aliases = null;
Collection inputFields = relationType.getAllFields();
if (relation.getColumnNames() != null) {
aliases = relation.getColumnNames().stream()
.map(Identifier::getValue)
.collect(Collectors.toList());
// hidden fields are not exposed when there are column aliases
inputFields = relationType.getVisibleFields();
}
RelationType descriptor = relationType.withAlias(relation.getAlias().getValue(), aliases);
checkArgument(inputFields.size() == descriptor.getAllFieldCount(),
"Expected %s fields, got %s",
descriptor.getAllFieldCount(),
inputFields.size());
Streams.forEachPair(
descriptor.getAllFields().stream(),
inputFields.stream(),
(newField, field) -> analysis.addSourceColumns(newField, analysis.getSourceColumns(field)));
return createAndAssignScope(relation, scope, descriptor);
}
// As described by the SQL standard ISO/IEC 9075-2, 7.6
, p. 409
private RelationType aliasTableFunctionInvocation(AliasedRelation relation, RelationType relationType, TableFunctionInvocation function)
{
TableFunctionInvocationAnalysis tableFunctionAnalysis = analysis.getTableFunctionAnalysis(function);
int properColumnsCount = tableFunctionAnalysis.getProperColumnsCount();
// check that relation alias is different from range variables of all table arguments
tableFunctionAnalysis.getTableArgumentAnalyses().stream()
.map(TableArgumentAnalysis::getName)
.filter(Optional::isPresent)
.map(Optional::get)
.filter(name -> name.hasSuffix(QualifiedName.of(ImmutableList.of(relation.getAlias()))))
.findFirst()
.ifPresent(name -> {
throw semanticException(DUPLICATE_RANGE_VARIABLE, relation.getAlias(), "Relation alias: %s is a duplicate of input table name: %s", relation.getAlias(), name);
});
// build the new relation type. the alias must be applied to the proper columns only,
// and it must not shadow the range variables exposed by the table arguments
ImmutableList.Builder fieldsBuilder = ImmutableList.builder();
// first, put the table function's proper columns with alias
if (relation.getColumnNames() != null) {
// check that number of column aliases matches number of table function's proper columns
if (properColumnsCount != relation.getColumnNames().size()) {
throw semanticException(MISMATCHED_COLUMN_ALIASES, relation, "Column alias list has %s entries but table function has %s proper columns", relation.getColumnNames().size(), properColumnsCount);
}
for (int i = 0; i < properColumnsCount; i++) {
// proper columns are not hidden, so we don't need to skip hidden fields
Field field = relationType.getFieldByIndex(i);
fieldsBuilder.add(Field.newQualified(
QualifiedName.of(ImmutableList.of(relation.getAlias())),
Optional.of(relation.getColumnNames().get(i).getCanonicalValue()), // although the canonical name is recorded, fields are resolved case-insensitive
field.getType(),
field.isHidden(),
field.getOriginTable(),
field.getOriginColumnName(),
field.isAliased()));
}
}
else {
for (int i = 0; i < properColumnsCount; i++) {
Field field = relationType.getFieldByIndex(i);
fieldsBuilder.add(Field.newQualified(
QualifiedName.of(ImmutableList.of(relation.getAlias())),
field.getName(),
field.getType(),
field.isHidden(),
field.getOriginTable(),
field.getOriginColumnName(),
field.isAliased()));
}
}
// append remaining fields. They are not being aliased, so hidden fields are included
for (int i = properColumnsCount; i < relationType.getAllFieldCount(); i++) {
fieldsBuilder.add(relationType.getFieldByIndex(i));
}
List fields = fieldsBuilder.build();
// check that there are no duplicate names within the table function's proper columns
Set names = new HashSet<>();
fields.subList(0, properColumnsCount).stream()
.map(Field::getName)
.filter(Optional::isPresent)
.map(Optional::get)
// field names are resolved case-insensitive
.map(name -> name.toLowerCase(ENGLISH))
.forEach(name -> {
if (!names.add(name)) {
throw semanticException(DUPLICATE_COLUMN_NAME, relation.getRelation(), "Duplicate name of table function proper column: %s", name);
}
});
return new RelationType(fields);
}
@Override
protected Scope visitSampledRelation(SampledRelation relation, Optional scope)
{
Expression samplePercentage = relation.getSamplePercentage();
if (!NamesExtractor.extractNames(samplePercentage, analysis.getColumnReferences()).isEmpty()) {
throw semanticException(EXPRESSION_NOT_CONSTANT, samplePercentage, "Sample percentage cannot contain column references");
}
Map, Type> expressionTypes = ExpressionAnalyzer.analyzeExpressions(
session,
plannerContext,
statementAnalyzerFactory,
accessControl,
ImmutableList.of(samplePercentage),
analysis.getParameters(),
WarningCollector.NOOP,
analysis.getQueryType())
.getExpressionTypes();
Type samplePercentageType = expressionTypes.get(NodeRef.of(samplePercentage));
if (!typeCoercion.canCoerce(samplePercentageType, DOUBLE)) {
throw semanticException(TYPE_MISMATCH, samplePercentage, "Sample percentage should be a numeric expression");
}
if (samplePercentageType == UNKNOWN) {
throw semanticException(INVALID_ARGUMENTS, samplePercentage, "Sample percentage cannot be NULL");
}
Object samplePercentageObject = evaluateConstant(samplePercentage, samplePercentageType, plannerContext, session, accessControl);
if (samplePercentageObject == null) {
throw semanticException(INVALID_ARGUMENTS, samplePercentage, "Sample percentage cannot be NULL");
}
double samplePercentageValue = (double) coerce(samplePercentageType, samplePercentageObject, DOUBLE);
if (samplePercentageValue < 0.0) {
throw semanticException(NUMERIC_VALUE_OUT_OF_RANGE, samplePercentage, "Sample percentage must be greater than or equal to 0");
}
if ((samplePercentageValue > 100.0)) {
throw semanticException(NUMERIC_VALUE_OUT_OF_RANGE, samplePercentage, "Sample percentage must be less than or equal to 100");
}
analysis.setSampleRatio(relation, samplePercentageValue / 100);
Scope relationScope = process(relation.getRelation(), scope);
// TABLESAMPLE cannot be applied to a polymorphic table function (SQL standard ISO/IEC 9075-2, 7.6
, p. 409)
// Note: the below method finds a table function immediately nested in SampledRelation, or aliased.
// Potentially, a table function could be also nested with intervening PatternRecognitionRelation.
// Such case is handled in visitPatternRecognitionRelation().
validateNoNestedTableFunction(relation.getRelation(), "sample");
return createAndAssignScope(relation, scope, relationScope.getRelationType());
}
// this method should run after the `base` relation is processed, so that it is
// determined whether the table function is polymorphic
private void validateNoNestedTableFunction(Relation base, String context)
{
TableFunctionInvocation tableFunctionInvocation = null;
if (base instanceof TableFunctionInvocation invocation) {
tableFunctionInvocation = invocation;
}
else if (base instanceof AliasedRelation aliasedRelation &&
aliasedRelation.getRelation() instanceof TableFunctionInvocation invocation) {
tableFunctionInvocation = invocation;
}
if (tableFunctionInvocation != null && analysis.isPolymorphicTableFunction(tableFunctionInvocation)) {
throw semanticException(INVALID_TABLE_FUNCTION_INVOCATION, base, "Cannot apply %s to polymorphic table function invocation", context);
}
}
@Override
protected Scope visitTableSubquery(TableSubquery node, Optional scope)
{
StatementAnalyzer analyzer = statementAnalyzerFactory.createStatementAnalyzer(analysis, session, warningCollector, CorrelationSupport.ALLOWED);
Scope queryScope = analyzer.analyze(node.getQuery(), scope.orElseThrow());
return createAndAssignScope(node, scope, queryScope.getRelationType());
}
@Override
protected Scope visitQuerySpecification(QuerySpecification node, Optional scope)
{
// TODO: extract candidate names from SELECT, WHERE, HAVING, GROUP BY and ORDER BY expressions
// to pass down to analyzeFrom
Scope sourceScope = analyzeFrom(node, scope);
analyzeWindowDefinitions(node, sourceScope);
resolveFunctionCallAndMeasureWindows(node);
node.getWhere().ifPresent(where -> analyzeWhere(node, sourceScope, where));
List outputExpressions = analyzeSelect(node, sourceScope);
GroupingSetAnalysis groupByAnalysis = analyzeGroupBy(node, sourceScope, outputExpressions);
analyzeHaving(node, sourceScope);
Scope outputScope = computeAndAssignOutputScope(node, scope, sourceScope);
List orderByExpressions = emptyList();
Optional orderByScope = Optional.empty();
if (node.getOrderBy().isPresent()) {
OrderBy orderBy = node.getOrderBy().get();
orderByScope = Optional.of(computeAndAssignOrderByScope(orderBy, sourceScope, outputScope));
orderByExpressions = analyzeOrderBy(node, orderBy.getSortItems(), orderByScope.get());
if ((sourceScope.getOuterQueryParent().isPresent() || !isTopLevel) && node.getLimit().isEmpty() && node.getOffset().isEmpty()) {
// not the root scope and ORDER BY is ineffective
analysis.markRedundantOrderBy(orderBy);
warningCollector.add(new TrinoWarning(REDUNDANT_ORDER_BY, "ORDER BY in subquery may have no effect"));
}
}
analysis.setOrderByExpressions(node, orderByExpressions);
if (node.getOffset().isPresent()) {
analyzeOffset(node.getOffset().get(), outputScope);
}
if (node.getLimit().isPresent()) {
boolean requiresOrderBy = analyzeLimit(node.getLimit().get(), outputScope);
if (requiresOrderBy && node.getOrderBy().isEmpty()) {
throw semanticException(MISSING_ORDER_BY, node.getLimit().get(), "FETCH FIRST WITH TIES clause requires ORDER BY");
}
}
List sourceExpressions = new ArrayList<>();
analysis.getSelectExpressions(node).stream()
.map(SelectExpression::getExpression)
.forEach(sourceExpressions::add);
node.getHaving().ifPresent(sourceExpressions::add);
for (WindowDefinition windowDefinition : node.getWindows()) {
WindowSpecification window = windowDefinition.getWindow();
sourceExpressions.addAll(window.getPartitionBy());
getSortItemsFromOrderBy(window.getOrderBy()).stream()
.map(SortItem::getSortKey)
.forEach(sourceExpressions::add);
if (window.getFrame().isPresent()) {
WindowFrame frame = window.getFrame().get();
frame.getStart().getValue()
.ifPresent(sourceExpressions::add);
frame.getEnd()
.flatMap(FrameBound::getValue)
.ifPresent(sourceExpressions::add);
frame.getMeasures().stream()
.map(MeasureDefinition::getExpression)
.forEach(sourceExpressions::add);
frame.getVariableDefinitions().stream()
.map(VariableDefinition::getExpression)
.forEach(sourceExpressions::add);
}
}
analyzeGroupingOperations(node, sourceExpressions, orderByExpressions);
analyzeAggregations(node, sourceScope, orderByScope, groupByAnalysis, sourceExpressions, orderByExpressions);
analyzeWindowFunctionsAndMeasures(node, outputExpressions, orderByExpressions);
if (analysis.isAggregation(node) && node.getOrderBy().isPresent()) {
ImmutableList.Builder aggregates = ImmutableList.builder()
.addAll(groupByAnalysis.getOriginalExpressions())
.addAll(extractAggregateFunctions(orderByExpressions, session, functionResolver, accessControl))
.addAll(extractExpressions(orderByExpressions, GroupingOperation.class));
analysis.setOrderByAggregates(node.getOrderBy().get(), aggregates.build());
}
if (node.getOrderBy().isPresent() && node.getSelect().isDistinct()) {
verifySelectDistinct(node, orderByExpressions, outputExpressions, sourceScope, orderByScope.orElseThrow());
}
return outputScope;
}
@Override
protected Scope visitSubqueryExpression(SubqueryExpression node, Optional context)
{
return process(node.getQuery(), context);
}
@Override
protected Scope visitSetOperation(SetOperation node, Optional scope)
{
checkState(node.getRelations().size() >= 2);
List childrenTypes = node.getRelations().stream()
.map(relation -> process(relation, scope).getRelationType().withOnlyVisibleFields())
.collect(toImmutableList());
String setOperationName = node.getClass().getSimpleName().toUpperCase(ENGLISH);
Type[] outputFieldTypes = childrenTypes.get(0).getVisibleFields().stream()
.map(Field::getType)
.toArray(Type[]::new);
for (RelationType relationType : childrenTypes) {
int outputFieldSize = outputFieldTypes.length;
int descFieldSize = relationType.getVisibleFields().size();
if (outputFieldSize != descFieldSize) {
throw semanticException(
TYPE_MISMATCH,
node,
"%s query has different number of fields: %d, %d",
setOperationName,
outputFieldSize,
descFieldSize);
}
for (int i = 0; i < descFieldSize; i++) {
Type descFieldType = relationType.getFieldByIndex(i).getType();
Optional commonSuperType = typeCoercion.getCommonSuperType(outputFieldTypes[i], descFieldType);
if (commonSuperType.isEmpty()) {
throw semanticException(
TYPE_MISMATCH,
node,
"column %d in %s query has incompatible types: %s, %s",
i + 1,
setOperationName,
outputFieldTypes[i].getDisplayName(),
descFieldType.getDisplayName());
}
outputFieldTypes[i] = commonSuperType.get();
}
}
if (node instanceof Intersect || node instanceof Except || node instanceof Union && node.isDistinct()) {
for (Type type : outputFieldTypes) {
if (!type.isComparable()) {
throw semanticException(
TYPE_MISMATCH,
node,
"Type %s is not comparable and therefore cannot be used in %s%s",
type,
setOperationName,
node instanceof Union ? " DISTINCT" : "");
}
}
}
Field[] outputDescriptorFields = new Field[outputFieldTypes.length];
RelationType firstDescriptor = childrenTypes.getFirst();
for (int i = 0; i < outputFieldTypes.length; i++) {
Field oldField = firstDescriptor.getFieldByIndex(i);
outputDescriptorFields[i] = new Field(
oldField.getRelationAlias(),
oldField.getName(),
outputFieldTypes[i],
oldField.isHidden(),
oldField.getOriginTable(),
oldField.getOriginColumnName(),
oldField.isAliased());
int index = i; // Variable used in Lambda should be final
analysis.addSourceColumns(
outputDescriptorFields[index],
childrenTypes.stream()
.map(relationType -> relationType.getFieldByIndex(index))
.flatMap(field -> analysis.getSourceColumns(field).stream())
.collect(toImmutableSet()));
}
for (int i = 0; i < node.getRelations().size(); i++) {
Relation relation = node.getRelations().get(i);
RelationType relationType = childrenTypes.get(i);
for (int j = 0; j < relationType.getVisibleFields().size(); j++) {
Type outputFieldType = outputFieldTypes[j];
Type descFieldType = relationType.getFieldByIndex(j).getType();
if (!outputFieldType.equals(descFieldType)) {
analysis.addRelationCoercion(relation, outputFieldTypes);
break;
}
}
}
return createAndAssignScope(node, scope, outputDescriptorFields);
}
@Override
protected Scope visitJoin(Join node, Optional scope)
{
JoinCriteria criteria = node.getCriteria().orElse(null);
if (criteria instanceof NaturalJoin) {
throw semanticException(NOT_SUPPORTED, node, "Natural join not supported");
}
Scope left = process(node.getLeft(), scope);
Scope right = process(node.getRight(), isLateralRelation(node.getRight()) ? Optional.of(left) : scope);
if (isLateralRelation(node.getRight())) {
if (node.getType() == RIGHT || node.getType() == FULL) {
Stream leftScopeReferences = getReferencesToScope(node.getRight(), analysis, left);
leftScopeReferences.findFirst().ifPresent(reference -> {
throw semanticException(INVALID_COLUMN_REFERENCE, reference, "LATERAL reference not allowed in %s JOIN", node.getType().name());
});
}
if (isUnnestRelation(node.getRight())) {
if (criteria != null) {
if (!(criteria instanceof JoinOn) || !((JoinOn) criteria).getExpression().equals(TRUE_LITERAL)) {
throw semanticException(
NOT_SUPPORTED,
criteria instanceof JoinOn ? ((JoinOn) criteria).getExpression() : node,
"%s JOIN involving UNNEST is only supported with condition ON TRUE",
node.getType().name());
}
}
}
else if (isJsonTable(node.getRight())) {
if (criteria != null) {
if (!(criteria instanceof JoinOn) || !((JoinOn) criteria).getExpression().equals(TRUE_LITERAL)) {
throw semanticException(
NOT_SUPPORTED,
criteria instanceof JoinOn ? ((JoinOn) criteria).getExpression() : node,
"%s JOIN involving JSON_TABLE is only supported with condition ON TRUE",
node.getType().name());
}
}
}
else if (node.getType() == FULL) {
if (!(criteria instanceof JoinOn) || !((JoinOn) criteria).getExpression().equals(TRUE_LITERAL)) {
throw semanticException(
NOT_SUPPORTED,
criteria instanceof JoinOn ? ((JoinOn) criteria).getExpression() : node,
"FULL JOIN involving LATERAL relation is only supported with condition ON TRUE");
}
}
}
if (criteria instanceof JoinUsing) {
return analyzeJoinUsing(node, ((JoinUsing) criteria).getColumns(), scope, left, right);
}
Scope output = createAndAssignScope(node, scope, left.getRelationType().joinWith(right.getRelationType()));
if (node.getType() == Join.Type.CROSS || node.getType() == Join.Type.IMPLICIT) {
return output;
}
if (criteria instanceof JoinOn) {
Expression expression = ((JoinOn) criteria).getExpression();
verifyNoAggregateWindowOrGroupingFunctions(session, functionResolver, accessControl, expression, "JOIN clause");
// Need to register coercions in case when join criteria requires coercion (e.g. join on char(1) = char(2))
// Correlations are only currently support in the join criteria for INNER joins
ExpressionAnalysis expressionAnalysis = analyzeExpression(expression, output, node.getType() == INNER ? CorrelationSupport.ALLOWED : CorrelationSupport.DISALLOWED);
Type clauseType = expressionAnalysis.getType(expression);
if (!clauseType.equals(BOOLEAN)) {
if (!clauseType.equals(UNKNOWN)) {
throw semanticException(TYPE_MISMATCH, expression, "JOIN ON clause must evaluate to a boolean: actual type %s", clauseType);
}
// coerce expression to boolean
analysis.addCoercion(expression, BOOLEAN);
}
analysis.recordSubqueries(node, expressionAnalysis);
analysis.setJoinCriteria(node, expression);
}
else {
throw new UnsupportedOperationException("Unsupported join criteria: " + criteria.getClass().getName());
}
return output;
}
@Override
protected Scope visitUpdate(Update update, Optional scope)
{
Table table = update.getTable();
QualifiedObjectName originalName = createQualifiedObjectName(session, table, table.getName());
if (metadata.isMaterializedView(session, originalName)) {
throw semanticException(NOT_SUPPORTED, update, "Updating materialized views is not supported");
}
if (metadata.isView(session, originalName)) {
throw semanticException(NOT_SUPPORTED, update, "Updating views is not supported");
}
analysis.setUpdateType("UPDATE");
RedirectionAwareTableHandle redirection = metadata.getRedirectionAwareTableHandle(session, originalName);
QualifiedObjectName tableName = redirection.redirectedTableName().orElse(originalName);
TableHandle handle = redirection.tableHandle()
.orElseThrow(() -> semanticException(TABLE_NOT_FOUND, table, "Table '%s' does not exist", tableName));
TableSchema tableSchema = metadata.getTableSchema(session, handle);
List allColumns = tableSchema.columns();
Map columns = allColumns.stream()
.collect(toImmutableMap(ColumnSchema::getName, Function.identity()));
for (UpdateAssignment assignment : update.getAssignments()) {
String columnName = assignment.getName().getValue();
if (!columns.containsKey(columnName)) {
throw semanticException(COLUMN_NOT_FOUND, assignment.getName(), "The UPDATE SET target column %s doesn't exist", columnName);
}
}
Set assignmentTargets = update.getAssignments().stream()
.map(assignment -> assignment.getName().getValue())
.collect(toImmutableSet());
accessControl.checkCanUpdateTableColumns(session.toSecurityContext(), tableName, assignmentTargets);
if (!accessControl.getRowFilters(session.toSecurityContext(), tableName).isEmpty()) {
throw semanticException(NOT_SUPPORTED, update, "Updating a table with a row filter is not supported");
}
// TODO: how to deal with connectors that need to see the pre-image of rows to perform the update without
// flowing that data through the masking logic
if (!accessControl.getColumnMasks(session.toSecurityContext(), tableName, tableSchema.columns()).isEmpty()) {
throw semanticException(NOT_SUPPORTED, update, "Updating a table with column masks is not supported");
}
List updatedColumnSchemas = allColumns.stream()
.filter(column -> assignmentTargets.contains(column.getName()))
.collect(toImmutableList());
analysis.setUpdatedColumns(updatedColumnSchemas);
Map allColumnHandles = metadata.getColumnHandles(session, handle);
List updatedColumnHandles = updatedColumnSchemas.stream()
.map(columnSchema -> allColumnHandles.get(columnSchema.getName()))
.collect(toImmutableList());
// Analyzer checks for select permissions but UPDATE has a separate permission, so disable access checks
StatementAnalyzer analyzer = statementAnalyzerFactory
.withSpecializedAccessControl(new AllowAllAccessControl())
.createStatementAnalyzer(analysis, session, warningCollector, CorrelationSupport.ALLOWED);
Scope tableScope = analyzer.analyzeForUpdate(table, scope, UpdateKind.UPDATE);
update.getWhere().ifPresent(where -> analyzeWhere(update, tableScope, where));
analyzeCheckConstraints(table, tableName, tableScope, tableSchema.tableSchema().getCheckConstraints());
analysis.registerTable(table, redirection.tableHandle(), tableName, session.getIdentity().getUser(), tableScope, Optional.empty());
ImmutableList.Builder analysesBuilder = ImmutableList.builder();
ImmutableList.Builder expressionTypesBuilder = ImmutableList.builder();
ImmutableMap.Builder> sourceColumnsByColumnNameBuilder = ImmutableMap.builder();
for (UpdateAssignment assignment : update.getAssignments()) {
String targetColumnName = assignment.getName().getValue();
Expression expression = assignment.getValue();
ExpressionAnalysis expressionAnalysis = analyzeExpression(expression, tableScope);
analysesBuilder.add(expressionAnalysis);
expressionTypesBuilder.add(expressionAnalysis.getType(expression));
Set sourceColumns = expressionAnalysis.getSubqueries().stream()
.map(query -> analyze(query.getNode(), tableScope))
.flatMap(subqueryScope -> subqueryScope.getRelationType().getVisibleFields().stream())
.flatMap(field -> analysis.getSourceColumns(field).stream())
.collect(toImmutableSet());
sourceColumnsByColumnNameBuilder.put(targetColumnName, sourceColumns);
}
List analyses = analysesBuilder.build();
List expressionTypes = expressionTypesBuilder.build();
Map> sourceColumnsByColumnName = sourceColumnsByColumnNameBuilder.buildOrThrow();
List tableTypes = update.getAssignments().stream()
.map(assignment -> requireNonNull(columns.get(assignment.getName().getValue())))
.map(ColumnSchema::getType)
.collect(toImmutableList());
if (!typesMatchForInsert(tableTypes, expressionTypes)) {
throw semanticException(TYPE_MISMATCH,
update,
"UPDATE table column types don't match SET expressions: Table: [%s], Expressions: [%s]",
Joiner.on(", ").join(tableTypes),
Joiner.on(", ").join(expressionTypes));
}
for (int index = 0; index < expressionTypes.size(); index++) {
Expression expression = update.getAssignments().get(index).getValue();
Type expressionType = expressionTypes.get(index);
Type targetType = tableTypes.get(index);
if (!targetType.equals(expressionType)) {
analysis.addCoercion(expression, targetType);
}
analysis.recordSubqueries(update, analyses.get(index));
}
analysis.setUpdateTarget(
handle.catalogHandle().getVersion(),
tableName,
Optional.of(table),
Optional.of(updatedColumnSchemas.stream()
.map(column -> new OutputColumn(
new Column(column.getName(), column.getType().toString()),
sourceColumnsByColumnName.getOrDefault(column.getName(), ImmutableSet.of())))
.collect(toImmutableList())));
createMergeAnalysis(table, handle, tableSchema, tableScope, tableScope, ImmutableList.of(updatedColumnHandles));
return createAndAssignScope(update, scope, Field.newUnqualified("rows", BIGINT));
}
@Override
protected Scope visitMerge(Merge merge, Optional