org.apache.flink.table.api.internal.TableEnvironmentImpl Maven / Gradle / Ivy
Go to download
Show more of this group Show more artifacts with this name
Show all versions of flink-table-api-java Show documentation
Show all versions of flink-table-api-java Show documentation
This module contains the Table/SQL API for writing table programs
within the table ecosystem using the Java programming language.
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.flink.table.api.internal;
import org.apache.flink.annotation.Internal;
import org.apache.flink.annotation.VisibleForTesting;
import org.apache.flink.api.dag.Pipeline;
import org.apache.flink.api.dag.Transformation;
import org.apache.flink.configuration.Configuration;
import org.apache.flink.core.execution.JobClient;
import org.apache.flink.table.api.CompiledPlan;
import org.apache.flink.table.api.DataTypes;
import org.apache.flink.table.api.EnvironmentSettings;
import org.apache.flink.table.api.ExplainDetail;
import org.apache.flink.table.api.ExplainFormat;
import org.apache.flink.table.api.PlanReference;
import org.apache.flink.table.api.ResultKind;
import org.apache.flink.table.api.SqlParserException;
import org.apache.flink.table.api.StatementSet;
import org.apache.flink.table.api.Table;
import org.apache.flink.table.api.TableConfig;
import org.apache.flink.table.api.TableDescriptor;
import org.apache.flink.table.api.TableEnvironment;
import org.apache.flink.table.api.TableException;
import org.apache.flink.table.api.TableResult;
import org.apache.flink.table.api.ValidationException;
import org.apache.flink.table.api.config.TableConfigOptions;
import org.apache.flink.table.catalog.Catalog;
import org.apache.flink.table.catalog.CatalogBaseTable;
import org.apache.flink.table.catalog.CatalogFunction;
import org.apache.flink.table.catalog.CatalogFunctionImpl;
import org.apache.flink.table.catalog.CatalogManager;
import org.apache.flink.table.catalog.CatalogPartition;
import org.apache.flink.table.catalog.CatalogPartitionSpec;
import org.apache.flink.table.catalog.Column;
import org.apache.flink.table.catalog.ConnectorCatalogTable;
import org.apache.flink.table.catalog.ContextResolvedTable;
import org.apache.flink.table.catalog.FunctionCatalog;
import org.apache.flink.table.catalog.FunctionLanguage;
import org.apache.flink.table.catalog.GenericInMemoryCatalog;
import org.apache.flink.table.catalog.ObjectIdentifier;
import org.apache.flink.table.catalog.ObjectPath;
import org.apache.flink.table.catalog.QueryOperationCatalogView;
import org.apache.flink.table.catalog.ResolvedCatalogTable;
import org.apache.flink.table.catalog.ResolvedSchema;
import org.apache.flink.table.catalog.UnresolvedIdentifier;
import org.apache.flink.table.catalog.WatermarkSpec;
import org.apache.flink.table.catalog.exceptions.CatalogException;
import org.apache.flink.table.catalog.exceptions.DatabaseAlreadyExistException;
import org.apache.flink.table.catalog.exceptions.DatabaseNotEmptyException;
import org.apache.flink.table.catalog.exceptions.DatabaseNotExistException;
import org.apache.flink.table.catalog.exceptions.FunctionAlreadyExistException;
import org.apache.flink.table.catalog.exceptions.FunctionNotExistException;
import org.apache.flink.table.catalog.exceptions.TableAlreadyExistException;
import org.apache.flink.table.catalog.exceptions.TableNotExistException;
import org.apache.flink.table.delegation.Executor;
import org.apache.flink.table.delegation.ExecutorFactory;
import org.apache.flink.table.delegation.ExtendedOperationExecutor;
import org.apache.flink.table.delegation.InternalPlan;
import org.apache.flink.table.delegation.Parser;
import org.apache.flink.table.delegation.Planner;
import org.apache.flink.table.expressions.ApiExpressionUtils;
import org.apache.flink.table.expressions.Expression;
import org.apache.flink.table.factories.FactoryUtil;
import org.apache.flink.table.factories.PlannerFactoryUtil;
import org.apache.flink.table.functions.ScalarFunction;
import org.apache.flink.table.functions.SqlLikeUtils;
import org.apache.flink.table.functions.UserDefinedFunction;
import org.apache.flink.table.functions.UserDefinedFunctionHelper;
import org.apache.flink.table.module.Module;
import org.apache.flink.table.module.ModuleEntry;
import org.apache.flink.table.module.ModuleManager;
import org.apache.flink.table.operations.CollectModifyOperation;
import org.apache.flink.table.operations.CompileAndExecutePlanOperation;
import org.apache.flink.table.operations.CreateTableASOperation;
import org.apache.flink.table.operations.DeleteFromFilterOperation;
import org.apache.flink.table.operations.DescribeTableOperation;
import org.apache.flink.table.operations.ExplainOperation;
import org.apache.flink.table.operations.LoadModuleOperation;
import org.apache.flink.table.operations.ModifyOperation;
import org.apache.flink.table.operations.NopOperation;
import org.apache.flink.table.operations.Operation;
import org.apache.flink.table.operations.QueryOperation;
import org.apache.flink.table.operations.ShowCatalogsOperation;
import org.apache.flink.table.operations.ShowColumnsOperation;
import org.apache.flink.table.operations.ShowCreateTableOperation;
import org.apache.flink.table.operations.ShowCreateViewOperation;
import org.apache.flink.table.operations.ShowCurrentCatalogOperation;
import org.apache.flink.table.operations.ShowCurrentDatabaseOperation;
import org.apache.flink.table.operations.ShowDatabasesOperation;
import org.apache.flink.table.operations.ShowFunctionsOperation;
import org.apache.flink.table.operations.ShowModulesOperation;
import org.apache.flink.table.operations.ShowPartitionsOperation;
import org.apache.flink.table.operations.ShowTablesOperation;
import org.apache.flink.table.operations.ShowViewsOperation;
import org.apache.flink.table.operations.SinkModifyOperation;
import org.apache.flink.table.operations.SourceQueryOperation;
import org.apache.flink.table.operations.StatementSetOperation;
import org.apache.flink.table.operations.TableSourceQueryOperation;
import org.apache.flink.table.operations.UnloadModuleOperation;
import org.apache.flink.table.operations.UseCatalogOperation;
import org.apache.flink.table.operations.UseDatabaseOperation;
import org.apache.flink.table.operations.UseModulesOperation;
import org.apache.flink.table.operations.command.AddJarOperation;
import org.apache.flink.table.operations.command.ExecutePlanOperation;
import org.apache.flink.table.operations.command.ShowJarsOperation;
import org.apache.flink.table.operations.ddl.AddPartitionsOperation;
import org.apache.flink.table.operations.ddl.AlterCatalogFunctionOperation;
import org.apache.flink.table.operations.ddl.AlterDatabaseOperation;
import org.apache.flink.table.operations.ddl.AlterPartitionPropertiesOperation;
import org.apache.flink.table.operations.ddl.AlterTableChangeOperation;
import org.apache.flink.table.operations.ddl.AlterTableOperation;
import org.apache.flink.table.operations.ddl.AlterTableOptionsOperation;
import org.apache.flink.table.operations.ddl.AlterTableRenameOperation;
import org.apache.flink.table.operations.ddl.AlterTableSchemaOperation;
import org.apache.flink.table.operations.ddl.AlterViewAsOperation;
import org.apache.flink.table.operations.ddl.AlterViewOperation;
import org.apache.flink.table.operations.ddl.AlterViewPropertiesOperation;
import org.apache.flink.table.operations.ddl.AlterViewRenameOperation;
import org.apache.flink.table.operations.ddl.AnalyzeTableOperation;
import org.apache.flink.table.operations.ddl.CompilePlanOperation;
import org.apache.flink.table.operations.ddl.CreateCatalogFunctionOperation;
import org.apache.flink.table.operations.ddl.CreateCatalogOperation;
import org.apache.flink.table.operations.ddl.CreateDatabaseOperation;
import org.apache.flink.table.operations.ddl.CreateTableOperation;
import org.apache.flink.table.operations.ddl.CreateTempSystemFunctionOperation;
import org.apache.flink.table.operations.ddl.CreateViewOperation;
import org.apache.flink.table.operations.ddl.DropCatalogFunctionOperation;
import org.apache.flink.table.operations.ddl.DropCatalogOperation;
import org.apache.flink.table.operations.ddl.DropDatabaseOperation;
import org.apache.flink.table.operations.ddl.DropPartitionsOperation;
import org.apache.flink.table.operations.ddl.DropTableOperation;
import org.apache.flink.table.operations.ddl.DropTempSystemFunctionOperation;
import org.apache.flink.table.operations.ddl.DropViewOperation;
import org.apache.flink.table.operations.utils.OperationTreeBuilder;
import org.apache.flink.table.resource.ResourceManager;
import org.apache.flink.table.resource.ResourceType;
import org.apache.flink.table.resource.ResourceUri;
import org.apache.flink.table.sinks.TableSink;
import org.apache.flink.table.sources.TableSource;
import org.apache.flink.table.sources.TableSourceValidation;
import org.apache.flink.table.types.AbstractDataType;
import org.apache.flink.table.types.DataType;
import org.apache.flink.table.types.logical.LogicalType;
import org.apache.flink.table.types.utils.DataTypeUtils;
import org.apache.flink.table.utils.print.PrintStyle;
import org.apache.flink.types.Row;
import org.apache.flink.util.FlinkUserCodeClassLoaders;
import org.apache.flink.util.MutableURLClassLoader;
import org.apache.flink.util.Preconditions;
import java.io.File;
import java.io.IOException;
import java.net.URL;
import java.nio.file.Paths;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collections;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.Optional;
import java.util.concurrent.ExecutionException;
import java.util.stream.Collectors;
import java.util.stream.StreamSupport;
import static org.apache.flink.table.api.config.TableConfigOptions.TABLE_DML_SYNC;
/**
* Implementation of {@link TableEnvironment} that works exclusively with Table API interfaces. Only
* {@link TableSource} is supported as an input and {@link TableSink} as an output. It also does not
* bind to any particular {@code StreamExecutionEnvironment}.
*/
@Internal
public class TableEnvironmentImpl implements TableEnvironmentInternal {
// Flag that tells if the TableSource/TableSink used in this environment is stream table
// source/sink,
// and this should always be true. This avoids too many hard code.
private static final boolean IS_STREAM_TABLE = true;
private final CatalogManager catalogManager;
private final ModuleManager moduleManager;
protected final ResourceManager resourceManager;
private final OperationTreeBuilder operationTreeBuilder;
protected final TableConfig tableConfig;
protected final Executor execEnv;
protected final FunctionCatalog functionCatalog;
protected final Planner planner;
private final boolean isStreamingMode;
private static final String UNSUPPORTED_QUERY_IN_EXECUTE_SQL_MSG =
"Unsupported SQL query! executeSql() only accepts a single SQL statement of type "
+ "CREATE TABLE, DROP TABLE, ALTER TABLE, CREATE DATABASE, DROP DATABASE, ALTER DATABASE, "
+ "CREATE FUNCTION, DROP FUNCTION, ALTER FUNCTION, CREATE CATALOG, DROP CATALOG, "
+ "USE CATALOG, USE [CATALOG.]DATABASE, SHOW CATALOGS, SHOW DATABASES, SHOW TABLES, SHOW [USER] FUNCTIONS, SHOW PARTITIONS"
+ "CREATE VIEW, DROP VIEW, SHOW VIEWS, INSERT, DESCRIBE, LOAD MODULE, UNLOAD "
+ "MODULE, USE MODULES, SHOW [FULL] MODULES.";
private static final String UNSUPPORTED_QUERY_IN_COMPILE_PLAN_SQL_MSG =
"Unsupported SQL query! compilePlanSql() only accepts a single SQL statement of type INSERT";
protected TableEnvironmentImpl(
CatalogManager catalogManager,
ModuleManager moduleManager,
ResourceManager resourceManager,
TableConfig tableConfig,
Executor executor,
FunctionCatalog functionCatalog,
Planner planner,
boolean isStreamingMode) {
this.catalogManager = catalogManager;
this.moduleManager = moduleManager;
this.resourceManager = resourceManager;
this.execEnv = executor;
this.tableConfig = tableConfig;
this.functionCatalog = functionCatalog;
this.planner = planner;
this.isStreamingMode = isStreamingMode;
this.operationTreeBuilder =
OperationTreeBuilder.create(
tableConfig,
resourceManager.getUserClassLoader(),
functionCatalog.asLookup(getParser()::parseIdentifier),
catalogManager.getDataTypeFactory(),
path -> {
try {
UnresolvedIdentifier unresolvedIdentifier =
getParser().parseIdentifier(path);
Optional catalogQueryOperation =
scanInternal(unresolvedIdentifier);
return catalogQueryOperation.map(
t -> ApiExpressionUtils.tableRef(path, t));
} catch (SqlParserException ex) {
// The TableLookup is used during resolution of expressions and it
// actually might not be an
// identifier of a table. It might be a reference to some other
// object such as column, local
// reference etc. This method should return empty optional in such
// cases to fallback for other
// identifiers resolution.
return Optional.empty();
}
},
getParser()::parseSqlExpression,
isStreamingMode);
catalogManager.initSchemaResolver(
isStreamingMode, operationTreeBuilder.getResolverBuilder());
}
public static TableEnvironmentImpl create(Configuration configuration) {
return create(EnvironmentSettings.newInstance().withConfiguration(configuration).build());
}
public static TableEnvironmentImpl create(EnvironmentSettings settings) {
final MutableURLClassLoader userClassLoader =
FlinkUserCodeClassLoaders.create(
new URL[0], settings.getUserClassLoader(), settings.getConfiguration());
final ExecutorFactory executorFactory =
FactoryUtil.discoverFactory(
userClassLoader, ExecutorFactory.class, ExecutorFactory.DEFAULT_IDENTIFIER);
final Executor executor = executorFactory.create(settings.getConfiguration());
// use configuration to init table config
final TableConfig tableConfig = TableConfig.getDefault();
tableConfig.setRootConfiguration(executor.getConfiguration());
tableConfig.addConfiguration(settings.getConfiguration());
final ResourceManager resourceManager =
new ResourceManager(settings.getConfiguration(), userClassLoader);
final ModuleManager moduleManager = new ModuleManager();
final CatalogManager catalogManager =
CatalogManager.newBuilder()
.classLoader(userClassLoader)
.config(tableConfig)
.defaultCatalog(
settings.getBuiltInCatalogName(),
new GenericInMemoryCatalog(
settings.getBuiltInCatalogName(),
settings.getBuiltInDatabaseName()))
.build();
final FunctionCatalog functionCatalog =
new FunctionCatalog(tableConfig, resourceManager, catalogManager, moduleManager);
final Planner planner =
PlannerFactoryUtil.createPlanner(
executor,
tableConfig,
userClassLoader,
moduleManager,
catalogManager,
functionCatalog);
return new TableEnvironmentImpl(
catalogManager,
moduleManager,
resourceManager,
tableConfig,
executor,
functionCatalog,
planner,
settings.isStreamingMode());
}
@Override
public Table fromValues(Object... values) {
return fromValues(Arrays.asList(values));
}
@Override
public Table fromValues(AbstractDataType> rowType, Object... values) {
return fromValues(rowType, Arrays.asList(values));
}
@Override
public Table fromValues(Expression... values) {
return createTable(operationTreeBuilder.values(values));
}
@Override
public Table fromValues(AbstractDataType> rowType, Expression... values) {
final DataType resolvedDataType =
catalogManager.getDataTypeFactory().createDataType(rowType);
return createTable(operationTreeBuilder.values(resolvedDataType, values));
}
@Override
public Table fromValues(Iterable> values) {
Expression[] exprs =
StreamSupport.stream(values.spliterator(), false)
.map(ApiExpressionUtils::objectToExpression)
.toArray(Expression[]::new);
return fromValues(exprs);
}
@Override
public Table fromValues(AbstractDataType> rowType, Iterable> values) {
Expression[] exprs =
StreamSupport.stream(values.spliterator(), false)
.map(ApiExpressionUtils::objectToExpression)
.toArray(Expression[]::new);
return fromValues(rowType, exprs);
}
@VisibleForTesting
public Planner getPlanner() {
return planner;
}
@Override
public Table fromTableSource(TableSource> source) {
// only accept StreamTableSource and LookupableTableSource here
// TODO should add a validation, while StreamTableSource is in flink-table-api-java-bridge
// module now
return createTable(new TableSourceQueryOperation<>(source, !IS_STREAM_TABLE));
}
@Override
public void registerCatalog(String catalogName, Catalog catalog) {
catalogManager.registerCatalog(catalogName, catalog);
}
@Override
public Optional getCatalog(String catalogName) {
return catalogManager.getCatalog(catalogName);
}
@Override
public void loadModule(String moduleName, Module module) {
moduleManager.loadModule(moduleName, module);
}
@Override
public void useModules(String... moduleNames) {
moduleManager.useModules(moduleNames);
}
@Override
public void unloadModule(String moduleName) {
moduleManager.unloadModule(moduleName);
}
@Override
public void registerFunction(String name, ScalarFunction function) {
functionCatalog.registerTempSystemScalarFunction(name, function);
}
@Override
public void createTemporarySystemFunction(
String name, Class extends UserDefinedFunction> functionClass) {
final UserDefinedFunction functionInstance =
UserDefinedFunctionHelper.instantiateFunction(functionClass);
createTemporarySystemFunction(name, functionInstance);
}
@Override
public void createTemporarySystemFunction(String name, UserDefinedFunction functionInstance) {
functionCatalog.registerTemporarySystemFunction(name, functionInstance, false);
}
@Override
public void createTemporarySystemFunction(
String name, String className, List resourceUris) {
functionCatalog.registerTemporarySystemFunction(name, className, resourceUris);
}
@Override
public boolean dropTemporarySystemFunction(String name) {
return functionCatalog.dropTemporarySystemFunction(name, true);
}
@Override
public void createFunction(String path, Class extends UserDefinedFunction> functionClass) {
createFunction(path, functionClass, false);
}
@Override
public void createFunction(
String path,
Class extends UserDefinedFunction> functionClass,
boolean ignoreIfExists) {
final UnresolvedIdentifier unresolvedIdentifier = getParser().parseIdentifier(path);
functionCatalog.registerCatalogFunction(
unresolvedIdentifier, functionClass, ignoreIfExists);
}
@Override
public void createFunction(String path, String className, List resourceUris) {
createFunction(path, className, resourceUris, false);
}
@Override
public void createFunction(
String path, String className, List resourceUris, boolean ignoreIfExists) {
final UnresolvedIdentifier unresolvedIdentifier = getParser().parseIdentifier(path);
functionCatalog.registerCatalogFunction(
unresolvedIdentifier, className, resourceUris, ignoreIfExists);
}
@Override
public boolean dropFunction(String path) {
final UnresolvedIdentifier unresolvedIdentifier = getParser().parseIdentifier(path);
return functionCatalog.dropCatalogFunction(unresolvedIdentifier, true);
}
@Override
public void createTemporaryFunction(
String path, Class extends UserDefinedFunction> functionClass) {
final UserDefinedFunction functionInstance =
UserDefinedFunctionHelper.instantiateFunction(functionClass);
createTemporaryFunction(path, functionInstance);
}
@Override
public void createTemporaryFunction(String path, UserDefinedFunction functionInstance) {
final UnresolvedIdentifier unresolvedIdentifier = getParser().parseIdentifier(path);
functionCatalog.registerTemporaryCatalogFunction(
unresolvedIdentifier, functionInstance, false);
}
@Override
public void createTemporaryFunction(
String path, String className, List resourceUris) {
final UnresolvedIdentifier unresolvedIdentifier = getParser().parseIdentifier(path);
final CatalogFunction catalogFunction =
new CatalogFunctionImpl(className, FunctionLanguage.JAVA, resourceUris);
functionCatalog.registerTemporaryCatalogFunction(
unresolvedIdentifier, catalogFunction, false);
}
@Override
public boolean dropTemporaryFunction(String path) {
final UnresolvedIdentifier unresolvedIdentifier = getParser().parseIdentifier(path);
return functionCatalog.dropTemporaryCatalogFunction(unresolvedIdentifier, true);
}
// TODO: Maybe we should expose addJar as tEnv's API later.
private TableResultInternal addJar(AddJarOperation addJarOperation) {
ResourceUri resourceUri = new ResourceUri(ResourceType.JAR, addJarOperation.getPath());
try {
resourceManager.registerJarResources(Collections.singletonList(resourceUri));
return TableResultImpl.TABLE_RESULT_OK;
} catch (IOException e) {
throw new TableException(
String.format("Could not register the specified resource [%s].", resourceUri),
e);
}
}
// TODO: Maybe we should expose listJars as tEnv's API later.
private String[] listJars() {
return resourceManager.getResources().keySet().stream()
.map(ResourceUri::getUri)
.toArray(String[]::new);
}
@Override
public void createTemporaryTable(String path, TableDescriptor descriptor) {
Preconditions.checkNotNull(path, "Path must not be null.");
Preconditions.checkNotNull(descriptor, "Table descriptor must not be null.");
final ObjectIdentifier tableIdentifier =
catalogManager.qualifyIdentifier(getParser().parseIdentifier(path));
catalogManager.createTemporaryTable(descriptor.toCatalogTable(), tableIdentifier, false);
}
@Override
public void createTable(String path, TableDescriptor descriptor) {
Preconditions.checkNotNull(path, "Path must not be null.");
Preconditions.checkNotNull(descriptor, "Table descriptor must not be null.");
final ObjectIdentifier tableIdentifier =
catalogManager.qualifyIdentifier(getParser().parseIdentifier(path));
catalogManager.createTable(descriptor.toCatalogTable(), tableIdentifier, false);
}
@Override
public void registerTable(String name, Table table) {
UnresolvedIdentifier identifier = UnresolvedIdentifier.of(name);
createTemporaryView(identifier, table);
}
@Override
public void createTemporaryView(String path, Table view) {
Preconditions.checkNotNull(path, "Path must not be null.");
Preconditions.checkNotNull(view, "Table view must not be null.");
UnresolvedIdentifier identifier = getParser().parseIdentifier(path);
createTemporaryView(identifier, view);
}
private void createTemporaryView(UnresolvedIdentifier identifier, Table view) {
if (((TableImpl) view).getTableEnvironment() != this) {
throw new TableException(
"Only table API objects that belong to this TableEnvironment can be registered.");
}
ObjectIdentifier tableIdentifier = catalogManager.qualifyIdentifier(identifier);
QueryOperation queryOperation =
qualifyQueryOperation(tableIdentifier, view.getQueryOperation());
CatalogBaseTable tableTable = new QueryOperationCatalogView(queryOperation);
catalogManager.createTemporaryTable(tableTable, tableIdentifier, false);
}
@Override
public Table scan(String... tablePath) {
UnresolvedIdentifier unresolvedIdentifier = UnresolvedIdentifier.of(tablePath);
return scanInternal(unresolvedIdentifier)
.map(this::createTable)
.orElseThrow(
() ->
new ValidationException(
String.format(
"Table %s was not found.", unresolvedIdentifier)));
}
@Override
public Table from(String path) {
UnresolvedIdentifier unresolvedIdentifier = getParser().parseIdentifier(path);
return scanInternal(unresolvedIdentifier)
.map(this::createTable)
.orElseThrow(
() ->
new ValidationException(
String.format(
"Table %s was not found.", unresolvedIdentifier)));
}
@Override
public Table from(TableDescriptor descriptor) {
Preconditions.checkNotNull(descriptor, "Table descriptor must not be null.");
final ResolvedCatalogTable resolvedCatalogBaseTable =
catalogManager.resolveCatalogTable(descriptor.toCatalogTable());
final QueryOperation queryOperation =
new SourceQueryOperation(ContextResolvedTable.anonymous(resolvedCatalogBaseTable));
return createTable(queryOperation);
}
private Optional scanInternal(UnresolvedIdentifier identifier) {
ObjectIdentifier tableIdentifier = catalogManager.qualifyIdentifier(identifier);
return catalogManager.getTable(tableIdentifier).map(SourceQueryOperation::new);
}
@Override
public String[] listCatalogs() {
return catalogManager.listCatalogs().stream().sorted().toArray(String[]::new);
}
@Override
public String[] listModules() {
return moduleManager.listModules().toArray(new String[0]);
}
@Override
public ModuleEntry[] listFullModules() {
return moduleManager.listFullModules().toArray(new ModuleEntry[0]);
}
@Override
public String[] listDatabases() {
return catalogManager
.getCatalog(catalogManager.getCurrentCatalog())
.get()
.listDatabases()
.toArray(new String[0]);
}
@Override
public String[] listTables() {
return catalogManager.listTables().stream().sorted().toArray(String[]::new);
}
@Override
public String[] listTables(String catalog, String databaseName) {
return catalogManager.listTables(catalog, databaseName).stream()
.sorted()
.toArray(String[]::new);
}
@Override
public String[] listViews() {
return catalogManager.listViews().stream().sorted().toArray(String[]::new);
}
@Override
public String[] listTemporaryTables() {
return catalogManager.listTemporaryTables().stream().sorted().toArray(String[]::new);
}
@Override
public String[] listTemporaryViews() {
return catalogManager.listTemporaryViews().stream().sorted().toArray(String[]::new);
}
@Override
public boolean dropTemporaryTable(String path) {
UnresolvedIdentifier unresolvedIdentifier = getParser().parseIdentifier(path);
ObjectIdentifier identifier = catalogManager.qualifyIdentifier(unresolvedIdentifier);
try {
catalogManager.dropTemporaryTable(identifier, false);
return true;
} catch (ValidationException e) {
return false;
}
}
@Override
public boolean dropTemporaryView(String path) {
UnresolvedIdentifier unresolvedIdentifier = getParser().parseIdentifier(path);
ObjectIdentifier identifier = catalogManager.qualifyIdentifier(unresolvedIdentifier);
try {
catalogManager.dropTemporaryView(identifier, false);
return true;
} catch (ValidationException e) {
return false;
}
}
@Override
public String[] listUserDefinedFunctions() {
String[] functions = functionCatalog.getUserDefinedFunctions();
Arrays.sort(functions);
return functions;
}
@Override
public String[] listFunctions() {
String[] functions = functionCatalog.getFunctions();
Arrays.sort(functions);
return functions;
}
@Override
public String explainSql(
String statement, ExplainFormat format, ExplainDetail... extraDetails) {
List operations = getParser().parse(statement);
if (operations.size() != 1) {
throw new TableException(
"Unsupported SQL query! explainSql() only accepts a single SQL query.");
}
if (operations.get(0) instanceof StatementSetOperation) {
operations =
new ArrayList<>(((StatementSetOperation) operations.get(0)).getOperations());
}
return explainInternal(operations, format, extraDetails);
}
@Override
public String explainInternal(
List operations, ExplainFormat format, ExplainDetail... extraDetails) {
operations =
operations.stream()
.filter(o -> !(o instanceof NopOperation))
.collect(Collectors.toList());
// hive parser may generate an NopOperation, in which case we just return an
// empty string as the plan
if (operations.isEmpty()) {
return "";
} else {
if (operations.size() > 1
&& operations.stream().anyMatch(this::isRowLevelModification)) {
throw new TableException(
"Unsupported SQL query! Only accept a single SQL statement of type DELETE, UPDATE.");
}
return planner.explain(operations, format, extraDetails);
}
}
@Override
public String[] getCompletionHints(String statement, int position) {
return planner.getParser().getCompletionHints(statement, position);
}
@Override
public Table sqlQuery(String query) {
List operations = getParser().parse(query);
if (operations.size() != 1) {
throw new ValidationException(
"Unsupported SQL query! sqlQuery() only accepts a single SQL query.");
}
Operation operation = operations.get(0);
if (operation instanceof QueryOperation && !(operation instanceof ModifyOperation)) {
return createTable((QueryOperation) operation);
} else {
throw new ValidationException(
"Unsupported SQL query! sqlQuery() only accepts a single SQL query of type "
+ "SELECT, UNION, INTERSECT, EXCEPT, VALUES, and ORDER_BY.");
}
}
@Override
public TableResult executeSql(String statement) {
List operations = getParser().parse(statement);
if (operations.size() != 1) {
throw new TableException(UNSUPPORTED_QUERY_IN_EXECUTE_SQL_MSG);
}
Operation operation = operations.get(0);
return executeInternal(operation);
}
@Override
public StatementSet createStatementSet() {
return new StatementSetImpl(this);
}
@Override
public CompiledPlan loadPlan(PlanReference planReference) {
try {
return new CompiledPlanImpl(this, planner.loadPlan(planReference));
} catch (IOException e) {
throw new TableException(String.format("Cannot load %s.", planReference), e);
}
}
@Override
public CompiledPlan compilePlanSql(String stmt) {
List operations = getParser().parse(stmt);
if (operations.size() != 1
|| !(operations.get(0) instanceof ModifyOperation)
|| isRowLevelModification(operations.get(0))
|| operations.get(0) instanceof CreateTableASOperation) {
throw new TableException(UNSUPPORTED_QUERY_IN_COMPILE_PLAN_SQL_MSG);
}
return new CompiledPlanImpl(
this,
planner.compilePlan(
Collections.singletonList((ModifyOperation) operations.get(0))));
}
@Override
public TableResultInternal executePlan(InternalPlan plan) {
List> transformations = planner.translatePlan(plan);
List sinkIdentifierNames =
deduplicateSinkIdentifierNames(plan.getSinkIdentifiers());
return executeInternal(transformations, sinkIdentifierNames);
}
private CompiledPlan compilePlanAndWrite(
String filePath, boolean ifNotExists, Operation operation) {
File file = Paths.get(filePath).toFile();
if (file.exists()) {
if (ifNotExists) {
return loadPlan(PlanReference.fromFile(filePath));
}
if (!tableConfig.get(TableConfigOptions.PLAN_FORCE_RECOMPILE)) {
throw new TableException(
String.format(
"Cannot overwrite the plan file '%s'. "
+ "Either manually remove the file or, "
+ "if you're debugging your job, "
+ "set the option '%s' to true.",
filePath, TableConfigOptions.PLAN_FORCE_RECOMPILE.key()));
}
}
CompiledPlan compiledPlan;
if (operation instanceof StatementSetOperation) {
compiledPlan = compilePlan(((StatementSetOperation) operation).getOperations());
} else if (operation instanceof ModifyOperation) {
compiledPlan = compilePlan(Collections.singletonList((ModifyOperation) operation));
} else {
throw new TableException(
"Unsupported operation to compile: "
+ operation.getClass()
+ ". This is a bug, please file an issue.");
}
compiledPlan.writeToFile(file, false);
return compiledPlan;
}
@Override
public CompiledPlan compilePlan(List operations) {
return new CompiledPlanImpl(this, planner.compilePlan(operations));
}
@Override
public TableResultInternal executeInternal(List operations) {
List mapOperations = new ArrayList<>();
for (ModifyOperation modify : operations) {
// execute CREATE TABLE first for CTAS statements
if (modify instanceof CreateTableASOperation) {
CreateTableASOperation ctasOperation = (CreateTableASOperation) modify;
executeInternal(ctasOperation.getCreateTableOperation());
mapOperations.add(ctasOperation.toSinkModifyOperation(catalogManager));
} else {
boolean isRowLevelModification = isRowLevelModification(modify);
if (isRowLevelModification) {
String modifyType =
((SinkModifyOperation) modify).isDelete() ? "DELETE" : "UPDATE";
if (operations.size() > 1) {
throw new TableException(
String.format(
"Unsupported SQL query! Only accept a single SQL statement of type %s.",
modifyType));
}
if (isStreamingMode) {
throw new TableException(
String.format(
"%s statement is not supported for streaming mode now.",
modifyType));
}
if (modify instanceof DeleteFromFilterOperation) {
return executeInternal((DeleteFromFilterOperation) modify);
}
}
mapOperations.add(modify);
}
}
List> transformations = translate(mapOperations);
List sinkIdentifierNames = extractSinkIdentifierNames(mapOperations);
return executeInternal(transformations, sinkIdentifierNames);
}
private TableResultInternal executeInternal(
DeleteFromFilterOperation deleteFromFilterOperation) {
Optional rows =
deleteFromFilterOperation.getSupportsDeletePushDownSink().executeDeletion();
if (rows.isPresent()) {
return TableResultImpl.builder()
.resultKind(ResultKind.SUCCESS_WITH_CONTENT)
.schema(ResolvedSchema.of(Column.physical("rows affected", DataTypes.BIGINT())))
.data(Collections.singletonList(Row.of(rows.get())))
.build();
} else {
return TableResultImpl.TABLE_RESULT_OK;
}
}
private TableResultInternal executeInternal(
List> transformations, List sinkIdentifierNames) {
final String defaultJobName = "insert-into_" + String.join(",", sinkIdentifierNames);
resourceManager.addJarConfiguration(tableConfig);
// We pass only the configuration to avoid reconfiguration with the rootConfiguration
Pipeline pipeline =
execEnv.createPipeline(
transformations, tableConfig.getConfiguration(), defaultJobName);
try {
JobClient jobClient = execEnv.executeAsync(pipeline);
final List columns = new ArrayList<>();
Long[] affectedRowCounts = new Long[transformations.size()];
for (int i = 0; i < transformations.size(); ++i) {
// use sink identifier name as field name
columns.add(Column.physical(sinkIdentifierNames.get(i), DataTypes.BIGINT()));
affectedRowCounts[i] = -1L;
}
TableResultInternal result =
TableResultImpl.builder()
.jobClient(jobClient)
.resultKind(ResultKind.SUCCESS_WITH_CONTENT)
.schema(ResolvedSchema.of(columns))
.resultProvider(
new InsertResultProvider(affectedRowCounts)
.setJobClient(jobClient))
.build();
if (tableConfig.get(TABLE_DML_SYNC)) {
try {
result.await();
} catch (InterruptedException | ExecutionException e) {
result.getJobClient().ifPresent(JobClient::cancel);
throw new TableException("Fail to wait execution finish.", e);
}
}
return result;
} catch (Exception e) {
throw new TableException("Failed to execute sql", e);
}
}
private TableResultInternal executeQueryOperation(QueryOperation operation) {
CollectModifyOperation sinkOperation = new CollectModifyOperation(operation);
List> transformations =
translate(Collections.singletonList(sinkOperation));
final String defaultJobName = "collect";
resourceManager.addJarConfiguration(tableConfig);
// We pass only the configuration to avoid reconfiguration with the rootConfiguration
Pipeline pipeline =
execEnv.createPipeline(
transformations, tableConfig.getConfiguration(), defaultJobName);
try {
JobClient jobClient = execEnv.executeAsync(pipeline);
ResultProvider resultProvider = sinkOperation.getSelectResultProvider();
resultProvider.setJobClient(jobClient);
return TableResultImpl.builder()
.jobClient(jobClient)
.resultKind(ResultKind.SUCCESS_WITH_CONTENT)
.schema(operation.getResolvedSchema())
.resultProvider(resultProvider)
.setPrintStyle(
PrintStyle.tableauWithTypeInferredColumnWidths(
// sinkOperation.getConsumedDataType() handles legacy types
DataTypeUtils.expandCompositeTypeToSchema(
sinkOperation.getConsumedDataType()),
resultProvider.getRowDataStringConverter(),
PrintStyle.DEFAULT_MAX_COLUMN_WIDTH,
false,
isStreamingMode))
.build();
} catch (Exception e) {
throw new TableException("Failed to execute sql", e);
}
}
@Override
public TableResultInternal executeInternal(Operation operation) {
// try to use extended operation executor to execute the operation
Optional tableResult =
getExtendedOperationExecutor().executeOperation(operation);
// if the extended operation executor return non-empty result, return it
if (tableResult.isPresent()) {
return tableResult.get();
}
// otherwise, fall back to internal implementation
if (operation instanceof ModifyOperation) {
return executeInternal(Collections.singletonList((ModifyOperation) operation));
} else if (operation instanceof StatementSetOperation) {
return executeInternal(((StatementSetOperation) operation).getOperations());
} else if (operation instanceof CreateTableOperation) {
CreateTableOperation createTableOperation = (CreateTableOperation) operation;
if (createTableOperation.isTemporary()) {
catalogManager.createTemporaryTable(
createTableOperation.getCatalogTable(),
createTableOperation.getTableIdentifier(),
createTableOperation.isIgnoreIfExists());
} else {
catalogManager.createTable(
createTableOperation.getCatalogTable(),
createTableOperation.getTableIdentifier(),
createTableOperation.isIgnoreIfExists());
}
return TableResultImpl.TABLE_RESULT_OK;
} else if (operation instanceof DropTableOperation) {
DropTableOperation dropTableOperation = (DropTableOperation) operation;
if (dropTableOperation.isTemporary()) {
catalogManager.dropTemporaryTable(
dropTableOperation.getTableIdentifier(), dropTableOperation.isIfExists());
} else {
catalogManager.dropTable(
dropTableOperation.getTableIdentifier(), dropTableOperation.isIfExists());
}
return TableResultImpl.TABLE_RESULT_OK;
} else if (operation instanceof AlterTableOperation) {
AlterTableOperation alterTableOperation = (AlterTableOperation) operation;
Catalog catalog =
getCatalogOrThrowException(
alterTableOperation.getTableIdentifier().getCatalogName());
String exMsg = getDDLOpExecuteErrorMsg(alterTableOperation.asSummaryString());
try {
if (alterTableOperation instanceof AlterTableRenameOperation) {
AlterTableRenameOperation alterTableRenameOp =
(AlterTableRenameOperation) operation;
catalog.renameTable(
alterTableRenameOp.getTableIdentifier().toObjectPath(),
alterTableRenameOp.getNewTableIdentifier().getObjectName(),
alterTableRenameOp.ignoreIfTableNotExists());
} else if (alterTableOperation instanceof AlterTableOptionsOperation) {
AlterTableOptionsOperation alterTablePropertiesOp =
(AlterTableOptionsOperation) operation;
catalogManager.alterTable(
alterTablePropertiesOp.getCatalogTable(),
alterTablePropertiesOp.getTableIdentifier(),
alterTablePropertiesOp.ignoreIfTableNotExists());
} else if (alterTableOperation instanceof AlterPartitionPropertiesOperation) {
AlterPartitionPropertiesOperation alterPartPropsOp =
(AlterPartitionPropertiesOperation) operation;
catalog.alterPartition(
alterPartPropsOp.getTableIdentifier().toObjectPath(),
alterPartPropsOp.getPartitionSpec(),
alterPartPropsOp.getCatalogPartition(),
alterPartPropsOp.ignoreIfTableNotExists());
} else if (alterTableOperation instanceof AlterTableSchemaOperation) {
AlterTableSchemaOperation alterTableSchemaOperation =
(AlterTableSchemaOperation) alterTableOperation;
catalogManager.alterTable(
alterTableSchemaOperation.getCatalogTable(),
alterTableSchemaOperation.getTableIdentifier(),
alterTableSchemaOperation.ignoreIfTableNotExists());
} else if (alterTableOperation instanceof AddPartitionsOperation) {
AddPartitionsOperation addPartitionsOperation =
(AddPartitionsOperation) alterTableOperation;
List specs = addPartitionsOperation.getPartitionSpecs();
List partitions =
addPartitionsOperation.getCatalogPartitions();
ObjectPath tablePath =
addPartitionsOperation.getTableIdentifier().toObjectPath();
for (int i = 0; i < specs.size(); i++) {
catalog.createPartition(
tablePath,
specs.get(i),
partitions.get(i),
addPartitionsOperation.ignoreIfPartitionExists());
}
} else if (alterTableOperation instanceof DropPartitionsOperation) {
DropPartitionsOperation dropPartitionsOperation =
(DropPartitionsOperation) alterTableOperation;
ObjectPath tablePath =
dropPartitionsOperation.getTableIdentifier().toObjectPath();
for (CatalogPartitionSpec spec : dropPartitionsOperation.getPartitionSpecs()) {
catalog.dropPartition(
tablePath,
spec,
dropPartitionsOperation.ignoreIfPartitionNotExists());
}
} else if (alterTableOperation instanceof AlterTableChangeOperation) {
AlterTableChangeOperation alterTableChangeOperation =
(AlterTableChangeOperation) alterTableOperation;
catalogManager.alterTable(
alterTableChangeOperation.getNewTable(),
alterTableChangeOperation.getTableChanges(),
alterTableChangeOperation.getTableIdentifier(),
alterTableChangeOperation.ignoreIfTableNotExists());
}
return TableResultImpl.TABLE_RESULT_OK;
} catch (TableAlreadyExistException | TableNotExistException e) {
throw new ValidationException(exMsg, e);
} catch (Exception e) {
throw new TableException(exMsg, e);
}
} else if (operation instanceof CreateViewOperation) {
CreateViewOperation createViewOperation = (CreateViewOperation) operation;
if (createViewOperation.isTemporary()) {
catalogManager.createTemporaryTable(
createViewOperation.getCatalogView(),
createViewOperation.getViewIdentifier(),
createViewOperation.isIgnoreIfExists());
} else {
catalogManager.createTable(
createViewOperation.getCatalogView(),
createViewOperation.getViewIdentifier(),
createViewOperation.isIgnoreIfExists());
}
return TableResultImpl.TABLE_RESULT_OK;
} else if (operation instanceof DropViewOperation) {
DropViewOperation dropViewOperation = (DropViewOperation) operation;
if (dropViewOperation.isTemporary()) {
catalogManager.dropTemporaryView(
dropViewOperation.getViewIdentifier(), dropViewOperation.isIfExists());
} else {
catalogManager.dropView(
dropViewOperation.getViewIdentifier(), dropViewOperation.isIfExists());
}
return TableResultImpl.TABLE_RESULT_OK;
} else if (operation instanceof AlterViewOperation) {
AlterViewOperation alterViewOperation = (AlterViewOperation) operation;
Catalog catalog =
getCatalogOrThrowException(
alterViewOperation.getViewIdentifier().getCatalogName());
String exMsg = getDDLOpExecuteErrorMsg(alterViewOperation.asSummaryString());
try {
if (alterViewOperation instanceof AlterViewRenameOperation) {
AlterViewRenameOperation alterTableRenameOp =
(AlterViewRenameOperation) operation;
catalog.renameTable(
alterTableRenameOp.getViewIdentifier().toObjectPath(),
alterTableRenameOp.getNewViewIdentifier().getObjectName(),
false);
} else if (alterViewOperation instanceof AlterViewPropertiesOperation) {
AlterViewPropertiesOperation alterTablePropertiesOp =
(AlterViewPropertiesOperation) operation;
catalogManager.alterTable(
alterTablePropertiesOp.getCatalogView(),
alterTablePropertiesOp.getViewIdentifier(),
false);
} else if (alterViewOperation instanceof AlterViewAsOperation) {
AlterViewAsOperation alterViewAsOperation =
(AlterViewAsOperation) alterViewOperation;
catalogManager.alterTable(
alterViewAsOperation.getNewView(),
alterViewAsOperation.getViewIdentifier(),
false);
}
return TableResultImpl.TABLE_RESULT_OK;
} catch (TableAlreadyExistException | TableNotExistException e) {
throw new ValidationException(exMsg, e);
} catch (Exception e) {
throw new TableException(exMsg, e);
}
} else if (operation instanceof CreateDatabaseOperation) {
CreateDatabaseOperation createDatabaseOperation = (CreateDatabaseOperation) operation;
Catalog catalog = getCatalogOrThrowException(createDatabaseOperation.getCatalogName());
String exMsg = getDDLOpExecuteErrorMsg(createDatabaseOperation.asSummaryString());
try {
catalog.createDatabase(
createDatabaseOperation.getDatabaseName(),
createDatabaseOperation.getCatalogDatabase(),
createDatabaseOperation.isIgnoreIfExists());
return TableResultImpl.TABLE_RESULT_OK;
} catch (DatabaseAlreadyExistException e) {
throw new ValidationException(exMsg, e);
} catch (Exception e) {
throw new TableException(exMsg, e);
}
} else if (operation instanceof DropDatabaseOperation) {
DropDatabaseOperation dropDatabaseOperation = (DropDatabaseOperation) operation;
Catalog catalog = getCatalogOrThrowException(dropDatabaseOperation.getCatalogName());
String exMsg = getDDLOpExecuteErrorMsg(dropDatabaseOperation.asSummaryString());
try {
catalog.dropDatabase(
dropDatabaseOperation.getDatabaseName(),
dropDatabaseOperation.isIfExists(),
dropDatabaseOperation.isCascade());
return TableResultImpl.TABLE_RESULT_OK;
} catch (DatabaseNotExistException | DatabaseNotEmptyException e) {
throw new ValidationException(exMsg, e);
} catch (Exception e) {
throw new TableException(exMsg, e);
}
} else if (operation instanceof AlterDatabaseOperation) {
AlterDatabaseOperation alterDatabaseOperation = (AlterDatabaseOperation) operation;
Catalog catalog = getCatalogOrThrowException(alterDatabaseOperation.getCatalogName());
String exMsg = getDDLOpExecuteErrorMsg(alterDatabaseOperation.asSummaryString());
try {
catalog.alterDatabase(
alterDatabaseOperation.getDatabaseName(),
alterDatabaseOperation.getCatalogDatabase(),
false);
return TableResultImpl.TABLE_RESULT_OK;
} catch (DatabaseNotExistException e) {
throw new ValidationException(exMsg, e);
} catch (Exception e) {
throw new TableException(exMsg, e);
}
} else if (operation instanceof CreateCatalogFunctionOperation) {
return createCatalogFunction((CreateCatalogFunctionOperation) operation);
} else if (operation instanceof CreateTempSystemFunctionOperation) {
return createSystemFunction((CreateTempSystemFunctionOperation) operation);
} else if (operation instanceof DropCatalogFunctionOperation) {
return dropCatalogFunction((DropCatalogFunctionOperation) operation);
} else if (operation instanceof DropTempSystemFunctionOperation) {
return dropSystemFunction((DropTempSystemFunctionOperation) operation);
} else if (operation instanceof AlterCatalogFunctionOperation) {
return alterCatalogFunction((AlterCatalogFunctionOperation) operation);
} else if (operation instanceof AddJarOperation) {
return addJar((AddJarOperation) operation);
} else if (operation instanceof ShowJarsOperation) {
return buildShowResult("jars", listJars());
} else if (operation instanceof CreateCatalogOperation) {
return createCatalog((CreateCatalogOperation) operation);
} else if (operation instanceof DropCatalogOperation) {
DropCatalogOperation dropCatalogOperation = (DropCatalogOperation) operation;
String exMsg = getDDLOpExecuteErrorMsg(dropCatalogOperation.asSummaryString());
try {
catalogManager.unregisterCatalog(
dropCatalogOperation.getCatalogName(), dropCatalogOperation.isIfExists());
return TableResultImpl.TABLE_RESULT_OK;
} catch (CatalogException e) {
throw new ValidationException(exMsg, e);
}
} else if (operation instanceof LoadModuleOperation) {
return loadModule((LoadModuleOperation) operation);
} else if (operation instanceof UnloadModuleOperation) {
return unloadModule((UnloadModuleOperation) operation);
} else if (operation instanceof UseModulesOperation) {
return useModules((UseModulesOperation) operation);
} else if (operation instanceof UseCatalogOperation) {
UseCatalogOperation useCatalogOperation = (UseCatalogOperation) operation;
catalogManager.setCurrentCatalog(useCatalogOperation.getCatalogName());
return TableResultImpl.TABLE_RESULT_OK;
} else if (operation instanceof UseDatabaseOperation) {
UseDatabaseOperation useDatabaseOperation = (UseDatabaseOperation) operation;
catalogManager.setCurrentCatalog(useDatabaseOperation.getCatalogName());
catalogManager.setCurrentDatabase(useDatabaseOperation.getDatabaseName());
return TableResultImpl.TABLE_RESULT_OK;
} else if (operation instanceof ShowCatalogsOperation) {
return buildShowResult("catalog name", listCatalogs());
} else if (operation instanceof ShowCreateTableOperation) {
ShowCreateTableOperation showCreateTableOperation =
(ShowCreateTableOperation) operation;
ContextResolvedTable table =
catalogManager
.getTable(showCreateTableOperation.getTableIdentifier())
.orElseThrow(
() ->
new ValidationException(
String.format(
"Could not execute SHOW CREATE TABLE. Table with identifier %s does not exist.",
showCreateTableOperation
.getTableIdentifier()
.asSerializableString())));
return TableResultImpl.builder()
.resultKind(ResultKind.SUCCESS_WITH_CONTENT)
.schema(ResolvedSchema.of(Column.physical("result", DataTypes.STRING())))
.data(
Collections.singletonList(
Row.of(
ShowCreateUtil.buildShowCreateTableRow(
table.getResolvedTable(),
showCreateTableOperation.getTableIdentifier(),
table.isTemporary()))))
.build();
} else if (operation instanceof ShowCreateViewOperation) {
ShowCreateViewOperation showCreateViewOperation = (ShowCreateViewOperation) operation;
final ContextResolvedTable table =
catalogManager
.getTable(showCreateViewOperation.getViewIdentifier())
.orElseThrow(
() ->
new ValidationException(
String.format(
"Could not execute SHOW CREATE VIEW. View with identifier %s does not exist.",
showCreateViewOperation
.getViewIdentifier()
.asSerializableString())));
return TableResultImpl.builder()
.resultKind(ResultKind.SUCCESS_WITH_CONTENT)
.schema(ResolvedSchema.of(Column.physical("result", DataTypes.STRING())))
.data(
Collections.singletonList(
Row.of(
ShowCreateUtil.buildShowCreateViewRow(
table.getResolvedTable(),
showCreateViewOperation.getViewIdentifier(),
table.isTemporary()))))
.build();
} else if (operation instanceof ShowCurrentCatalogOperation) {
return buildShowResult(
"current catalog name", new String[] {catalogManager.getCurrentCatalog()});
} else if (operation instanceof ShowDatabasesOperation) {
return buildShowResult("database name", listDatabases());
} else if (operation instanceof ShowCurrentDatabaseOperation) {
return buildShowResult(
"current database name", new String[] {catalogManager.getCurrentDatabase()});
} else if (operation instanceof ShowModulesOperation) {
ShowModulesOperation showModulesOperation = (ShowModulesOperation) operation;
if (showModulesOperation.requireFull()) {
return buildShowFullModulesResult(listFullModules());
} else {
return buildShowResult("module name", listModules());
}
} else if (operation instanceof ShowTablesOperation) {
ShowTablesOperation showTablesOperation = (ShowTablesOperation) operation;
if (showTablesOperation.getPreposition() == null) {
return buildShowTablesResult(listTables(), showTablesOperation);
}
final String catalogName = showTablesOperation.getCatalogName();
final String databaseName = showTablesOperation.getDatabaseName();
Catalog catalog = getCatalogOrThrowException(catalogName);
if (catalog.databaseExists(databaseName)) {
return buildShowTablesResult(
listTables(catalogName, databaseName), showTablesOperation);
} else {
throw new ValidationException(
String.format(
"Database '%s'.'%s' doesn't exist.", catalogName, databaseName));
}
} else if (operation instanceof ShowFunctionsOperation) {
ShowFunctionsOperation showFunctionsOperation = (ShowFunctionsOperation) operation;
String[] functionNames = null;
ShowFunctionsOperation.FunctionScope functionScope =
showFunctionsOperation.getFunctionScope();
switch (functionScope) {
case USER:
functionNames = listUserDefinedFunctions();
break;
case ALL:
functionNames = listFunctions();
break;
default:
throw new UnsupportedOperationException(
String.format(
"SHOW FUNCTIONS with %s scope is not supported.",
functionScope));
}
return buildShowResult("function name", functionNames);
} else if (operation instanceof ShowViewsOperation) {
return buildShowResult("view name", listViews());
} else if (operation instanceof ShowColumnsOperation) {
ShowColumnsOperation showColumnsOperation = (ShowColumnsOperation) operation;
Optional result =
catalogManager.getTable(showColumnsOperation.getTableIdentifier());
if (result.isPresent()) {
return buildShowColumnsResult(
result.get().getResolvedSchema(), showColumnsOperation);
} else {
throw new ValidationException(
String.format(
"Tables or views with the identifier '%s' doesn't exist.",
showColumnsOperation.getTableIdentifier().asSummaryString()));
}
} else if (operation instanceof ShowPartitionsOperation) {
String exMsg = getDDLOpExecuteErrorMsg(operation.asSummaryString());
try {
ShowPartitionsOperation showPartitionsOperation =
(ShowPartitionsOperation) operation;
Catalog catalog =
getCatalogOrThrowException(
showPartitionsOperation.getTableIdentifier().getCatalogName());
ObjectPath tablePath = showPartitionsOperation.getTableIdentifier().toObjectPath();
CatalogPartitionSpec partitionSpec = showPartitionsOperation.getPartitionSpec();
List partitionSpecs =
partitionSpec == null
? catalog.listPartitions(tablePath)
: catalog.listPartitions(tablePath, partitionSpec);
List partitionNames = new ArrayList<>(partitionSpecs.size());
for (CatalogPartitionSpec spec : partitionSpecs) {
List partitionKVs = new ArrayList<>(spec.getPartitionSpec().size());
for (Map.Entry partitionKV :
spec.getPartitionSpec().entrySet()) {
String partitionValue =
partitionKV.getValue() == null
? showPartitionsOperation.getDefaultPartitionName()
: partitionKV.getValue();
partitionKVs.add(partitionKV.getKey() + "=" + partitionValue);
}
partitionNames.add(String.join("/", partitionKVs));
}
return buildShowResult("partition name", partitionNames.toArray(new String[0]));
} catch (TableNotExistException e) {
throw new ValidationException(exMsg, e);
} catch (Exception e) {
throw new TableException(exMsg, e);
}
} else if (operation instanceof ExplainOperation) {
ExplainOperation explainOperation = (ExplainOperation) operation;
ExplainDetail[] explainDetails =
explainOperation.getExplainDetails().stream()
.map(ExplainDetail::valueOf)
.toArray(ExplainDetail[]::new);
Operation child = ((ExplainOperation) operation).getChild();
List operations;
if (child instanceof StatementSetOperation) {
operations = new ArrayList<>(((StatementSetOperation) child).getOperations());
} else {
operations = Collections.singletonList(child);
}
String explanation = explainInternal(operations, explainDetails);
return TableResultImpl.builder()
.resultKind(ResultKind.SUCCESS_WITH_CONTENT)
.schema(ResolvedSchema.of(Column.physical("result", DataTypes.STRING())))
.data(Collections.singletonList(Row.of(explanation)))
.build();
} else if (operation instanceof DescribeTableOperation) {
DescribeTableOperation describeTableOperation = (DescribeTableOperation) operation;
Optional result =
catalogManager.getTable(describeTableOperation.getSqlIdentifier());
if (result.isPresent()) {
return buildDescribeResult(result.get().getResolvedSchema());
} else {
throw new ValidationException(
String.format(
"Tables or views with the identifier '%s' doesn't exist",
describeTableOperation.getSqlIdentifier().asSummaryString()));
}
} else if (operation instanceof QueryOperation) {
return executeQueryOperation((QueryOperation) operation);
} else if (operation instanceof ExecutePlanOperation) {
ExecutePlanOperation executePlanOperation = (ExecutePlanOperation) operation;
return (TableResultInternal)
executePlan(PlanReference.fromFile(executePlanOperation.getFilePath()));
} else if (operation instanceof CompilePlanOperation) {
CompilePlanOperation compilePlanOperation = (CompilePlanOperation) operation;
compilePlanAndWrite(
compilePlanOperation.getFilePath(),
compilePlanOperation.isIfNotExists(),
compilePlanOperation.getOperation());
return TableResultImpl.TABLE_RESULT_OK;
} else if (operation instanceof CompileAndExecutePlanOperation) {
CompileAndExecutePlanOperation compileAndExecutePlanOperation =
(CompileAndExecutePlanOperation) operation;
CompiledPlan compiledPlan =
compilePlanAndWrite(
compileAndExecutePlanOperation.getFilePath(),
true,
compileAndExecutePlanOperation.getOperation());
return (TableResultInternal) compiledPlan.execute();
} else if (operation instanceof AnalyzeTableOperation) {
if (isStreamingMode) {
throw new TableException("ANALYZE TABLE is not supported for streaming mode now");
}
try {
return AnalyzeTableUtil.analyzeTable(this, (AnalyzeTableOperation) operation);
} catch (Exception e) {
throw new TableException("Failed to execute ANALYZE TABLE command", e);
}
} else if (operation instanceof NopOperation) {
return TableResultImpl.TABLE_RESULT_OK;
} else {
throw new TableException(UNSUPPORTED_QUERY_IN_EXECUTE_SQL_MSG);
}
}
private TableResultInternal createCatalog(CreateCatalogOperation operation) {
String exMsg = getDDLOpExecuteErrorMsg(operation.asSummaryString());
try {
String catalogName = operation.getCatalogName();
Map properties = operation.getProperties();
Catalog catalog =
FactoryUtil.createCatalog(
catalogName,
properties,
tableConfig,
resourceManager.getUserClassLoader());
catalogManager.registerCatalog(catalogName, catalog);
return TableResultImpl.TABLE_RESULT_OK;
} catch (CatalogException e) {
throw new ValidationException(exMsg, e);
}
}
private TableResultInternal loadModule(LoadModuleOperation operation) {
final String exMsg = getDDLOpExecuteErrorMsg(operation.asSummaryString());
try {
final Module module =
FactoryUtil.createModule(
operation.getModuleName(),
operation.getOptions(),
tableConfig,
resourceManager.getUserClassLoader());
moduleManager.loadModule(operation.getModuleName(), module);
return TableResultImpl.TABLE_RESULT_OK;
} catch (ValidationException e) {
throw new ValidationException(String.format("%s. %s", exMsg, e.getMessage()), e);
} catch (Exception e) {
throw new TableException(String.format("%s. %s", exMsg, e.getMessage()), e);
}
}
private TableResultInternal unloadModule(UnloadModuleOperation operation) {
String exMsg = getDDLOpExecuteErrorMsg(operation.asSummaryString());
try {
moduleManager.unloadModule(operation.getModuleName());
return TableResultImpl.TABLE_RESULT_OK;
} catch (ValidationException e) {
throw new ValidationException(String.format("%s. %s", exMsg, e.getMessage()), e);
}
}
private TableResultInternal useModules(UseModulesOperation operation) {
String exMsg = getDDLOpExecuteErrorMsg(operation.asSummaryString());
try {
moduleManager.useModules(operation.getModuleNames().toArray(new String[0]));
return TableResultImpl.TABLE_RESULT_OK;
} catch (ValidationException e) {
throw new ValidationException(String.format("%s. %s", exMsg, e.getMessage()), e);
}
}
private TableResultInternal buildShowResult(String columnName, String[] objects) {
return buildResult(
new String[] {columnName},
new DataType[] {DataTypes.STRING()},
Arrays.stream(objects).map((c) -> new String[] {c}).toArray(String[][]::new));
}
private TableResultInternal buildDescribeResult(ResolvedSchema schema) {
Object[][] rows = buildTableColumns(schema);
boolean nonComments = isSchemaNonColumnComments(schema);
return buildResult(
generateTableColumnsNames(nonComments),
generateTableColumnsDataTypes(nonComments),
rows);
}
private DataType[] generateTableColumnsDataTypes(boolean nonComments) {
final ArrayList result =
new ArrayList<>(
Arrays.asList(
DataTypes.STRING(),
DataTypes.STRING(),
DataTypes.BOOLEAN(),
DataTypes.STRING(),
DataTypes.STRING(),
DataTypes.STRING()));
if (!nonComments) {
result.add(DataTypes.STRING());
}
return result.toArray(new DataType[0]);
}
private String[] generateTableColumnsNames(boolean nonComments) {
final ArrayList result =
new ArrayList<>(
Arrays.asList("name", "type", "null", "key", "extras", "watermark"));
if (!nonComments) {
result.add("comment");
}
return result.toArray(new String[0]);
}
private TableResultInternal buildShowTablesResult(
String[] tableList, ShowTablesOperation showTablesOp) {
String[] rows = tableList.clone();
if (showTablesOp.isUseLike()) {
rows =
Arrays.stream(tableList)
.filter(
row ->
showTablesOp.isNotLike()
!= SqlLikeUtils.like(
row,
showTablesOp.getLikePattern(),
"\\"))
.toArray(String[]::new);
}
return buildShowResult("table name", rows);
}
private TableResultInternal buildShowColumnsResult(
ResolvedSchema schema, ShowColumnsOperation showColumnsOp) {
Object[][] rows = buildTableColumns(schema);
if (showColumnsOp.isUseLike()) {
rows =
Arrays.stream(rows)
.filter(
row ->
showColumnsOp.isNotLike()
!= SqlLikeUtils.like(
row[0].toString(),
showColumnsOp.getLikePattern(),
"\\"))
.toArray(Object[][]::new);
}
boolean nonComments = isSchemaNonColumnComments(schema);
return buildResult(
generateTableColumnsNames(nonComments),
generateTableColumnsDataTypes(nonComments),
rows);
}
private TableResultInternal buildShowFullModulesResult(ModuleEntry[] moduleEntries) {
Object[][] rows =
Arrays.stream(moduleEntries)
.map(entry -> new Object[] {entry.name(), entry.used()})
.toArray(Object[][]::new);
return buildResult(
new String[] {"module name", "used"},
new DataType[] {DataTypes.STRING(), DataTypes.BOOLEAN()},
rows);
}
private Object[][] buildTableColumns(ResolvedSchema schema) {
Map fieldToWatermark =
schema.getWatermarkSpecs().stream()
.collect(
Collectors.toMap(
WatermarkSpec::getRowtimeAttribute,
spec -> spec.getWatermarkExpression().asSummaryString()));
Map fieldToPrimaryKey = new HashMap<>();
schema.getPrimaryKey()
.ifPresent(
(p) -> {
List columns = p.getColumns();
columns.forEach(
(c) ->
fieldToPrimaryKey.put(
c,
String.format(
"PRI(%s)",
String.join(", ", columns))));
});
boolean nonComments = isSchemaNonColumnComments(schema);
return schema.getColumns().stream()
.map(
(c) -> {
final LogicalType logicalType = c.getDataType().getLogicalType();
final ArrayList