All Downloads are FREE. Search and download functionalities are using the official Maven repository.

io.trino.plugin.hive.metastore.thrift.ThriftMetastore Maven / Gradle / Ivy

There is a newer version: 468
Show newest version
/*
 * Licensed under the Apache License, Version 2.0 (the "License");
 * you may not use this file except in compliance with the License.
 * You may obtain a copy of the License at
 *
 *     http://www.apache.org/licenses/LICENSE-2.0
 *
 * Unless required by applicable law or agreed to in writing, software
 * distributed under the License is distributed on an "AS IS" BASIS,
 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
 * See the License for the specific language governing permissions and
 * limitations under the License.
 */
package io.trino.plugin.hive.metastore.thrift;

import io.trino.plugin.hive.HivePartition;
import io.trino.plugin.hive.PartitionStatistics;
import io.trino.plugin.hive.acid.AcidOperation;
import io.trino.plugin.hive.acid.AcidTransaction;
import io.trino.plugin.hive.metastore.AcidTransactionOwner;
import io.trino.plugin.hive.metastore.HivePrincipal;
import io.trino.plugin.hive.metastore.HivePrivilegeInfo;
import io.trino.plugin.hive.metastore.HivePrivilegeInfo.HivePrivilege;
import io.trino.plugin.hive.metastore.PartitionWithStatistics;
import io.trino.spi.TrinoException;
import io.trino.spi.connector.SchemaTableName;
import io.trino.spi.connector.TableNotFoundException;
import io.trino.spi.predicate.TupleDomain;
import io.trino.spi.security.RoleGrant;
import io.trino.spi.statistics.ColumnStatisticType;
import io.trino.spi.type.Type;
import org.apache.hadoop.hive.metastore.api.DataOperationType;
import org.apache.hadoop.hive.metastore.api.Database;
import org.apache.hadoop.hive.metastore.api.FieldSchema;
import org.apache.hadoop.hive.metastore.api.Partition;
import org.apache.hadoop.hive.metastore.api.Table;

import java.util.List;
import java.util.Map;
import java.util.Optional;
import java.util.OptionalLong;
import java.util.Set;
import java.util.function.Function;

import static io.trino.plugin.hive.HiveErrorCode.HIVE_INVALID_METADATA;

public interface ThriftMetastore
{
    void createDatabase(Database database);

    void dropDatabase(String databaseName, boolean deleteData);

    void alterDatabase(String databaseName, Database database);

    void createTable(Table table);

    void dropTable(String databaseName, String tableName, boolean deleteData);

    void alterTable(String databaseName, String tableName, Table table);

    void alterTransactionalTable(Table table, long transactionId, long writeId);

    List getAllDatabases();

    List getAllTables(String databaseName);

    List getTablesWithParameter(String databaseName, String parameterKey, String parameterValue);

    List getAllViews(String databaseName);

    Optional getDatabase(String databaseName);

    void addPartitions(String databaseName, String tableName, List partitions);

    void dropPartition(String databaseName, String tableName, List parts, boolean deleteData);

    void alterPartition(String databaseName, String tableName, PartitionWithStatistics partition);

    Optional> getPartitionNamesByFilter(String databaseName, String tableName, List columnNames, TupleDomain partitionKeysFilter);

    Optional getPartition(String databaseName, String tableName, List partitionValues);

    List getPartitionsByNames(String databaseName, String tableName, List partitionNames);

    Optional getTable(String databaseName, String tableName);

    Set getSupportedColumnStatistics(Type type);

    PartitionStatistics getTableStatistics(Table table);

    Map getPartitionStatistics(Table table, List partitions);

    void updateTableStatistics(String databaseName, String tableName, AcidTransaction transaction, Function update);

    void updatePartitionStatistics(Table table, String partitionName, Function update);

    void createRole(String role, String grantor);

    void dropRole(String role);

    Set listRoles();

    void grantRoles(Set roles, Set grantees, boolean adminOption, HivePrincipal grantor);

    void revokeRoles(Set roles, Set grantees, boolean adminOption, HivePrincipal grantor);

    Set listGrantedPrincipals(String role);

    Set listRoleGrants(HivePrincipal principal);

    void grantTablePrivileges(String databaseName, String tableName, String tableOwner, HivePrincipal grantee, HivePrincipal grantor, Set privileges, boolean grantOption);

    void revokeTablePrivileges(String databaseName, String tableName, String tableOwner, HivePrincipal grantee, HivePrincipal grantor, Set privileges, boolean grantOption);

    /**
     * @param tableOwner
     * @param principal when empty, all table privileges are returned
     */
    Set listTablePrivileges(String databaseName, String tableName, Optional tableOwner, Optional principal);

    default Optional> getFields(String databaseName, String tableName)
    {
        Optional
table = getTable(databaseName, tableName); if (table.isEmpty()) { throw new TableNotFoundException(new SchemaTableName(databaseName, tableName)); } if (table.get().getSd() == null) { throw new TrinoException(HIVE_INVALID_METADATA, "Table is missing storage descriptor"); } return Optional.of(table.get().getSd().getCols()); } default long openTransaction(AcidTransactionOwner transactionOwner) { throw new UnsupportedOperationException(); } default void commitTransaction(long transactionId) { throw new UnsupportedOperationException(); } default void abortTransaction(long transactionId) { throw new UnsupportedOperationException(); } default void sendTransactionHeartbeat(long transactionId) { throw new UnsupportedOperationException(); } default void acquireSharedReadLock( AcidTransactionOwner transactionOwner, String queryId, long transactionId, List fullTables, List partitions) { throw new UnsupportedOperationException(); } default String getValidWriteIds(List tables, long currentTransactionId) { throw new UnsupportedOperationException(); } default Optional getConfigValue(String name) { return Optional.empty(); } default long allocateWriteId(String dbName, String tableName, long transactionId) { throw new UnsupportedOperationException(); } default void acquireTableWriteLock( AcidTransactionOwner transactionOwner, String queryId, long transactionId, String dbName, String tableName, DataOperationType operation, boolean isDynamicPartitionWrite) { throw new UnsupportedOperationException(); } default long acquireTableExclusiveLock( AcidTransactionOwner transactionOwner, String queryId, String dbName, String tableName) { throw new UnsupportedOperationException(); } default void releaseTableLock(long lockId) { throw new UnsupportedOperationException(); } default void updateTableWriteId(String dbName, String tableName, long transactionId, long writeId, OptionalLong rowCountChange) { throw new UnsupportedOperationException(); } default void alterPartitions(String dbName, String tableName, List partitions, long writeId) { throw new UnsupportedOperationException(); } default void addDynamicPartitions(String dbName, String tableName, List partitionNames, long transactionId, long writeId, AcidOperation operation) { throw new UnsupportedOperationException(); } }