
org.apache.hive.service.cli.session.HiveSessionImpl Maven / Gradle / Ivy
Go to download
Show more of this group Show more artifacts with this name
Show all versions of hive-apache-jdbc Show documentation
Show all versions of hive-apache-jdbc Show documentation
Shaded version of Apache Hive JDBC driver for Presto
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hive.service.cli.session;
import java.io.IOException;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
import java.util.Set;
import com.facebook.presto.hive.$internal.org.apache.commons.logging.Log;
import com.facebook.presto.hive.$internal.org.apache.commons.logging.LogFactory;
import com.facebook.presto.hive.$internal.org.apache.hadoop.hive.conf.HiveConf;
import com.facebook.presto.hive.$internal.org.apache.hadoop.hive.conf.HiveConf.ConfVars;
import com.facebook.presto.hive.$internal.org.apache.hadoop.hive.metastore.HiveMetaStoreClient;
import com.facebook.presto.hive.$internal.org.apache.hadoop.hive.metastore.IMetaStoreClient;
import com.facebook.presto.hive.$internal.org.apache.hadoop.hive.metastore.api.MetaException;
import com.facebook.presto.hive.$internal.org.apache.hadoop.hive.ql.exec.FetchFormatter;
import com.facebook.presto.hive.$internal.org.apache.hadoop.hive.ql.exec.ListSinkOperator;
import com.facebook.presto.hive.$internal.org.apache.hadoop.hive.ql.history.HiveHistory;
import com.facebook.presto.hive.$internal.org.apache.hadoop.hive.ql.session.SessionState;
import org.apache.hive.common.util.HiveVersionInfo;
import org.apache.hive.service.auth.HiveAuthFactory;
import org.apache.hive.service.cli.FetchOrientation;
import org.apache.hive.service.cli.GetInfoType;
import org.apache.hive.service.cli.GetInfoValue;
import org.apache.hive.service.cli.HiveSQLException;
import org.apache.hive.service.cli.OperationHandle;
import org.apache.hive.service.cli.RowSet;
import org.apache.hive.service.cli.SessionHandle;
import org.apache.hive.service.cli.TableSchema;
import org.apache.hive.service.cli.operation.ExecuteStatementOperation;
import org.apache.hive.service.cli.operation.GetCatalogsOperation;
import org.apache.hive.service.cli.operation.GetColumnsOperation;
import org.apache.hive.service.cli.operation.GetFunctionsOperation;
import org.apache.hive.service.cli.operation.GetSchemasOperation;
import org.apache.hive.service.cli.operation.GetTableTypesOperation;
import org.apache.hive.service.cli.operation.GetTypeInfoOperation;
import org.apache.hive.service.cli.operation.MetadataOperation;
import org.apache.hive.service.cli.operation.OperationManager;
import org.apache.hive.service.cli.thrift.TProtocolVersion;
/**
* HiveSession
*
*/
public class HiveSessionImpl implements HiveSession {
private final SessionHandle sessionHandle;
private String username;
private final String password;
private final HiveConf hiveConf;
private final SessionState sessionState;
private String ipAddress;
private static final String FETCH_WORK_SERDE_CLASS =
"com.facebook.presto.hive.$internal.org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe";
private static final Log LOG = LogFactory.getLog(HiveSessionImpl.class);
private SessionManager sessionManager;
private OperationManager operationManager;
private IMetaStoreClient metastoreClient = null;
private final Set opHandleSet = new HashSet();
public HiveSessionImpl(TProtocolVersion protocol, String username, String password,
HiveConf serverhiveConf, Map sessionConfMap, String ipAddress) {
this.username = username;
this.password = password;
this.sessionHandle = new SessionHandle(protocol);
this.hiveConf = new HiveConf(serverhiveConf);
this.ipAddress = ipAddress;
//set conf properties specified by user from client side
if (sessionConfMap != null) {
for (Map.Entry entry : sessionConfMap.entrySet()) {
hiveConf.verifyAndSet(entry.getKey(), entry.getValue());
}
}
// set an explicit session name to control the download directory name
hiveConf.set(ConfVars.HIVESESSIONID.varname,
sessionHandle.getHandleIdentifier().toString());
// use thrift transportable formatter
hiveConf.set(ListSinkOperator.OUTPUT_FORMATTER,
FetchFormatter.ThriftFormatter.class.getName());
hiveConf.setInt(ListSinkOperator.OUTPUT_PROTOCOL, protocol.getValue());
sessionState = new SessionState(hiveConf, username);
sessionState.setIsHiveServerQuery(true);
SessionState.start(sessionState);
}
@Override
public TProtocolVersion getProtocolVersion() {
return sessionHandle.getProtocolVersion();
}
@Override
public SessionManager getSessionManager() {
return sessionManager;
}
@Override
public void setSessionManager(SessionManager sessionManager) {
this.sessionManager = sessionManager;
}
private OperationManager getOperationManager() {
return operationManager;
}
@Override
public void setOperationManager(OperationManager operationManager) {
this.operationManager = operationManager;
}
@Override
public void open() {
SessionState.start(sessionState);
}
protected synchronized void acquire() throws HiveSQLException {
// need to make sure that the this connections session state is
// stored in the thread local for sessions.
SessionState.setCurrentSessionState(sessionState);
}
protected synchronized void release() {
assert sessionState != null;
SessionState.detachSession();
}
@Override
public SessionHandle getSessionHandle() {
return sessionHandle;
}
@Override
public String getUsername() {
return username;
}
@Override
public String getPassword() {
return password;
}
@Override
public HiveConf getHiveConf() {
hiveConf.setVar(HiveConf.ConfVars.HIVEFETCHOUTPUTSERDE, FETCH_WORK_SERDE_CLASS);
return hiveConf;
}
@Override
public IMetaStoreClient getMetaStoreClient() throws HiveSQLException {
if (metastoreClient == null) {
try {
metastoreClient = new HiveMetaStoreClient(getHiveConf());
} catch (MetaException e) {
throw new HiveSQLException(e);
}
}
return metastoreClient;
}
@Override
public GetInfoValue getInfo(GetInfoType getInfoType)
throws HiveSQLException {
acquire();
try {
switch (getInfoType) {
case CLI_SERVER_NAME:
return new GetInfoValue("Hive");
case CLI_DBMS_NAME:
return new GetInfoValue("Apache Hive");
case CLI_DBMS_VER:
return new GetInfoValue(HiveVersionInfo.getVersion());
case CLI_MAX_COLUMN_NAME_LEN:
return new GetInfoValue(128);
case CLI_MAX_SCHEMA_NAME_LEN:
return new GetInfoValue(128);
case CLI_MAX_TABLE_NAME_LEN:
return new GetInfoValue(128);
case CLI_TXN_CAPABLE:
default:
throw new HiveSQLException("Unrecognized GetInfoType value: " + getInfoType.toString());
}
} finally {
release();
}
}
@Override
public OperationHandle executeStatement(String statement, Map confOverlay)
throws HiveSQLException {
return executeStatementInternal(statement, confOverlay, false);
}
@Override
public OperationHandle executeStatementAsync(String statement, Map confOverlay)
throws HiveSQLException {
return executeStatementInternal(statement, confOverlay, true);
}
private OperationHandle executeStatementInternal(String statement, Map confOverlay,
boolean runAsync)
throws HiveSQLException {
acquire();
OperationManager operationManager = getOperationManager();
ExecuteStatementOperation operation = operationManager
.newExecuteStatementOperation(getSession(), statement, confOverlay, runAsync);
OperationHandle opHandle = operation.getHandle();
try {
operation.run();
opHandleSet.add(opHandle);
return opHandle;
} catch (HiveSQLException e) {
// Cleanup opHandle in case the query is synchronous
// Async query needs to retain and pass back the opHandle for error reporting
if (!runAsync) {
operationManager.closeOperation(opHandle);
}
throw e;
} finally {
release();
}
}
@Override
public OperationHandle getTypeInfo()
throws HiveSQLException {
acquire();
OperationManager operationManager = getOperationManager();
GetTypeInfoOperation operation = operationManager.newGetTypeInfoOperation(getSession());
OperationHandle opHandle = operation.getHandle();
try {
operation.run();
opHandleSet.add(opHandle);
return opHandle;
} catch (HiveSQLException e) {
operationManager.closeOperation(opHandle);
throw e;
} finally {
release();
}
}
@Override
public OperationHandle getCatalogs()
throws HiveSQLException {
acquire();
OperationManager operationManager = getOperationManager();
GetCatalogsOperation operation = operationManager.newGetCatalogsOperation(getSession());
OperationHandle opHandle = operation.getHandle();
try {
operation.run();
opHandleSet.add(opHandle);
return opHandle;
} catch (HiveSQLException e) {
operationManager.closeOperation(opHandle);
throw e;
} finally {
release();
}
}
@Override
public OperationHandle getSchemas(String catalogName, String schemaName)
throws HiveSQLException {
acquire();
OperationManager operationManager = getOperationManager();
GetSchemasOperation operation =
operationManager.newGetSchemasOperation(getSession(), catalogName, schemaName);
OperationHandle opHandle = operation.getHandle();
try {
operation.run();
opHandleSet.add(opHandle);
return opHandle;
} catch (HiveSQLException e) {
operationManager.closeOperation(opHandle);
throw e;
} finally {
release();
}
}
@Override
public OperationHandle getTables(String catalogName, String schemaName, String tableName,
List tableTypes)
throws HiveSQLException {
acquire();
OperationManager operationManager = getOperationManager();
MetadataOperation operation =
operationManager.newGetTablesOperation(getSession(), catalogName, schemaName, tableName, tableTypes);
OperationHandle opHandle = operation.getHandle();
try {
operation.run();
opHandleSet.add(opHandle);
return opHandle;
} catch (HiveSQLException e) {
operationManager.closeOperation(opHandle);
throw e;
} finally {
release();
}
}
@Override
public OperationHandle getTableTypes()
throws HiveSQLException {
acquire();
OperationManager operationManager = getOperationManager();
GetTableTypesOperation operation = operationManager.newGetTableTypesOperation(getSession());
OperationHandle opHandle = operation.getHandle();
try {
operation.run();
opHandleSet.add(opHandle);
return opHandle;
} catch (HiveSQLException e) {
operationManager.closeOperation(opHandle);
throw e;
} finally {
release();
}
}
@Override
public OperationHandle getColumns(String catalogName, String schemaName,
String tableName, String columnName) throws HiveSQLException {
acquire();
OperationManager operationManager = getOperationManager();
GetColumnsOperation operation = operationManager.newGetColumnsOperation(getSession(),
catalogName, schemaName, tableName, columnName);
OperationHandle opHandle = operation.getHandle();
try {
operation.run();
opHandleSet.add(opHandle);
return opHandle;
} catch (HiveSQLException e) {
operationManager.closeOperation(opHandle);
throw e;
} finally {
release();
}
}
@Override
public OperationHandle getFunctions(String catalogName, String schemaName, String functionName)
throws HiveSQLException {
acquire();
OperationManager operationManager = getOperationManager();
GetFunctionsOperation operation = operationManager
.newGetFunctionsOperation(getSession(), catalogName, schemaName, functionName);
OperationHandle opHandle = operation.getHandle();
try {
operation.run();
opHandleSet.add(opHandle);
return opHandle;
} catch (HiveSQLException e) {
operationManager.closeOperation(opHandle);
throw e;
} finally {
release();
}
}
@Override
public void close() throws HiveSQLException {
try {
acquire();
/**
* For metadata operations like getTables(), getColumns() etc,
* the session allocates a private metastore handler which should be
* closed at the end of the session
*/
if (metastoreClient != null) {
metastoreClient.close();
}
// Iterate through the opHandles and close their operations
for (OperationHandle opHandle : opHandleSet) {
operationManager.closeOperation(opHandle);
}
opHandleSet.clear();
HiveHistory hiveHist = sessionState.getHiveHistory();
if (null != hiveHist) {
hiveHist.closeStream();
}
sessionState.close();
} catch (IOException ioe) {
throw new HiveSQLException("Failure to close", ioe);
} finally {
release();
}
}
@Override
public SessionState getSessionState() {
return sessionState;
}
@Override
public String getUserName() {
return username;
}
@Override
public void setUserName(String userName) {
this.username = userName;
}
@Override
public void cancelOperation(OperationHandle opHandle) throws HiveSQLException {
acquire();
try {
sessionManager.getOperationManager().cancelOperation(opHandle);
} finally {
release();
}
}
@Override
public void closeOperation(OperationHandle opHandle) throws HiveSQLException {
acquire();
try {
operationManager.closeOperation(opHandle);
opHandleSet.remove(opHandle);
} finally {
release();
}
}
@Override
public TableSchema getResultSetMetadata(OperationHandle opHandle) throws HiveSQLException {
acquire();
try {
return sessionManager.getOperationManager().getOperationResultSetSchema(opHandle);
} finally {
release();
}
}
@Override
public RowSet fetchResults(OperationHandle opHandle, FetchOrientation orientation, long maxRows)
throws HiveSQLException {
acquire();
try {
return sessionManager.getOperationManager()
.getOperationNextRowSet(opHandle, orientation, maxRows);
} finally {
release();
}
}
@Override
public RowSet fetchResults(OperationHandle opHandle) throws HiveSQLException {
acquire();
try {
return sessionManager.getOperationManager().getOperationNextRowSet(opHandle);
} finally {
release();
}
}
protected HiveSession getSession() {
return this;
}
@Override
public String getIpAddress() {
return ipAddress;
}
@Override
public void setIpAddress(String ipAddress) {
this.ipAddress = ipAddress;
}
@Override
public String getDelegationToken(HiveAuthFactory authFactory, String owner, String renewer)
throws HiveSQLException {
HiveAuthFactory.verifyProxyAccess(getUsername(), owner, getIpAddress(), getHiveConf());
return authFactory.getDelegationToken(owner, renewer);
}
@Override
public void cancelDelegationToken(HiveAuthFactory authFactory, String tokenStr)
throws HiveSQLException {
HiveAuthFactory.verifyProxyAccess(getUsername(), getUserFromToken(authFactory, tokenStr),
getIpAddress(), getHiveConf());
authFactory.cancelDelegationToken(tokenStr);
}
@Override
public void renewDelegationToken(HiveAuthFactory authFactory, String tokenStr)
throws HiveSQLException {
HiveAuthFactory.verifyProxyAccess(getUsername(), getUserFromToken(authFactory, tokenStr),
getIpAddress(), getHiveConf());
authFactory.renewDelegationToken(tokenStr);
}
// extract the real user from the given token string
private String getUserFromToken(HiveAuthFactory authFactory, String tokenStr) throws HiveSQLException {
return authFactory.getUserFromToken(tokenStr);
}
}