org.apache.hive.service.cli.operation.SQLOperation Maven / Gradle / Ivy
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hive.service.cli.operation;
import java.io.IOException;
import java.io.Serializable;
import java.io.UnsupportedEncodingException;
import java.security.PrivilegedExceptionAction;
import java.sql.SQLException;
import java.util.ArrayList;
import java.util.List;
import java.util.Map;
import java.util.Properties;
import java.util.concurrent.Future;
import java.util.concurrent.RejectedExecutionException;
import com.facebook.presto.hive.$internal.org.apache.hadoop.hive.conf.HiveConf;
import com.facebook.presto.hive.$internal.org.apache.hadoop.hive.metastore.api.FieldSchema;
import com.facebook.presto.hive.$internal.org.apache.hadoop.hive.metastore.api.Schema;
import com.facebook.presto.hive.$internal.org.apache.hadoop.hive.ql.CommandNeedRetryException;
import com.facebook.presto.hive.$internal.org.apache.hadoop.hive.ql.Driver;
import com.facebook.presto.hive.$internal.org.apache.hadoop.hive.ql.exec.ExplainTask;
import com.facebook.presto.hive.$internal.org.apache.hadoop.hive.ql.exec.Task;
import com.facebook.presto.hive.$internal.org.apache.hadoop.hive.ql.metadata.Hive;
import com.facebook.presto.hive.$internal.org.apache.hadoop.hive.ql.metadata.HiveException;
import com.facebook.presto.hive.$internal.org.apache.hadoop.hive.ql.parse.VariableSubstitution;
import com.facebook.presto.hive.$internal.org.apache.hadoop.hive.ql.processors.CommandProcessorResponse;
import com.facebook.presto.hive.$internal.org.apache.hadoop.hive.ql.session.SessionState;
import com.facebook.presto.hive.$internal.org.apache.hadoop.hive.serde.serdeConstants;
import com.facebook.presto.hive.$internal.org.apache.hadoop.hive.serde2.SerDe;
import com.facebook.presto.hive.$internal.org.apache.hadoop.hive.serde2.SerDeException;
import com.facebook.presto.hive.$internal.org.apache.hadoop.hive.serde2.SerDeUtils;
import com.facebook.presto.hive.$internal.org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe;
import com.facebook.presto.hive.$internal.org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
import com.facebook.presto.hive.$internal.org.apache.hadoop.hive.serde2.objectinspector.StructField;
import com.facebook.presto.hive.$internal.org.apache.hadoop.hive.serde2.objectinspector.StructObjectInspector;
import com.facebook.presto.hive.$internal.org.apache.hadoop.hive.shims.ShimLoader;
import com.facebook.presto.hive.$internal.org.apache.hadoop.io.BytesWritable;
import com.facebook.presto.hive.$internal.org.apache.hadoop.security.UserGroupInformation;
import org.apache.hive.service.cli.FetchOrientation;
import org.apache.hive.service.cli.HiveSQLException;
import org.apache.hive.service.cli.OperationState;
import org.apache.hive.service.cli.RowSet;
import org.apache.hive.service.cli.RowSetFactory;
import org.apache.hive.service.cli.TableSchema;
import org.apache.hive.service.cli.session.HiveSession;
/**
* SQLOperation.
*
*/
public class SQLOperation extends ExecuteStatementOperation {
private Driver driver = null;
private CommandProcessorResponse response;
private TableSchema resultSchema = null;
private Schema mResultSchema = null;
private SerDe serde = null;
private boolean fetchStarted = false;
public SQLOperation(HiveSession parentSession, String statement, Map confOverlay, boolean runInBackground) {
// TODO: call setRemoteUser in ExecuteStatementOperation or higher.
super(parentSession, statement, confOverlay, runInBackground);
}
/***
* Compile the query and extract metadata
* @param sqlOperationConf
* @throws HiveSQLException
*/
public void prepare(HiveConf sqlOperationConf) throws HiveSQLException {
setState(OperationState.RUNNING);
try {
driver = new Driver(sqlOperationConf, getParentSession().getUserName());
// In Hive server mode, we are not able to retry in the FetchTask
// case, when calling fetch queries since execute() has returned.
// For now, we disable the test attempts.
driver.setTryCount(Integer.MAX_VALUE);
String subStatement = new VariableSubstitution().substitute(sqlOperationConf, statement);
response = driver.compileAndRespond(subStatement);
if (0 != response.getResponseCode()) {
throw new HiveSQLException("Error while compiling statement: "
+ response.getErrorMessage(), response.getSQLState(), response.getResponseCode());
}
mResultSchema = driver.getSchema();
// hasResultSet should be true only if the query has a FetchTask
// "explain" is an exception for now
if(driver.getPlan().getFetchTask() != null) {
//Schema has to be set
if (mResultSchema == null || !mResultSchema.isSetFieldSchemas()) {
throw new HiveSQLException("Error compiling query: Schema and FieldSchema " +
"should be set when query plan has a FetchTask");
}
resultSchema = new TableSchema(mResultSchema);
setHasResultSet(true);
} else {
setHasResultSet(false);
}
// Set hasResultSet true if the plan has ExplainTask
// TODO explain should use a FetchTask for reading
for (Task extends Serializable> task: driver.getPlan().getRootTasks()) {
if (task.getClass() == ExplainTask.class) {
resultSchema = new TableSchema(mResultSchema);
setHasResultSet(true);
break;
}
}
} catch (HiveSQLException e) {
setState(OperationState.ERROR);
throw e;
} catch (Exception e) {
setState(OperationState.ERROR);
throw new HiveSQLException("Error running query: " + e.toString(), e);
}
}
private void runInternal(HiveConf sqlOperationConf) throws HiveSQLException {
try {
// In Hive server mode, we are not able to retry in the FetchTask
// case, when calling fetch queries since execute() has returned.
// For now, we disable the test attempts.
driver.setTryCount(Integer.MAX_VALUE);
response = driver.run();
if (0 != response.getResponseCode()) {
throw new HiveSQLException("Error while processing statement: "
+ response.getErrorMessage(), response.getSQLState(), response.getResponseCode());
}
} catch (HiveSQLException e) {
// If the operation was cancelled by another thread,
// Driver#run will return a non-zero response code.
// We will simply return if the operation state is CANCELED,
// otherwise throw an exception
if (getStatus().getState() == OperationState.CANCELED) {
return;
}
else {
setState(OperationState.ERROR);
throw e;
}
} catch (Exception e) {
setState(OperationState.ERROR);
throw new HiveSQLException("Error running query: " + e.toString(), e);
}
setState(OperationState.FINISHED);
}
@Override
public void run() throws HiveSQLException {
setState(OperationState.PENDING);
final HiveConf opConfig = getConfigForOperation();
prepare(opConfig);
if (!shouldRunAsync()) {
runInternal(opConfig);
} else {
final SessionState parentSessionState = SessionState.get();
// current Hive object needs to be set in aysnc thread in case of remote metastore.
// The metastore client in Hive is associated with right user
final Hive sessionHive = getCurrentHive();
// current UGI will get used by metastore when metsatore is in embedded mode
// so this needs to get passed to the new async thread
final UserGroupInformation currentUGI = getCurrentUGI(opConfig);
// Runnable impl to call runInternal asynchronously,
// from a different thread
Runnable backgroundOperation = new Runnable() {
@Override
public void run() {
PrivilegedExceptionAction
© 2015 - 2025 Weber Informatics LLC | Privacy Policy