Many resources are needed to download a project. Please understand that we have to compensate our server costs. Thank you in advance. Project price only 1 $
You can buy this project and download/modify it how often you want.
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.hive.ql.plan;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hive.common.StringInternUtils;
import org.apache.hadoop.hive.metastore.api.hive_metastoreConstants;
import org.apache.hadoop.hive.ql.exec.Utilities;
import org.apache.hadoop.hive.ql.io.HiveFileFormatUtils;
import org.apache.hadoop.hive.ql.plan.Explain.Level;
import org.apache.hadoop.hive.serde.serdeConstants;
import org.apache.hadoop.hive.serde2.Deserializer;
import org.apache.hadoop.hive.serde2.SerDeUtils;
import org.apache.hadoop.mapred.InputFormat;
import org.apache.hadoop.mapred.OutputFormat;
import org.apache.hive.common.util.HiveStringUtils;
import org.apache.hive.common.util.ReflectionUtil;
import com.facebook.presto.hive.$internal.org.slf4j.Logger;
import com.facebook.presto.hive.$internal.org.slf4j.LoggerFactory;
import java.io.Serializable;
import java.util.Enumeration;
import java.util.LinkedHashMap;
import java.util.Map;
import java.util.Properties;
/**
* TableDesc.
*
*/
public class TableDesc implements Serializable, Cloneable {
private static final Logger LOG = LoggerFactory.getLogger(TableDesc.class);
private static final long serialVersionUID = 1L;
private Class extends InputFormat> inputFileFormatClass;
private Class extends OutputFormat> outputFileFormatClass;
private java.util.Properties properties;
private Map jobProperties;
private Map jobSecrets;
public TableDesc() {
}
/**
* @param inputFormatClass
* @param outputFormatClass
* @param properties must contain serde class name associate with this table.
*/
public TableDesc(
final Class extends InputFormat> inputFormatClass,
final Class> outputFormatClass, final Properties properties) {
this.inputFileFormatClass = inputFormatClass;
outputFileFormatClass = HiveFileFormatUtils
.getOutputFormatSubstitute(outputFormatClass);
setProperties(properties);
}
public Class extends Deserializer> getDeserializerClass() {
try {
return (Class extends Deserializer>) Class.forName(
getSerdeClassName(), true, Utilities.getSessionSpecifiedClassLoader());
} catch (ClassNotFoundException e) {
throw new RuntimeException(e);
}
}
public Class extends InputFormat> getInputFileFormatClass() {
return inputFileFormatClass;
}
public Deserializer getDeserializer() throws Exception {
return getDeserializer(null);
}
/**
* Return a deserializer object corresponding to the tableDesc.
*/
public Deserializer getDeserializer(Configuration conf) throws Exception {
return getDeserializer(conf, false);
}
public Deserializer getDeserializer(Configuration conf, boolean ignoreError) throws Exception {
Deserializer de = ReflectionUtil.newInstance(
getDeserializerClass().asSubclass(Deserializer.class), conf);
if (ignoreError) {
SerDeUtils.initializeSerDeWithoutErrorCheck(de, conf, properties, null);
} else {
SerDeUtils.initializeSerDe(de, conf, properties, null);
}
return de;
}
public void setInputFileFormatClass(
final Class extends InputFormat> inputFileFormatClass) {
this.inputFileFormatClass = inputFileFormatClass;
}
public Class extends OutputFormat> getOutputFileFormatClass() {
return outputFileFormatClass;
}
public void setOutputFileFormatClass(Class> outputFileFormatClass) {
this.outputFileFormatClass = HiveFileFormatUtils
.getOutputFormatSubstitute(outputFileFormatClass);
}
public Properties getProperties() {
return properties;
}
@Explain(displayName = "properties", explainLevels = { Level.EXTENDED })
public Map getPropertiesExplain() {
return HiveStringUtils.getPropertiesExplain(getProperties());
}
public void setProperties(final Properties properties) {
StringInternUtils.internValuesInMap((Map) properties);
this.properties = properties;
}
public void setJobProperties(Map jobProperties) {
this.jobProperties = jobProperties;
}
@Explain(displayName = "jobProperties", explainLevels = { Level.EXTENDED })
public Map getJobProperties() {
return jobProperties;
}
public void setJobSecrets(Map jobSecrets) {
this.jobSecrets = jobSecrets;
}
public Map getJobSecrets() {
return jobSecrets;
}
/**
* @return the serdeClassName
*/
@Explain(displayName = "serde")
public String getSerdeClassName() {
return properties.getProperty(serdeConstants.SERIALIZATION_LIB);
}
@Explain(displayName = "name", explainLevels = { Level.USER, Level.DEFAULT, Level.EXTENDED })
public String getTableName() {
return properties.getProperty(hive_metastoreConstants.META_TABLE_NAME);
}
@Explain(displayName = "name", explainLevels = { Level.USER, Level.DEFAULT, Level.EXTENDED })
public String getDbName() {
return properties.getProperty(hive_metastoreConstants.META_TABLE_DB);
}
@Explain(displayName = "input format")
public String getInputFileFormatClassName() {
return getInputFileFormatClass().getName();
}
@Explain(displayName = "output format")
public String getOutputFileFormatClassName() {
return getOutputFileFormatClass().getName();
}
public boolean isNonNative() {
return (properties.getProperty(hive_metastoreConstants.META_TABLE_STORAGE) != null);
}
public int getBucketingVersion() {
return Utilities.getBucketingVersion(
properties.getProperty(hive_metastoreConstants.TABLE_BUCKETING_VERSION));
}
@Override
public Object clone() {
TableDesc ret = new TableDesc();
ret.setInputFileFormatClass(inputFileFormatClass);
ret.setOutputFileFormatClass(outputFileFormatClass);
Properties newProp = new Properties();
Enumeration