Many resources are needed to download a project. Please understand that we have to compensate our server costs. Thank you in advance. Project price only 1 $
You can buy this project and download/modify it how often you want.
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.hive.ql.metadata.formatting;
import java.io.DataOutputStream;
import java.io.IOException;
import java.io.OutputStream;
import java.io.UnsupportedEncodingException;
import java.net.URLDecoder;
import java.util.ArrayList;
import java.util.List;
import java.util.Map;
import java.util.Set;
import org.apache.commons.lang.StringUtils;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.fs.FileStatus;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.hive.conf.HiveConf;
import org.apache.hadoop.hive.metastore.TableType;
import org.apache.hadoop.hive.metastore.api.ColumnStatisticsObj;
import org.apache.hadoop.hive.metastore.api.FieldSchema;
import org.apache.hadoop.hive.ql.metadata.Hive;
import org.apache.hadoop.hive.ql.metadata.HiveException;
import org.apache.hadoop.hive.ql.metadata.Partition;
import org.apache.hadoop.hive.ql.metadata.Table;
import org.codehaus.jackson.map.ObjectMapper;
/**
* Format table and index information for machine readability using
* json.
*/
public class JsonMetaDataFormatter implements MetaDataFormatter {
private static final Log LOG = LogFactory.getLog(JsonMetaDataFormatter.class);
/**
* Convert the map to a JSON string.
*/
private void asJson(OutputStream out, Map data)
throws HiveException
{
try {
new ObjectMapper().writeValue(out, data);
} catch (IOException e) {
throw new HiveException("Unable to convert to json", e);
}
}
/**
* Write an error message.
*/
@Override
public void error(OutputStream out, String msg, int errorCode, String sqlState)
throws HiveException
{
error(out, msg, errorCode, sqlState, null);
}
@Override
public void error(OutputStream out, String errorMessage, int errorCode, String sqlState, String errorDetail) throws HiveException {
MapBuilder mb = MapBuilder.create().put("error", errorMessage);
if(errorDetail != null) {
mb.put("errorDetail", errorDetail);
}
mb.put("errorCode", errorCode);
if(sqlState != null) {
mb.put("sqlState", sqlState);
}
asJson(out,mb.build());
}
/**
* Show a list of tables.
*/
@Override
public void showTables(DataOutputStream out, Set tables)
throws HiveException {
asJson(out, MapBuilder.create().put("tables", tables).build());
}
/**
* Describe table.
*/
@Override
public void describeTable(DataOutputStream out, String colPath,
String tableName, Table tbl, Partition part, List cols,
boolean isFormatted, boolean isExt, boolean isPretty,
boolean isOutputPadded, List colStats) throws HiveException {
MapBuilder builder = MapBuilder.create();
builder.put("columns", makeColsUnformatted(cols));
if (isExt) {
if (part != null) {
builder.put("partitionInfo", part.getTPartition());
}
else {
builder.put("tableInfo", tbl.getTTable());
}
}
asJson(out, builder.build());
}
private List