water.hive.JdbcHiveMetadata Maven / Gradle / Ivy
Go to download
Show more of this group Show more artifacts with this name
Show all versions of h2o-hive Show documentation
Show all versions of h2o-hive Show documentation
H2O Generic MapReduce Driver for Hadoop
The newest version!
package water.hive;
import org.apache.log4j.Logger;
import water.jdbc.SQLManager;
import water.util.JSONUtils;
import water.util.Log;
import java.sql.*;
import java.util.ArrayList;
import java.util.Collections;
import java.util.List;
import java.util.Map;
import static java.util.Collections.emptyList;
@SuppressWarnings({"rawtypes", "unchecked"})
public class JdbcHiveMetadata implements HiveMetaData {
private static final Logger LOG = Logger.getLogger(JdbcHiveMetadata.class);
private static final String SQL_SET_JSON_OUTPUT = "set hive.ddl.output.format=json";
private static final String SQL_GET_VERSION = "select version()";
private static final String SQL_DESCRIBE_TABLE = "DESCRIBE EXTENDED %s";
private static final String SQL_DESCRIBE_PARTITION = "DESCRIBE EXTENDED %s PARTITION ";
private static final String SQL_SHOW_PARTS = "SHOW PARTITIONS %s";
private final String url;
public JdbcHiveMetadata(String url) {
this.url = url;
}
static class StorableMetadata {
String location;
String serializationLib;
String inputFormat;
Map serDeParams = Collections.emptyMap();
}
static class JdbcStorable implements Storable {
private final String location;
private final String serializationLib;
private final String inputFormat;
private final Map serDeParams;
JdbcStorable(StorableMetadata data) {
this.location = data.location;
this.serializationLib = data.serializationLib;
this.inputFormat = data.inputFormat;
this.serDeParams = data.serDeParams;
}
@Override
public Map getSerDeParams() {
return serDeParams;
}
@Override
public String getLocation() {
return location;
}
@Override
public String getSerializationLib() {
return serializationLib;
}
@Override
public String getInputFormat() {
return inputFormat;
}
}
static class JdbcPartition extends JdbcStorable implements Partition {
private final List values;
JdbcPartition(StorableMetadata meta, List values) {
super(meta);
this.values = values;
}
@Override
public List getValues() {
return values;
}
}
static class JdbcColumn implements Column {
private final String name;
private final String type;
JdbcColumn(String name, String type) {
this.name = name;
this.type = type;
}
@Override
public String getName() {
return name;
}
@Override
public String getType() {
return type;
}
}
static class JdbcTable extends JdbcStorable implements Table {
private final String name;
private final List partitions;
private final List columns;
private final List partitionKeys;
public JdbcTable(
String name,
StorableMetadata meta,
List columns,
List partitions,
List partitionKeys
) {
super(meta);
this.name = name;
this.partitions = partitions;
this.columns = columns;
this.partitionKeys = partitionKeys;
}
@Override
public String getName() {
return name;
}
@Override
public boolean hasPartitions() {
return !partitionKeys.isEmpty();
}
@Override
public List getPartitions() {
return partitions;
}
@Override
public List getColumns() {
return columns;
}
@Override
public List getPartitionKeys() {
return partitionKeys;
}
}
private String executeQuery(Connection conn, String query) throws SQLException {
try (Statement stmt = conn.createStatement()) {
try (ResultSet rs = stmt.executeQuery(query)) {
boolean hasData = rs.next();
assert hasData : "Query has no result rows.";
return rs.getString(1);
}
}
}
private Map executeAndParseJsonResultSet(
Connection conn, String queryPattern, String tableName
) throws SQLException {
String query = String.format(queryPattern, tableName);
LOG.info("Executing Hive metadata query " + query);
String json = executeQuery(conn, query);
return JSONUtils.parse(json);
}
@Override
public Table getTable(String tableName) throws SQLException {
try (Connection conn = SQLManager.getConnectionSafe(url, null, null)) {
try (Statement stmt = conn.createStatement()) {
stmt.execute(SQL_SET_JSON_OUTPUT);
}
return getTable(conn, tableName);
}
}
private Table getTable(Connection conn, String name) throws SQLException {
Map tableData = executeAndParseJsonResultSet(conn, SQL_DESCRIBE_TABLE, name);
List columns = readColumns((List
© 2015 - 2024 Weber Informatics LLC | Privacy Policy