All Downloads are FREE. Search and download functionalities are using the official Maven repository.

com.databricks.jdbc.core.DatabricksResultSetMetaData Maven / Gradle / Ivy

There is a newer version: 2.6.40-patch-1
Show newest version
package com.databricks.jdbc.core;

import static com.databricks.jdbc.client.impl.helper.MetadataResultConstants.NULL_STRING;
import static com.databricks.jdbc.client.impl.thrift.commons.DatabricksThriftHelper.getTypeFromTypeDesc;
import static com.databricks.jdbc.driver.DatabricksJdbcConstants.VOLUME_OPERATION_STATUS_COLUMN_NAME;

import com.databricks.jdbc.client.impl.thrift.generated.TColumnDesc;
import com.databricks.jdbc.client.impl.thrift.generated.TGetResultSetMetadataResp;
import com.databricks.jdbc.client.impl.thrift.generated.TTypeEntry;
import com.databricks.jdbc.client.impl.thrift.generated.TTypeQualifierValue;
import com.databricks.jdbc.client.sqlexec.ResultManifest;
import com.databricks.jdbc.commons.LogLevel;
import com.databricks.jdbc.commons.util.LoggingUtil;
import com.databricks.jdbc.commons.util.WrapperUtil;
import com.databricks.jdbc.core.types.AccessType;
import com.databricks.jdbc.core.types.Nullable;
import com.databricks.sdk.service.sql.ColumnInfo;
import com.databricks.sdk.service.sql.ColumnInfoTypeName;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.ImmutableMap;
import java.sql.ResultSetMetaData;
import java.sql.SQLException;
import java.sql.Types;
import java.util.HashMap;
import java.util.List;
import java.util.Map;

public class DatabricksResultSetMetaData implements ResultSetMetaData {
  private final String statementId;
  private final ImmutableList columns;
  private final ImmutableMap columnNameIndex;
  private final long totalRows;
  private Long chunkCount;
  private static final String DEFAULT_CATALOGUE_NAME = "Spark";

  // TODO: Add handling for Arrow stream results

  public DatabricksResultSetMetaData(String statementId, ResultManifest resultManifest) {
    this.statementId = statementId;
    Map columnNameToIndexMap = new HashMap<>();
    ImmutableList.Builder columnsBuilder = ImmutableList.builder();

    int currIndex = 0;
    if (resultManifest.getIsVolumeOperation() != null && resultManifest.getIsVolumeOperation()) {
      ImmutableDatabricksColumn.Builder columnBuilder = getColumnBuilder();
      columnBuilder
          .columnName(VOLUME_OPERATION_STATUS_COLUMN_NAME)
          .columnType(Types.VARCHAR)
          .columnTypeText(ColumnInfoTypeName.STRING.name())
          .typePrecision(0)
          .columnTypeClassName(DatabricksTypeUtil.getColumnTypeClassName(ColumnInfoTypeName.STRING))
          .displaySize(DatabricksTypeUtil.getDisplaySize(ColumnInfoTypeName.STRING, 0))
          .isSigned(DatabricksTypeUtil.isSigned(ColumnInfoTypeName.STRING));
      columnsBuilder.add(columnBuilder.build());
      columnNameToIndexMap.putIfAbsent(VOLUME_OPERATION_STATUS_COLUMN_NAME, ++currIndex);
    } else {
      if (resultManifest.getSchema().getColumnCount() > 0) {
        for (ColumnInfo columnInfo : resultManifest.getSchema().getColumns()) {
          ColumnInfoTypeName columnTypeName = columnInfo.getTypeName();
          int[] scaleAndPrecision = getScaleAndPrecision(columnInfo, columnTypeName);
          int precision = scaleAndPrecision[0];
          int scale = scaleAndPrecision[1];
          ImmutableDatabricksColumn.Builder columnBuilder = getColumnBuilder();
          columnBuilder
              .columnName(columnInfo.getName())
              .columnTypeClassName(DatabricksTypeUtil.getColumnTypeClassName(columnTypeName))
              .columnType(DatabricksTypeUtil.getColumnType(columnTypeName))
              .columnTypeText(columnInfo.getTypeText())
              .typePrecision(precision)
              .typeScale(scale)
              .displaySize(DatabricksTypeUtil.getDisplaySize(columnTypeName, precision))
              .isSigned(DatabricksTypeUtil.isSigned(columnTypeName));

          columnsBuilder.add(columnBuilder.build());
          // Keep index starting from 1, to be consistent with JDBC convention
          columnNameToIndexMap.putIfAbsent(columnInfo.getName(), ++currIndex);
        }
      }
    }
    this.columns = columnsBuilder.build();
    this.columnNameIndex = ImmutableMap.copyOf(columnNameToIndexMap);
    this.totalRows = resultManifest.getTotalRowCount();
    this.chunkCount = resultManifest.getTotalChunkCount();
  }

  public DatabricksResultSetMetaData(
      String statementId, TGetResultSetMetadataResp resultManifest, long rows, long chunkCount) {
    this.statementId = statementId;
    Map columnNameToIndexMap = new HashMap<>();
    ImmutableList.Builder columnsBuilder = ImmutableList.builder();
    LoggingUtil.log(
        LogLevel.DEBUG,
        String.format(
            "Result manifest for statement {%s} has schema: {%s}",
            statementId, resultManifest.getSchema()));
    int currIndex = 0;
    if (resultManifest.getSchema() != null && resultManifest.getSchema().getColumnsSize() > 0) {
      for (TColumnDesc columnInfo : resultManifest.getSchema().getColumns()) {
        ColumnInfoTypeName columnTypeName = getTypeFromTypeDesc(columnInfo.getTypeDesc());
        int[] scaleAndPrecision = getScaleAndPrecision(columnInfo, columnTypeName);
        int precision = scaleAndPrecision[0];
        int scale = scaleAndPrecision[1];

        ImmutableDatabricksColumn.Builder columnBuilder = getColumnBuilder();
        columnBuilder
            .columnName(columnInfo.getColumnName())
            .columnTypeClassName(DatabricksTypeUtil.getColumnTypeClassName(columnTypeName))
            .columnType(DatabricksTypeUtil.getColumnType(columnTypeName))
            .columnTypeText(columnTypeName.name())
            .typePrecision(precision)
            .typeScale(scale)
            .displaySize(DatabricksTypeUtil.getDisplaySize(columnTypeName, precision))
            .isSigned(DatabricksTypeUtil.isSigned(columnTypeName));
        columnsBuilder.add(columnBuilder.build());
        columnNameToIndexMap.putIfAbsent(columnInfo.getColumnName(), ++currIndex);
      }
    }
    this.columns = columnsBuilder.build();
    this.columnNameIndex = ImmutableMap.copyOf(columnNameToIndexMap);
    this.totalRows = rows;
    this.chunkCount = chunkCount;
  }

  public DatabricksResultSetMetaData(
      String statementId,
      List columnNames,
      List columnTypeText,
      List columnTypes,
      List columnTypePrecisions,
      long totalRows) {
    this.statementId = statementId;

    Map columnNameToIndexMap = new HashMap<>();
    ImmutableList.Builder columnsBuilder = ImmutableList.builder();
    for (int i = 0; i < columnNames.size(); i++) {
      ColumnInfoTypeName columnTypeName =
          ColumnInfoTypeName.valueOf(
              DatabricksTypeUtil.getDatabricksTypeFromSQLType(columnTypes.get(i)));
      ImmutableDatabricksColumn.Builder columnBuilder = getColumnBuilder();
      columnBuilder
          .columnName(columnNames.get(i))
          .columnType(columnTypes.get(i))
          .columnTypeText(columnTypeText.get(i))
          .typePrecision(columnTypePrecisions.get(i))
          .columnTypeClassName(DatabricksTypeUtil.getColumnTypeClassName(columnTypeName))
          .displaySize(
              DatabricksTypeUtil.getDisplaySize(columnTypeName, columnTypePrecisions.get(i)))
          .isSigned(DatabricksTypeUtil.isSigned(columnTypeName));
      columnsBuilder.add(columnBuilder.build());
      // Keep index starting from 1, to be consistent with JDBC convention
      columnNameToIndexMap.putIfAbsent(columnNames.get(i), i + 1);
    }
    this.columns = columnsBuilder.build();
    this.columnNameIndex = ImmutableMap.copyOf(columnNameToIndexMap);
    this.totalRows = totalRows;
  }

  @Override
  public int getColumnCount() throws SQLException {
    return columns.size();
  }

  @Override
  public boolean isAutoIncrement(int column) throws SQLException {
    return columns.get(getEffectiveIndex(column)).isAutoIncrement();
  }

  @Override
  public boolean isCaseSensitive(int column) throws SQLException {
    return columns.get(getEffectiveIndex(column)).isCaseSensitive();
  }

  @Override
  public boolean isSearchable(int column) throws SQLException {
    return columns.get(getEffectiveIndex(column)).isSearchable();
  }

  @Override
  public boolean isCurrency(int column) throws SQLException {
    return columns.get(getEffectiveIndex(column)).isCurrency();
  }

  @Override
  public int isNullable(int column) throws SQLException {
    return columns.get(getEffectiveIndex(column)).nullable().getValue();
  }

  @Override
  public boolean isSigned(int column) throws SQLException {
    return columns.get(getEffectiveIndex(column)).isSigned();
  }

  @Override
  public int getColumnDisplaySize(int column) throws SQLException {
    return columns.get(getEffectiveIndex(column)).displaySize();
  }

  @Override
  public String getColumnLabel(int column) throws SQLException {
    return columns.get(getEffectiveIndex(column)).columnName();
  }

  @Override
  public String getColumnName(int column) throws SQLException {
    return columns.get(getEffectiveIndex(column)).columnName();
  }

  @Override
  public String getSchemaName(int column) throws SQLException {
    return columns.get(getEffectiveIndex(column)).schemaName();
  }

  @Override
  public int getPrecision(int column) throws SQLException {
    return columns.get(getEffectiveIndex(column)).typePrecision();
  }

  @Override
  public int getScale(int column) throws SQLException {
    return columns.get(getEffectiveIndex(column)).typeScale();
  }

  @Override
  public String getTableName(int column) throws SQLException {
    return columns.get(getEffectiveIndex(column)).tableName();
  }

  @Override
  public String getCatalogName(int column) throws SQLException {
    return columns.get(getEffectiveIndex(column)).catalogName();
  }

  @Override
  public int getColumnType(int column) throws SQLException {
    return columns.get(getEffectiveIndex(column)).columnType();
  }

  @Override
  public String getColumnTypeName(int column) throws SQLException {
    return columns.get(getEffectiveIndex(column)).columnTypeText();
  }

  @Override
  public boolean isReadOnly(int column) throws SQLException {
    AccessType columnAccessType = columns.get(getEffectiveIndex(column)).accessType();
    return columnAccessType.equals(AccessType.READ_ONLY)
        || columnAccessType.equals(AccessType.UNKNOWN);
  }

  @Override
  public boolean isWritable(int column) throws SQLException {
    return columns.get(getEffectiveIndex(column)).accessType().equals(AccessType.WRITE);
  }

  @Override
  public boolean isDefinitelyWritable(int column) throws SQLException {
    return columns.get(getEffectiveIndex(column)).isDefinitelyWritable();
  }

  @Override
  public String getColumnClassName(int column) throws SQLException {
    return columns.get(getEffectiveIndex(column)).columnTypeClassName();
  }

  @Override
  public  T unwrap(Class iface) throws SQLException {
    return WrapperUtil.unwrap(iface, this);
  }

  @Override
  public boolean isWrapperFor(Class iface) throws SQLException {
    return WrapperUtil.isWrapperFor(iface, this);
  }

  private int getEffectiveIndex(int columnIndex) {
    if (columnIndex > 0 && columnIndex <= columns.size()) {
      return columnIndex - 1;
    } else {
      throw new IllegalStateException("Invalid column index: " + columnIndex);
    }
  }

  /**
   * Returns index of column-name in metadata starting from 1
   *
   * @param columnName column-name
   * @return index of column if exists, else -1
   */
  public int getColumnNameIndex(String columnName) {
    return columnNameIndex.getOrDefault(columnName, -1);
  }

  public long getTotalRows() {
    return totalRows;
  }

  public Long getChunkCount() {
    return chunkCount;
  }

  public int[] getScaleAndPrecision(ColumnInfo columnInfo, ColumnInfoTypeName columnTypeName) {
    int precision = DatabricksTypeUtil.getPrecision(columnTypeName);
    int scale = DatabricksTypeUtil.getScale(columnTypeName);
    if (columnInfo.getTypePrecision() != null) {
      precision = Math.toIntExact(columnInfo.getTypePrecision());
      scale = Math.toIntExact(columnInfo.getTypeScale());
    }
    return new int[] {precision, scale};
  }

  public int[] getScaleAndPrecision(TColumnDesc columnInfo, ColumnInfoTypeName columnTypeName) {
    int precision = DatabricksTypeUtil.getPrecision(columnTypeName);
    int scale = DatabricksTypeUtil.getScale(columnTypeName);
    if (columnInfo.getTypeDesc() != null && columnInfo.getTypeDesc().getTypesSize() > 0) {
      TTypeEntry tTypeEntry = columnInfo.getTypeDesc().getTypes().get(0);
      if (tTypeEntry.isSetPrimitiveEntry()
          && tTypeEntry.getPrimitiveEntry().isSetTypeQualifiers()
          && tTypeEntry.getPrimitiveEntry().getTypeQualifiers().isSetQualifiers()) {
        Map qualifiers =
            tTypeEntry.getPrimitiveEntry().getTypeQualifiers().getQualifiers();
        scale = qualifiers.get("scale").getI32Value();
        precision = qualifiers.get("precision").getI32Value();
      }
    }
    return new int[] {precision, scale};
  }

  private ImmutableDatabricksColumn.Builder getColumnBuilder() {
    return ImmutableDatabricksColumn.builder()
        .isAutoIncrement(false)
        .isSearchable(true)
        .nullable(Nullable.NULLABLE)
        .accessType(AccessType.READ_ONLY)
        .isDefinitelyWritable(false)
        .schemaName(NULL_STRING)
        .tableName(NULL_STRING)
        .catalogName(DEFAULT_CATALOGUE_NAME)
        .isCurrency(false)
        .typeScale(0)
        .isCaseSensitive(false);
  }
}




© 2015 - 2024 Weber Informatics LLC | Privacy Policy