All Downloads are FREE. Search and download functionalities are using the official Maven repository.

com.databricks.jdbc.api.impl.arrow.ArrowStreamResult Maven / Gradle / Ivy

There is a newer version: 2.7.1
Show newest version
package com.databricks.jdbc.api.impl.arrow;

import static com.databricks.jdbc.common.util.DatabricksThriftUtil.getTypeFromTypeDesc;

import com.databricks.jdbc.api.IDatabricksSession;
import com.databricks.jdbc.api.impl.IExecutionResult;
import com.databricks.jdbc.api.impl.converters.ArrowToJavaObjectConverter;
import com.databricks.jdbc.api.internal.IDatabricksStatementInternal;
import com.databricks.jdbc.common.CompressionCodec;
import com.databricks.jdbc.dbclient.IDatabricksHttpClient;
import com.databricks.jdbc.dbclient.impl.common.StatementId;
import com.databricks.jdbc.dbclient.impl.http.DatabricksHttpClientFactory;
import com.databricks.jdbc.exception.DatabricksParsingException;
import com.databricks.jdbc.exception.DatabricksSQLException;
import com.databricks.jdbc.model.client.thrift.generated.TColumnDesc;
import com.databricks.jdbc.model.client.thrift.generated.TFetchResultsResp;
import com.databricks.jdbc.model.client.thrift.generated.TGetResultSetMetadataResp;
import com.databricks.jdbc.model.core.ResultData;
import com.databricks.jdbc.model.core.ResultManifest;
import com.databricks.sdk.service.sql.ColumnInfo;
import com.databricks.sdk.service.sql.ColumnInfoTypeName;
import com.google.common.annotations.VisibleForTesting;
import java.util.ArrayList;
import java.util.List;

public class ArrowStreamResult implements IExecutionResult {

  private final ChunkProvider chunkProvider;
  private long currentRowIndex = -1;
  private boolean isClosed;
  private int chunkCount = 0;
  private ArrowResultChunk.ArrowResultChunkIterator chunkIterator;
  private List columnInfos;

  public ArrowStreamResult(
      ResultManifest resultManifest,
      ResultData resultData,
      StatementId statementId,
      IDatabricksSession session)
      throws DatabricksParsingException {
    this(
        resultManifest,
        resultData,
        statementId,
        session,
        DatabricksHttpClientFactory.getInstance().getClient(session.getConnectionContext()));
  }

  @VisibleForTesting
  ArrowStreamResult(
      ResultManifest resultManifest,
      ResultData resultData,
      StatementId statementId,
      IDatabricksSession session,
      IDatabricksHttpClient httpClient)
      throws DatabricksParsingException {
    this.chunkProvider =
        new RemoteChunkProvider(
            statementId,
            resultManifest,
            resultData,
            session,
            httpClient,
            session.getConnectionContext().getCloudFetchThreadPoolSize());
    this.columnInfos =
        resultManifest.getSchema().getColumnCount() == 0
            ? new ArrayList<>()
            : new ArrayList<>(resultManifest.getSchema().getColumns());
  }

  public ArrowStreamResult(
      TFetchResultsResp resultsResp,
      boolean isInlineArrow,
      IDatabricksStatementInternal parentStatementId,
      IDatabricksSession session)
      throws DatabricksSQLException {
    this(
        resultsResp,
        isInlineArrow,
        parentStatementId,
        session,
        DatabricksHttpClientFactory.getInstance().getClient(session.getConnectionContext()));
  }

  @VisibleForTesting
  ArrowStreamResult(
      TFetchResultsResp resultsResp,
      boolean isInlineArrow,
      IDatabricksStatementInternal parentStatement,
      IDatabricksSession session,
      IDatabricksHttpClient httpClient)
      throws DatabricksSQLException {
    setColumnInfo(resultsResp.getResultSetMetadata());
    if (isInlineArrow) {
      this.chunkProvider = new InlineChunkProvider(resultsResp, parentStatement, session);
    } else {
      CompressionCodec compressionCodec =
          CompressionCodec.getCompressionMapping(resultsResp.getResultSetMetadata());
      this.chunkProvider =
          new RemoteChunkProvider(
              parentStatement,
              resultsResp,
              session,
              httpClient,
              session.getConnectionContext().getCloudFetchThreadPoolSize(),
              compressionCodec);
    }
  }

  /** {@inheritDoc} */
  @Override
  public Object getObject(int columnIndex) throws DatabricksSQLException {
    ColumnInfoTypeName requiredType = columnInfos.get(columnIndex).getTypeName();
    Object unconvertedObject = chunkIterator.getColumnObjectAtCurrentRow(columnIndex);
    return ArrowToJavaObjectConverter.convert(unconvertedObject, requiredType);
  }

  /** {@inheritDoc} */
  @Override
  public long getCurrentRow() {
    return currentRowIndex;
  }

  /** {@inheritDoc} */
  @Override
  public boolean next() throws DatabricksSQLException {
    if (!hasNext()) {
      return false;
    }

    currentRowIndex++;
    if (chunkIterator == null || !chunkIterator.hasNextRow()) {
      chunkProvider.next();
      chunkIterator = chunkProvider.getChunk().getChunkIterator();
    }

    return chunkIterator.nextRow();
  }

  /** {@inheritDoc} */
  @Override
  public boolean hasNext() {
    if (isClosed) {
      return false;
    }

    // Check if there are any more rows available in the current chunk
    if (chunkIterator != null && chunkIterator.hasNextRow()) {
      return true;
    }

    // For inline arrow, check if the chunk extractor has more chunks
    // Otherwise, check the chunk downloader
    return chunkProvider.hasNextChunk();
  }

  /** {@inheritDoc} */
  @Override
  public void close() {
    isClosed = true;
    chunkProvider.close();
  }

  @Override
  public long getRowCount() {
    return chunkProvider.getRowCount();
  }

  @Override
  public long getChunkCount() {
    return chunkProvider.getChunkCount();
  }

  private void setColumnInfo(TGetResultSetMetadataResp resultManifest) {
    columnInfos = new ArrayList<>();
    if (resultManifest.getSchema() == null) {
      return;
    }
    for (TColumnDesc columnInfo : resultManifest.getSchema().getColumns()) {
      columnInfos.add(new ColumnInfo().setTypeName(getTypeFromTypeDesc(columnInfo.getTypeDesc())));
    }
  }
}




© 2015 - 2025 Weber Informatics LLC | Privacy Policy