com.databricks.jdbc.api.impl.DatabricksConnection Maven / Gradle / Ivy
package com.databricks.jdbc.api.impl;
import com.databricks.jdbc.api.IDatabricksConnection;
import com.databricks.jdbc.api.IDatabricksConnectionContext;
import com.databricks.jdbc.api.IDatabricksSession;
import com.databricks.jdbc.api.IDatabricksStatement;
import com.databricks.jdbc.api.IDatabricksUCVolumeClient;
import com.databricks.jdbc.api.impl.volume.DatabricksUCVolumeClient;
import com.databricks.jdbc.api.internal.IDatabricksStatementInternal;
import com.databricks.jdbc.common.DatabricksJdbcConstants;
import com.databricks.jdbc.common.util.UserAgentManager;
import com.databricks.jdbc.common.util.ValidationUtil;
import com.databricks.jdbc.dbclient.IDatabricksClient;
import com.databricks.jdbc.dbclient.impl.common.StatementId;
import com.databricks.jdbc.dbclient.impl.http.DatabricksHttpClientFactory;
import com.databricks.jdbc.exception.DatabricksSQLClientInfoException;
import com.databricks.jdbc.exception.DatabricksSQLException;
import com.databricks.jdbc.exception.DatabricksSQLFeatureNotSupportedException;
import com.databricks.jdbc.log.JdbcLogger;
import com.databricks.jdbc.log.JdbcLoggerFactory;
import com.google.common.annotations.VisibleForTesting;
import java.sql.*;
import java.util.HashMap;
import java.util.Map;
import java.util.Properties;
import java.util.Set;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.Executor;
import java.util.stream.Collectors;
/** Implementation for Databricks specific connection. */
public class DatabricksConnection implements IDatabricksConnection {
private static final JdbcLogger LOGGER = JdbcLoggerFactory.getLogger(DatabricksConnection.class);
private final IDatabricksSession session;
private final Set statementSet = ConcurrentHashMap.newKeySet();
private SQLWarning warnings = null;
private volatile IDatabricksUCVolumeClient ucVolumeClient = null;
private final IDatabricksConnectionContext connectionContext;
/**
* Creates an instance of Databricks connection for given connection context.
*
* @param connectionContext underlying connection context
*/
public DatabricksConnection(IDatabricksConnectionContext connectionContext)
throws DatabricksSQLException {
this.connectionContext = connectionContext;
this.session = new DatabricksSession(connectionContext);
}
@VisibleForTesting
public DatabricksConnection(
IDatabricksConnectionContext connectionContext, IDatabricksClient testDatabricksClient)
throws DatabricksSQLException {
this.connectionContext = connectionContext;
this.session = new DatabricksSession(connectionContext, testDatabricksClient);
UserAgentManager.setUserAgent(connectionContext);
}
@Override
public void open() throws DatabricksSQLException {
this.session.open();
}
@Override
public Statement getStatement(String statementId) throws SQLException {
return new DatabricksStatement(this, StatementId.deserialize(statementId));
}
@Override
public IDatabricksSession getSession() {
return session;
}
@Override
public Statement createStatement() {
LOGGER.debug("public Statement createStatement()");
DatabricksStatement statement = new DatabricksStatement(this);
statementSet.add(statement);
return statement;
}
@Override
public PreparedStatement prepareStatement(String sql) {
LOGGER.debug(
String.format("public PreparedStatement prepareStatement(String sql = {%s})", sql));
DatabricksPreparedStatement statement = new DatabricksPreparedStatement(this, sql);
statementSet.add(statement);
return statement;
}
@Override
public CallableStatement prepareCall(String sql) throws SQLException {
LOGGER.debug(String.format("public CallableStatement prepareCall= {%s})", sql));
throw new DatabricksSQLFeatureNotSupportedException("Not Supported");
}
@Override
public String nativeSQL(String sql) throws SQLException {
LOGGER.debug(String.format("public String nativeSQL(String sql{%s})", sql));
throw new DatabricksSQLFeatureNotSupportedException(
"Not implemented in DatabricksConnection - nativeSQL(String sql)");
}
@Override
public void setAutoCommit(boolean autoCommit) throws SQLException {
throw new DatabricksSQLFeatureNotSupportedException(
"Not implemented in DatabricksConnection - setAutoCommit(boolean autoCommit)");
}
@Override
public boolean getAutoCommit() throws SQLException {
LOGGER.debug("public boolean getAutoCommit()");
throwExceptionIfConnectionIsClosed();
return true;
}
@Override
public void commit() throws SQLException {
LOGGER.debug("public void commit()");
throw new DatabricksSQLFeatureNotSupportedException(
"Not implemented in DatabricksConnection - commit()");
}
@Override
public void rollback() throws SQLException {
LOGGER.debug("public void rollback()");
throw new DatabricksSQLFeatureNotSupportedException(
"Not implemented in DatabricksConnection - rollback()");
}
@Override
public void close() throws DatabricksSQLException {
LOGGER.debug("public void close()");
for (IDatabricksStatementInternal statement : statementSet) {
statement.close(false);
statementSet.remove(statement);
}
this.session.close();
DatabricksHttpClientFactory.getInstance().removeClient(this.session.getConnectionContext());
}
@Override
public boolean isClosed() throws SQLException {
LOGGER.debug("public boolean isClosed()");
return session == null || !session.isOpen();
}
@Override
public DatabaseMetaData getMetaData() throws SQLException {
LOGGER.debug("public DatabaseMetaData getMetaData()");
return new DatabricksDatabaseMetaData(this);
}
@Override
public void setReadOnly(boolean readOnly) throws SQLException {
LOGGER.debug("public void setReadOnly(boolean readOnly = {})");
throw new DatabricksSQLFeatureNotSupportedException(
"Not implemented in DatabricksConnection - setReadOnly(boolean readOnly)");
}
@Override
public boolean isReadOnly() throws SQLException {
LOGGER.debug("public boolean isReadOnly()");
throwExceptionIfConnectionIsClosed();
return false;
}
@Override
public void setCatalog(String catalog) throws SQLException {
this.session.setCatalog(catalog);
Statement statement = this.createStatement();
statement.execute("SET CATALOG " + catalog);
}
@Override
public String getCatalog() throws SQLException {
LOGGER.debug("public String getCatalog()");
return this.session.getCatalog();
}
@Override
public void setTransactionIsolation(int level) throws SQLException {
LOGGER.debug("public void setTransactionIsolation(int level = {})");
throw new DatabricksSQLFeatureNotSupportedException(
"Not implemented in DatabricksConnection - setTransactionIsolation(int level)");
}
@Override
public int getTransactionIsolation() throws SQLException {
LOGGER.debug("public int getTransactionIsolation()");
throwExceptionIfConnectionIsClosed();
return Connection.TRANSACTION_READ_UNCOMMITTED;
}
@Override
public SQLWarning getWarnings() throws SQLException {
LOGGER.debug("public SQLWarning getWarnings()");
throwExceptionIfConnectionIsClosed();
return warnings;
}
@Override
public void clearWarnings() throws SQLException {
LOGGER.debug("public void clearWarnings()");
throwExceptionIfConnectionIsClosed();
warnings = null;
}
@Override
public Statement createStatement(int resultSetType, int resultSetConcurrency)
throws SQLException {
if (resultSetType != ResultSet.TYPE_FORWARD_ONLY
|| resultSetConcurrency != ResultSet.CONCUR_READ_ONLY) {
throw new DatabricksSQLFeatureNotSupportedException(
"Only ResultSet.TYPE_FORWARD_ONLY and ResultSet.CONCUR_READ_ONLY are supported");
}
return createStatement();
}
@Override
public PreparedStatement prepareStatement(String sql, int resultSetType, int resultSetConcurrency)
throws SQLException {
if (resultSetType != ResultSet.TYPE_FORWARD_ONLY
|| resultSetConcurrency != ResultSet.CONCUR_READ_ONLY) {
throw new DatabricksSQLFeatureNotSupportedException(
"Only ResultSet.TYPE_FORWARD_ONLY and ResultSet.CONCUR_READ_ONLY are supported");
}
return prepareStatement(sql);
}
@Override
public CallableStatement prepareCall(String sql, int resultSetType, int resultSetConcurrency)
throws SQLException {
throw new DatabricksSQLFeatureNotSupportedException(
"Not implemented in DatabricksConnection - prepareCall(String sql, int resultSetType, int resultSetConcurrency)");
}
@Override
public Map> getTypeMap() throws SQLException {
LOGGER.debug("public Map> getTypeMap()");
throw new DatabricksSQLFeatureNotSupportedException(
"Not implemented in DatabricksConnection - getTypeMap()");
}
@Override
public void setTypeMap(Map> map) throws SQLException {
LOGGER.debug("public void setTypeMap(Map> map)");
throw new DatabricksSQLFeatureNotSupportedException(
"Not implemented in DatabricksConnection - setTypeMap(Map> map)");
}
@Override
public void setHoldability(int holdability) throws SQLException {
throw new DatabricksSQLFeatureNotSupportedException(
"Not implemented in DatabricksConnection - setHoldability(int holdability)");
}
@Override
public int getHoldability() throws SQLException {
LOGGER.debug("public int getHoldability()");
throw new DatabricksSQLFeatureNotSupportedException(
"Not implemented in DatabricksConnection - getHoldability()");
}
@Override
public Savepoint setSavepoint() throws SQLException {
LOGGER.debug("public Savepoint setSavepoint()");
throw new DatabricksSQLFeatureNotSupportedException(
"Not implemented in DatabricksConnection - setSavepoint()");
}
@Override
public Savepoint setSavepoint(String name) throws SQLException {
throw new DatabricksSQLFeatureNotSupportedException(
"Not implemented in DatabricksConnection - setSavepoint(String name)");
}
@Override
public void rollback(Savepoint savepoint) throws SQLException {
LOGGER.debug("public void rollback(Savepoint savepoint)");
throw new DatabricksSQLFeatureNotSupportedException(
"Not implemented in DatabricksConnection - rollback(Savepoint savepoint)");
}
@Override
public void releaseSavepoint(Savepoint savepoint) throws SQLException {
LOGGER.debug("public void releaseSavepoint(Savepoint savepoint)");
throw new DatabricksSQLFeatureNotSupportedException(
"Not implemented in DatabricksConnection - releaseSavepoint(Savepoint savepoint)");
}
@Override
public Statement createStatement(
int resultSetType, int resultSetConcurrency, int resultSetHoldability) throws SQLException {
throw new DatabricksSQLFeatureNotSupportedException(
"Not implemented in DatabricksConnection - createStatement(int resultSetType, int resultSetConcurrency, int resultSetHoldability)");
}
@Override
public PreparedStatement prepareStatement(
String sql, int resultSetType, int resultSetConcurrency, int resultSetHoldability)
throws SQLException {
throw new DatabricksSQLFeatureNotSupportedException(
"Not implemented in DatabricksConnection - prepareStatement(String sql, int resultSetType, int resultSetConcurrency, int resultSetHoldability)");
}
@Override
public CallableStatement prepareCall(
String sql, int resultSetType, int resultSetConcurrency, int resultSetHoldability)
throws SQLException {
throw new DatabricksSQLFeatureNotSupportedException(
"Not implemented in DatabricksConnection - prepareCall(String sql, int resultSetType, int resultSetConcurrency, int resultSetHoldability)");
}
@Override
public PreparedStatement prepareStatement(String sql, int autoGeneratedKeys) throws SQLException {
throw new DatabricksSQLFeatureNotSupportedException(
"Not implemented in DatabricksConnection - prepareStatement(String sql, int autoGeneratedKeys)");
}
@Override
public PreparedStatement prepareStatement(String sql, int[] columnIndexes) throws SQLException {
throw new DatabricksSQLFeatureNotSupportedException(
"Not implemented in DatabricksConnection - prepareStatement(String sql, int[] columnIndexes)");
}
@Override
public PreparedStatement prepareStatement(String sql, String[] columnNames) throws SQLException {
throw new DatabricksSQLFeatureNotSupportedException(
"Not implemented in DatabricksConnection - prepareStatement(String sql, String[] columnNames)");
}
@Override
public Clob createClob() throws SQLException {
LOGGER.debug("public Clob createClob()");
throw new DatabricksSQLFeatureNotSupportedException(
"Not implemented in DatabricksConnection - createClob()");
}
@Override
public Blob createBlob() throws SQLException {
LOGGER.debug("public Blob createBlob()");
throw new DatabricksSQLFeatureNotSupportedException(
"Not implemented in DatabricksConnection - createBlob()");
}
@Override
public NClob createNClob() throws SQLException {
LOGGER.debug("public NClob createNClob()");
throw new DatabricksSQLFeatureNotSupportedException(
"Not implemented in DatabricksConnection - createNClob()");
}
@Override
public SQLXML createSQLXML() throws SQLException {
LOGGER.debug("public SQLXML createSQLXML()");
throw new DatabricksSQLFeatureNotSupportedException(
"Not implemented in DatabricksConnection - createSQLXML()");
}
@Override
public boolean isValid(int timeout) throws SQLException {
ValidationUtil.checkIfNonNegative(timeout, "timeout");
try (DatabricksStatement statement = new DatabricksStatement(this)) {
statement.setQueryTimeout(timeout);
// simple query to check whether connection is working
statement.execute("SELECT 1");
return true;
} catch (Exception e) {
return false;
}
}
@Override
public void setClientInfo(String name, String value) throws SQLClientInfoException {
if (DatabricksJdbcConstants.ALLOWED_SESSION_CONF_TO_DEFAULT_VALUES_MAP.keySet().stream()
.map(String::toLowerCase)
.anyMatch(s -> s.equalsIgnoreCase(name))) {
Map failedProperties = new HashMap<>();
setSessionConfig(name, value, failedProperties);
if (!failedProperties.isEmpty()) {
throw new DatabricksSQLClientInfoException(
getFailedPropertiesExceptionMessage(failedProperties), failedProperties);
}
} else {
if (DatabricksJdbcConstants.ALLOWED_CLIENT_INFO_PROPERTIES.stream()
.map(String::toLowerCase)
.anyMatch(s -> s.equalsIgnoreCase(name))) {
this.session.setClientInfoProperty(name.toLowerCase(), value);
} else {
throw new DatabricksSQLClientInfoException(
String.format(
"Setting client info for %s failed with %s",
name, ClientInfoStatus.REASON_UNKNOWN_PROPERTY),
Map.of(name, ClientInfoStatus.REASON_UNKNOWN_PROPERTY));
}
}
}
@Override
public void setClientInfo(Properties properties) throws SQLClientInfoException {
LOGGER.debug("public void setClientInfo(Properties properties)");
for (Map.Entry
© 2015 - 2025 Weber Informatics LLC | Privacy Policy