All Downloads are FREE. Search and download functionalities are using the official Maven repository.

com.databricks.jdbc.common.DatabricksJdbcConstants Maven / Gradle / Ivy

package com.databricks.jdbc.common;

import com.google.common.annotations.VisibleForTesting;
import java.util.Map;
import java.util.Set;
import java.util.regex.Pattern;

public final class DatabricksJdbcConstants {
  public static final Pattern JDBC_URL_PATTERN =
      Pattern.compile("jdbc:databricks://([^/;]*)(?::\\d+)?/*(.*)");
  public static final Pattern HTTP_WAREHOUSE_PATH_PATTERN = Pattern.compile(".*/warehouses/(.+)");
  public static final Pattern HTTP_ENDPOINT_PATH_PATTERN = Pattern.compile(".*/endpoints/(.+)");
  public static final Pattern HTTP_CLI_PATTERN = Pattern.compile(".*cliservice(.+)");
  public static final Pattern HTTP_PATH_CLI_PATTERN = Pattern.compile("cliservice");
  public static final Pattern TEST_PATH_PATTERN = Pattern.compile("jdbc:databricks://test");
  public static final Pattern BASE_PATTERN = Pattern.compile("jdbc:databricks://[^;]+(;[^;]*)?");
  public static final Pattern HTTP_CLUSTER_PATH_PATTERN = Pattern.compile(".*/o/(.+)/(.+)");
  public static final String JDBC_SCHEMA = "jdbc:databricks://";
  public static final LogLevel DEFAULT_LOG_LEVEL = LogLevel.OFF;
  public static final String USER_AGENT_DELIMITER = "-";
  public static final String URL_DELIMITER = ";";
  public static final String PORT_DELIMITER = ":";
  public static final String DEFAULT_SCHEMA = "default";
  public static final String PAIR_DELIMITER = "=";
  public static final String SCHEMA_DELIMITER = "://";
  public static final String PKIX = "PKIX";
  public static final String TLS = "TLS";
  public static final String HTTP = "http";
  public static final String HTTPS = "https";
  public static final String HTTP_SCHEMA = HTTP + SCHEMA_DELIMITER;
  public static final String HTTPS_SCHEMA = HTTPS + SCHEMA_DELIMITER;
  public static final String LOGIN_TIMEOUT = "loginTimeout";
  public static final String U2M_AUTH_TYPE = "external-browser";
  public static final String M2M_AUTH_TYPE = "oauth-m2m";
  public static final String ACCESS_TOKEN_AUTH_TYPE = "pat";
  public static final String U2M_AUTH_REDIRECT_URL = "http://localhost:8020";
  public static final String SQL_SCOPE = "sql";
  public static final String OFFLINE_ACCESS_SCOPE = "offline_access";
  public static final String FULL_STOP = ".";
  public static final String COMMA = ",";
  public static final String PIPE = "|";
  public static final String ASTERISK = "*";
  public static final String EMPTY_STRING = "";
  public static final String IDENTIFIER_QUOTE_STRING = "`";
  public static final String CATALOG = "catalog";
  public static final String PROCEDURE = "procedure";
  public static final String SCHEMA = "schema";
  public static final String TABLE = "table";
  public static final String USER_NAME = "User";
  public static final String PORT = "port";
  public static final int DEFAULT_PORT = 443;
  public static final String DEFAULT_USER_AGENT = "DatabricksJDBCDriverOSS";
  public static final String CLIENT_USER_AGENT_PREFIX = "Java";
  public static final String USER_AGENT_SEA_CLIENT = "SQLExecHttpClient-HC";
  public static final String USER_AGENT_THRIFT_CLIENT = "THttpClient-HC";
  public static final String ALLOWED_VOLUME_INGESTION_PATHS =
      "allowlistedVolumeOperationLocalFilePaths";
  public static final String ALLOWED_STAGING_INGESTION_PATHS = "StagingAllowedLocalPaths";
  public static final String VOLUME_OPERATION_STATUS_COLUMN_NAME = "operation_status";
  public static final String VOLUME_OPERATION_STATUS_SUCCEEDED = "SUCCEEDED";
  public static final Map ALLOWED_SESSION_CONF_TO_DEFAULT_VALUES_MAP =
      // This map comes from
      // https://docs.databricks.com/en/sql/language-manual/sql-ref-parameters.html
      Map.of(
          "ANSI_MODE", "TRUE",
          "ENABLE_PHOTON", "TRUE",
          "LEGACY_TIME_PARSER_POLICY", "EXCEPTION",
          "MAX_FILE_PARTITION_BYTES", "128m",
          "READ_ONLY_EXTERNAL_METASTORE", "FALSE",
          "STATEMENT_TIMEOUT", "172800",
          "TIMEZONE", "UTC",
          "USE_CACHED_RESULT", "TRUE");
  public static final Set ALLOWED_CLIENT_INFO_PROPERTIES =
      Set.of(
          ALLOWED_VOLUME_INGESTION_PATHS,
          ALLOWED_STAGING_INGESTION_PATHS,
          DatabricksJdbcUrlParams.AUTH_ACCESS_TOKEN.getParamName());
  @VisibleForTesting public static final String IS_FAKE_SERVICE_TEST_PROP = "isFakeServiceTest";
  @VisibleForTesting public static final String FAKE_SERVICE_URI_PROP_SUFFIX = ".fakeServiceURI";
  public static final String AWS_CLIENT_ID = "databricks-sql-jdbc";
  public static final String AAD_CLIENT_ID = "96eecda7-19ea-49cc-abb5-240097d554f5";

  /** Enum for the services that can be replaced with a fake service in integration tests. */
  @VisibleForTesting
  public enum FakeServiceType {
    SQL_EXEC,
    CLOUD_FETCH,
    SQL_GATEWAY,
    CLOUD_FETCH_SQL_GATEWAY,
    CLOUD_FETCH_UC_VOLUME
  }

  public static final Pattern SELECT_PATTERN =
      Pattern.compile("^(\\s*\\()*\\s*SELECT", Pattern.CASE_INSENSITIVE);
  public static final Pattern SHOW_PATTERN =
      Pattern.compile("^(\\s*\\()*\\s*SHOW", Pattern.CASE_INSENSITIVE);
  public static final Pattern DESCRIBE_PATTERN =
      Pattern.compile("^(\\s*\\()*\\s*DESCRIBE", Pattern.CASE_INSENSITIVE);
  public static final Pattern EXPLAIN_PATTERN =
      Pattern.compile("^(\\s*\\()*\\s*EXPLAIN", Pattern.CASE_INSENSITIVE);
  public static final Pattern WITH_PATTERN =
      Pattern.compile("^(\\s*\\()*\\s*WITH", Pattern.CASE_INSENSITIVE);
  public static final Pattern SET_PATTERN =
      Pattern.compile("^(\\s*\\()*\\s*SET", Pattern.CASE_INSENSITIVE);
  public static final Pattern MAP_PATTERN =
      Pattern.compile("^(\\s*\\()*\\s*MAP", Pattern.CASE_INSENSITIVE);
  public static final Pattern FROM_PATTERN =
      Pattern.compile("^(\\s*\\()*\\s*FROM\\s*\\(", Pattern.CASE_INSENSITIVE);
  public static final Pattern VALUES_PATTERN =
      Pattern.compile("^(\\s*\\()*\\s*VALUES", Pattern.CASE_INSENSITIVE);
  public static final Pattern UNION_PATTERN =
      Pattern.compile("\\s+UNION\\s+", Pattern.CASE_INSENSITIVE);
  public static final Pattern INTERSECT_PATTERN =
      Pattern.compile("\\s+INTERSECT\\s+", Pattern.CASE_INSENSITIVE);
  public static final Pattern EXCEPT_PATTERN =
      Pattern.compile("\\s+EXCEPT\\s+", Pattern.CASE_INSENSITIVE);
  public static final Pattern DECLARE_PATTERN =
      Pattern.compile("^(\\s*\\()*\\s*DECLARE", Pattern.CASE_INSENSITIVE);
  public static final Pattern PUT_PATTERN =
      Pattern.compile("^(\\s*\\()*\\s*GET", Pattern.CASE_INSENSITIVE);
  public static final Pattern GET_PATTERN =
      Pattern.compile("^(\\s*\\()*\\s*PUT", Pattern.CASE_INSENSITIVE);
  public static final Pattern REMOVE_PATTERN =
      Pattern.compile("^(\\s*\\()*\\s*REMOVE", Pattern.CASE_INSENSITIVE);
  public static final Pattern LIST_PATTERN =
      Pattern.compile("^(\\s*\\()*\\s*LIST", Pattern.CASE_INSENSITIVE);
  public static final String DEFAULT_USERNAME =
      "token"; // This is for PAT. We do not support Basic Auth.
}




© 2015 - 2025 Weber Informatics LLC | Privacy Policy