
tarlake-spark3_2.12.1.3.0.source-code.reference-spark.conf Maven / Gradle / Ivy
spark {
catalogKeys = "hive.metastore.uris,spark.sql.catalog.iceberg,spark.sql.catalog.hive,spark.sql.catalog.spark_catalog,org.apache.spark.sql.delta.catalog.DeltaCatalog"
catalogKeys = ${?SL_SPARK_CATALOGS}
serializer = "org.apache.spark.serializer.KryoSerializer"
serializer = ${?SL_SPARK_SERIALIZER}
kryo.registrator = "ai.starlake.config.KryoSerialization"
kryo.registrator = ${?SL_SPARK_KRYO_REGISTRATOR}
# sql.hive.convertMetastoreParquet = false
# yarn.principal = "invalid"
# yarn.keytab = "invalid"
# yarn.principal = ${?SPARK_YARN_PRINCIPAL}
# yarn.keytab = ${?SPARK_YARN_KEYTAB}
ui.enabled = false
ui.enabled = ${?SL_SPARK_UI_ENABLED}
debug.maxToStringFields = 100
debug.maxToStringFields = ${?SL_SPARK_DEBUG_MAX_TO_STRING_FIELDS}
# sql.catalogImplementation="hive"
# sql.catalogImplementation="in-memory"
sql.parquet.int96RebaseModeInWrite = "CORRECTED"
sql.parquet.int96RebaseModeInWrite = ${?SL_SPARK_SQL_PARQUET_INT96_REBASE_MODE_IN_WRITE}
sql.parquet.datetimeRebaseModeInWrite = "CORRECTED"
sql.parquet.datetimeRebaseModeInWrite = ${?SL_SPARK_SQL_PARQUET_DATETIME_REBASE_MODE_IN_WRITE}
sql.streaming.checkpointLocation = "/tmp"
sql.streaming.checkpointLocation = ${?SL_SPARK_SQL_STREAMING_CHECKPOINT_LOCATION}
sql.sources.partitionOverwriteMode = "DYNAMIC"
sql.sources.partitionOverwriteMode = ${?SL_SPARK_SQL_SOURCES_PARTITION_OVERWRITE_MODE}
datasource.bigquery {
viewsEnabled = "true"
viewsEnabled = ${?SL_SPARK_BIGQUERY_VIEWS_ENABLED}
materializationProject = ${?SL_SPARK_BIGQUERY_MATERIALIZATION_PROJECT}
#materializationDataset = "materialization"
materializationDataset = ${?SL_SPARK_BIGQUERY_MATERIALIZATION_DATASET}
# materializationExpirationTimeInMinutes (DEFAULT_MATERIALIZATION_EXPRIRATION_TIME_IN_MINUTES = 24 * 60)
readDataFormat = "avro"
readDataFormat = ${?SL_SPARK_BIGQUERY_READ_DATA_FORMAT}
}
pyFiles = ""
pyFiles = ${?SL_SPARK_PY_FILES}
delta.logStore.class = "org.apache.spark.sql.delta.storage.LocalLogStore"
delta.logStore.class = ${?SL_SPARK_DELTA_LOGSTORE_CLASS}
}
sparkScheduling {
maxJobs = 1
maxJobs = ${?SL_SCHEDULING_MAX_JOBS}
mode = "FIFO"
mode = ${?SL_SCHEDULING_MODE}
file = ""
file = ${?SL_SCHEDULING_FILE}
poolName = "default"
poolName = ${?SL_SCHEDULING_POOL_NAME}
}
© 2015 - 2025 Weber Informatics LLC | Privacy Policy