org.apache.spark.deploy.history.config.scala Maven / Gradle / Ivy
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.deploy.history
import java.util.concurrent.TimeUnit
import org.apache.spark.internal.config.ConfigBuilder
import org.apache.spark.network.util.ByteUnit
private[spark] object config {
val DEFAULT_LOG_DIR = "file:/tmp/spark-events"
val EVENT_LOG_DIR = ConfigBuilder("spark.history.fs.logDirectory")
.stringConf
.createWithDefault(DEFAULT_LOG_DIR)
val MAX_LOG_AGE_S = ConfigBuilder("spark.history.fs.cleaner.maxAge")
.timeConf(TimeUnit.SECONDS)
.createWithDefaultString("7d")
val LOCAL_STORE_DIR = ConfigBuilder("spark.history.store.path")
.doc("Local directory where to cache application history information. By default this is " +
"not set, meaning all history information will be kept in memory.")
.stringConf
.createOptional
val MAX_LOCAL_DISK_USAGE = ConfigBuilder("spark.history.store.maxDiskUsage")
.bytesConf(ByteUnit.BYTE)
.createWithDefaultString("10g")
val HISTORY_SERVER_UI_PORT = ConfigBuilder("spark.history.ui.port")
.doc("Web UI port to bind Spark History Server")
.intConf
.createWithDefault(18080)
val FAST_IN_PROGRESS_PARSING =
ConfigBuilder("spark.history.fs.inProgressOptimization.enabled")
.doc("Enable optimized handling of in-progress logs. This option may leave finished " +
"applications that fail to rename their event logs listed as in-progress.")
.booleanConf
.createWithDefault(true)
val END_EVENT_REPARSE_CHUNK_SIZE =
ConfigBuilder("spark.history.fs.endEventReparseChunkSize")
.doc("How many bytes to parse at the end of log files looking for the end event. " +
"This is used to speed up generation of application listings by skipping unnecessary " +
"parts of event log files. It can be disabled by setting this config to 0.")
.bytesConf(ByteUnit.BYTE)
.createWithDefaultString("1m")
}