Please wait. This can take some minutes ...
Many resources are needed to download a project. Please understand that we have to compensate our server costs. Thank you in advance.
Project price only 1 $
You can buy this project and download/modify it how often you want.
ppelin.spark-interpreter.0.11.2.source-code.interpreter-setting.json Maven / Gradle / Ivy
[
{
"group": "spark",
"name": "spark",
"className": "org.apache.zeppelin.spark.SparkInterpreter",
"defaultInterpreter": true,
"properties": {
"SPARK_HOME": {
"envName": "SPARK_HOME",
"propertyName": "SPARK_HOME",
"defaultValue": "",
"description": "Location of spark distribution",
"type": "string"
},
"spark.master": {
"envName": "SPARK_MASTER",
"propertyName": "spark.master",
"defaultValue": "local[*]",
"description": "Spark master uri. local | yarn-client | yarn-cluster | spark master address of standalone mode, ex) spark://master_host:7077",
"type": "string"
},
"spark.submit.deployMode": {
"envName": "",
"propertyName": "spark.submit.deployMode",
"defaultValue": "",
"description": "The deploy mode of Spark driver program, either \"client\" or \"cluster\", Which means to launch driver program locally (\"client\") or remotely (\"cluster\") on one of the nodes inside the cluster.",
"type": "string"
},
"spark.app.name": {
"envName": "",
"propertyName": "spark.app.name",
"defaultValue": "",
"description": "The name of spark application.",
"type": "string"
},
"spark.driver.cores": {
"envName": "",
"propertyName": "spark.driver.cores",
"defaultValue": "1",
"description": "Number of cores to use for the driver process, only in cluster mode.",
"type": "number"
},
"spark.driver.memory": {
"envName": "",
"propertyName": "spark.driver.memory",
"defaultValue": "1g",
"description": "Amount of memory to use for the driver process, i.e. where SparkContext is initialized, in the same format as JVM memory strings with a size unit suffix (\"k\", \"m\", \"g\" or \"t\") (e.g. 512m, 2g).",
"type": "string"
},
"spark.executor.cores": {
"envName": null,
"propertyName": "spark.executor.cores",
"defaultValue": "1",
"description": "The number of cores to use on each executor",
"type": "number"
},
"spark.executor.memory": {
"envName": null,
"propertyName": "spark.executor.memory",
"defaultValue": "1g",
"description": "Executor memory per worker instance. ex) 512m, 32g",
"type": "string"
},
"spark.executor.instances": {
"envName": null,
"propertyName": "spark.executor.instances",
"defaultValue": "2",
"description": "The number of executors for static allocation.",
"type": "number"
},
"spark.files": {
"envName": null,
"propertyName": "spark.files",
"defaultValue": "",
"description": "Comma-separated list of files to be placed in the working directory of each executor. Globs are allowed.",
"type": "string"
},
"spark.jars": {
"envName": null,
"propertyName": "spark.jars",
"defaultValue": "",
"description": "Comma-separated list of jars to include on the driver and executor classpaths. Globs are allowed.",
"type": "string"
},
"spark.jars.packages": {
"envName": null,
"propertyName": "spark.jars.packages",
"defaultValue": "",
"description": "Comma-separated list of Maven coordinates of jars to include on the driver and executor classpaths. The coordinates should be groupId:artifactId:version. If spark.jars.ivySettings is given artifacts will be resolved according to the configuration in the file, otherwise artifacts will be searched for in the local maven repo, then maven central and finally any additional remote repositories given by the command-line option --repositories.",
"type": "string"
},
"zeppelin.spark.useHiveContext": {
"envName": null,
"propertyName": "zeppelin.spark.useHiveContext",
"defaultValue": true,
"description": "Use HiveContext instead of SQLContext if it is true. Enable hive for SparkSession.",
"type": "checkbox"
},
"zeppelin.spark.run.asLoginUser": {
"envName": null,
"propertyName": "zeppelin.spark.run.asLoginUser",
"defaultValue": true,
"description": "Whether run spark job as the zeppelin login user, it is only applied when running spark job in hadoop yarn cluster and shiro is enabled",
"type": "checkbox"
},
"zeppelin.spark.printREPLOutput": {
"envName": null,
"propertyName": "zeppelin.spark.printREPLOutput",
"defaultValue": true,
"description": "Print scala REPL output",
"type": "checkbox"
},
"zeppelin.spark.maxResult": {
"envName": null,
"propertyName": "zeppelin.spark.maxResult",
"defaultValue": "1000",
"description": "Max number of Spark SQL result to display.",
"type": "number"
},
"zeppelin.spark.enableSupportedVersionCheck": {
"envName": null,
"propertyName": "zeppelin.spark.enableSupportedVersionCheck",
"defaultValue": true,
"description": "Whether checking supported spark version. Developer only setting, not for production use",
"type": "checkbox"
},
"zeppelin.spark.uiWebUrl": {
"envName": null,
"propertyName": "zeppelin.spark.uiWebUrl",
"defaultValue": "",
"description": "Override Spark UI default URL. In Kubernetes mode, value can be Jinja template string with 3 template variables 'PORT', 'SERVICE_NAME' and 'SERVICE_DOMAIN'. (ex: http://{{PORT}}-{{SERVICE_NAME}}.{{SERVICE_DOMAIN}})",
"type": "string"
},
"zeppelin.spark.ui.hidden": {
"envName": null,
"propertyName": "zeppelin.spark.ui.hidden",
"defaultValue": false,
"description": "Whether hide spark ui in zeppelin ui",
"type": "checkbox"
},
"spark.webui.yarn.useProxy": {
"envName": null,
"propertyName": "",
"defaultValue": false,
"description": "whether use yarn proxy url as spark weburl, e.g. http://localhost:8088/proxy/application_1583396598068_0004",
"type": "checkbox"
},
"zeppelin.spark.scala.color": {
"envName": null,
"propertyName": "zeppelin.spark.scala.color",
"defaultValue": true,
"description": "Whether enable color output of spark scala interpreter",
"type": "checkbox"
},
"zeppelin.spark.deprecatedMsg.show": {
"envName": null,
"propertyName": "zeppelin.spark.deprecatedMsg.show",
"defaultValue": true,
"description": "Whether show the spark deprecated message, spark 2.2 and before are deprecated. Zeppelin will display warning message by default",
"type": "checkbox"
}
},
"editor": {
"language": "scala",
"editOnDblClick": false,
"completionKey": "TAB",
"completionSupport": true
}
},
{
"group": "spark",
"name": "sql",
"className": "org.apache.zeppelin.spark.SparkSqlInterpreter",
"properties": {
"zeppelin.spark.concurrentSQL": {
"envName": null,
"propertyName": "zeppelin.spark.concurrentSQL",
"defaultValue": true,
"description": "Execute multiple SQL concurrently if set true.",
"type": "checkbox"
},
"zeppelin.spark.concurrentSQL.max": {
"envName": null,
"propertyName": "zeppelin.spark.concurrentSQL.max",
"defaultValue": "10",
"description": "Max number of SQL concurrently executed",
"type": "number"
},
"zeppelin.spark.sql.stacktrace": {
"envName": null,
"propertyName": "zeppelin.spark.sql.stacktrace",
"defaultValue": true,
"description": "Show full exception stacktrace for SQL queries if set to true.",
"type": "checkbox"
},
"zeppelin.spark.sql.interpolation": {
"envName": null,
"propertyName": "zeppelin.spark.sql.interpolation",
"defaultValue": false,
"description": "Enable ZeppelinContext variable interpolation into spark sql",
"type": "checkbox"
}
},
"editor": {
"language": "sql",
"editOnDblClick": false,
"completionKey": "TAB",
"completionSupport": true
}
},
{
"group": "spark",
"name": "pyspark",
"className": "org.apache.zeppelin.spark.PySparkInterpreter",
"properties": {
"PYSPARK_PYTHON": {
"envName": "PYSPARK_PYTHON",
"propertyName": "PYSPARK_PYTHON",
"defaultValue": "python",
"description": "Python binary executable to use for PySpark in both driver and workers (default is python2.7 if available, otherwise python). Property `spark.pyspark.python` take precedence if it is set",
"type": "string"
},
"PYSPARK_DRIVER_PYTHON": {
"envName": "PYSPARK_DRIVER_PYTHON",
"propertyName": "PYSPARK_DRIVER_PYTHON",
"defaultValue": "python",
"description": "Python binary executable to use for PySpark in driver only (default is `PYSPARK_PYTHON`). Property `spark.pyspark.driver.python` take precedence if it is set",
"type": "string"
},
"zeppelin.pyspark.useIPython": {
"envName": null,
"propertyName": "zeppelin.pyspark.useIPython",
"defaultValue": true,
"description": "Whether use IPython when it is available",
"type": "checkbox"
}
},
"editor": {
"language": "python",
"editOnDblClick": false,
"completionKey": "TAB",
"completionSupport": true
}
},
{
"group": "spark",
"name": "ipyspark",
"className": "org.apache.zeppelin.spark.IPySparkInterpreter",
"properties": {},
"editor": {
"language": "python",
"editOnDblClick": false,
"completionSupport": true,
"completionKey": "TAB"
}
},
{
"group": "spark",
"name": "r",
"className": "org.apache.zeppelin.spark.SparkRInterpreter",
"properties": {
"zeppelin.R.knitr": {
"envName": null,
"propertyName": "zeppelin.R.knitr",
"defaultValue": true,
"description": "Whether use knitr or not",
"type": "checkbox"
},
"zeppelin.R.cmd": {
"envName": null,
"propertyName": "zeppelin.R.cmd",
"defaultValue": "R",
"description": "R binary executable path",
"type": "string"
},
"zeppelin.R.image.width": {
"envName": null,
"propertyName": "zeppelin.R.image.width",
"defaultValue": "100%",
"description": "Image width of R plotting",
"type": "number"
},
"zeppelin.R.render.options": {
"envName": null,
"propertyName": "zeppelin.R.render.options",
"defaultValue": "out.format = 'html', comment = NA, echo = FALSE, results = 'asis', message = F, warning = F, fig.retina = 2",
"description": "",
"type": "textarea"
},
"zeppelin.R.shiny.portRange": {
"envName": "",
"propertyName": "zeppelin.R.shiny.portRange",
"defaultValue": ":",
"description": "Shiny app would launch a web app at some port, this property is to specify the portRange via format ':', e.g. '5000:5001'. By default it is ':' which means any port",
"type": "string"
}
},
"editor": {
"language": "r",
"editOnDblClick": false,
"completionSupport": false,
"completionKey": "TAB"
}
},
{
"group": "spark",
"name": "ir",
"className": "org.apache.zeppelin.spark.SparkIRInterpreter",
"properties": {
},
"editor": {
"language": "r",
"editOnDblClick": false,
"completionSupport": true,
"completionKey": "TAB"
}
},
{
"group": "spark",
"name": "shiny",
"className": "org.apache.zeppelin.spark.SparkShinyInterpreter",
"properties": {
},
"editor": {
"language": "r",
"editOnDblClick": false,
"completionSupport": true,
"completionKey": "TAB"
}
}
]