org.apache.spark.ui.UIUtils.scala Maven / Gradle / Ivy
Go to download
Show more of this group Show more artifacts with this name
Show all versions of spark-core Show documentation
Show all versions of spark-core Show documentation
Shaded version of Apache Spark 2.x.x for Presto
The newest version!
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.ui
import java.net.URLDecoder
import java.text.SimpleDateFormat
import java.util.{Date, Locale, TimeZone}
import javax.servlet.http.HttpServletRequest
import javax.ws.rs.core.{MediaType, Response}
import scala.util.control.NonFatal
import scala.xml._
import scala.xml.transform.{RewriteRule, RuleTransformer}
import org.apache.commons.lang3.StringEscapeUtils
import org.apache.spark.internal.Logging
import org.apache.spark.ui.scope.RDDOperationGraph
/** Utility functions for generating XML pages with spark content. */
private[spark] object UIUtils extends Logging {
val TABLE_CLASS_NOT_STRIPED = "table table-bordered table-condensed"
val TABLE_CLASS_STRIPED = TABLE_CLASS_NOT_STRIPED + " table-striped"
val TABLE_CLASS_STRIPED_SORTABLE = TABLE_CLASS_STRIPED + " sortable"
private val NEWLINE_AND_SINGLE_QUOTE_REGEX = raw"(?i)(\r\n|\n|\r|%0D%0A|%0A|%0D|'|%27)".r
// SimpleDateFormat is not thread-safe. Don't expose it to avoid improper use.
private val dateFormat = new ThreadLocal[SimpleDateFormat]() {
override def initialValue(): SimpleDateFormat =
new SimpleDateFormat("yyyy/MM/dd HH:mm:ss", Locale.US)
}
def formatDate(date: Date): String = dateFormat.get.format(date)
def formatDate(timestamp: Long): String = dateFormat.get.format(new Date(timestamp))
def formatDuration(milliseconds: Long): String = {
if (milliseconds < 100) {
return "%d ms".format(milliseconds)
}
val seconds = milliseconds.toDouble / 1000
if (seconds < 1) {
return "%.1f s".format(seconds)
}
if (seconds < 60) {
return "%.0f s".format(seconds)
}
val minutes = seconds / 60
if (minutes < 10) {
return "%.1f min".format(minutes)
} else if (minutes < 60) {
return "%.0f min".format(minutes)
}
val hours = minutes / 60
"%.1f h".format(hours)
}
/** Generate a verbose human-readable string representing a duration such as "5 second 35 ms" */
def formatDurationVerbose(ms: Long): String = {
try {
val second = 1000L
val minute = 60 * second
val hour = 60 * minute
val day = 24 * hour
val week = 7 * day
val year = 365 * day
def toString(num: Long, unit: String): String = {
if (num == 0) {
""
} else if (num == 1) {
s"$num $unit"
} else {
s"$num ${unit}s"
}
}
val millisecondsString = if (ms >= second && ms % second == 0) "" else s"${ms % second} ms"
val secondString = toString((ms % minute) / second, "second")
val minuteString = toString((ms % hour) / minute, "minute")
val hourString = toString((ms % day) / hour, "hour")
val dayString = toString((ms % week) / day, "day")
val weekString = toString((ms % year) / week, "week")
val yearString = toString(ms / year, "year")
Seq(
second -> millisecondsString,
minute -> s"$secondString $millisecondsString",
hour -> s"$minuteString $secondString",
day -> s"$hourString $minuteString $secondString",
week -> s"$dayString $hourString $minuteString",
year -> s"$weekString $dayString $hourString"
).foreach { case (durationLimit, durationString) =>
if (ms < durationLimit) {
// if time is less than the limit (upto year)
return durationString
}
}
// if time is more than a year
return s"$yearString $weekString $dayString"
} catch {
case e: Exception =>
logError("Error converting time to string", e)
// if there is some error, return blank string
return ""
}
}
/** Generate a human-readable string representing a number (e.g. 100 K) */
def formatNumber(records: Double): String = {
val trillion = 1e12
val billion = 1e9
val million = 1e6
val thousand = 1e3
val (value, unit) = {
if (records >= 2*trillion) {
(records / trillion, " T")
} else if (records >= 2*billion) {
(records / billion, " B")
} else if (records >= 2*million) {
(records / million, " M")
} else if (records >= 2*thousand) {
(records / thousand, " K")
} else {
(records, "")
}
}
if (unit.isEmpty) {
"%d".formatLocal(Locale.US, value.toInt)
} else {
"%.1f%s".formatLocal(Locale.US, value, unit)
}
}
// Yarn has to go through a proxy so the base uri is provided and has to be on all links
def uiRoot(request: HttpServletRequest): String = {
// Knox uses X-Forwarded-Context to notify the application the base path
val knoxBasePath = Option(request.getHeader("X-Forwarded-Context"))
// SPARK-11484 - Use the proxyBase set by the AM, if not found then use env.
sys.props.get("spark.ui.proxyBase")
.orElse(sys.env.get("APPLICATION_WEB_PROXY_BASE"))
.orElse(knoxBasePath)
.getOrElse("")
}
def prependBaseUri(
request: HttpServletRequest,
basePath: String = "",
resource: String = ""): String = {
uiRoot(request) + basePath + resource
}
def commonHeaderNodes(request: HttpServletRequest): Seq[Node] = {
}
def vizHeaderNodes(request: HttpServletRequest): Seq[Node] = {
}
def dataTablesHeaderNodes(request: HttpServletRequest): Seq[Node] = {
}
/** Returns a spark page with correctly formatted headers */
def headerSparkPage(
request: HttpServletRequest,
title: String,
content: => Seq[Node],
activeTab: SparkUITab,
refreshInterval: Option[Int] = None,
helpText: Option[String] = None,
showVisualization: Boolean = false,
useDataTables: Boolean = false): Seq[Node] = {
val appName = activeTab.appName
val shortAppName = if (appName.length < 36) appName else appName.take(32) + "..."
val header = activeTab.headerTabs.map { tab =>
{tab.name}
}
val helpButton: Seq[Node] = helpText.map(tooltip(_, "bottom")).getOrElse(Seq.empty)
{commonHeaderNodes(request)}
{if (showVisualization) vizHeaderNodes(request) else Seq.empty}
{if (useDataTables) dataTablesHeaderNodes(request) else Seq.empty}
{appName} - {title}
{title}
{helpButton}
{content}
}
/** Returns a page with the spark css/js and a simple format. Used for scheduler UI. */
def basicSparkPage(
request: HttpServletRequest,
content: => Seq[Node],
title: String,
useDataTables: Boolean = false): Seq[Node] = {
{commonHeaderNodes(request)}
{if (useDataTables) dataTablesHeaderNodes(request) else Seq.empty}
{title}
{org.apache.spark.SPARK_VERSION}
{title}
{content}
}
/** Returns an HTML table constructed by generating a row for each object in a sequence. */
def listingTable[T](
headers: Seq[String],
generateDataRow: T => Seq[Node],
data: Iterable[T],
fixedWidth: Boolean = false,
id: Option[String] = None,
headerClasses: Seq[String] = Seq.empty,
stripeRowsWithCss: Boolean = true,
sortable: Boolean = true): Seq[Node] = {
val listingTableClass = {
val _tableClass = if (stripeRowsWithCss) TABLE_CLASS_STRIPED else TABLE_CLASS_NOT_STRIPED
if (sortable) {
_tableClass + " sortable"
} else {
_tableClass
}
}
val colWidth = 100.toDouble / headers.size
val colWidthAttr = if (fixedWidth) colWidth + "%" else ""
def getClass(index: Int): String = {
if (index < headerClasses.size) {
headerClasses(index)
} else {
""
}
}
val newlinesInHeader = headers.exists(_.contains("\n"))
def getHeaderContent(header: String): Seq[Node] = {
if (newlinesInHeader) {
{ header.split("\n").map { case t => - {t}
} }
} else {
Text(header)
}
}
val headerRow: Seq[Node] = {
headers.view.zipWithIndex.map { x =>
{getHeaderContent(x._1)}
}
}
{headerRow}
{data.map(r => generateDataRow(r))}
}
def makeProgressBar(
started: Int,
completed: Int,
failed: Int,
skipped: Int,
reasonToNumKilled: Map[String, Int],
total: Int): Seq[Node] = {
val completeWidth = "width: %s%%".format((completed.toDouble/total)*100)
// started + completed can be > total when there are speculative tasks
val boundedStarted = math.min(started, total - completed)
val startWidth = "width: %s%%".format((boundedStarted.toDouble/total)*100)
{completed}/{total}
{ if (failed == 0 && skipped == 0 && started > 0) s"($started running)" }
{ if (failed > 0) s"($failed failed)" }
{ if (skipped > 0) s"($skipped skipped)" }
{ reasonToNumKilled.toSeq.sortBy(-_._2).map {
case (reason, count) => s"($count killed: $reason)"
}
}
}
/** Return a "DAG visualization" DOM element that expands into a visualization for a stage. */
def showDagVizForStage(stageId: Int, graph: Option[RDDOperationGraph]): Seq[Node] = {
showDagViz(graph.toSeq, forJob = false)
}
/** Return a "DAG visualization" DOM element that expands into a visualization for a job. */
def showDagVizForJob(jobId: Int, graphs: Seq[RDDOperationGraph]): Seq[Node] = {
showDagViz(graphs, forJob = true)
}
/**
* Return a "DAG visualization" DOM element that expands into a visualization on the UI.
*
* This populates metadata necessary for generating the visualization on the front-end in
* a format that is expected by spark-dag-viz.js. Any changes in the format here must be
* reflected there.
*/
private def showDagViz(graphs: Seq[RDDOperationGraph], forJob: Boolean): Seq[Node] = {
}
def tooltip(text: String, position: String): Seq[Node] = {
(
}
}
?)
}
/**
* Returns HTML rendering of a job or stage description. It will try to parse the string as HTML
* and make sure that it only contains anchors with root-relative links. Otherwise,
* the whole string will rendered as a simple escaped text.
*
* Note: In terms of security, only anchor tags with root relative links are supported. So any
* attempts to embed links outside Spark UI, or other tags like {@code