Repository: spark Updated Branches: refs/heads/master 7c6937a88 -> 23256be0d
[SPARK-14576][WEB UI] Spark console should display Web UI url ## What changes were proposed in this pull request? This is a proposal to print the Spark Driver UI link when spark-shell is launched. ## How was this patch tested? Launched spark-shell in local mode and cluster mode. Spark-shell console output included following line: "Spark context Web UI available at <Spark web url>" Author: Ergin Seyfe <[email protected]> Closes #12341 from seyfe/spark_console_display_webui_link. Project: http://git-wip-us.apache.org/repos/asf/spark/repo Commit: http://git-wip-us.apache.org/repos/asf/spark/commit/23256be0 Tree: http://git-wip-us.apache.org/repos/asf/spark/tree/23256be0 Diff: http://git-wip-us.apache.org/repos/asf/spark/diff/23256be0 Branch: refs/heads/master Commit: 23256be0d0846d4eb188a4d1cae6e3f261248153 Parents: 7c6937a Author: Ergin Seyfe <[email protected]> Authored: Thu Apr 28 16:16:28 2016 +0100 Committer: Sean Owen <[email protected]> Committed: Thu Apr 28 16:16:28 2016 +0100 ---------------------------------------------------------------------- .../main/scala/org/apache/spark/SparkContext.scala | 2 ++ core/src/main/scala/org/apache/spark/ui/WebUI.scala | 14 ++++++++------ .../scala/org/apache/spark/repl/SparkILoopInit.scala | 1 + .../main/scala/org/apache/spark/repl/SparkILoop.scala | 1 + 4 files changed, 12 insertions(+), 6 deletions(-) ---------------------------------------------------------------------- http://git-wip-us.apache.org/repos/asf/spark/blob/23256be0/core/src/main/scala/org/apache/spark/SparkContext.scala ---------------------------------------------------------------------- diff --git a/core/src/main/scala/org/apache/spark/SparkContext.scala b/core/src/main/scala/org/apache/spark/SparkContext.scala index 865989a..ed4408c 100644 --- a/core/src/main/scala/org/apache/spark/SparkContext.scala +++ b/core/src/main/scala/org/apache/spark/SparkContext.scala @@ -280,6 +280,8 @@ class SparkContext(config: SparkConf) extends Logging with ExecutorAllocationCli private[spark] def ui: Option[SparkUI] = _ui + def uiWebUrl: Option[String] = _ui.map(_.webUrl) + /** * A default Hadoop Configuration for the Hadoop code (e.g. file systems) that we reuse. * http://git-wip-us.apache.org/repos/asf/spark/blob/23256be0/core/src/main/scala/org/apache/spark/ui/WebUI.scala ---------------------------------------------------------------------- diff --git a/core/src/main/scala/org/apache/spark/ui/WebUI.scala b/core/src/main/scala/org/apache/spark/ui/WebUI.scala index 2b0bc32..2c40e72 100644 --- a/core/src/main/scala/org/apache/spark/ui/WebUI.scala +++ b/core/src/main/scala/org/apache/spark/ui/WebUI.scala @@ -133,26 +133,28 @@ private[spark] abstract class WebUI( /** Bind to the HTTP server behind this web interface. */ def bind() { - assert(!serverInfo.isDefined, "Attempted to bind %s more than once!".format(className)) + assert(!serverInfo.isDefined, s"Attempted to bind $className more than once!") try { - var host = Option(conf.getenv("SPARK_LOCAL_IP")).getOrElse("0.0.0.0") + val host = Option(conf.getenv("SPARK_LOCAL_IP")).getOrElse("0.0.0.0") serverInfo = Some(startJettyServer(host, port, sslOptions, handlers, conf, name)) - logInfo("Bound %s to %s, and started at http://%s:%d".format(className, host, - publicHostName, boundPort)) + logInfo(s"Bound $className to $host, and started at $webUrl") } catch { case e: Exception => - logError("Failed to bind %s".format(className), e) + logError(s"Failed to bind $className", e) System.exit(1) } } + /** Return the url of web interface. Only valid after bind(). */ + def webUrl: String = s"http://$publicHostName:$boundPort" + /** Return the actual port to which this server is bound. Only valid after bind(). */ def boundPort: Int = serverInfo.map(_.boundPort).getOrElse(-1) /** Stop the server behind this web interface. Only valid after bind(). */ def stop() { assert(serverInfo.isDefined, - "Attempted to stop %s before binding to a server!".format(className)) + s"Attempted to stop $className before binding to a server!") serverInfo.get.stop() } } http://git-wip-us.apache.org/repos/asf/spark/blob/23256be0/repl/scala-2.10/src/main/scala/org/apache/spark/repl/SparkILoopInit.scala ---------------------------------------------------------------------- diff --git a/repl/scala-2.10/src/main/scala/org/apache/spark/repl/SparkILoopInit.scala b/repl/scala-2.10/src/main/scala/org/apache/spark/repl/SparkILoopInit.scala index 4ce776e..f1febb9 100644 --- a/repl/scala-2.10/src/main/scala/org/apache/spark/repl/SparkILoopInit.scala +++ b/repl/scala-2.10/src/main/scala/org/apache/spark/repl/SparkILoopInit.scala @@ -125,6 +125,7 @@ private[repl] trait SparkILoopInit { command(""" @transient val sc = { val _sc = org.apache.spark.repl.Main.interp.createSparkContext() + _sc.uiWebUrl.foreach(webUrl => println(s"Spark context Web UI available at ${webUrl}")) println("Spark context available as 'sc' " + s"(master = ${_sc.master}, app id = ${_sc.applicationId}).") _sc http://git-wip-us.apache.org/repos/asf/spark/blob/23256be0/repl/scala-2.11/src/main/scala/org/apache/spark/repl/SparkILoop.scala ---------------------------------------------------------------------- diff --git a/repl/scala-2.11/src/main/scala/org/apache/spark/repl/SparkILoop.scala b/repl/scala-2.11/src/main/scala/org/apache/spark/repl/SparkILoop.scala index d029659..d74b796 100644 --- a/repl/scala-2.11/src/main/scala/org/apache/spark/repl/SparkILoop.scala +++ b/repl/scala-2.11/src/main/scala/org/apache/spark/repl/SparkILoop.scala @@ -38,6 +38,7 @@ class SparkILoop(in0: Option[BufferedReader], out: JPrintWriter) processLine(""" @transient val sc = { val _sc = org.apache.spark.repl.Main.createSparkContext() + _sc.uiWebUrl.foreach(webUrl => println(s"Spark context Web UI available at ${webUrl}")) println("Spark context available as 'sc' " + s"(master = ${_sc.master}, app id = ${_sc.applicationId}).") _sc --------------------------------------------------------------------- To unsubscribe, e-mail: [email protected] For additional commands, e-mail: [email protected]
