From ec992e182231da7313d85d10b3d5fd5975c44c8b Mon Sep 17 00:00:00 2001 From: Patrick Wendell Date: Sat, 1 Mar 2014 17:15:38 -0800 Subject: [PATCH] Revert "[SPARK-1150] fix repo location in create script" This reverts commit 9aa095711858ce8670e51488f66a3d7c1a821c30. --- conf/spark-env.sh.template | 2 +- .../apache/spark/deploy/worker/WorkerArguments.scala | 10 ++-------- docs/spark-standalone.md | 4 ++-- 3 files changed, 5 insertions(+), 11 deletions(-) diff --git a/conf/spark-env.sh.template b/conf/spark-env.sh.template index 619fc27d5..6432a5660 100755 --- a/conf/spark-env.sh.template +++ b/conf/spark-env.sh.template @@ -15,7 +15,7 @@ # - SPARK_MASTER_IP, to bind the master to a different IP address or hostname # - SPARK_MASTER_PORT / SPARK_MASTER_WEBUI_PORT, to use non-default ports # - SPARK_WORKER_CORES, to set the number of cores to use on this machine -# - SPARK_WORKER_MEM, to set how much memory to use (e.g. 1000m, 2g) +# - SPARK_WORKER_MEMORY, to set how much memory to use (e.g. 1000m, 2g) # - SPARK_WORKER_PORT / SPARK_WORKER_WEBUI_PORT # - SPARK_WORKER_INSTANCES, to set the number of worker processes per node # - SPARK_WORKER_DIR, to set the working directory of worker processes diff --git a/core/src/main/scala/org/apache/spark/deploy/worker/WorkerArguments.scala b/core/src/main/scala/org/apache/spark/deploy/worker/WorkerArguments.scala index 52c441963..d35d5be73 100644 --- a/core/src/main/scala/org/apache/spark/deploy/worker/WorkerArguments.scala +++ b/core/src/main/scala/org/apache/spark/deploy/worker/WorkerArguments.scala @@ -18,15 +18,13 @@ package org.apache.spark.deploy.worker import java.lang.management.ManagementFactory -import org.apache.spark.Logging import org.apache.spark.util.{IntParam, MemoryParam, Utils} /** * Command-line parser for the master. */ -private[spark] class WorkerArguments(args: Array[String]) extends Logging { - initLogging() +private[spark] class WorkerArguments(args: Array[String]) { var host = Utils.localHostName() var port = 0 var webUiPort = 8081 @@ -42,13 +40,9 @@ private[spark] class WorkerArguments(args: Array[String]) extends Logging { if (System.getenv("SPARK_WORKER_CORES") != null) { cores = System.getenv("SPARK_WORKER_CORES").toInt } - if (System.getenv("SPARK_WORKER_MEM") != null) { - memory = Utils.memoryStringToMb(System.getenv("SPARK_WORKER_MEM")) - } else if (System.getenv("SPARK_WORKER_MEMORY") != null) { - logWarning("SPARK_WORKER_MEMORY is deprecated. Please use SPARK_WORKER_MEM instead") + if (System.getenv("SPARK_WORKER_MEMORY") != null) { memory = Utils.memoryStringToMb(System.getenv("SPARK_WORKER_MEMORY")) } - if (System.getenv("SPARK_WORKER_WEBUI_PORT") != null) { webUiPort = System.getenv("SPARK_WORKER_WEBUI_PORT").toInt } diff --git a/docs/spark-standalone.md b/docs/spark-standalone.md index a2dec86be..51fb3a4f7 100644 --- a/docs/spark-standalone.md +++ b/docs/spark-standalone.md @@ -104,8 +104,8 @@ You can optionally configure the cluster further by setting environment variable Total number of cores to allow Spark applications to use on the machine (default: all available cores). - SPARK_WORKER_MEM - Total amount of memory to allow Spark applications to use on the machine, e.g. 1000m, 2g (default: total memory minus 1 GB); note that each application's individual memory is configured using its spark.executor.memory property. The old variable SPARK_WORKER_MEMORY has been deprecated. + SPARK_WORKER_MEMORY + Total amount of memory to allow Spark applications to use on the machine, e.g. 1000m, 2g (default: total memory minus 1 GB); note that each application's individual memory is configured using its spark.executor.memory property. SPARK_WORKER_WEBUI_PORT