Skip to content

Commit

Permalink
Rename testExecutorEnvs --> executorEnvs.
Browse files Browse the repository at this point in the history
This was changed, but in fact, it's used for things other than tests.
So I've changed it back.

Author: Patrick Wendell <[email protected]>

Closes apache#747 from pwendell/executor-env and squashes the following commits:

36a60a5 [Patrick Wendell] Rename testExecutorEnvs --> executorEnvs.
  • Loading branch information
pwendell committed May 13, 2014
1 parent 8586bf5 commit 3ce526b
Show file tree
Hide file tree
Showing 4 changed files with 8 additions and 9 deletions.
11 changes: 5 additions & 6 deletions core/src/main/scala/org/apache/spark/SparkContext.scala
Original file line number Diff line number Diff line change
Expand Up @@ -276,27 +276,26 @@ class SparkContext(config: SparkConf) extends Logging {
.getOrElse(512)

// Environment variables to pass to our executors.
// NOTE: This should only be used for test related settings.
private[spark] val testExecutorEnvs = HashMap[String, String]()
private[spark] val executorEnvs = HashMap[String, String]()

// Convert java options to env vars as a work around
// since we can't set env vars directly in sbt.
for { (envKey, propKey) <- Seq(("SPARK_TESTING", "spark.testing"))
value <- Option(System.getenv(envKey)).orElse(Option(System.getProperty(propKey)))} {
testExecutorEnvs(envKey) = value
executorEnvs(envKey) = value
}
// The Mesos scheduler backend relies on this environment variable to set executor memory.
// TODO: Set this only in the Mesos scheduler.
testExecutorEnvs("SPARK_EXECUTOR_MEMORY") = executorMemory + "m"
testExecutorEnvs ++= conf.getExecutorEnv
executorEnvs("SPARK_EXECUTOR_MEMORY") = executorMemory + "m"
executorEnvs ++= conf.getExecutorEnv

// Set SPARK_USER for user who is running SparkContext.
val sparkUser = Option {
Option(System.getProperty("user.name")).getOrElse(System.getenv("SPARK_USER"))
}.getOrElse {
SparkContext.SPARK_UNKNOWN_USER
}
testExecutorEnvs("SPARK_USER") = sparkUser
executorEnvs("SPARK_USER") = sparkUser

// Create and start the scheduler
private[spark] var taskScheduler = SparkContext.createTaskScheduler(this, master)
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -54,7 +54,7 @@ private[spark] class SparkDeploySchedulerBackend(
}

val command = Command(
"org.apache.spark.executor.CoarseGrainedExecutorBackend", args, sc.testExecutorEnvs,
"org.apache.spark.executor.CoarseGrainedExecutorBackend", args, sc.executorEnvs,
classPathEntries, libraryPathEntries, extraJavaOpts)
val sparkHome = sc.getSparkHome()
val appDesc = new ApplicationDescription(sc.appName, maxCores, sc.executorMemory, command,
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -122,7 +122,7 @@ private[spark] class CoarseMesosSchedulerBackend(
val extraLibraryPath = conf.getOption(libraryPathOption).map(p => s"-Djava.library.path=$p")
val extraOpts = Seq(extraJavaOpts, extraLibraryPath).flatten.mkString(" ")

sc.testExecutorEnvs.foreach { case (key, value) =>
sc.executorEnvs.foreach { case (key, value) =>
environment.addVariables(Environment.Variable.newBuilder()
.setName(key)
.setValue(value)
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -90,7 +90,7 @@ private[spark] class MesosSchedulerBackend(
"Spark home is not set; set it through the spark.home system " +
"property, the SPARK_HOME environment variable or the SparkContext constructor"))
val environment = Environment.newBuilder()
sc.testExecutorEnvs.foreach { case (key, value) =>
sc.executorEnvs.foreach { case (key, value) =>
environment.addVariables(Environment.Variable.newBuilder()
.setName(key)
.setValue(value)
Expand Down

0 comments on commit 3ce526b

Please sign in to comment.