Skip to content

Commit

Permalink
Add new SBT target for dependency assembly
Browse files Browse the repository at this point in the history
  • Loading branch information
shivaram committed Oct 9, 2013
1 parent a106ed8 commit 484166d
Show file tree
Hide file tree
Showing 2 changed files with 13 additions and 1 deletion.
6 changes: 6 additions & 0 deletions bin/compute-classpath.sh
Original file line number Diff line number Diff line change
Expand Up @@ -39,6 +39,12 @@ else
fi
CLASSPATH="$CLASSPATH:$ASSEMBLY_JAR"

CLASSPATH="$CLASSPATH:$FWDIR/core/target/scala-$SCALA_VERSION/classes"
CLASSPATH="$CLASSPATH:$FWDIR/repl/target/scala-$SCALA_VERSION/classes"
CLASSPATH="$CLASSPATH:$FWDIR/mllib/target/scala-$SCALA_VERSION/classes"
CLASSPATH="$CLASSPATH:$FWDIR/bagel/target/scala-$SCALA_VERSION/classes"
CLASSPATH="$CLASSPATH:$FWDIR/streaming/target/scala-$SCALA_VERSION/classes"

# Add test classes if we're running from SBT or Maven with SPARK_TESTING set to 1
if [[ $SPARK_TESTING == 1 ]]; then
CLASSPATH="$CLASSPATH:$FWDIR/core/target/scala-$SCALA_VERSION/test-classes"
Expand Down
8 changes: 7 additions & 1 deletion project/SparkBuild.scala
Original file line number Diff line number Diff line change
Expand Up @@ -56,6 +56,8 @@ object SparkBuild extends Build {
lazy val assemblyProj = Project("assembly", file("assembly"), settings = assemblyProjSettings)
.dependsOn(core, bagel, mllib, repl, streaming) dependsOn(maybeYarn: _*)

lazy val spark = TaskKey[Unit]("spark", "Build assembly of dependencies and spark packages")

// A configuration to set an alternative publishLocalConfiguration
lazy val MavenCompile = config("m2r") extend(Compile)
lazy val publishLocalBoth = TaskKey[Unit]("publish-local", "publish local for m2 and ivy")
Expand All @@ -73,6 +75,8 @@ object SparkBuild extends Build {
lazy val allProjects = Seq[ProjectReference](
core, repl, examples, bagel, streaming, mllib, tools, assemblyProj) ++ maybeYarnRef

lazy val packageProjects = Seq[ProjectReference](core, repl, bagel, streaming, mllib, tools) ++ maybeYarnRef

def sharedSettings = Defaults.defaultSettings ++ Seq(
organization := "org.apache.spark",
version := "0.8.0-SNAPSHOT",
Expand Down Expand Up @@ -288,7 +292,9 @@ object SparkBuild extends Build {

def assemblyProjSettings = sharedSettings ++ Seq(
name := "spark-assembly",
jarName in assembly <<= version map { v => "spark-assembly-" + v + "-hadoop" + hadoopVersion + ".jar" }
spark in Compile <<= (packageProjects.map(packageBin in Compile in _) ++ Seq(packageDependency in Compile)).dependOn,
jarName in assembly <<= version map { v => "spark-assembly-" + v + "-hadoop" + hadoopVersion + ".jar" },
jarName in packageDependency <<= version map { v => "spark-assembly-" + v + "-hadoop" + hadoopVersion + "-deps.jar" }
) ++ assemblySettings ++ extraAssemblySettings

def extraAssemblySettings() = Seq(
Expand Down

0 comments on commit 484166d

Please sign in to comment.