Skip to content

Commit

Permalink
Style fixes and addressed review comments at apache#221
Browse files Browse the repository at this point in the history
  • Loading branch information
ScrapCodes committed Dec 10, 2013
1 parent c1201f4 commit 17db6a9
Show file tree
Hide file tree
Showing 22 changed files with 114 additions and 107 deletions.
14 changes: 7 additions & 7 deletions assembly/pom.xml
Original file line number Diff line number Diff line change
Expand Up @@ -41,27 +41,27 @@
<dependencies>
<dependency>
<groupId>org.apache.spark</groupId>
<artifactId>spark-core_${scala-short.version}</artifactId>
<artifactId>spark-core_2.10</artifactId>
<version>${project.version}</version>
</dependency>
<dependency>
<groupId>org.apache.spark</groupId>
<artifactId>spark-bagel_${scala-short.version}</artifactId>
<artifactId>spark-bagel_2.10</artifactId>
<version>${project.version}</version>
</dependency>
<dependency>
<groupId>org.apache.spark</groupId>
<artifactId>spark-mllib_${scala-short.version}</artifactId>
<artifactId>spark-mllib_2.10</artifactId>
<version>${project.version}</version>
</dependency>
<dependency>
<groupId>org.apache.spark</groupId>
<artifactId>spark-repl_${scala-short.version}</artifactId>
<artifactId>spark-repl_2.10</artifactId>
<version>${project.version}</version>
</dependency>
<dependency>
<groupId>org.apache.spark</groupId>
<artifactId>spark-streaming_${scala-short.version}</artifactId>
<artifactId>spark-streaming_2.10</artifactId>
<version>${project.version}</version>
</dependency>
<dependency>
Expand All @@ -79,7 +79,7 @@
<artifactId>maven-shade-plugin</artifactId>
<configuration>
<shadedArtifactAttached>false</shadedArtifactAttached>
<outputFile>${project.build.directory}/scala-${scala-short.version}/${project.artifactId}-${project.version}-hadoop${hadoop.version}.jar</outputFile>
<outputFile>${project.build.directory}/scala-2.10/${project.artifactId}-${project.version}-hadoop${hadoop.version}.jar</outputFile>
<artifactSet>
<includes>
<include>*:*</include>
Expand Down Expand Up @@ -128,7 +128,7 @@
<dependencies>
<dependency>
<groupId>org.apache.spark</groupId>
<artifactId>spark-yarn_${scala-short.version}</artifactId>
<artifactId>spark-yarn_2.10</artifactId>
<version>${project.version}</version>
</dependency>
</dependencies>
Expand Down
10 changes: 5 additions & 5 deletions bagel/pom.xml
Original file line number Diff line number Diff line change
Expand Up @@ -34,7 +34,7 @@
<dependencies>
<dependency>
<groupId>org.apache.spark</groupId>
<artifactId>spark-core_${scala-short.version}</artifactId>
<artifactId>spark-core_2.10</artifactId>
<version>${project.version}</version>
</dependency>
<dependency>
Expand All @@ -43,18 +43,18 @@
</dependency>
<dependency>
<groupId>org.scalatest</groupId>
<artifactId>scalatest_${scala-short.version}</artifactId>
<artifactId>scalatest_2.10</artifactId>
<scope>test</scope>
</dependency>
<dependency>
<groupId>org.scalacheck</groupId>
<artifactId>scalacheck_${scala-short.version}</artifactId>
<artifactId>scalacheck_2.10</artifactId>
<scope>test</scope>
</dependency>
</dependencies>
<build>
<outputDirectory>target/scala-${scala-short.version}/classes</outputDirectory>
<testOutputDirectory>target/scala-${scala-short.version}/test-classes</testOutputDirectory>
<outputDirectory>target/scala-2.10/classes</outputDirectory>
<testOutputDirectory>target/scala-2.10/test-classes</testOutputDirectory>
<plugins>
<plugin>
<groupId>org.scalatest</groupId>
Expand Down
18 changes: 9 additions & 9 deletions core/pom.xml
Original file line number Diff line number Diff line change
Expand Up @@ -86,7 +86,7 @@
</dependency>
<dependency>
<groupId>com.twitter</groupId>
<artifactId>chill_${scala-short.version}</artifactId>
<artifactId>chill_2.10</artifactId>
<version>0.3.1</version>
</dependency>
<dependency>
Expand All @@ -96,19 +96,19 @@
</dependency>
<dependency>
<groupId>com.typesafe.akka</groupId>
<artifactId>akka-remote_${scala-short.version}</artifactId>
<artifactId>akka-remote_2.10</artifactId>
</dependency>
<dependency>
<groupId>com.typesafe.akka</groupId>
<artifactId>akka-slf4j_${scala-short.version}</artifactId>
<artifactId>akka-slf4j_2.10</artifactId>
</dependency>
<dependency>
<groupId>org.scala-lang</groupId>
<artifactId>scala-library</artifactId>
</dependency>
<dependency>
<groupId>net.liftweb</groupId>
<artifactId>lift-json_${scala-short.version}</artifactId>
<artifactId>lift-json_2.10</artifactId>
</dependency>
<dependency>
<groupId>it.unimi.dsi</groupId>
Expand All @@ -120,7 +120,7 @@
</dependency>
<dependency>
<groupId>com.github.scala-incubator.io</groupId>
<artifactId>scala-io-file_${scala-short.version}</artifactId>
<artifactId>scala-io-file_2.10</artifactId>
</dependency>
<dependency>
<groupId>org.apache.mesos</groupId>
Expand Down Expand Up @@ -166,12 +166,12 @@
</dependency>
<dependency>
<groupId>org.scalatest</groupId>
<artifactId>scalatest_${scala-short.version}</artifactId>
<artifactId>scalatest_2.10</artifactId>
<scope>test</scope>
</dependency>
<dependency>
<groupId>org.scalacheck</groupId>
<artifactId>scalacheck_${scala-short.version}</artifactId>
<artifactId>scalacheck_2.10</artifactId>
<scope>test</scope>
</dependency>
<dependency>
Expand All @@ -191,8 +191,8 @@
</dependency>
</dependencies>
<build>
<outputDirectory>target/scala-${scala-short.version}/classes</outputDirectory>
<testOutputDirectory>target/scala-${scala-short.version}/test-classes</testOutputDirectory>
<outputDirectory>target/scala-2.10/classes</outputDirectory>
<testOutputDirectory>target/scala-2.10/test-classes</testOutputDirectory>
<plugins>
<plugin>
<groupId>org.apache.maven.plugins</groupId>
Expand Down
9 changes: 4 additions & 5 deletions core/src/main/scala/org/apache/spark/MapOutputTracker.scala
Original file line number Diff line number Diff line change
Expand Up @@ -72,12 +72,11 @@ private[spark] class MapOutputTracker extends Logging {
// throw a SparkException if this fails.
private def askTracker(message: Any): Any = {
try {
val future = if (trackerActor.isLeft ) {
trackerActor.left.get.ask(message)(timeout)
} else {
trackerActor.right.get.ask(message)(timeout)
val future = trackerActor match {
case Left(a: ActorRef) => a.ask(message)(timeout)
case Right(b: ActorSelection) => b.ask(message)(timeout)
}
return Await.result(future, timeout)
Await.result(future, timeout)
} catch {
case e: Exception =>
throw new SparkException("Error communicating with MapOutputTracker", e)
Expand Down
6 changes: 3 additions & 3 deletions core/src/main/scala/org/apache/spark/Partitioner.scala
Original file line number Diff line number Diff line change
Expand Up @@ -17,11 +17,11 @@

package org.apache.spark

import org.apache.spark.util.Utils
import org.apache.spark.rdd.RDD

import scala.reflect.ClassTag

import org.apache.spark.rdd.RDD
import org.apache.spark.util.Utils

/**
* An object that defines how the elements in a key-value pair RDD are partitioned by key.
* Maps each key to a partition ID, from 0 to `numPartitions - 1`.
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -17,23 +17,19 @@

package org.apache.spark.deploy.worker.ui

import akka.actor.ActorRef
import akka.util.Timeout
import java.io.File

import scala.concurrent.duration._

import java.io.{FileInputStream, File}

import akka.util.Timeout
import javax.servlet.http.HttpServletRequest

import org.eclipse.jetty.server.{Handler, Server}

import org.apache.spark.Logging
import org.apache.spark.deploy.worker.Worker
import org.apache.spark.{Logging}
import org.apache.spark.ui.JettyUtils
import org.apache.spark.ui.{JettyUtils, UIUtils}
import org.apache.spark.ui.JettyUtils._
import org.apache.spark.ui.UIUtils
import org.apache.spark.util.Utils
import org.eclipse.jetty.server.{Handler, Server}

/**
* Web UI server for the standalone worker.
Expand Down
11 changes: 4 additions & 7 deletions core/src/main/scala/org/apache/spark/rdd/CheckpointRDD.scala
Original file line number Diff line number Diff line change
Expand Up @@ -17,16 +17,13 @@

package org.apache.spark.rdd

import java.io.IOException

import scala.reflect.ClassTag

import org.apache.hadoop.fs.Path
import org.apache.spark._
import org.apache.spark.deploy.SparkHadoopUtil
import org.apache.hadoop.mapred.{FileInputFormat, SequenceFileInputFormat, JobConf, Reporter}
import org.apache.hadoop.conf.Configuration
import org.apache.hadoop.io.{NullWritable, BytesWritable}
import org.apache.hadoop.util.ReflectionUtils
import org.apache.hadoop.fs.Path
import java.io.{File, IOException, EOFException}
import java.text.NumberFormat

private[spark] class CheckpointRDDPartition(val index: Int) extends Partition {}

Expand Down
1 change: 1 addition & 0 deletions core/src/main/scala/org/apache/spark/rdd/JdbcRDD.scala
Original file line number Diff line number Diff line change
Expand Up @@ -20,6 +20,7 @@ package org.apache.spark.rdd
import java.sql.{Connection, ResultSet}

import scala.reflect.ClassTag

import org.apache.spark.{Logging, Partition, SparkContext, TaskContext}
import org.apache.spark.util.NextIterator

Expand Down
3 changes: 2 additions & 1 deletion core/src/main/scala/org/apache/spark/rdd/MappedRDD.scala
Original file line number Diff line number Diff line change
Expand Up @@ -17,9 +17,10 @@

package org.apache.spark.rdd

import org.apache.spark.{Partition, TaskContext}
import scala.reflect.ClassTag

import org.apache.spark.{Partition, TaskContext}

private[spark]
class MappedRDD[U: ClassTag, T: ClassTag](prev: RDD[T], f: T => U)
extends RDD[U](prev) {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -17,9 +17,10 @@

package org.apache.spark.rdd

import org.apache.spark.{RangePartitioner, Logging}
import scala.reflect.ClassTag

import org.apache.spark.{Logging, RangePartitioner}

/**
* Extra functions available on RDDs of (key, value) pairs where the key is sortable through
* an implicit conversion. Import `org.apache.spark.SparkContext._` at the top of your program to
Expand Down
3 changes: 2 additions & 1 deletion core/src/main/scala/org/apache/spark/rdd/ShuffledRDD.scala
Original file line number Diff line number Diff line change
Expand Up @@ -17,9 +17,10 @@

package org.apache.spark.rdd

import org.apache.spark.{Dependency, Partitioner, SparkEnv, ShuffleDependency, Partition, TaskContext}
import scala.reflect.ClassTag

import org.apache.spark.{Dependency, Partition, Partitioner, ShuffleDependency,
SparkEnv, TaskContext}

private[spark] class ShuffledRDDPartition(val idx: Int) extends Partition {
override val index = idx
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -157,10 +157,9 @@ private[spark] class BlockManagerMaster(var driverActor : Either[ActorRef, Actor
while (attempts < AKKA_RETRY_ATTEMPTS) {
attempts += 1
try {
val future = if (driverActor.isLeft ) {
driverActor.left.get.ask(message)(timeout)
} else {
driverActor.right.get.ask(message)(timeout)
val future = driverActor match {
case Left(a: ActorRef) => a.ask(message)(timeout)
case Right(b: ActorSelection) => b.ask(message)(timeout)
}
val result = Await.result(future, timeout)
if (result == null) {
Expand Down
Original file line number Diff line number Diff line change
@@ -1,5 +1,18 @@
/**
* Copyright (C) 2009-2013 Typesafe Inc. <http://www.typesafe.com>
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/

// Must be in akka.actor package as ActorSystemImpl is protected[akka].
Expand Down
20 changes: 10 additions & 10 deletions examples/pom.xml
Original file line number Diff line number Diff line change
Expand Up @@ -49,25 +49,25 @@
<dependencies>
<dependency>
<groupId>org.apache.spark</groupId>
<artifactId>spark-core_${scala-short.version}</artifactId>
<artifactId>spark-core_2.10</artifactId>
<version>${project.version}</version>
<scope>provided</scope>
</dependency>
<dependency>
<groupId>org.apache.spark</groupId>
<artifactId>spark-streaming_${scala-short.version}</artifactId>
<artifactId>spark-streaming_2.10</artifactId>
<version>${project.version}</version>
<scope>provided</scope>
</dependency>
<dependency>
<groupId>org.apache.spark</groupId>
<artifactId>spark-mllib_${scala-short.version}</artifactId>
<artifactId>spark-mllib_2.10</artifactId>
<version>${project.version}</version>
<scope>provided</scope>
</dependency>
<dependency>
<groupId>org.apache.spark</groupId>
<artifactId>spark-bagel_${scala-short.version}</artifactId>
<artifactId>spark-bagel_2.10</artifactId>
<version>${project.version}</version>
<scope>provided</scope>
</dependency>
Expand All @@ -88,7 +88,7 @@
</dependency>
<dependency>
<groupId>org.apache.kafka</groupId>
<artifactId>kafka_2.9.2</artifactId>
<artifactId>kafka_2.10</artifactId>
<version>0.8.0-beta1</version>
<exclusions>
<exclusion>
Expand All @@ -107,17 +107,17 @@
</dependency>
<dependency>
<groupId>com.twitter</groupId>
<artifactId>algebird-core_${scala-short.version}</artifactId>
<artifactId>algebird-core_2.10</artifactId>
<version>0.1.11</version>
</dependency>
<dependency>
<groupId>org.scalatest</groupId>
<artifactId>scalatest_${scala-short.version}</artifactId>
<artifactId>scalatest_2.10</artifactId>
<scope>test</scope>
</dependency>
<dependency>
<groupId>org.scalacheck</groupId>
<artifactId>scalacheck_${scala-short.version}</artifactId>
<artifactId>scalacheck_2.10</artifactId>
<scope>test</scope>
</dependency>
<dependency>
Expand Down Expand Up @@ -166,8 +166,8 @@
</dependencies>

<build>
<outputDirectory>target/scala-${scala-short.version}/classes</outputDirectory>
<testOutputDirectory>target/scala-${scala-short.version}/test-classes</testOutputDirectory>
<outputDirectory>target/scala-2.10/classes</outputDirectory>
<testOutputDirectory>target/scala-2.10/test-classes</testOutputDirectory>
<plugins>
<plugin>
<groupId>org.apache.maven.plugins</groupId>
Expand Down
Loading

0 comments on commit 17db6a9

Please sign in to comment.