Skip to content

Commit 37586e5

Browse files
Davies Liurxin
Davies Liu
authored andcommittedAug 15, 2015
[HOTFIX] fix duplicated braces
Author: Davies Liu <[email protected]> Closes apache#8219 from davies/fix_typo.
1 parent e5fd604 commit 37586e5

File tree

13 files changed

+15
-15
lines changed

13 files changed

+15
-15
lines changed
 

‎core/src/main/scala/org/apache/spark/storage/BlockManager.scala

+1-1
Original file line numberDiff line numberDiff line change
@@ -222,7 +222,7 @@ private[spark] class BlockManager(
222222
return
223223
} catch {
224224
case e: Exception if i < MAX_ATTEMPTS =>
225-
logError(s"Failed to connect to external shuffle server, will retry ${MAX_ATTEMPTS - i}}"
225+
logError(s"Failed to connect to external shuffle server, will retry ${MAX_ATTEMPTS - i}"
226226
+ s" more times after waiting $SLEEP_TIME_SECS seconds...", e)
227227
Thread.sleep(SLEEP_TIME_SECS * 1000)
228228
}

‎core/src/main/scala/org/apache/spark/storage/BlockManagerMaster.scala

+3-3
Original file line numberDiff line numberDiff line change
@@ -103,7 +103,7 @@ class BlockManagerMaster(
103103
val future = driverEndpoint.askWithRetry[Future[Seq[Int]]](RemoveRdd(rddId))
104104
future.onFailure {
105105
case e: Exception =>
106-
logWarning(s"Failed to remove RDD $rddId - ${e.getMessage}}", e)
106+
logWarning(s"Failed to remove RDD $rddId - ${e.getMessage}", e)
107107
}(ThreadUtils.sameThread)
108108
if (blocking) {
109109
timeout.awaitResult(future)
@@ -115,7 +115,7 @@ class BlockManagerMaster(
115115
val future = driverEndpoint.askWithRetry[Future[Seq[Boolean]]](RemoveShuffle(shuffleId))
116116
future.onFailure {
117117
case e: Exception =>
118-
logWarning(s"Failed to remove shuffle $shuffleId - ${e.getMessage}}", e)
118+
logWarning(s"Failed to remove shuffle $shuffleId - ${e.getMessage}", e)
119119
}(ThreadUtils.sameThread)
120120
if (blocking) {
121121
timeout.awaitResult(future)
@@ -129,7 +129,7 @@ class BlockManagerMaster(
129129
future.onFailure {
130130
case e: Exception =>
131131
logWarning(s"Failed to remove broadcast $broadcastId" +
132-
s" with removeFromMaster = $removeFromMaster - ${e.getMessage}}", e)
132+
s" with removeFromMaster = $removeFromMaster - ${e.getMessage}", e)
133133
}(ThreadUtils.sameThread)
134134
if (blocking) {
135135
timeout.awaitResult(future)

‎core/src/main/scala/org/apache/spark/util/ClosureCleaner.scala

+1-1
Original file line numberDiff line numberDiff line change
@@ -181,7 +181,7 @@ private[spark] object ClosureCleaner extends Logging {
181181
return
182182
}
183183

184-
logDebug(s"+++ Cleaning closure $func (${func.getClass.getName}}) +++")
184+
logDebug(s"+++ Cleaning closure $func (${func.getClass.getName}) +++")
185185

186186
// A list of classes that represents closures enclosed in the given one
187187
val innerClasses = getInnerClosureClasses(func)

‎core/src/main/scala/org/apache/spark/util/Utils.scala

+1-1
Original file line numberDiff line numberDiff line change
@@ -1366,7 +1366,7 @@ private[spark] object Utils extends Logging {
13661366
file.getAbsolutePath, effectiveStartIndex, effectiveEndIndex))
13671367
}
13681368
sum += fileToLength(file)
1369-
logDebug(s"After processing file $file, string built is ${stringBuffer.toString}}")
1369+
logDebug(s"After processing file $file, string built is ${stringBuffer.toString}")
13701370
}
13711371
stringBuffer.toString
13721372
}

‎examples/src/main/scala/org/apache/spark/examples/ml/MovieLensALS.scala

+1-1
Original file line numberDiff line numberDiff line change
@@ -76,7 +76,7 @@ object MovieLensALS {
7676
.text("path to a MovieLens dataset of movies")
7777
.action((x, c) => c.copy(movies = x))
7878
opt[Int]("rank")
79-
.text(s"rank, default: ${defaultParams.rank}}")
79+
.text(s"rank, default: ${defaultParams.rank}")
8080
.action((x, c) => c.copy(rank = x))
8181
opt[Int]("maxIter")
8282
.text(s"max number of iterations, default: ${defaultParams.maxIter}")

‎examples/src/main/scala/org/apache/spark/examples/mllib/DecisionTreeRunner.scala

+1-1
Original file line numberDiff line numberDiff line change
@@ -100,7 +100,7 @@ object DecisionTreeRunner {
100100
.action((x, c) => c.copy(numTrees = x))
101101
opt[String]("featureSubsetStrategy")
102102
.text(s"feature subset sampling strategy" +
103-
s" (${RandomForest.supportedFeatureSubsetStrategies.mkString(", ")}}), " +
103+
s" (${RandomForest.supportedFeatureSubsetStrategies.mkString(", ")}), " +
104104
s"default: ${defaultParams.featureSubsetStrategy}")
105105
.action((x, c) => c.copy(featureSubsetStrategy = x))
106106
opt[Double]("fracTest")

‎examples/src/main/scala/org/apache/spark/examples/mllib/MovieLensALS.scala

+1-1
Original file line numberDiff line numberDiff line change
@@ -55,7 +55,7 @@ object MovieLensALS {
5555
val parser = new OptionParser[Params]("MovieLensALS") {
5656
head("MovieLensALS: an example app for ALS on MovieLens data.")
5757
opt[Int]("rank")
58-
.text(s"rank, default: ${defaultParams.rank}}")
58+
.text(s"rank, default: ${defaultParams.rank}")
5959
.action((x, c) => c.copy(rank = x))
6060
opt[Int]("numIterations")
6161
.text(s"number of iterations, default: ${defaultParams.numIterations}")

‎mllib/src/main/scala/org/apache/spark/mllib/linalg/BLAS.scala

+1-1
Original file line numberDiff line numberDiff line change
@@ -469,7 +469,7 @@ private[spark] object BLAS extends Serializable with Logging {
469469
require(A.numCols == x.size,
470470
s"The columns of A don't match the number of elements of x. A: ${A.numCols}, x: ${x.size}")
471471
require(A.numRows == y.size,
472-
s"The rows of A don't match the number of elements of y. A: ${A.numRows}, y:${y.size}}")
472+
s"The rows of A don't match the number of elements of y. A: ${A.numRows}, y:${y.size}")
473473
if (alpha == 0.0) {
474474
logDebug("gemv: alpha is equal to 0. Returning y.")
475475
} else {

‎sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/HiveTypeCoercion.scala

+1-1
Original file line numberDiff line numberDiff line change
@@ -164,7 +164,7 @@ object HiveTypeCoercion {
164164
// Leave the same if the dataTypes match.
165165
case Some(newType) if a.dataType == newType.dataType => a
166166
case Some(newType) =>
167-
logDebug(s"Promoting $a to $newType in ${q.simpleString}}")
167+
logDebug(s"Promoting $a to $newType in ${q.simpleString}")
168168
newType
169169
}
170170
}

‎sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/jdbc/JdbcUtils.scala

+1-1
Original file line numberDiff line numberDiff line change
@@ -170,7 +170,7 @@ object JdbcUtils extends Logging {
170170
case BinaryType => "BLOB"
171171
case TimestampType => "TIMESTAMP"
172172
case DateType => "DATE"
173-
case t: DecimalType => s"DECIMAL(${t.precision}},${t.scale}})"
173+
case t: DecimalType => s"DECIMAL(${t.precision},${t.scale})"
174174
case _ => throw new IllegalArgumentException(s"Don't know how to save $field to JDBC")
175175
})
176176
val nullable = if (field.nullable) "" else "NOT NULL"

‎sql/core/src/main/scala/org/apache/spark/sql/execution/ui/SQLListener.scala

+1-1
Original file line numberDiff line numberDiff line change
@@ -162,7 +162,7 @@ private[sql] class SQLListener(sqlContext: SQLContext) extends SparkListener wit
162162
// A task of an old stage attempt. Because a new stage is submitted, we can ignore it.
163163
} else if (stageAttemptID > stageMetrics.stageAttemptId) {
164164
logWarning(s"A task should not have a higher stageAttemptID ($stageAttemptID) then " +
165-
s"what we have seen (${stageMetrics.stageAttemptId}})")
165+
s"what we have seen (${stageMetrics.stageAttemptId})")
166166
} else {
167167
// TODO We don't know the attemptId. Currently, what we can do is overriding the
168168
// accumulator updates. However, if there are two same task are running, such as

‎streaming/src/main/scala/org/apache/spark/streaming/rdd/WriteAheadLogBackedBlockRDD.scala

+1-1
Original file line numberDiff line numberDiff line change
@@ -84,7 +84,7 @@ class WriteAheadLogBackedBlockRDD[T: ClassTag](
8484
require(
8585
blockIds.length == walRecordHandles.length,
8686
s"Number of block Ids (${blockIds.length}) must be " +
87-
s" same as number of WAL record handles (${walRecordHandles.length}})")
87+
s" same as number of WAL record handles (${walRecordHandles.length})")
8888

8989
require(
9090
isBlockIdValid.isEmpty || isBlockIdValid.length == blockIds.length,

‎streaming/src/main/scala/org/apache/spark/streaming/scheduler/InputInfoTracker.scala

+1-1
Original file line numberDiff line numberDiff line change
@@ -66,7 +66,7 @@ private[streaming] class InputInfoTracker(ssc: StreamingContext) extends Logging
6666
new mutable.HashMap[Int, StreamInputInfo]())
6767

6868
if (inputInfos.contains(inputInfo.inputStreamId)) {
69-
throw new IllegalStateException(s"Input stream ${inputInfo.inputStreamId}} for batch" +
69+
throw new IllegalStateException(s"Input stream ${inputInfo.inputStreamId} for batch" +
7070
s"$batchTime is already added into InputInfoTracker, this is a illegal state")
7171
}
7272
inputInfos += ((inputInfo.inputStreamId, inputInfo))

0 commit comments

Comments
 (0)
Please sign in to comment.