Skip to content

Commit

Permalink
SPARK-2519. Eliminate pattern-matching on Tuple2 in performance-criti…
Browse files Browse the repository at this point in the history
…cal...

... aggregation code

Author: Sandy Ryza <[email protected]>

Closes apache#1435 from sryza/sandy-spark-2519 and squashes the following commits:

640706a [Sandy Ryza] SPARK-2519. Eliminate pattern-matching on Tuple2 in performance-critical aggregation code
  • Loading branch information
sryza authored and rxin committed Jul 16, 2014
1 parent 1c5739f commit fc7edc9
Show file tree
Hide file tree
Showing 2 changed files with 11 additions and 9 deletions.
8 changes: 4 additions & 4 deletions core/src/main/scala/org/apache/spark/Aggregator.scala
Original file line number Diff line number Diff line change
Expand Up @@ -56,8 +56,8 @@ case class Aggregator[K, V, C] (
} else {
val combiners = new ExternalAppendOnlyMap[K, V, C](createCombiner, mergeValue, mergeCombiners)
while (iter.hasNext) {
val (k, v) = iter.next()
combiners.insert(k, v)
val pair = iter.next()
combiners.insert(pair._1, pair._2)
}
// TODO: Make this non optional in a future release
Option(context).foreach(c => c.taskMetrics.memoryBytesSpilled = combiners.memoryBytesSpilled)
Expand Down Expand Up @@ -85,8 +85,8 @@ case class Aggregator[K, V, C] (
} else {
val combiners = new ExternalAppendOnlyMap[K, C, C](identity, mergeCombiners, mergeCombiners)
while (iter.hasNext) {
val (k, c) = iter.next()
combiners.insert(k, c)
val pair = iter.next()
combiners.insert(pair._1, pair._2)
}
// TODO: Make this non optional in a future release
Option(context).foreach(c => c.taskMetrics.memoryBytesSpilled = combiners.memoryBytesSpilled)
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -268,10 +268,10 @@ class ExternalAppendOnlyMap[K, V, C](
private def mergeIfKeyExists(key: K, baseCombiner: C, buffer: StreamBuffer): C = {
var i = 0
while (i < buffer.pairs.length) {
val (k, c) = buffer.pairs(i)
if (k == key) {
val pair = buffer.pairs(i)
if (pair._1 == key) {
buffer.pairs.remove(i)
return mergeCombiners(baseCombiner, c)
return mergeCombiners(baseCombiner, pair._2)
}
i += 1
}
Expand All @@ -293,9 +293,11 @@ class ExternalAppendOnlyMap[K, V, C](
}
// Select a key from the StreamBuffer that holds the lowest key hash
val minBuffer = mergeHeap.dequeue()
val (minPairs, minHash) = (minBuffer.pairs, minBuffer.minKeyHash)
val minPairs = minBuffer.pairs
val minHash = minBuffer.minKeyHash
val minPair = minPairs.remove(0)
var (minKey, minCombiner) = minPair
val minKey = minPair._1
var minCombiner = minPair._2
assert(getKeyHashCode(minPair) == minHash)

// For all other streams that may have this key (i.e. have the same minimum key hash),
Expand Down

0 comments on commit fc7edc9

Please sign in to comment.