Skip to content

Commit

Permalink
[SPARK-32398][TESTS][CORE][STREAMING][SQL][ML] Update to scalatest 3.…
Browse files Browse the repository at this point in the history
…2.0 for Scala 2.13.3+

### What changes were proposed in this pull request?

Updates to scalatest 3.2.0. Though it looks large, it is 99% changes to the new location of scalatest classes.

### Why are the changes needed?

3.2.0+ has a fix that is required for Scala 2.13.3+ compatibility.

### Does this PR introduce _any_ user-facing change?

No, only affects tests.

### How was this patch tested?

Existing tests.

Closes apache#29196 from srowen/SPARK-32398.

Authored-by: Sean Owen <[email protected]>
Signed-off-by: Dongjoon Hyun <[email protected]>
  • Loading branch information
srowen authored and dongjoon-hyun committed Jul 23, 2020
1 parent e7fb67c commit be2eca2
Show file tree
Hide file tree
Showing 116 changed files with 270 additions and 145 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -19,9 +19,9 @@ package org.apache.spark.util.sketch

import scala.util.Random

import org.scalatest.FunSuite // scalastyle:ignore funsuite
import org.scalatest.funsuite.AnyFunSuite // scalastyle:ignore funsuite

class BitArraySuite extends FunSuite { // scalastyle:ignore funsuite
class BitArraySuite extends AnyFunSuite { // scalastyle:ignore funsuite

test("error case when create BitArray") {
intercept[IllegalArgumentException](new BitArray(0))
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -22,9 +22,9 @@ import java.io.{ByteArrayInputStream, ByteArrayOutputStream}
import scala.reflect.ClassTag
import scala.util.Random

import org.scalatest.FunSuite // scalastyle:ignore funsuite
import org.scalatest.funsuite.AnyFunSuite // scalastyle:ignore funsuite

class BloomFilterSuite extends FunSuite { // scalastyle:ignore funsuite
class BloomFilterSuite extends AnyFunSuite { // scalastyle:ignore funsuite
private final val EPSILON = 0.01

// Serializes and deserializes a given `BloomFilter`, then checks whether the deserialized
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -22,9 +22,9 @@ import java.io.{ByteArrayInputStream, ByteArrayOutputStream}
import scala.reflect.ClassTag
import scala.util.Random

import org.scalatest.FunSuite // scalastyle:ignore funsuite
import org.scalatest.funsuite.AnyFunSuite // scalastyle:ignore funsuite

class CountMinSketchSuite extends FunSuite { // scalastyle:ignore funsuite
class CountMinSketchSuite extends AnyFunSuite { // scalastyle:ignore funsuite
private val epsOfTotalCount = 0.01

private val confidence = 0.9
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -21,14 +21,15 @@ import org.apache.commons.text.similarity.LevenshteinDistance
import org.scalacheck.{Arbitrary, Gen}
import org.scalatestplus.scalacheck.ScalaCheckDrivenPropertyChecks
// scalastyle:off
import org.scalatest.{FunSuite, Matchers}
import org.scalatest.funsuite.AnyFunSuite
import org.scalatest.matchers.must.Matchers

import org.apache.spark.unsafe.types.UTF8String.{fromString => toUTF8}

/**
* This TestSuite utilize ScalaCheck to generate randomized inputs for UTF8String testing.
*/
class UTF8StringPropertyCheckSuite extends FunSuite with ScalaCheckDrivenPropertyChecks with Matchers {
class UTF8StringPropertyCheckSuite extends AnyFunSuite with ScalaCheckDrivenPropertyChecks with Matchers {
// scalastyle:on

test("toString") {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -727,7 +727,7 @@ class JavaSparkContext(val sc: SparkContext) extends Closeable {
* @note This does not necessarily mean the caching or computation was successful.
*/
def getPersistentRDDs: JMap[java.lang.Integer, JavaRDD[_]] = {
sc.getPersistentRDDs.mapValues(s => JavaRDD.fromRDD(s))
sc.getPersistentRDDs.mapValues(s => JavaRDD.fromRDD(s)).toMap
.asJava.asInstanceOf[JMap[java.lang.Integer, JavaRDD[_]]]
}

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -25,8 +25,8 @@ import scala.collection.mutable.ArrayBuffer
import scala.ref.WeakReference
import scala.util.control.NonFatal

import org.scalatest.Matchers
import org.scalatest.exceptions.TestFailedException
import org.scalatest.matchers.must.Matchers

import org.apache.spark.scheduler._
import org.apache.spark.serializer.JavaSerializer
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -18,8 +18,8 @@
package org.apache.spark

import org.scalatest.Assertions._
import org.scalatest.Matchers
import org.scalatest.concurrent.{Signaler, ThreadSignaler, TimeLimits}
import org.scalatest.matchers.must.Matchers
import org.scalatest.time.{Millis, Span}

import org.apache.spark.internal.config
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -19,6 +19,7 @@ package org.apache.spark

import org.scalatest.BeforeAndAfterAll
import org.scalatest.concurrent.Eventually
import org.scalatest.matchers.should.Matchers._
import org.scalatest.time.SpanSugar._

import org.apache.spark.internal.config
Expand Down
4 changes: 3 additions & 1 deletion core/src/test/scala/org/apache/spark/FutureActionSuite.scala
Original file line number Diff line number Diff line change
Expand Up @@ -19,7 +19,9 @@ package org.apache.spark

import scala.concurrent.duration.Duration

import org.scalatest.{BeforeAndAfter, Matchers}
import org.scalatest.BeforeAndAfter
import org.scalatest.matchers.must.Matchers
import org.scalatest.matchers.should.Matchers._

import org.apache.spark.util.ThreadUtils

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -25,7 +25,7 @@ import scala.concurrent.Future
import scala.concurrent.duration._

import org.scalatest.BeforeAndAfter
import org.scalatest.Matchers
import org.scalatest.matchers.must.Matchers

import org.apache.spark.internal.config._
import org.apache.spark.internal.config.Deploy._
Expand Down
3 changes: 2 additions & 1 deletion core/src/test/scala/org/apache/spark/ShuffleSuite.scala
Original file line number Diff line number Diff line change
Expand Up @@ -20,7 +20,8 @@ package org.apache.spark
import java.util.{Locale, Properties}
import java.util.concurrent.{Callable, CyclicBarrier, Executors, ExecutorService}

import org.scalatest.Matchers
import org.scalatest.matchers.must.Matchers
import org.scalatest.matchers.should.Matchers._

import org.apache.spark.ShuffleSuite.NonJavaSerializableClass
import org.apache.spark.internal.config
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -24,6 +24,7 @@ import scala.collection.JavaConverters._
import org.apache.commons.io.FileUtils
import org.apache.commons.io.filefilter.TrueFileFilter
import org.scalatest.BeforeAndAfterAll
import org.scalatest.matchers.should.Matchers._

import org.apache.spark.internal.config
import org.apache.spark.rdd.ShuffledRDD
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -31,8 +31,8 @@ import org.apache.hadoop.io.{BytesWritable, LongWritable, Text}
import org.apache.hadoop.mapred.TextInputFormat
import org.apache.hadoop.mapreduce.lib.input.{TextInputFormat => NewTextInputFormat}
import org.json4s.{DefaultFormats, Extraction}
import org.scalatest.Matchers._
import org.scalatest.concurrent.Eventually
import org.scalatest.matchers.must.Matchers._

import org.apache.spark.TestUtils._
import org.apache.spark.internal.config._
Expand Down
5 changes: 3 additions & 2 deletions core/src/test/scala/org/apache/spark/SparkFunSuite.scala
Original file line number Diff line number Diff line change
Expand Up @@ -25,7 +25,8 @@ import org.apache.log4j.spi.LoggingEvent

import scala.annotation.tailrec
import org.apache.log4j.{Appender, AppenderSkeleton, Level, Logger}
import org.scalatest.{BeforeAndAfter, BeforeAndAfterAll, BeforeAndAfterEach, FunSuite, Outcome}
import org.scalatest.{BeforeAndAfter, BeforeAndAfterAll, BeforeAndAfterEach, Outcome}
import org.scalatest.funsuite.AnyFunSuite
import org.apache.spark.internal.Logging
import org.apache.spark.internal.config.Tests.IS_TESTING
import org.apache.spark.util.{AccumulatorContext, Utils}
Expand Down Expand Up @@ -57,7 +58,7 @@ import scala.collection.mutable.ArrayBuffer
* }
*/
abstract class SparkFunSuite
extends FunSuite
extends AnyFunSuite
with BeforeAndAfterAll
with BeforeAndAfterEach
with ThreadAudit
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -20,8 +20,9 @@ package org.apache.spark
import scala.concurrent.duration._
import scala.language.implicitConversions

import org.scalatest.Matchers
import org.scalatest.concurrent.Eventually._
import org.scalatest.matchers.must.Matchers
import org.scalatest.matchers.should.Matchers._

import org.apache.spark.JobExecutionStatus._

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -21,7 +21,8 @@ import java.io.{File, PrintWriter}

import scala.io.Source

import org.scalatest.Matchers
import org.scalatest.matchers.must.Matchers
import org.scalatest.matchers.should.Matchers._

import org.apache.spark.{SharedSparkContext, SparkConf, SparkFunSuite}
import org.apache.spark.internal.config.Kryo._
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -17,7 +17,8 @@

package org.apache.spark.deploy

import org.scalatest.Matchers
import org.scalatest.matchers.must.Matchers
import org.scalatest.matchers.should.Matchers._

import org.apache.spark.SparkFunSuite

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -29,8 +29,10 @@ import com.google.common.io.ByteStreams
import org.apache.commons.io.FileUtils
import org.apache.hadoop.conf.Configuration
import org.apache.hadoop.fs.{FileStatus, FSDataInputStream, Path}
import org.scalatest.{BeforeAndAfterEach, Matchers}
import org.scalatest.BeforeAndAfterEach
import org.scalatest.concurrent.{Signaler, ThreadSignaler, TimeLimits}
import org.scalatest.matchers.must.Matchers
import org.scalatest.matchers.should.Matchers._
import org.scalatest.time.Span
import org.scalatest.time.SpanSugar._

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -27,7 +27,8 @@ import org.eclipse.jetty.servlet.ServletContextHandler
import org.mockito.ArgumentMatchers.any
import org.mockito.Mockito._
import org.mockito.invocation.InvocationOnMock
import org.scalatest.Matchers
import org.scalatest.matchers.must.Matchers
import org.scalatest.matchers.should.Matchers._
import org.scalatestplus.mockito.MockitoSugar

import org.apache.spark.SparkFunSuite
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -34,8 +34,9 @@ import org.apache.hadoop.security.AccessControlException
import org.json4s.jackson.JsonMethods._
import org.mockito.ArgumentMatchers.{any, argThat}
import org.mockito.Mockito.{doThrow, mock, spy, verify, when}
import org.scalatest.Matchers
import org.scalatest.concurrent.Eventually._
import org.scalatest.matchers.must.Matchers
import org.scalatest.matchers.should.Matchers._

import org.apache.spark.{JobExecutionStatus, SecurityManager, SPARK_VERSION, SparkConf, SparkFunSuite}
import org.apache.spark.deploy.SparkHadoopUtil
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -35,8 +35,10 @@ import org.json4s.jackson.JsonMethods._
import org.mockito.Mockito._
import org.openqa.selenium.WebDriver
import org.openqa.selenium.htmlunit.HtmlUnitDriver
import org.scalatest.{BeforeAndAfter, Matchers}
import org.scalatest.BeforeAndAfter
import org.scalatest.concurrent.Eventually
import org.scalatest.matchers.must.Matchers
import org.scalatest.matchers.should.Matchers._
import org.scalatestplus.mockito.MockitoSugar
import org.scalatestplus.selenium.WebBrowser

Expand Down Expand Up @@ -309,14 +311,18 @@ class HistoryServerSuite extends SparkFunSuite with BeforeAndAfter with Matchers

val urlsThroughKnox = responseThroughKnox \\ "@href" map (_.toString)
val siteRelativeLinksThroughKnox = urlsThroughKnox filter (_.startsWith("/"))
all (siteRelativeLinksThroughKnox) should startWith (knoxBaseUrl)
for (link <- siteRelativeLinksThroughKnox) {
link should startWith (knoxBaseUrl)
}

val directRequest = mock[HttpServletRequest]
val directResponse = page.render(directRequest)

val directUrls = directResponse \\ "@href" map (_.toString)
val directSiteRelativeLinks = directUrls filter (_.startsWith("/"))
all (directSiteRelativeLinks) should not startWith (knoxBaseUrl)
for (link <- directSiteRelativeLinks) {
link should not startWith (knoxBaseUrl)
}
}

test("static relative links are prefixed with uiRoot (spark.ui.proxyBase)") {
Expand All @@ -331,7 +337,9 @@ class HistoryServerSuite extends SparkFunSuite with BeforeAndAfter with Matchers
// then
val urls = response \\ "@href" map (_.toString)
val siteRelativeLinks = urls filter (_.startsWith("/"))
all (siteRelativeLinks) should startWith (uiRoot)
for (link <- siteRelativeLinks) {
link should startWith (uiRoot)
}
}

test("/version api endpoint") {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -22,7 +22,9 @@ import javax.servlet.http.HttpServletRequest
import org.eclipse.jetty.proxy.ProxyServlet
import org.eclipse.jetty.servlet.{ServletContextHandler, ServletHolder}
import org.openqa.selenium.WebDriver
import org.scalatest._
import org.scalatest.{BeforeAndAfterAll, BeforeAndAfterEach}
import org.scalatest.matchers.must.Matchers
import org.scalatest.matchers.should.Matchers._
import org.scalatestplus.selenium.WebBrowser

import org.apache.spark._
Expand Down Expand Up @@ -146,7 +148,9 @@ abstract class RealBrowserUIHistoryServerSuite(val driverProp: String)
// there are at least some URL links that were generated via javascript,
// and they all contain the spark.ui.proxyBase (uiRoot)
links.length should be > 4
all(links) should startWith(url + uiRoot)
for (link <- links) {
link should startWith(url + uiRoot)
}
} finally {
contextHandler.stop()
quit()
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -31,8 +31,10 @@ import scala.reflect.ClassTag
import org.json4s._
import org.json4s.jackson.JsonMethods._
import org.mockito.Mockito.{mock, when}
import org.scalatest.{BeforeAndAfter, Matchers, PrivateMethodTester}
import org.scalatest.{BeforeAndAfter, PrivateMethodTester}
import org.scalatest.concurrent.Eventually
import org.scalatest.matchers.must.Matchers
import org.scalatest.matchers.should.Matchers._
import other.supplier.{CustomPersistenceEngine, CustomRecoveryModeFactory}

import org.apache.spark.{SecurityManager, SparkConf, SparkFunSuite}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -19,7 +19,8 @@ package org.apache.spark.deploy.security

import org.apache.hadoop.conf.Configuration
import org.apache.hadoop.fs.Path
import org.scalatest.Matchers
import org.scalatest.matchers.must.Matchers
import org.scalatest.matchers.should.Matchers._

import org.apache.spark.{SparkConf, SparkFunSuite}
import org.apache.spark.internal.config.{STAGING_DIR, SUBMIT_DEPLOY_MODE}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -17,7 +17,9 @@

package org.apache.spark.deploy.worker

import org.scalatest.{Matchers, PrivateMethodTester}
import org.scalatest.PrivateMethodTester
import org.scalatest.matchers.must.Matchers
import org.scalatest.matchers.should.Matchers._

import org.apache.spark.{SecurityManager, SparkConf, SparkFunSuite}
import org.apache.spark.deploy.Command
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -29,8 +29,10 @@ import org.mockito.Answers.RETURNS_SMART_NULLS
import org.mockito.ArgumentMatchers.any
import org.mockito.Mockito._
import org.mockito.invocation.InvocationOnMock
import org.scalatest.{BeforeAndAfter, Matchers}
import org.scalatest.BeforeAndAfter
import org.scalatest.concurrent.Eventually.{eventually, interval, timeout}
import org.scalatest.matchers.must.Matchers
import org.scalatest.matchers.should.Matchers._

import org.apache.spark.{SecurityManager, SparkConf, SparkFunSuite}
import org.apache.spark.TestUtils.{createTempJsonFile, createTempScriptWithExpectedOutput}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -21,8 +21,9 @@ import java.util.concurrent.TimeUnit

import scala.concurrent.duration._

import org.scalatest.Matchers
import org.scalatest.concurrent.Eventually._
import org.scalatest.matchers.must.Matchers
import org.scalatest.matchers.should.Matchers._

import org.apache.spark._
import org.apache.spark.internal.config.UI.UI_ENABLED
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -28,7 +28,8 @@ import scala.util.{Failure, Success, Try}

import com.google.common.io.CharStreams
import org.mockito.Mockito._
import org.scalatest.Matchers
import org.scalatest.matchers.must.Matchers
import org.scalatest.matchers.should.Matchers._
import org.scalatestplus.mockito.MockitoSugar

import org.apache.spark.{SecurityManager, SparkConf, SparkFunSuite}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -25,7 +25,9 @@ import scala.util.Random

import org.mockito.ArgumentMatchers.any
import org.mockito.Mockito.{mock, times, verify, when}
import org.scalatest._
import org.scalatest.BeforeAndAfterEach
import org.scalatest.matchers.must.Matchers
import org.scalatest.matchers.should.Matchers._

import org.apache.spark.{ExecutorDeadException, SecurityManager, SparkConf, SparkFunSuite}
import org.apache.spark.network.BlockDataManager
Expand Down
3 changes: 2 additions & 1 deletion core/src/test/scala/org/apache/spark/rdd/SortingSuite.scala
Original file line number Diff line number Diff line change
Expand Up @@ -17,7 +17,8 @@

package org.apache.spark.rdd

import org.scalatest.Matchers
import org.scalatest.matchers.must.Matchers
import org.scalatest.matchers.should.Matchers._

import org.apache.spark.{SharedSparkContext, SparkFunSuite}
import org.apache.spark.internal.Logging
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -2281,7 +2281,7 @@ class DAGSchedulerSuite extends SparkFunSuite with LocalSparkContext with TimeLi
assert(stackTraceString.contains("org.apache.spark.rdd.RDD.count"))

// should include the FunSuite setup:
assert(stackTraceString.contains("org.scalatest.FunSuite"))
assert(stackTraceString.contains("org.scalatest.funsuite.AnyFunSuite"))
}

test("catch errors in event loop") {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -24,7 +24,8 @@ import scala.collection.JavaConverters._
import scala.collection.mutable

import org.mockito.Mockito
import org.scalatest.Matchers
import org.scalatest.matchers.must.Matchers
import org.scalatest.matchers.should.Matchers._

import org.apache.spark._
import org.apache.spark.executor.TaskMetrics
Expand Down
Loading

0 comments on commit be2eca2

Please sign in to comment.