Skip to content

Commit

Permalink
[SPARK-38203][SQL] Fix SQLInsertTestSuite and SchemaPruningSuite unde…
Browse files Browse the repository at this point in the history
…r ANSI mode

### What changes were proposed in this pull request?

Fix test failures of following tests under ANSI mode:

- HiveSQLInsertTestSuite
- FileSourceSQLInsertTestSuite
- ParquetV1SchemaPruningSuite
- ParquetV2SchemaPruningSuite

### Why are the changes needed?

To set up a new GA job to run tests with ANSI mode before 3.3.0 release.

### Does this PR introduce _any_ user-facing change?

No

### How was this patch tested?

Manual enable ANSI mode and test.

Closes apache#35511 from gengliangwang/fixParquetPruneAnsi.

Authored-by: Gengliang Wang <[email protected]>
Signed-off-by: Gengliang Wang <[email protected]>
  • Loading branch information
gengliangwang committed Feb 15, 2022
1 parent ca9bbba commit e2eb6d8
Show file tree
Hide file tree
Showing 2 changed files with 21 additions and 9 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -286,20 +286,28 @@ trait SQLInsertTestSuite extends QueryTest with SQLTestUtils {
} else {
SQLConf.StoreAssignmentPolicy.values
}

def shouldThrowException(policy: SQLConf.StoreAssignmentPolicy.Value): Boolean = policy match {
case SQLConf.StoreAssignmentPolicy.ANSI | SQLConf.StoreAssignmentPolicy.STRICT =>
true
case SQLConf.StoreAssignmentPolicy.LEGACY =>
SQLConf.get.ansiEnabled
}

testingPolicies.foreach { policy =>
withSQLConf(
SQLConf.STORE_ASSIGNMENT_POLICY.key -> policy.toString) {
SQLConf.STORE_ASSIGNMENT_POLICY.key -> policy.toString,
SQLConf.ANSI_ENABLED.key -> "false") {
withTable("t") {
sql("create table t(a int, b string) using parquet partitioned by (a)")
policy match {
case SQLConf.StoreAssignmentPolicy.ANSI | SQLConf.StoreAssignmentPolicy.STRICT =>
val errorMsg = intercept[NumberFormatException] {
sql("insert into t partition(a='ansi') values('ansi')")
}.getMessage
assert(errorMsg.contains("invalid input syntax for type numeric: ansi"))
case SQLConf.StoreAssignmentPolicy.LEGACY =>
if (shouldThrowException(policy)) {
val errorMsg = intercept[NumberFormatException] {
sql("insert into t partition(a='ansi') values('ansi')")
checkAnswer(sql("select * from t"), Row("ansi", null) :: Nil)
}.getMessage
assert(errorMsg.contains("invalid input syntax for type numeric: ansi"))
} else {
sql("insert into t partition(a='ansi') values('ansi')")
checkAnswer(sql("select * from t"), Row("ansi", null) :: Nil)
}
}
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -21,6 +21,7 @@ import java.io.File

import org.scalactic.Equality

import org.apache.spark.SparkConf
import org.apache.spark.sql.{DataFrame, QueryTest, Row}
import org.apache.spark.sql.catalyst.SchemaPruningTest
import org.apache.spark.sql.catalyst.expressions.Concat
Expand Down Expand Up @@ -57,6 +58,9 @@ abstract class SchemaPruningSuite
contactId: Int,
employer: Employer)

override protected def sparkConf: SparkConf =
super.sparkConf.set(SQLConf.ANSI_STRICT_INDEX_OPERATOR.key, "false")

val janeDoe = FullName("Jane", "X.", "Doe")
val johnDoe = FullName("John", "Y.", "Doe")
val susanSmith = FullName("Susan", "Z.", "Smith")
Expand Down

0 comments on commit e2eb6d8

Please sign in to comment.