Skip to content

Commit e46d7d1

Browse files
itholicMaxGekk
authored andcommitted
[SPARK-47158][SQL] Assign proper name and sqlState to _LEGACY_ERROR_TEMP_(2134|2231)
### What changes were proposed in this pull request? This PR proposes to assign proper name and `sqlState` to `_LEGACY_ERROR_TEMP_(2134|2231)` - `_LEGACY_ERROR_TEMP_2134` -> `CANNOT_PARSE_STRING_AS_DATATYPE` - `_LEGACY_ERROR_TEMP_2231` -> `UNSUPPORTED_CALL.FIELD_INDEX` ### Why are the changes needed? To improve error usability ### Does this PR introduce _any_ user-facing change? No API changes, but the user-facing error message will be improved ### How was this patch tested? Added UTs. ### Was this patch authored or co-authored using generative AI tooling? No. Closes apache#45244 from itholic/TOP_LEGACY_ERRORS. Authored-by: Haejoon Lee <[email protected]> Signed-off-by: Max Gekk <[email protected]>
1 parent 1135f6b commit e46d7d1

File tree

11 files changed

+109
-27
lines changed

11 files changed

+109
-27
lines changed

common/utils/src/main/resources/error/error-classes.json

+12-10
Original file line numberDiff line numberDiff line change
@@ -3735,6 +3735,18 @@
37353735
"message" : [
37363736
"Cannot call the method \"<methodName>\" of the class \"<className>\"."
37373737
],
3738+
"subClass" : {
3739+
"FIELD_INDEX" : {
3740+
"message" : [
3741+
"The row shall have a schema to get an index of the field <fieldName>."
3742+
]
3743+
},
3744+
"WITHOUT_SUGGESTION" : {
3745+
"message" : [
3746+
""
3747+
]
3748+
}
3749+
},
37383750
"sqlState" : "0A000"
37393751
},
37403752
"UNSUPPORTED_CHAR_OR_VARCHAR_AS_STRING" : {
@@ -6172,11 +6184,6 @@
61726184
"Exception when registering StreamingQueryListener."
61736185
]
61746186
},
6175-
"_LEGACY_ERROR_TEMP_2134" : {
6176-
"message" : [
6177-
"Cannot parse field value <value> for pattern <pattern> as target spark data type [<dataType>]."
6178-
]
6179-
},
61806187
"_LEGACY_ERROR_TEMP_2138" : {
61816188
"message" : [
61826189
"Cannot have circular references in bean class, but got the circular reference of class <clazz>."
@@ -6517,11 +6524,6 @@
65176524
"Primitive types are not supported."
65186525
]
65196526
},
6520-
"_LEGACY_ERROR_TEMP_2231" : {
6521-
"message" : [
6522-
"fieldIndex on a Row without schema is undefined."
6523-
]
6524-
},
65256527
"_LEGACY_ERROR_TEMP_2232" : {
65266528
"message" : [
65276529
"Value at index <index> is null."

common/utils/src/main/scala/org/apache/spark/SparkException.scala

+2-1
Original file line numberDiff line numberDiff line change
@@ -262,7 +262,8 @@ private[spark] object SparkUnsupportedOperationException {
262262
} else {
263263
Map("className" -> "?", "methodName" -> "?")
264264
}
265-
new SparkUnsupportedOperationException("UNSUPPORTED_CALL", messageParameters)
265+
new SparkUnsupportedOperationException(
266+
"UNSUPPORTED_CALL.WITHOUT_SUGGESTION", messageParameters)
266267
}
267268
}
268269

Original file line numberDiff line numberDiff line change
@@ -0,0 +1,41 @@
1+
---
2+
layout: global
3+
title: UNSUPPORTED_CALL error class
4+
displayTitle: UNSUPPORTED_CALL error class
5+
license: |
6+
Licensed to the Apache Software Foundation (ASF) under one or more
7+
contributor license agreements. See the NOTICE file distributed with
8+
this work for additional information regarding copyright ownership.
9+
The ASF licenses this file to You under the Apache License, Version 2.0
10+
(the "License"); you may not use this file except in compliance with
11+
the License. You may obtain a copy of the License at
12+
13+
http://www.apache.org/licenses/LICENSE-2.0
14+
15+
Unless required by applicable law or agreed to in writing, software
16+
distributed under the License is distributed on an "AS IS" BASIS,
17+
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
18+
See the License for the specific language governing permissions and
19+
limitations under the License.
20+
---
21+
22+
<!--
23+
DO NOT EDIT THIS FILE.
24+
It was generated automatically by `org.apache.spark.SparkThrowableSuite`.
25+
-->
26+
27+
[SQLSTATE: 0A000](sql-error-conditions-sqlstates.html#class-0A-feature-not-supported)
28+
29+
Cannot call the method "`<methodName>`" of the class "`<className>`".
30+
31+
This error class has the following derived error classes:
32+
33+
## FIELD_INDEX
34+
35+
The row shall have a schema to get an index of the field `<fieldName>`.
36+
37+
## WITHOUT_SUGGESTION
38+
39+
40+
41+

docs/sql-error-conditions.md

+3-1
Original file line numberDiff line numberDiff line change
@@ -2435,12 +2435,14 @@ For more details see [UNSUPPORTED_ADD_FILE](sql-error-conditions-unsupported-add
24352435

24362436
Unsupported arrow type `<typeName>`.
24372437

2438-
### UNSUPPORTED_CALL
2438+
### [UNSUPPORTED_CALL](sql-error-conditions-unsupported-call-error-class.html)
24392439

24402440
[SQLSTATE: 0A000](sql-error-conditions-sqlstates.html#class-0A-feature-not-supported)
24412441

24422442
Cannot call the method "`<methodName>`" of the class "`<className>`".
24432443

2444+
For more details see [UNSUPPORTED_CALL](sql-error-conditions-unsupported-call-error-class.html)
2445+
24442446
### UNSUPPORTED_CHAR_OR_VARCHAR_AS_STRING
24452447

24462448
[SQLSTATE: 0A000](sql-error-conditions-sqlstates.html#class-0A-feature-not-supported)

sql/api/src/main/scala/org/apache/spark/sql/Row.scala

+1-1
Original file line numberDiff line numberDiff line change
@@ -380,7 +380,7 @@ trait Row extends Serializable {
380380
* @throws IllegalArgumentException when a field `name` does not exist.
381381
*/
382382
def fieldIndex(name: String): Int = {
383-
throw DataTypeErrors.fieldIndexOnRowWithoutSchemaError()
383+
throw DataTypeErrors.fieldIndexOnRowWithoutSchemaError(fieldName = name)
384384
}
385385

386386
/**

sql/api/src/main/scala/org/apache/spark/sql/errors/DataTypeErrors.scala

+8-2
Original file line numberDiff line numberDiff line change
@@ -261,8 +261,14 @@ private[sql] object DataTypeErrors extends DataTypeErrorsBase {
261261
messageParameters = Map("raw" -> s"'$raw'"))
262262
}
263263

264-
def fieldIndexOnRowWithoutSchemaError(): SparkUnsupportedOperationException = {
265-
new SparkUnsupportedOperationException("_LEGACY_ERROR_TEMP_2231")
264+
def fieldIndexOnRowWithoutSchemaError(fieldName: String): SparkUnsupportedOperationException = {
265+
new SparkUnsupportedOperationException(
266+
errorClass = "UNSUPPORTED_CALL.FIELD_INDEX",
267+
messageParameters = Map(
268+
"methodName" -> "fieldIndex",
269+
"className" -> "Row",
270+
"fieldName" -> toSQLId(fieldName))
271+
)
266272
}
267273

268274
def valueIsNullError(index: Int): Throwable = {

sql/api/src/main/scala/org/apache/spark/sql/errors/ExecutionErrors.scala

+4-7
Original file line numberDiff line numberDiff line change
@@ -127,13 +127,10 @@ private[sql] trait ExecutionErrors extends DataTypeErrorsBase {
127127
}
128128

129129
def cannotParseStringAsDataTypeError(pattern: String, value: String, dataType: DataType)
130-
: SparkRuntimeException = {
131-
new SparkRuntimeException(
132-
errorClass = "_LEGACY_ERROR_TEMP_2134",
133-
messageParameters = Map(
134-
"value" -> toSQLValue(value),
135-
"pattern" -> toSQLValue(pattern),
136-
"dataType" -> dataType.toString))
130+
: Throwable = {
131+
SparkException.internalError(
132+
s"Cannot parse field value ${toSQLValue(value)} for pattern ${toSQLValue(pattern)} " +
133+
s"as the target spark data type ${toSQLType(dataType)}.")
137134
}
138135

139136
def unsupportedArrowTypeError(typeName: ArrowType): SparkUnsupportedOperationException = {

sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/ShuffleSpecSuite.scala

+1-1
Original file line numberDiff line numberDiff line change
@@ -474,7 +474,7 @@ class ShuffleSpecSuite extends SparkFunSuite with SQLHelper {
474474
exception = intercept[SparkUnsupportedOperationException] {
475475
RangeShuffleSpec(10, distribution).createPartitioning(distribution.clustering)
476476
},
477-
errorClass = "UNSUPPORTED_CALL",
477+
errorClass = "UNSUPPORTED_CALL.WITHOUT_SUGGESTION",
478478
parameters = Map(
479479
"methodName" -> "createPartitioning$",
480480
"className" -> "org.apache.spark.sql.catalyst.plans.physical.ShuffleSpec"))

sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/util/TimestampFormatterSuite.scala

+23-2
Original file line numberDiff line numberDiff line change
@@ -17,11 +17,13 @@
1717

1818
package org.apache.spark.sql.catalyst.util
1919

20-
import java.time.{DateTimeException, LocalDateTime}
20+
import java.time.{DateTimeException, LocalDateTime, ZoneId}
21+
import java.util.Locale
2122

22-
import org.apache.spark.SparkUpgradeException
23+
import org.apache.spark.{SparkException, SparkUpgradeException}
2324
import org.apache.spark.sql.catalyst.util.DateTimeTestUtils._
2425
import org.apache.spark.sql.catalyst.util.DateTimeUtils._
26+
import org.apache.spark.sql.catalyst.util.LegacyDateFormats.LENIENT_SIMPLE_DATE_FORMAT
2527
import org.apache.spark.sql.internal.{LegacyBehaviorPolicy, SQLConf}
2628
import org.apache.spark.unsafe.types.UTF8String
2729

@@ -502,4 +504,23 @@ class TimestampFormatterSuite extends DatetimeFormatterSuite {
502504
assert(formatter.parseOptional("9999-12-31 23:59:59.999").isEmpty)
503505
assert(formatter.parseWithoutTimeZoneOptional("9999-12-31 23:59:59.999", true).isEmpty)
504506
}
507+
508+
test("fail to parse string as TimestampNTZ with invalid format") {
509+
val zoneId = ZoneId.systemDefault()
510+
val locale = Locale.getDefault()
511+
val formatter = new DefaultTimestampFormatter(
512+
zoneId, locale, LENIENT_SIMPLE_DATE_FORMAT, isParsing = true)
513+
514+
val invalidTimestampStr = "2021-13-01T25:61:61"
515+
516+
checkError(
517+
exception = intercept[SparkException] {
518+
formatter.parseWithoutTimeZone(invalidTimestampStr, allowTimeZone = false)
519+
},
520+
errorClass = "INTERNAL_ERROR",
521+
parameters = Map(
522+
"message" -> ("Cannot parse field value '2021-13-01T25:61:61' for pattern " +
523+
"'yyyy-MM-dd HH:mm:ss' as the target spark data type \"TIMESTAMP_NTZ\"."))
524+
)
525+
}
505526
}

sql/core/src/test/scala/org/apache/spark/sql/RowSuite.scala

+13-1
Original file line numberDiff line numberDiff line change
@@ -17,7 +17,7 @@
1717

1818
package org.apache.spark.sql
1919

20-
import org.apache.spark.SparkFunSuite
20+
import org.apache.spark.{SparkFunSuite, SparkUnsupportedOperationException}
2121
import org.apache.spark.sql.catalyst.expressions.{GenericInternalRow, SpecificInternalRow}
2222
import org.apache.spark.sql.test.SharedSparkSession
2323
import org.apache.spark.sql.types._
@@ -111,4 +111,16 @@ class RowSuite extends SparkFunSuite with SharedSparkSession {
111111
assert(Row(Seq()).getSeq(0) === List())
112112
assert(Row(null).getSeq(0) === null)
113113
}
114+
115+
test("access fieldIndex on Row without schema") {
116+
val rowWithoutSchema = Row(1, "foo", 3.14)
117+
118+
checkError(
119+
exception = intercept[SparkUnsupportedOperationException] {
120+
rowWithoutSchema.fieldIndex("foo")
121+
},
122+
errorClass = "UNSUPPORTED_CALL.FIELD_INDEX",
123+
parameters = Map("methodName" -> "fieldIndex", "className" -> "Row", "fieldName" -> "`foo`")
124+
)
125+
}
114126
}

sql/core/src/test/scala/org/apache/spark/sql/errors/QueryCompilationErrorsSuite.scala

+1-1
Original file line numberDiff line numberDiff line change
@@ -958,7 +958,7 @@ class QueryCompilationErrorsSuite
958958
exception = intercept[SparkUnsupportedOperationException] {
959959
new UnsafeRow(1).update(0, 1)
960960
},
961-
errorClass = "UNSUPPORTED_CALL",
961+
errorClass = "UNSUPPORTED_CALL.WITHOUT_SUGGESTION",
962962
parameters = Map(
963963
"methodName" -> "update",
964964
"className" -> "org.apache.spark.sql.catalyst.expressions.UnsafeRow"))

0 commit comments

Comments
 (0)