Skip to content

Commit 7d0b921

Browse files
yuexingLuciferYang
authored andcommitted
[SPARK-42841][SQL] Assign a name to the error class _LEGACY_ERROR_TEMP_2005
### What changes were proposed in this pull request? see https://issues.apache.org/jira/browse/SPARK-42841, replace _LEGACY_ERROR_TEMP_2005 with a meaningful name. Some points: - To make sure full UT coverage, method with "datatype" is refactored to call another one with "string". - sqlStat '0A000' means NOT_SUPPORTED or CANNOT. ### Why are the changes needed? see https://issues.apache.org/jira/browse/SPARK-42841 ### Does this PR introduce _any_ user-facing change? No ### How was this patch tested? UT ### Was this patch authored or co-authored using generative AI tooling? No Closes #51110 from yuexing/SPARK-42841-2005. Lead-authored-by: xingyue <[email protected]> Co-authored-by: Yue <[email protected]> Signed-off-by: yangjie01 <[email protected]>
1 parent 37c85a6 commit 7d0b921

File tree

3 files changed

+24
-9
lines changed

3 files changed

+24
-9
lines changed

common/utils/src/main/resources/error/error-conditions.json

Lines changed: 6 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -927,6 +927,12 @@
927927
],
928928
"sqlState" : "KD011"
929929
},
930+
"DATATYPE_CANNOT_ORDER" : {
931+
"message" : [
932+
"Type <dataType> does not support ordered operations."
933+
],
934+
"sqlState" : "0A000"
935+
},
930936
"DATATYPE_MISMATCH" : {
931937
"message" : [
932938
"Cannot resolve <sqlExpr> due to data type mismatch:"
@@ -7870,11 +7876,6 @@
78707876
"Unsuccessful try to zip maps with <size> unique keys due to exceeding the array size limit <maxRoundedArrayLength>."
78717877
]
78727878
},
7873-
"_LEGACY_ERROR_TEMP_2005" : {
7874-
"message" : [
7875-
"Type <dataType> does not support ordered operations."
7876-
]
7877-
},
78787879
"_LEGACY_ERROR_TEMP_2017" : {
78797880
"message" : [
78807881
"not resolved."

sql/catalyst/src/main/scala/org/apache/spark/sql/errors/QueryExecutionErrors.scala

Lines changed: 2 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -363,15 +363,13 @@ private[sql] object QueryExecutionErrors extends QueryErrorsBase with ExecutionE
363363

364364
def orderedOperationUnsupportedByDataTypeError(
365365
dataType: DataType): SparkIllegalArgumentException = {
366-
new SparkIllegalArgumentException(
367-
errorClass = "_LEGACY_ERROR_TEMP_2005",
368-
messageParameters = Map("dataType" -> dataType.toString()))
366+
return orderedOperationUnsupportedByDataTypeError(dataType.toString())
369367
}
370368

371369
def orderedOperationUnsupportedByDataTypeError(
372370
dataType: String): SparkIllegalArgumentException = {
373371
new SparkIllegalArgumentException(
374-
errorClass = "_LEGACY_ERROR_TEMP_2005",
372+
errorClass = "DATATYPE_CANNOT_ORDER",
375373
messageParameters = Map("dataType" -> dataType))
376374
}
377375

sql/core/src/test/scala/org/apache/spark/sql/errors/QueryExecutionErrorsSuite.scala

Lines changed: 16 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -1275,6 +1275,22 @@ class QueryExecutionErrorsSuite
12751275
sql("ALTER TABLE t SET LOCATION '/mister/spark'")
12761276
}
12771277
}
1278+
1279+
test("SPARK-42841: SQL query with unsupported data types for ordering") {
1280+
import org.apache.spark.sql.catalyst.types.PhysicalDataType
1281+
import org.apache.spark.sql.types.CalendarIntervalType
1282+
1283+
// Test PhysicalDataType.ordering() with CalendarIntervalType
1284+
// It's hard to make a sql test that passes Argument verification but fails
1285+
// Order verification. So we directly test the error.
1286+
checkError(
1287+
exception = intercept[SparkIllegalArgumentException] {
1288+
PhysicalDataType.ordering(CalendarIntervalType)
1289+
},
1290+
condition = "DATATYPE_CANNOT_ORDER",
1291+
parameters = Map("dataType" -> "PhysicalCalendarIntervalType"))
1292+
}
1293+
12781294
}
12791295

12801296
class FakeFileSystemSetPermission extends LocalFileSystem {

0 commit comments

Comments
 (0)