Skip to content

Commit f6cdd9a

Browse files
committed
error
1 parent 8469afd commit f6cdd9a

File tree

3 files changed

+38
-0
lines changed

3 files changed

+38
-0
lines changed

common/utils/src/main/resources/error/error-conditions.json

+5
Original file line numberDiff line numberDiff line change
@@ -2945,6 +2945,11 @@
29452945
"The lambda function has duplicate arguments <args>. Please, consider to rename the argument names or set <caseSensitiveConfig> to \"true\"."
29462946
]
29472947
},
2948+
"PARAMETER_DOES_NOT_ACCEPT_LAMBDA_FUNCTION" : {
2949+
"message" : [
2950+
"You passed a lambda function to a parameter that does not accept it. Please check if lambda function argument is in the correct position."
2951+
]
2952+
},
29482953
"NON_HIGHER_ORDER_FUNCTION" : {
29492954
"message" : [
29502955
"A lambda function should only be used in a higher order function. However, its class is <class>, which is not a higher order function."

sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/CheckAnalysis.scala

+10
Original file line numberDiff line numberDiff line change
@@ -345,6 +345,16 @@ trait CheckAnalysis extends LookupCatalog with QueryErrorsBase with PlanToString
345345

346346
case operator: LogicalPlan =>
347347
operator transformExpressionsDown {
348+
case hof: HigherOrderFunction if hof.arguments.exists {
349+
case LambdaFunction(_, _, _) => true
350+
case _ => false
351+
} =>
352+
throw new AnalysisException(
353+
errorClass =
354+
"INVALID_LAMBDA_FUNCTION_CALL.PARAMETER_DOES_NOT_ACCEPT_LAMBDA_FUNCTION",
355+
messageParameters = Map.empty,
356+
origin = hof.origin
357+
)
348358
// Check argument data types of higher-order functions downwards first.
349359
// If the arguments of the higher-order functions are resolved but the type check fails,
350360
// the argument functions will not get resolved, but we should report the argument type

sql/core/src/test/scala/org/apache/spark/sql/errors/QueryCompilationErrorsSuite.scala

+23
Original file line numberDiff line numberDiff line change
@@ -1069,6 +1069,29 @@ class QueryCompilationErrorsSuite
10691069
context = ExpectedContext(fragment = "in (select map(1,2))", start = 16, stop = 35)
10701070
)
10711071
}
1072+
1073+
test("SPARK-51580: Throw proper user facing error message when lambda function is out of " +
1074+
"place in HigherOrderFunction") {
1075+
checkError(
1076+
exception = intercept[AnalysisException] {
1077+
sql("select transform(x -> x + 1, array(1,2,3))")
1078+
},
1079+
condition = "INVALID_LAMBDA_FUNCTION_CALL.PARAMETER_DOES_NOT_ACCEPT_LAMBDA_FUNCTION",
1080+
parameters = Map(),
1081+
context =
1082+
ExpectedContext(fragment = "transform(x -> x + 1, array(1,2,3))", start = 7, stop = 41)
1083+
)
1084+
1085+
checkError(
1086+
exception = intercept[AnalysisException] {
1087+
sql("select aggregate(array(1,2,3), x -> x + 1, 0)")
1088+
},
1089+
condition = "INVALID_LAMBDA_FUNCTION_CALL.PARAMETER_DOES_NOT_ACCEPT_LAMBDA_FUNCTION",
1090+
parameters = Map(),
1091+
context =
1092+
ExpectedContext(fragment = "aggregate(array(1,2,3), x -> x + 1, 0)", start = 7, stop = 44)
1093+
)
1094+
}
10721095
}
10731096

10741097
class MyCastToString extends SparkUserDefinedFunction(

0 commit comments

Comments
 (0)