From b82d5fa434fcb443d78c6c1372d743a6b216d7c1 Mon Sep 17 00:00:00 2001 From: Mihailo Timotic Date: Fri, 21 Mar 2025 09:43:10 +0100 Subject: [PATCH] error --- .../resources/error/error-conditions.json | 5 ++++ .../sql/catalyst/analysis/CheckAnalysis.scala | 10 ++++++++ .../errors/QueryCompilationErrorsSuite.scala | 23 +++++++++++++++++++ 3 files changed, 38 insertions(+) diff --git a/common/utils/src/main/resources/error/error-conditions.json b/common/utils/src/main/resources/error/error-conditions.json index 90d270676dcd3..adb5b8eedf466 100644 --- a/common/utils/src/main/resources/error/error-conditions.json +++ b/common/utils/src/main/resources/error/error-conditions.json @@ -2954,6 +2954,11 @@ "message" : [ "A higher order function expects arguments, but got ." ] + }, + "PARAMETER_DOES_NOT_ACCEPT_LAMBDA_FUNCTION" : { + "message" : [ + "You passed a lambda function to a parameter that does not accept it. Please check if lambda function argument is in the correct position." + ] } }, "sqlState" : "42K0D" diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/CheckAnalysis.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/CheckAnalysis.scala index c8678d994ba80..6203e7faec119 100644 --- a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/CheckAnalysis.scala +++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/CheckAnalysis.scala @@ -345,6 +345,16 @@ trait CheckAnalysis extends LookupCatalog with QueryErrorsBase with PlanToString case operator: LogicalPlan => operator transformExpressionsDown { + case hof: HigherOrderFunction if hof.arguments.exists { + case LambdaFunction(_, _, _) => true + case _ => false + } => + throw new AnalysisException( + errorClass = + "INVALID_LAMBDA_FUNCTION_CALL.PARAMETER_DOES_NOT_ACCEPT_LAMBDA_FUNCTION", + messageParameters = Map.empty, + origin = hof.origin + ) // Check argument data types of higher-order functions downwards first. // If the arguments of the higher-order functions are resolved but the type check fails, // the argument functions will not get resolved, but we should report the argument type diff --git a/sql/core/src/test/scala/org/apache/spark/sql/errors/QueryCompilationErrorsSuite.scala b/sql/core/src/test/scala/org/apache/spark/sql/errors/QueryCompilationErrorsSuite.scala index 8f258f819d3bd..be83d2c653235 100644 --- a/sql/core/src/test/scala/org/apache/spark/sql/errors/QueryCompilationErrorsSuite.scala +++ b/sql/core/src/test/scala/org/apache/spark/sql/errors/QueryCompilationErrorsSuite.scala @@ -1069,6 +1069,29 @@ class QueryCompilationErrorsSuite context = ExpectedContext(fragment = "in (select map(1,2))", start = 16, stop = 35) ) } + + test("SPARK-51580: Throw proper user facing error message when lambda function is out of " + + "place in HigherOrderFunction") { + checkError( + exception = intercept[AnalysisException] { + sql("select transform(x -> x + 1, array(1,2,3))") + }, + condition = "INVALID_LAMBDA_FUNCTION_CALL.PARAMETER_DOES_NOT_ACCEPT_LAMBDA_FUNCTION", + parameters = Map(), + context = + ExpectedContext(fragment = "transform(x -> x + 1, array(1,2,3))", start = 7, stop = 41) + ) + + checkError( + exception = intercept[AnalysisException] { + sql("select aggregate(array(1,2,3), x -> x + 1, 0)") + }, + condition = "INVALID_LAMBDA_FUNCTION_CALL.PARAMETER_DOES_NOT_ACCEPT_LAMBDA_FUNCTION", + parameters = Map(), + context = + ExpectedContext(fragment = "aggregate(array(1,2,3), x -> x + 1, 0)", start = 7, stop = 44) + ) + } } class MyCastToString extends SparkUserDefinedFunction(