cloud-fan commented on code in PR #52173: URL: https://github.com/apache/spark/pull/52173#discussion_r2340502403
########## sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/parameters.scala: ########## @@ -203,6 +229,64 @@ object BindParameters extends Rule[LogicalPlan] with QueryErrorsBase { args(posToIndex(pos)) } + case GeneralParameterizedQuery(child, args, paramNames) + if !child.containsPattern(UNRESOLVED_WITH) && + args.forall(_.resolved) => + + // Check all arguments for validity (args are already evaluated expressions/literals) + val allArgs = args.zipWithIndex.map { case (arg, idx) => + val name = if (idx < paramNames.length && paramNames(idx) != null) { + paramNames(idx) + } else { + s"_$idx" + } + (name, arg) + } + checkArgs(allArgs) + + // Collect parameter types used in the query to enforce invariants + val namedParams = scala.collection.mutable.Set.empty[String] + val positionalParams = scala.collection.mutable.Set.empty[Int] + bind(child) { + case NamedParameter(name) => namedParams.add(name); NamedParameter(name) + case p @ PosParameter(pos) => positionalParams.add(pos); p + } + + // Validate: no mixing of positional and named parameters + if (namedParams.nonEmpty && positionalParams.nonEmpty) { + throw QueryCompilationErrors.invalidQueryMixedQueryParameters() + } + + // Validate: if query uses named parameters, all USING expressions must have names + if (namedParams.nonEmpty && positionalParams.isEmpty) { + val unnamedExpressions = paramNames.zipWithIndex.collect { Review Comment: `paramNames.isEmpty` means we don't allow named parameters. Why do we need to handle empty and null param names? -- This is an automated message from the Apache Git Service. To respond to the message, please log on to GitHub and use the URL above to go to the specific comment. To unsubscribe, e-mail: reviews-unsubscr...@spark.apache.org For queries about this service, please contact Infrastructure at: us...@infra.apache.org --------------------------------------------------------------------- To unsubscribe, e-mail: reviews-unsubscr...@spark.apache.org For additional commands, e-mail: reviews-h...@spark.apache.org