gengliangwang commented on a change in pull request #34747:
URL: https://github.com/apache/spark/pull/34747#discussion_r758907096



##########
File path: 
sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/Analyzer.scala
##########
@@ -198,21 +205,39 @@ class Analyzer(override val catalogManager: 
CatalogManager)
   }
 
   def executeAndCheck(plan: LogicalPlan, tracker: QueryPlanningTracker): 
LogicalPlan = {
-    if (plan.analyzed) return plan
     AnalysisHelper.markInAnalyzer {
       val analyzed = executeAndTrack(plan, tracker)
       try {
         checkAnalysis(analyzed)
         analyzed
       } catch {
         case e: AnalysisException =>
-          val ae = e.copy(plan = Option(analyzed))
+          val ae = e.copy(plan = Option(analyzed),
+            message = e.message + extraHintForAnsiTypeCoercion(plan))
           ae.setStackTrace(e.getStackTrace)
           throw ae
       }
     }
   }
 
+  private def extraHintForAnsiTypeCoercion(plan: LogicalPlan): String = {
+    if (!conf.ansiEnabled) {
+      ""
+    } else {
+      val nonAnsiPlan = AnalysisContext.withDefaultTypeCoercionAnalysisContext 
{
+        executeSameContext(plan)

Review comment:
       It won't be a perf issue. When the code reaches here, the query already 
fails.
   But from user experience, I am thinking about just adding `To fix the error, 
you might need to add explicit type casts.` and don't show the hint `set 
spark.sql.ansi.enabled to false`




-- 
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.

To unsubscribe, e-mail: [email protected]

For queries about this service, please contact Infrastructure at:
[email protected]



---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]

Reply via email to