wangyum commented on a change in pull request #30975:
URL: https://github.com/apache/spark/pull/30975#discussion_r553108010



##########
File path: 
sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/optimizer/expressions.scala
##########
@@ -634,36 +635,68 @@ object LikeSimplification extends Rule[LogicalPlan] {
   private val contains = "%([^_%]+)%".r
   private val equalTo = "([^_%]*)".r
 
+  private def simplifyLike(
+      input: Expression, pattern: String, escapeChar: Char = '\\'): 
Option[Expression] = {
+    if (pattern.contains(escapeChar)) {
+      // There are three different situations when pattern containing 
escapeChar:
+      // 1. pattern contains invalid escape sequence, e.g. 'm\aca'
+      // 2. pattern contains escaped wildcard character, e.g. 'ma\%ca'
+      // 3. pattern contains escaped escape character, e.g. 'ma\\ca'
+      // Although there are patterns can be optimized if we handle the escape 
first, we just
+      // skip this rule if pattern contains any escapeChar for simplicity.
+      None
+    } else {
+      pattern match {
+        case startsWith(prefix) =>
+          Some(StartsWith(input, Literal(prefix)))
+        case endsWith(postfix) =>
+          Some(EndsWith(input, Literal(postfix)))
+        // 'a%a' pattern is basically same with 'a%' && '%a'.
+        // However, the additional `Length` condition is required to prevent 
'a' match 'a%a'.
+        case startsAndEndsWith(prefix, postfix) =>
+          Some(And(GreaterThanOrEqual(Length(input), Literal(prefix.length + 
postfix.length)),
+            And(StartsWith(input, Literal(prefix)), EndsWith(input, 
Literal(postfix)))))
+        case contains(infix) =>
+          Some(Contains(input, Literal(infix)))
+        case equalTo(str) =>
+          Some(EqualTo(input, Literal(str)))
+        case _ => None
+      }
+    }
+  }
+
+  private def simplifyMultiLike(
+      child: Expression, patterns: Seq[UTF8String], multi: MultiLikeBase): 
Expression = {
+    val (remainPatternMap, replacementMap) =
+      patterns.map { p => p -> simplifyLike(child, 
p.toString)}.partition(_._2.isEmpty)
+    val remainPatterns = remainPatternMap.map(_._1)
+    val replacements = replacementMap.map(_._2.get)
+    if (replacements.isEmpty) {
+      multi
+    } else {
+      multi match {
+        case l: LikeAll => And(replacements.reduceLeft(And), l.copy(patterns = 
remainPatterns))

Review comment:
       It may cause `StackOverflowError`.
   ```
   scala> spark.sql("drop table SPARK_33938")
   res6: org.apache.spark.sql.DataFrame = []
   
   scala> spark.sql("create table SPARK_33938(id string) using parquet")
   res7: org.apache.spark.sql.DataFrame = []
   
   scala> val values = Range(1, 10000)
   values: scala.collection.immutable.Range = Range 1 until 10000
   
   scala> spark.sql(s"select * from SPARK_33938 where id like all 
(${values.map(s => s"'$s'").mkString(", ")})").show
   java.lang.StackOverflowError
     at java.lang.ThreadLocal.set(ThreadLocal.java:201)
     at 
org.apache.spark.sql.catalyst.trees.CurrentOrigin$.set(TreeNode.scala:62)
     at 
org.apache.spark.sql.catalyst.trees.CurrentOrigin$.withOrigin(TreeNode.scala:72)
     at 
org.apache.spark.sql.catalyst.trees.TreeNode.transformDown(TreeNode.scala:317)
     at 
org.apache.spark.sql.catalyst.trees.TreeNode.$anonfun$transformDown$3(TreeNode.scala:322)
     at 
org.apache.spark.sql.catalyst.trees.TreeNode.$anonfun$mapChildren$1(TreeNode.scala:407)
     at 
org.apache.spark.sql.catalyst.trees.TreeNode.mapProductIterator(TreeNode.scala:243)
     at 
org.apache.spark.sql.catalyst.trees.TreeNode.mapChildren(TreeNode.scala:405)
     at 
org.apache.spark.sql.catalyst.trees.TreeNode.mapChildren(TreeNode.scala:358)
     at 
org.apache.spark.sql.catalyst.trees.TreeNode.transformDown(TreeNode.scala:322)
     at 
org.apache.spark.sql.catalyst.trees.TreeNode.$anonfun$transformDown$3(TreeNode.scala:322)
     at 
org.apache.spark.sql.catalyst.trees.TreeNode.$anonfun$mapChildren$1(TreeNode.scala:407)
     at 
org.apache.spark.sql.catalyst.trees.TreeNode.mapProductIterator(TreeNode.scala:243)
     at 
org.apache.spark.sql.catalyst.trees.TreeNode.mapChildren(TreeNode.scala:405)
     at 
org.apache.spark.sql.catalyst.trees.TreeNode.mapChildren(TreeNode.scala:358)
   ```




----------------------------------------------------------------
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.

For queries about this service, please contact Infrastructure at:
[email protected]



---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]

Reply via email to