Github user marmbrus commented on a diff in the pull request:
https://github.com/apache/spark/pull/224#discussion_r11007251
--- Diff:
sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/stringOperations.scala
---
@@ -19,11 +19,113 @@ package org.apache.spark.sql
package catalyst
package expressions
+import java.util.regex.Pattern
+
+import org.apache.spark.sql.catalyst.types.DataType
+import org.apache.spark.sql.catalyst.types.StringType
import org.apache.spark.sql.catalyst.types.BooleanType
+import org.apache.spark.sql.catalyst.trees.TreeNode
+import org.apache.spark.sql.catalyst.errors.`package`.TreeNodeException
+
+
+/**
+ * Thrown when an invalid RegEx string is found.
+ */
+class InvalidRegExException[TreeType <: TreeNode[_]](tree: TreeType,
reason: String) extends
+ errors.TreeNodeException(tree, s"$reason", null)
+
+trait StringRegexExpression {
+ self: BinaryExpression =>
+
+ type EvaluatedType = Any
+
+ def escape(v: String): String
+ def nullable: Boolean = true
+ def dataType: DataType = BooleanType
+
+ // try cache the pattern for Literal
+ private lazy val cache: Pattern = right match {
+ case x @ Literal(value: String, StringType) => compile(value)
+ case _ => null
+ }
+
+ protected def compile(str: Any): Pattern = str match {
+ // TODO or let it be null if couldn't compile the regex?
+ case x: String if(x != null) => Pattern.compile(escape(x))
+ case x: String => null
+ case _ => throw new InvalidRegExException(this, "$str can not be
compiled to regex pattern")
--- End diff --
Ah, I was also thinking of the case where the regex itself is invalid, but
it looks like Hive is going to fail here too.
```scala
[marmbrus@michaels-mbp spark (javaSchemaRDD)]$ sbt hive/console
[info] Starting scala interpreter...
[info]
import org.apache.spark.sql.catalyst.analysis._
import org.apache.spark.sql.catalyst.dsl._
import org.apache.spark.sql.catalyst.errors._
import org.apache.spark.sql.catalyst.expressions._
import org.apache.spark.sql.catalyst.plans.logical._
import org.apache.spark.sql.catalyst.rules._
import org.apache.spark.sql.catalyst.types._
import org.apache.spark.sql.catalyst.util._
import org.apache.spark.sql.execution
import org.apache.spark.sql.hive._
import org.apache.spark.sql.hive.TestHive._
import org.apache.spark.sql.parquet.ParquetTestData
Welcome to Scala version 2.10.3 (Java HotSpot(TM) 64-Bit Server VM, Java
1.7.0_45).
Type in expressions to have them evaluated.
Type :help for more information.
scala> TestHive.runSqlHive("SELECT 'a' RLIKE '**' FROM src LIMIT 1")
======================
HIVE FAILURE OUTPUT
======================
set
javax.jdo.option.ConnectionURL=jdbc:derby:;databaseName=/private/var/folders/36/cjkbrr953xg2p_krwrmn8h_r0000gn/T/sparkHiveMetastore5920310799452446901;create=true
set
hive.metastore.warehouse.dir=/private/var/folders/36/cjkbrr953xg2p_krwrmn8h_r0000gn/T/sparkHiveWarehouse505596429372573669
OK
Copying data from file:/Users/marmbrus/workspace/hive/data/files/kv1.txt
Copying file: file:/Users/marmbrus/workspace/hive/data/files/kv1.txt
Loading data to table default.src
Table default.src stats: [num_partitions: 0, num_files: 1, num_rows: 0,
total_size: 5812, raw_data_size: 0]
OK
FAILED: ParseException line 1:0 cannot recognize input near 'test' '<EOF>'
'<EOF>'
FAILED: SemanticException [Error 10014]: Line 1:7 Wrong arguments ''**'':
org.apache.hadoop.hive.ql.metadata.HiveException: Unable to execute method
public org.apache.hadoop.io.BooleanWritable
org.apache.hadoop.hive.ql.udf.UDFRegExp.evaluate(org.apache.hadoop.io.Text,org.apache.hadoop.io.Text)
on object org.apache.hadoop.hive.ql.udf.UDFRegExp@37348663 of class
org.apache.hadoop.hive.ql.udf.UDFRegExp with arguments
{a:org.apache.hadoop.io.Text, **:org.apache.hadoop.io.Text} of size 2
======================
END HIVE FAILURE OUTPUT
======================
```
---
If your project is set up for it, you can reply to this email and have your
reply appear on GitHub as well. If your project does not have this feature
enabled and wishes so, or if the feature is enabled but not working, please
contact infrastructure at [email protected] or file a JIRA ticket
with INFRA.
---