aokolnychyi commented on a change in pull request #2428:
URL: https://github.com/apache/iceberg/pull/2428#discussion_r612102052
##########
File path: build.gradle
##########
@@ -972,7 +972,6 @@ project(":iceberg-spark3-extensions") {
dependencies {
compileOnly project(':iceberg-spark3')
-
Review comment:
nit: unneeded change
##########
File path: build.gradle
##########
@@ -984,7 +983,9 @@ project(":iceberg-spark3-extensions") {
testCompile project(path: ':iceberg-spark', configuration: 'testArtifacts')
testCompile project(path: ':iceberg-spark3', configuration:
'testArtifacts')
+
Review comment:
nit: extra empty line
##########
File path:
spark3-extensions/src/main/scala/org/apache/spark/sql/catalyst/parser/extensions/IcebergSparkSqlExtensionsParser.scala
##########
@@ -198,3 +228,71 @@ case object IcebergSqlExtensionsPostProcessor extends
IcebergSqlExtensionsBaseLi
parent.addChild(new TerminalNodeImpl(f(newToken)))
}
}
+
+
+/* Partially copied from Apache Spark's Parser to avoid dependency on Spark
Internals */
+case object ParseErrorListener extends BaseErrorListener {
Review comment:
nit: should we call it `IcebergParseErrorListener` to be consistent with
other copied classes?
##########
File path:
spark3-extensions/src/main/scala/org/apache/spark/sql/catalyst/parser/extensions/IcebergSparkSqlExtensionsParser.scala
##########
@@ -198,3 +228,71 @@ case object IcebergSqlExtensionsPostProcessor extends
IcebergSqlExtensionsBaseLi
parent.addChild(new TerminalNodeImpl(f(newToken)))
}
}
+
Review comment:
nit: extra line
##########
File path: build.gradle
##########
@@ -1021,6 +1031,18 @@ project(':iceberg-spark3-runtime') {
compile(project(':iceberg-nessie')) {
exclude group: 'com.google.code.findbugs', module: 'jsr305'
}
+
+ integrationImplementation 'org.apache.spark:spark-hive_2.12'
Review comment:
Do we need a separate source set as test inherits other things?
##########
File path:
spark3-extensions/src/main/scala/org/apache/spark/sql/catalyst/parser/extensions/IcebergSparkSqlExtensionsParser.scala
##########
@@ -198,3 +228,71 @@ case object IcebergSqlExtensionsPostProcessor extends
IcebergSqlExtensionsBaseLi
parent.addChild(new TerminalNodeImpl(f(newToken)))
}
}
+
+
+/* Partially copied from Apache Spark's Parser to avoid dependency on Spark
Internals */
+case object ParseErrorListener extends BaseErrorListener {
+ override def syntaxError(
+ recognizer: Recognizer[_, _],
Review comment:
nit: formatting, let's use what we do in Scala that matches Spark's
behavior
##########
File path: build.gradle
##########
@@ -1038,9 +1060,7 @@ project(':iceberg-spark3-runtime') {
relocate 'com.google', 'org.apache.iceberg.shaded.com.google'
relocate 'com.fasterxml', 'org.apache.iceberg.shaded.com.fasterxml'
relocate 'com.github.benmanes',
'org.apache.iceberg.shaded.com.github.benmanes'
- relocate 'org.antlr.v4.runtime',
'org.apache.iceberg.shaded.org.antlr.v4.runtime'
relocate 'org.checkerframework',
'org.apache.iceberg.shaded.org.checkerframework'
- relocate 'org.apache.avro', 'org.apache.iceberg.shaded.org.apache.avro'
Review comment:
Looks like this hasn't been reverted yet.
##########
File path:
spark3-extensions/src/main/scala/org/apache/spark/sql/catalyst/parser/extensions/IcebergSparkSqlExtensionsParser.scala
##########
@@ -150,22 +148,54 @@ class IcebergSparkSqlExtensionsParser(delegate:
ParserInterface) extends ParserI
}
}
catch {
- case e: ParseException if e.command.isDefined =>
+ case e: IcebergParseException if e.command.isDefined =>
throw e
- case e: ParseException =>
+ case e: IcebergParseException =>
throw e.withCommand(command)
case e: AnalysisException =>
val position = Origin(e.line, e.startPosition)
- throw new ParseException(Option(command), e.message, position,
position)
+ throw new IcebergParseException(Option(command), e.message, position,
position)
}
}
+
Review comment:
nit: extra empty lines
##########
File path:
spark3-extensions/src/main/scala/org/apache/spark/sql/catalyst/parser/extensions/IcebergSparkSqlExtensionsParser.scala
##########
@@ -198,3 +228,71 @@ case object IcebergSqlExtensionsPostProcessor extends
IcebergSqlExtensionsBaseLi
parent.addChild(new TerminalNodeImpl(f(newToken)))
}
}
+
+
+/* Partially copied from Apache Spark's Parser to avoid dependency on Spark
Internals */
+case object ParseErrorListener extends BaseErrorListener {
+ override def syntaxError(
+ recognizer: Recognizer[_, _],
+ offendingSymbol: scala.Any,
+ line: Int,
+ charPositionInLine: Int,
+ msg: String,
+ e: RecognitionException): Unit = {
+ val (start, stop) = offendingSymbol match {
+ case token: CommonToken =>
+ val start = Origin(Some(line), Some(token.getCharPositionInLine))
+ val length = token.getStopIndex - token.getStartIndex + 1
+ val stop = Origin(Some(line), Some(token.getCharPositionInLine +
length))
+ (start, stop)
+ case _ =>
+ val start = Origin(Some(line), Some(charPositionInLine))
+ (start, start)
+ }
+ throw new IcebergParseException(None, msg, start, stop)
+ }
+}
+
+/**
+ * Copied from Apache Spark
+ * A [[ParseException]] is an [[AnalysisException]] that is thrown during the
parse process. It
+ * contains fields and an extended error message that make reporting and
diagnosing errors easier.
+ */
+class IcebergParseException(
+ val command: Option[String],
+ message: String,
Review comment:
nit: formatting
##########
File path: build.gradle
##########
@@ -1054,10 +1075,22 @@ project(':iceberg-spark3-runtime') {
relocate 'org.apache.arrow', 'org.apache.iceberg.shaded.org.apache.arrow'
relocate 'com.carrotsearch', 'org.apache.iceberg.shaded.com.carrotsearch'
relocate 'org.threeten.extra',
'org.apache.iceberg.shaded.org.threeten.extra'
+ // relocate Antlr runtime and related deps to shade Iceberg specific
version
+ relocate 'org.antlr.v4.runtime',
'org.apache.iceberg.shaded.org.antlr.v4.runtime'
classifier null
}
+ task integrationTest(type: Test) {
+ description = "Test Spark3 Runtime Jar"
+ group = "verification"
+ testClassesDirs = sourceSets.integration.output.classesDirs
+ classpath = sourceSets.integration.runtimeClasspath +
files(shadowJar.archiveFile.get().asFile.path)
+ inputs.file(shadowJar.archiveFile.get().asFile.path)
+ }
+ integrationTest.dependsOn(shadowJar)
Review comment:
nit: I think we mostly use `integrationTest.dependsOn` shadowJar like on
the line below
--
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.
For queries about this service, please contact Infrastructure at:
[email protected]
---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]