yihua commented on code in PR #9145:
URL: https://github.com/apache/hudi/pull/9145#discussion_r1261618581
##########
hudi-spark-datasource/hudi-spark3.0.x/src/main/scala/org/apache/spark/sql/catalyst/analysis/HoodieSpark30Analysis.scala:
##########
@@ -197,11 +170,7 @@ object HoodieSpark30Analysis {
func.map(wrapper)
}
-
////////////////////////////////////////////////////////////////////////////////////////////
- // Following section is amended to the original (Spark's) implementation
- // >>> BEGINS
-
////////////////////////////////////////////////////////////////////////////////////////////
-
+ //START: custom Hudi change. Following section is amended to the original
(Spark's) implementation
Review Comment:
```suggestion
// START: custom Hudi change. Following section is amended to the
original (Spark's) implementation
```
##########
hudi-spark-datasource/hudi-spark3.1.x/src/main/scala/org/apache/spark/sql/catalyst/analysis/HoodieSpark31Analysis.scala:
##########
@@ -38,15 +39,13 @@ object HoodieSpark31Analysis {
override def apply(plan: LogicalPlan): LogicalPlan =
plan.resolveOperatorsUp {
case mO @ MergeIntoTable(targetTableO, sourceTableO, _, _, _)
- //// Hudi change: don't want to go to the spark mit resolution so we
resolve the source and target if they haven't been
- //
+ // START custom Hudi change: don't want to go to the spark mit
resolution so we resolve the source and target if they haven't been
Review Comment:
```suggestion
// START: custom Hudi change: don't want to go to the spark mit
resolution so we resolve the source and target if they haven't been
```
##########
hudi-spark-datasource/hudi-spark3.1.x/src/main/scala/org/apache/spark/sql/catalyst/analysis/HoodieSpark31Analysis.scala:
##########
@@ -38,15 +39,13 @@ object HoodieSpark31Analysis {
override def apply(plan: LogicalPlan): LogicalPlan =
plan.resolveOperatorsUp {
case mO @ MergeIntoTable(targetTableO, sourceTableO, _, _, _)
- //// Hudi change: don't want to go to the spark mit resolution so we
resolve the source and target if they haven't been
- //
+ // START custom Hudi change: don't want to go to the spark mit
resolution so we resolve the source and target if they haven't been
if !mO.resolved || containsUnresolvedStarAssignments(mO) =>
lazy val analyzer = spark.sessionState.analyzer
val targetTable = if (targetTableO.resolved) targetTableO else
analyzer.execute(targetTableO)
val sourceTable = if (sourceTableO.resolved) sourceTableO else
analyzer.execute(sourceTableO)
val m = mO.copy(targetTable = targetTable, sourceTable = sourceTable)
- //
- ////
+ // END custom Hudi change.
Review Comment:
```suggestion
// END: custom Hudi change.
```
##########
hudi-spark-datasource/hudi-spark3.2.x/src/main/scala/org/apache/spark/sql/adapter/Spark3_2Adapter.scala:
##########
@@ -123,8 +123,4 @@ class Spark3_2Adapter extends BaseSpark3Adapter {
case OFF_HEAP => "OFF_HEAP"
case _ => throw new IllegalArgumentException(s"Invalid StorageLevel:
$level")
}
-
- override def failAnalysisForMIT(a: Attribute, cols: String): Unit = {
Review Comment:
This should be moved to `HoodieSpark32CatalystPlanUtils` correct?
##########
hudi-spark-datasource/hudi-spark3.3.x/src/main/scala/org/apache/spark/sql/adapter/Spark3_3Adapter.scala:
##########
@@ -124,8 +124,4 @@ class Spark3_3Adapter extends BaseSpark3Adapter {
case OFF_HEAP => "OFF_HEAP"
case _ => throw new IllegalArgumentException(s"Invalid StorageLevel:
$level")
}
-
- override def failAnalysisForMIT(a: Attribute, cols: String): Unit = {
Review Comment:
same here for `HoodieSpark33CatalystPlanUtils`
##########
hudi-spark-datasource/hudi-spark3.1.x/src/main/scala/org/apache/spark/sql/catalyst/analysis/HoodieSpark31Analysis.scala:
##########
@@ -197,11 +196,7 @@ object HoodieSpark31Analysis {
func.map(wrapper)
}
-
////////////////////////////////////////////////////////////////////////////////////////////
- // Following section is amended to the original (Spark's) implementation
- // >>> BEGINS
-
////////////////////////////////////////////////////////////////////////////////////////////
-
+ // START custom Hudi change: Following section is amended to the original
(Spark's) implementation
Review Comment:
```suggestion
// START: custom Hudi change: Following section is amended to the
original (Spark's) implementation
```
##########
hudi-spark-datasource/hudi-spark3.1.x/src/main/scala/org/apache/spark/sql/catalyst/analysis/HoodieSpark31Analysis.scala:
##########
@@ -214,10 +209,7 @@ object HoodieSpark31Analysis {
containsUnresolvedInsertStar || containsUnresolvedUpdateStar
}
-
-
////////////////////////////////////////////////////////////////////////////////////////////
- // <<< ENDS
-
////////////////////////////////////////////////////////////////////////////////////////////
+ // END custom Hudi change.
Review Comment:
```suggestion
// END: custom Hudi change.
```
##########
hudi-spark-datasource/hudi-spark3.2.x/src/main/scala/org/apache/spark/sql/adapter/Spark3_2Adapter.scala:
##########
@@ -123,8 +123,4 @@ class Spark3_2Adapter extends BaseSpark3Adapter {
case OFF_HEAP => "OFF_HEAP"
case _ => throw new IllegalArgumentException(s"Invalid StorageLevel:
$level")
}
-
- override def failAnalysisForMIT(a: Attribute, cols: String): Unit = {
Review Comment:
Do we have a unit test for the failed analysis? A unit test should fail if
this is missing for Spark 3.2. If not, could we add one?
--
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.
To unsubscribe, e-mail: [email protected]
For queries about this service, please contact Infrastructure at:
[email protected]