Github user sounakr commented on a diff in the pull request:
https://github.com/apache/carbondata/pull/1469#discussion_r153094607
--- Diff:
integration/spark2/src/main/scala/org/apache/spark/sql/hive/CarbonAnalysisRules.scala
---
@@ -143,52 +168,63 @@ case class CarbonIUDAnalysisRule(sparkSession:
SparkSession) extends Rule[Logica
selectPlan
}
val finalPlan = if (filter.length > 0) {
- val alias = table.alias.getOrElse("")
var transformed: Boolean = false
// Create a dummy projection to include filter conditions
var newPlan: LogicalPlan = null
if (table.tableIdentifier.database.isDefined) {
newPlan = parser.parsePlan("select * from " +
- table.tableIdentifier.database.getOrElse("") + "." +
- table.tableIdentifier.table + " " + alias + " " + filter)
+
table.tableIdentifier.database.getOrElse("") + "." +
+ table.tableIdentifier.table + " " +
alias.getOrElse("") + " " +
+ filter)
}
else {
newPlan = parser.parsePlan("select * from " +
- table.tableIdentifier.table + " " + alias + " " + filter)
+ table.tableIdentifier.table + " " +
alias.getOrElse("") + " " +
+ filter)
}
newPlan transform {
- case UnresolvedRelation(t, Some(a))
- if !transformed && t == table.tableIdentifier && a == alias =>
+ case CarbonUnresolvedRelation(t)
+ if !transformed && t == table.tableIdentifier =>
transformed = true
- // Add the filter condition of update statement on destination
table
- SubqueryAlias(alias, updatedSelectPlan,
Option(table.tableIdentifier))
+
+ val subqueryAlias = CarbonClassReflectionUtils
+ .getSubqueryAlias(sparkSession, alias, updatedSelectPlan,
Some(table.tableIdentifier))
+ subqueryAlias
}
} else {
updatedSelectPlan
}
val tid =
CarbonTableIdentifierImplicit.toTableIdentifier(Seq(table.tableIdentifier.toString()))
val tidSeq = Seq(GetDB.getDatabaseName(tid.database, sparkSession))
- val destinationTable = UnresolvedRelation(table.tableIdentifier,
table.alias)
+ val destinationTable = CarbonClassReflectionUtils
+ .getUnresolvedRelation(table.tableIdentifier, alias)
+
ProjectForUpdate(destinationTable, columns, Seq(finalPlan))
}
- def processDeleteRecordsQuery(selectStmt: String, table:
UnresolvedRelation): LogicalPlan = {
- val tidSeq = Seq(GetDB.getDatabaseName(table.tableIdentifier.database,
sparkSession),
- table.tableIdentifier.table)
+
+ def processDeleteRecordsQuery(selectStmt: String,
+ alias: Option[String],
+ table: UnresolvedRelation): LogicalPlan = {
+ val tidSeq = Seq(GetDB.getDatabaseName(table.tableIdentifier.database,
sparkSession),
+ table.tableIdentifier.table)
var addedTupleId = false
val parsePlan = parser.parsePlan(selectStmt)
+
val selectPlan = parsePlan transform {
case relation: UnresolvedRelation
if table.tableIdentifier == relation.tableIdentifier &&
!addedTupleId =>
addedTupleId = true
val tupleId =
UnresolvedAlias(Alias(UnresolvedFunction("getTupleId",
Seq.empty, isDistinct = false), "tupleId")())
- val alias = table.alias match {
- case Some(alias) => Some(table.alias.toSeq)
+
+ val localalias = alias match {
--- End diff --
Done
---