This is an automated email from the ASF dual-hosted git repository.
yao pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/kyuubi.git
The following commit(s) were added to refs/heads/master by this push:
new 39de21016 Revert "[KYUUBI #5757][AUTHZ] Support alter path-based table
for Delta Lake in Authz"
39de21016 is described below
commit 39de2101616517fa63439ba58dabb6ed646f7f50
Author: Kent Yao <[email protected]>
AuthorDate: Wed Nov 29 16:38:53 2023 +0800
Revert "[KYUUBI #5757][AUTHZ] Support alter path-based table for Delta Lake
in Authz"
This reverts commit 31299347e399d447d0cccc01ac7341fce0f374de.
---
.../src/main/resources/table_command_spec.json | 42 ++-------
.../plugin/spark/authz/gen/TableCommands.scala | 13 +--
.../DeltaCatalogRangerSparkExtensionSuite.scala | 102 +--------------------
3 files changed, 14 insertions(+), 143 deletions(-)
diff --git
a/extensions/spark/kyuubi-spark-authz/src/main/resources/table_command_spec.json
b/extensions/spark/kyuubi-spark-authz/src/main/resources/table_command_spec.json
index 68334c3b5..8e55009e2 100644
---
a/extensions/spark/kyuubi-spark-authz/src/main/resources/table_command_spec.json
+++
b/extensions/spark/kyuubi-spark-authz/src/main/resources/table_command_spec.json
@@ -13,11 +13,7 @@
} ],
"opType" : "ALTERTABLE_ADDCOLS",
"queryDescs" : [ ],
- "uriDescs" : [ {
- "fieldName" : "child",
- "fieldExtractor" : "ResolvedTableURIExtractor",
- "isInput" : false
- } ]
+ "uriDescs" : [ ]
}, {
"classname" : "org.apache.spark.sql.catalyst.plans.logical.AddPartitions",
"tableDescs" : [ {
@@ -49,11 +45,7 @@
} ],
"opType" : "ALTERTABLE_ADDCOLS",
"queryDescs" : [ ],
- "uriDescs" : [ {
- "fieldName" : "child",
- "fieldExtractor" : "ResolvedTableURIExtractor",
- "isInput" : false
- } ]
+ "uriDescs" : [ ]
}, {
"classname" : "org.apache.spark.sql.catalyst.plans.logical.AlterTable",
"tableDescs" : [ {
@@ -69,11 +61,7 @@
} ],
"opType" : "ALTERTABLE_PROPERTIES",
"queryDescs" : [ ],
- "uriDescs" : [ {
- "fieldName" : "ident",
- "fieldExtractor" : "IdentifierURIExtractor",
- "isInput" : false
- } ]
+ "uriDescs" : [ ]
}, {
"classname" : "org.apache.spark.sql.catalyst.plans.logical.AppendData",
"tableDescs" : [ {
@@ -331,11 +319,7 @@
} ],
"opType" : "ALTERTABLE_ADDCOLS",
"queryDescs" : [ ],
- "uriDescs" : [ {
- "fieldName" : "child",
- "fieldExtractor" : "ResolvedTableURIExtractor",
- "isInput" : false
- } ]
+ "uriDescs" : [ ]
}, {
"classname" : "org.apache.spark.sql.catalyst.plans.logical.DropPartitions",
"tableDescs" : [ {
@@ -494,11 +478,7 @@
} ],
"opType" : "ALTERTABLE_RENAMECOL",
"queryDescs" : [ ],
- "uriDescs" : [ {
- "fieldName" : "child",
- "fieldExtractor" : "ResolvedTableURIExtractor",
- "isInput" : false
- } ]
+ "uriDescs" : [ ]
}, {
"classname" : "org.apache.spark.sql.catalyst.plans.logical.RenamePartitions",
"tableDescs" : [ {
@@ -546,11 +526,7 @@
} ],
"opType" : "ALTERTABLE_REPLACECOLS",
"queryDescs" : [ ],
- "uriDescs" : [ {
- "fieldName" : "child",
- "fieldExtractor" : "ResolvedTableURIExtractor",
- "isInput" : false
- } ]
+ "uriDescs" : [ ]
}, {
"classname" : "org.apache.spark.sql.catalyst.plans.logical.ReplaceData",
"tableDescs" : [ {
@@ -700,11 +676,7 @@
} ],
"opType" : "ALTERTABLE_PROPERTIES",
"queryDescs" : [ ],
- "uriDescs" : [ {
- "fieldName" : "table",
- "fieldExtractor" : "ResolvedTableURIExtractor",
- "isInput" : false
- } ]
+ "uriDescs" : [ ]
}, {
"classname" : "org.apache.spark.sql.catalyst.plans.logical.ShowCreateTable",
"tableDescs" : [ {
diff --git
a/extensions/spark/kyuubi-spark-authz/src/test/scala/org/apache/kyuubi/plugin/spark/authz/gen/TableCommands.scala
b/extensions/spark/kyuubi-spark-authz/src/test/scala/org/apache/kyuubi/plugin/spark/authz/gen/TableCommands.scala
index aced937b9..ee0345a8c 100644
---
a/extensions/spark/kyuubi-spark-authz/src/test/scala/org/apache/kyuubi/plugin/spark/authz/gen/TableCommands.scala
+++
b/extensions/spark/kyuubi-spark-authz/src/test/scala/org/apache/kyuubi/plugin/spark/authz/gen/TableCommands.scala
@@ -39,8 +39,7 @@ object TableCommands extends CommandSpecs[TableCommandSpec] {
val AlterTable = {
val cmd = "org.apache.spark.sql.catalyst.plans.logical.AlterTable"
val tableDesc = TableDesc("ident", classOf[IdentifierTableExtractor])
- val uriDescs = Seq(UriDesc("ident", classOf[IdentifierURIExtractor]))
- TableCommandSpec(cmd, Seq(tableDesc), ALTERTABLE_PROPERTIES, uriDescs =
uriDescs)
+ TableCommandSpec(cmd, Seq(tableDesc), ALTERTABLE_PROPERTIES)
}
val AlterTableAddColumns = {
@@ -52,8 +51,7 @@ object TableCommands extends CommandSpecs[TableCommandSpec] {
val AddColumns = {
val cmd = "org.apache.spark.sql.catalyst.plans.logical.AddColumns"
- val uriDescs = Seq(UriDesc("child", classOf[ResolvedTableURIExtractor]))
- TableCommandSpec(cmd, Seq(resolvedTableDesc), ALTERTABLE_ADDCOLS, uriDescs
= uriDescs)
+ TableCommandSpec(cmd, Seq(resolvedTableDesc), ALTERTABLE_ADDCOLS)
}
val AlterColumn = {
@@ -68,12 +66,12 @@ object TableCommands extends CommandSpecs[TableCommandSpec]
{
val ReplaceColumns = {
val cmd = "org.apache.spark.sql.catalyst.plans.logical.ReplaceColumns"
- AddColumns.copy(classname = cmd, opType = ALTERTABLE_REPLACECOLS)
+ TableCommandSpec(cmd, Seq(resolvedTableDesc), ALTERTABLE_REPLACECOLS)
}
val RenameColumn = {
val cmd = "org.apache.spark.sql.catalyst.plans.logical.RenameColumn"
- AddColumns.copy(classname = cmd, opType = ALTERTABLE_RENAMECOL)
+ TableCommandSpec(cmd, Seq(resolvedTableDesc), ALTERTABLE_RENAMECOL)
}
val AlterTableAddPartition = {
@@ -640,8 +638,7 @@ object TableCommands extends CommandSpecs[TableCommandSpec]
{
val SetTableProperties = {
val cmd = "org.apache.spark.sql.catalyst.plans.logical.SetTableProperties"
val tableDesc = TableDesc("table", classOf[ResolvedTableTableExtractor])
- val uriDescs = Seq(UriDesc("table", classOf[ResolvedTableURIExtractor]))
- TableCommandSpec(cmd, Seq(tableDesc), ALTERTABLE_PROPERTIES, uriDescs =
uriDescs)
+ TableCommandSpec(cmd, Seq(tableDesc), ALTERTABLE_PROPERTIES)
}
val AddArchivesCommand = {
diff --git
a/extensions/spark/kyuubi-spark-authz/src/test/scala/org/apache/kyuubi/plugin/spark/authz/ranger/DeltaCatalogRangerSparkExtensionSuite.scala
b/extensions/spark/kyuubi-spark-authz/src/test/scala/org/apache/kyuubi/plugin/spark/authz/ranger/DeltaCatalogRangerSparkExtensionSuite.scala
index 1ce8ad676..c1dda6989 100644
---
a/extensions/spark/kyuubi-spark-authz/src/test/scala/org/apache/kyuubi/plugin/spark/authz/ranger/DeltaCatalogRangerSparkExtensionSuite.scala
+++
b/extensions/spark/kyuubi-spark-authz/src/test/scala/org/apache/kyuubi/plugin/spark/authz/ranger/DeltaCatalogRangerSparkExtensionSuite.scala
@@ -25,7 +25,7 @@ import
org.apache.kyuubi.plugin.spark.authz.AccessControlException
import org.apache.kyuubi.plugin.spark.authz.RangerTestNamespace._
import org.apache.kyuubi.plugin.spark.authz.RangerTestUsers._
import
org.apache.kyuubi.plugin.spark.authz.ranger.DeltaCatalogRangerSparkExtensionSuite._
-import
org.apache.kyuubi.plugin.spark.authz.util.AuthZUtils.{isSparkV32OrGreater,
isSparkV35OrGreater}
+import org.apache.kyuubi.plugin.spark.authz.util.AuthZUtils.isSparkV32OrGreater
import org.apache.kyuubi.tags.DeltaTest
import org.apache.kyuubi.util.AssertionUtils._
@@ -41,14 +41,6 @@ class DeltaCatalogRangerSparkExtensionSuite extends
RangerSparkExtensionSuite {
val table1 = "table1_delta"
val table2 = "table2_delta"
- def propString(props: Map[String, String]): String =
- if (props.isEmpty) ""
- else {
- props
- .map { case (key, value) => s"'$key' = '$value'" }
- .mkString("TBLPROPERTIES (", ",", ")")
- }
-
def createTableSql(namespace: String, table: String): String =
s"""
|CREATE TABLE IF NOT EXISTS $namespace.$table (
@@ -61,7 +53,7 @@ class DeltaCatalogRangerSparkExtensionSuite extends
RangerSparkExtensionSuite {
|PARTITIONED BY (gender)
|""".stripMargin
- def createPathBasedTableSql(path: Path, props: Map[String, String] =
Map.empty): String =
+ def createPathBasedTableSql(path: Path): String =
s"""
|CREATE TABLE IF NOT EXISTS delta.`$path` (
| id INT,
@@ -71,7 +63,6 @@ class DeltaCatalogRangerSparkExtensionSuite extends
RangerSparkExtensionSuite {
|)
|USING DELTA
|PARTITIONED BY (gender)
- |${propString(props)}
|""".stripMargin
override def withFixture(test: NoArgTest): Outcome = {
@@ -477,95 +468,6 @@ class DeltaCatalogRangerSparkExtensionSuite extends
RangerSparkExtensionSuite {
doAs(admin, sql(vacuumTableSql2))
})
}
-
- test("alter path-based table set properties") {
- withTempDir(path => {
- doAs(admin, sql(createPathBasedTableSql(path)))
- val setPropertiesSql = s"ALTER TABLE delta.`$path`" +
- s" SET TBLPROPERTIES ('delta.appendOnly' = 'true')"
- interceptEndsWith[AccessControlException](
- doAs(someone, sql(setPropertiesSql)))(
- s"does not have [write] privilege on [[$path, $path/]]")
- doAs(admin, sql(setPropertiesSql))
- })
- }
-
- test("alter path-based table add columns") {
- withTempDir(path => {
- doAs(admin, sql(createPathBasedTableSql(path)))
- val addColumnsSql = s"ALTER TABLE delta.`$path` ADD COLUMNS (age int)"
- interceptEndsWith[AccessControlException](
- doAs(someone, sql(addColumnsSql)))(
- s"does not have [write] privilege on [[$path, $path/]]")
- doAs(admin, sql(addColumnsSql))
- })
- }
-
- test("alter path-based table change column") {
- withTempDir(path => {
- doAs(admin, sql(createPathBasedTableSql(path)))
- val changeColumnSql = s"ALTER TABLE delta.`$path`" +
- s" CHANGE COLUMN gender gender STRING AFTER birthDate"
- interceptEndsWith[AccessControlException](
- doAs(someone, sql(changeColumnSql)))(
- s"does not have [write] privilege on [[$path, $path/]]")
- doAs(admin, sql(changeColumnSql))
- })
- }
-
- test("alter path-based table drop column") {
- assume(
- isSparkV32OrGreater,
- "alter table drop column is available in Delta Lake 1.2.0 and above")
-
- withTempDir(path => {
- doAs(admin, sql(createPathBasedTableSql(path,
Map("delta.columnMapping.mode" -> "name"))))
- val dropColumnSql = s"ALTER TABLE delta.`$path` DROP COLUMN birthDate"
- interceptEndsWith[AccessControlException](
- doAs(someone, sql(dropColumnSql)))(
- s"does not have [write] privilege on [[$path, $path/]]")
- doAs(admin, sql(dropColumnSql))
- })
- }
-
- test("alter path-based table rename column") {
- assume(
- isSparkV32OrGreater,
- "alter table rename column is available in Delta Lake 1.2.0 and above")
-
- withTempDir(path => {
- doAs(admin, sql(createPathBasedTableSql(path,
Map("delta.columnMapping.mode" -> "name"))))
- val renameColumnSql = s"ALTER TABLE delta.`$path`" +
- s" RENAME COLUMN birthDate TO dateOfBirth"
- interceptEndsWith[AccessControlException](
- doAs(someone, sql(renameColumnSql)))(
- s"does not have [write] privilege on [[$path, $path/]]")
- doAs(admin, sql(renameColumnSql))
- })
- }
-
- test("alter path-based table replace columns") {
- withTempDir(path => {
- assume(
- isSparkV32OrGreater,
- "alter table replace columns is not available in Delta Lake 1.0.1")
-
- doAs(admin, sql(createPathBasedTableSql(path,
Map("delta.columnMapping.mode" -> "name"))))
- val replaceColumnsSql = s"ALTER TABLE delta.`$path`" +
- s" REPLACE COLUMNS (id INT, name STRING, gender STRING)"
- interceptEndsWith[AccessControlException](
- doAs(someone, sql(replaceColumnsSql)))(
- s"does not have [write] privilege on [[$path, $path/]]")
-
- // There was a bug before Delta Lake 3.0, it will throw
AnalysisException message
- // "Cannot drop column from a struct type with a single field:
- // StructType(StructField(birthDate,TimestampType,true))".
- // For details, see https://github.com/delta-io/delta/pull/1822
- if (isSparkV35OrGreater) {
- doAs(admin, sql(replaceColumnsSql))
- }
- })
- }
}
object DeltaCatalogRangerSparkExtensionSuite {