MaxGekk commented on a change in pull request #29324:
URL: https://github.com/apache/spark/pull/29324#discussion_r464214388
##########
File path: sql/core/src/main/scala/org/apache/spark/sql/jdbc/JdbcDialects.scala
##########
@@ -184,15 +189,65 @@ abstract class JdbcDialect extends Serializable {
/**
* Rename an existing table.
*
- * TODO (SPARK-32382): Override this method in the dialects that don't
support such syntax.
- *
* @param oldTable The existing table.
* @param newTable New name of the table.
* @return The SQL statement to use for renaming the table.
*/
def renameTable(oldTable: String, newTable: String): String = {
s"ALTER TABLE $oldTable RENAME TO $newTable"
}
+
+ /**
+ * Alter an existing table.
+ *
+ * @param tableName The name of the table to be altered.
+ * @param changes Changes to apply to the table.
+ * @return The SQL statements to use for altering the table.
+ */
+ def alterTable(tableName: String, changes: Seq[TableChange]): Array[String]
= {
+ val updateClause = ArrayBuilder.make[String]
+ for (change <- changes) {
+ change match {
+ case add: AddColumn =>
+ add.fieldNames match {
+ case Array(name) =>
+ val dataType = JdbcUtils.getJdbcType(add.dataType(),
this).databaseTypeDefinition
+ updateClause += s"ALTER TABLE $tableName ADD COLUMN $name
$dataType"
+ }
+ case rename: RenameColumn =>
+ rename.fieldNames match {
+ case Array(name) =>
+ updateClause += s"ALTER TABLE $tableName RENAME COLUMN $name TO
${rename.newName}"
+ }
+ case delete: DeleteColumn =>
+ delete.fieldNames match {
+ case Array(name) =>
+ updateClause += s"ALTER TABLE $tableName DROP COLUMN $name"
+ }
+ case update: UpdateColumnType =>
+ update.fieldNames match {
+ case Array(name) =>
+ val dataType = JdbcUtils.getJdbcType(update.newDataType(), this)
+ .databaseTypeDefinition
+ updateClause += s"ALTER TABLE $tableName ALTER COLUMN $name
$dataType"
+ }
+ case update: UpdateColumnNullability =>
+ update.fieldNames match {
+ case Array(name) =>
+ if (update.nullable()) {
+ updateClause += s"ALTER TABLE $tableName ALTER COLUMN $name
SET NULL"
+ } else {
+ updateClause += s"ALTER TABLE $tableName ALTER COLUMN $name
SET NOT NULL"
+ }
Review comment:
nit:
```suggestion
val nullable = if (update.nullable()) "NULL" else "NOT NULL"
updateClause += s"ALTER TABLE $tableName ALTER COLUMN $name
SET " + nullable
```
##########
File path: sql/core/src/main/scala/org/apache/spark/sql/jdbc/JdbcDialects.scala
##########
@@ -184,15 +189,65 @@ abstract class JdbcDialect extends Serializable {
/**
* Rename an existing table.
*
- * TODO (SPARK-32382): Override this method in the dialects that don't
support such syntax.
- *
* @param oldTable The existing table.
* @param newTable New name of the table.
* @return The SQL statement to use for renaming the table.
*/
def renameTable(oldTable: String, newTable: String): String = {
s"ALTER TABLE $oldTable RENAME TO $newTable"
}
+
+ /**
+ * Alter an existing table.
+ *
+ * @param tableName The name of the table to be altered.
+ * @param changes Changes to apply to the table.
+ * @return The SQL statements to use for altering the table.
+ */
+ def alterTable(tableName: String, changes: Seq[TableChange]): Array[String]
= {
+ val updateClause = ArrayBuilder.make[String]
+ for (change <- changes) {
+ change match {
+ case add: AddColumn =>
+ add.fieldNames match {
+ case Array(name) =>
+ val dataType = JdbcUtils.getJdbcType(add.dataType(),
this).databaseTypeDefinition
+ updateClause += s"ALTER TABLE $tableName ADD COLUMN $name
$dataType"
+ }
Review comment:
I would implement this (and below) in more reliable way:
```suggestion
for (name <- add.fieldNames) {
val dataType = JdbcUtils.getJdbcType(add.dataType(),
this).databaseTypeDefinition
updateClause += s"ALTER TABLE $tableName ADD COLUMN $name
$dataType"
}
```
I am not sure that we can guarantee that the array always contain 1 element.
##########
File path:
sql/core/src/test/scala/org/apache/spark/sql/execution/datasources/v2/jdbc/JDBCTableCatalogSuite.scala
##########
@@ -106,4 +108,70 @@ class JDBCTableCatalogSuite extends QueryTest with
SharedSparkSession {
Seq(Row("test", "people"), Row("test", "new_table")))
}
}
+
+ test("alter table ... add column") {
+ withTable("h2.test.alt_table") {
+ withConnection { conn =>
+ sql("CREATE TABLE h2.test.alt_table (ID INTEGER) USING _")
+ }
+ assert(checkColumnExistence("h2.test.alt_table", Array("ID")))
+ sql("ALTER TABLE h2.test.alt_table ADD COLUMNS (C1 INTEGER, C2 STRING)")
+ assert(checkColumnExistence("h2.test.alt_table", Array("ID", "C1",
"C2")))
+ sql("ALTER TABLE h2.test.alt_table ADD COLUMNS (C3 DOUBLE)")
+ assert(checkColumnExistence("h2.test.alt_table", Array("ID", "C1", "C2",
"C3")))
+ }
+ }
+
+ test("alter table ... rename column") {
+ withTable("h2.test.alt_table") {
+ withConnection { conn =>
+ sql("CREATE TABLE h2.test.alt_table (ID INTEGER) USING _")
+ }
+ assert(checkColumnExistence("h2.test.alt_table", Array("ID")))
+ sql("ALTER TABLE h2.test.alt_table RENAME COLUMN ID TO C")
+ assert(checkColumnExistence("h2.test.alt_table", Array("C")))
+ }
+ }
+
+ test("alter table ... drop column") {
+ withTable("h2.test.alt_table") {
+ withConnection { conn =>
+ sql("CREATE TABLE h2.test.alt_table (C1 INTEGER, C2 INTEGER) USING _")
+ }
+ assert(checkColumnExistence("h2.test.alt_table", Array("C1", "C2")))
+ sql("ALTER TABLE h2.test.alt_table DROP COLUMN C1")
+ assert(checkColumnExistence("h2.test.alt_table", Array("C2")))
+ }
+ }
+
+ test("alter table ... update column type") {
+ withTable("h2.test.alt_table") {
+ withConnection { conn =>
+ sql("CREATE TABLE h2.test.alt_table (ID INTEGER) USING _")
+ }
Review comment:
```suggestion
sql("CREATE TABLE h2.test.alt_table (ID INTEGER) USING _")
```
##########
File path:
sql/core/src/test/scala/org/apache/spark/sql/execution/datasources/v2/jdbc/JDBCTableCatalogSuite.scala
##########
@@ -106,4 +108,70 @@ class JDBCTableCatalogSuite extends QueryTest with
SharedSparkSession {
Seq(Row("test", "people"), Row("test", "new_table")))
}
}
+
+ test("alter table ... add column") {
+ withTable("h2.test.alt_table") {
+ withConnection { conn =>
+ sql("CREATE TABLE h2.test.alt_table (ID INTEGER) USING _")
+ }
Review comment:
```suggestion
sql("CREATE TABLE h2.test.alt_table (ID INTEGER) USING _")
```
##########
File path:
sql/core/src/test/scala/org/apache/spark/sql/execution/datasources/v2/jdbc/JDBCTableCatalogSuite.scala
##########
@@ -106,4 +108,70 @@ class JDBCTableCatalogSuite extends QueryTest with
SharedSparkSession {
Seq(Row("test", "people"), Row("test", "new_table")))
}
}
+
+ test("alter table ... add column") {
+ withTable("h2.test.alt_table") {
+ withConnection { conn =>
+ sql("CREATE TABLE h2.test.alt_table (ID INTEGER) USING _")
+ }
+ assert(checkColumnExistence("h2.test.alt_table", Array("ID")))
+ sql("ALTER TABLE h2.test.alt_table ADD COLUMNS (C1 INTEGER, C2 STRING)")
+ assert(checkColumnExistence("h2.test.alt_table", Array("ID", "C1",
"C2")))
+ sql("ALTER TABLE h2.test.alt_table ADD COLUMNS (C3 DOUBLE)")
+ assert(checkColumnExistence("h2.test.alt_table", Array("ID", "C1", "C2",
"C3")))
+ }
+ }
+
+ test("alter table ... rename column") {
+ withTable("h2.test.alt_table") {
+ withConnection { conn =>
+ sql("CREATE TABLE h2.test.alt_table (ID INTEGER) USING _")
+ }
+ assert(checkColumnExistence("h2.test.alt_table", Array("ID")))
+ sql("ALTER TABLE h2.test.alt_table RENAME COLUMN ID TO C")
+ assert(checkColumnExistence("h2.test.alt_table", Array("C")))
+ }
+ }
+
+ test("alter table ... drop column") {
+ withTable("h2.test.alt_table") {
+ withConnection { conn =>
+ sql("CREATE TABLE h2.test.alt_table (C1 INTEGER, C2 INTEGER) USING _")
+ }
Review comment:
```suggestion
sql("CREATE TABLE h2.test.alt_table (C1 INTEGER, C2 INTEGER) USING _")
```
##########
File path:
sql/core/src/test/scala/org/apache/spark/sql/execution/datasources/v2/jdbc/JDBCTableCatalogSuite.scala
##########
@@ -106,4 +108,70 @@ class JDBCTableCatalogSuite extends QueryTest with
SharedSparkSession {
Seq(Row("test", "people"), Row("test", "new_table")))
}
}
+
+ test("alter table ... add column") {
+ withTable("h2.test.alt_table") {
+ withConnection { conn =>
+ sql("CREATE TABLE h2.test.alt_table (ID INTEGER) USING _")
+ }
+ assert(checkColumnExistence("h2.test.alt_table", Array("ID")))
+ sql("ALTER TABLE h2.test.alt_table ADD COLUMNS (C1 INTEGER, C2 STRING)")
+ assert(checkColumnExistence("h2.test.alt_table", Array("ID", "C1",
"C2")))
+ sql("ALTER TABLE h2.test.alt_table ADD COLUMNS (C3 DOUBLE)")
+ assert(checkColumnExistence("h2.test.alt_table", Array("ID", "C1", "C2",
"C3")))
+ }
+ }
+
+ test("alter table ... rename column") {
+ withTable("h2.test.alt_table") {
+ withConnection { conn =>
+ sql("CREATE TABLE h2.test.alt_table (ID INTEGER) USING _")
+ }
Review comment:
```suggestion
sql("CREATE TABLE h2.test.alt_table (ID INTEGER) USING _")
```
##########
File path:
sql/core/src/test/scala/org/apache/spark/sql/execution/datasources/v2/jdbc/JDBCTableCatalogSuite.scala
##########
@@ -106,4 +108,70 @@ class JDBCTableCatalogSuite extends QueryTest with
SharedSparkSession {
Seq(Row("test", "people"), Row("test", "new_table")))
}
}
+
+ test("alter table ... add column") {
+ withTable("h2.test.alt_table") {
+ withConnection { conn =>
+ sql("CREATE TABLE h2.test.alt_table (ID INTEGER) USING _")
+ }
+ assert(checkColumnExistence("h2.test.alt_table", Array("ID")))
+ sql("ALTER TABLE h2.test.alt_table ADD COLUMNS (C1 INTEGER, C2 STRING)")
+ assert(checkColumnExistence("h2.test.alt_table", Array("ID", "C1",
"C2")))
+ sql("ALTER TABLE h2.test.alt_table ADD COLUMNS (C3 DOUBLE)")
+ assert(checkColumnExistence("h2.test.alt_table", Array("ID", "C1", "C2",
"C3")))
+ }
+ }
+
+ test("alter table ... rename column") {
+ withTable("h2.test.alt_table") {
+ withConnection { conn =>
+ sql("CREATE TABLE h2.test.alt_table (ID INTEGER) USING _")
+ }
+ assert(checkColumnExistence("h2.test.alt_table", Array("ID")))
+ sql("ALTER TABLE h2.test.alt_table RENAME COLUMN ID TO C")
+ assert(checkColumnExistence("h2.test.alt_table", Array("C")))
+ }
+ }
+
+ test("alter table ... drop column") {
+ withTable("h2.test.alt_table") {
+ withConnection { conn =>
+ sql("CREATE TABLE h2.test.alt_table (C1 INTEGER, C2 INTEGER) USING _")
+ }
+ assert(checkColumnExistence("h2.test.alt_table", Array("C1", "C2")))
+ sql("ALTER TABLE h2.test.alt_table DROP COLUMN C1")
+ assert(checkColumnExistence("h2.test.alt_table", Array("C2")))
+ }
+ }
+
+ test("alter table ... update column type") {
+ withTable("h2.test.alt_table") {
+ withConnection { conn =>
+ sql("CREATE TABLE h2.test.alt_table (ID INTEGER) USING _")
+ }
+ sql("ALTER TABLE h2.test.alt_table ALTER COLUMN id TYPE DOUBLE")
+ assert(sql(s"DESCRIBE TABLE
h2.test.alt_table").select("data_type").first()
+ === Row("double"))
+ }
+ }
+
+ test("alter table ... update column comment not supported") {
+ withTable("h2.test.alt_table") {
+ withConnection { conn =>
+ sql("CREATE TABLE h2.test.alt_table (ID INTEGER) USING _")
+ }
Review comment:
```suggestion
sql("CREATE TABLE h2.test.alt_table (ID INTEGER) USING _")
```
----------------------------------------------------------------
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.
For queries about this service, please contact Infrastructure at:
[email protected]
---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]