aokolnychyi commented on code in PR #50044: URL: https://github.com/apache/spark/pull/50044#discussion_r1976098170
########## sql/core/src/test/scala/org/apache/spark/sql/connector/AlterTableTests.scala: ########## @@ -328,7 +336,7 @@ trait AlterTableTests extends SharedSparkSession with QueryErrorsBase { } test("SPARK-39383 DEFAULT columns on V2 data sources with ALTER TABLE ADD/ALTER COLUMN") { - withSQLConf(SQLConf.DEFAULT_COLUMN_ALLOWED_PROVIDERS.key -> s"$v2Format, ") { + withSQLConf(SQLConf.DEFAULT_COLUMN_ALLOWED_PROVIDERS.key -> s"$v2Format,$catalog") { Review Comment: @viirya is correct. We previously passed `"" ` as provider in the in-memory connector, which required workarounds like this. No longer needed as we pass the catalog name as provider. Simplifies testing. ########## sql/catalyst/src/test/scala/org/apache/spark/sql/connector/catalog/InMemoryTableCatalog.scala: ########## @@ -122,7 +122,7 @@ class BasicInMemoryTableCatalog extends TableCatalog { override def alterTable(ident: Identifier, changes: TableChange*): Table = { val table = loadTable(ident).asInstanceOf[InMemoryTable] val properties = CatalogV2Util.applyPropertiesChanges(table.properties, changes) - val schema = CatalogV2Util.applySchemaChanges(table.schema, changes, None, "ALTER TABLE") + val schema = CatalogV2Util.applySchemaChanges(table.schema, changes, Some(name), "ALTER TABLE") Review Comment: Correct. ########## sql/catalyst/src/test/scala/org/apache/spark/sql/connector/catalog/InMemoryBaseTable.scala: ########## @@ -718,6 +724,11 @@ private class BufferedRowsReader( schema: StructType, row: InternalRow): Any = { val index = schema.fieldIndex(field.name) + + if (index >= row.numFields) { Review Comment: This is needed to read data inserted prior to adding columns to the schema. If that happens, there would be extra columns in the schema and we have to default new columns using the existence default value. -- This is an automated message from the Apache Git Service. To respond to the message, please log on to GitHub and use the URL above to go to the specific comment. To unsubscribe, e-mail: reviews-unsubscr...@spark.apache.org For queries about this service, please contact Infrastructure at: us...@infra.apache.org --------------------------------------------------------------------- To unsubscribe, e-mail: reviews-unsubscr...@spark.apache.org For additional commands, e-mail: reviews-h...@spark.apache.org