ashb commented on code in PR #59241:
URL: https://github.com/apache/airflow/pull/59241#discussion_r2603148582
##########
airflow-core/src/airflow/migrations/versions/0042_3_0_0_add_uuid_primary_key_to_task_instance_.py:
##########
@@ -255,22 +255,23 @@ def upgrade():
elif dialect_name == "sqlite":
from uuid6 import uuid7
- stmt = text("SELECT COUNT(*) FROM task_instance WHERE id IS NULL")
conn = op.get_bind()
- task_instances = conn.execute(stmt).scalar()
- uuid_values = [str(uuid7()) for _ in range(task_instances)]
- # Ensure `uuid_values` is a list or iterable with the UUIDs for the
update.
- stmt = text("""
- UPDATE task_instance
- SET id = :uuid
- WHERE id IS NULL
- """)
+ stmt = text("SELECT rowid FROM task_instance WHERE id IS NULL")
+ rows = conn.execute(stmt).fetchall()
- for uuid_value in uuid_values:
- conn.execute(stmt.bindparams(uuid=uuid_value))
+ update_stmt = text("UPDATE task_instance SET id = :uuid WHERE rowid =
:rowid")
+
+ for row in rows:
+ conn.execute(
+ update_stmt.bindparams(
+ uuid=str(uuid7()),
+ rowid=row.rowid,
+ )
+ )
with op.batch_alter_table("task_instance") as batch_op:
+ conn.execute(text("PRAGMA defer_foreign_keys = ON;"))
Review Comment:
Nit: We should probably do it this way, as the conn.execute isn't part of
the batch_op.
```suggestion
conn.execute(text("PRAGMA defer_foreign_keys = ON;"))
with op.batch_alter_table("task_instance") as batch_op:
```
--
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.
To unsubscribe, e-mail: [email protected]
For queries about this service, please contact Infrastructure at:
[email protected]