This is an automated email from the ASF dual-hosted git repository. jedcunningham pushed a commit to branch v2-2-test in repository https://gitbox.apache.org/repos/asf/airflow.git
commit 815130724f6dc78359153f3643088408be30e0cb Author: Kaxil Naik <[email protected]> AuthorDate: Wed Nov 3 18:30:03 2021 +0000 Fix downgrade for a DB Migration (#19390) The downgrade was not working because of the issues fixed in this PR (cherry picked from commit a373ca347bb2e6308f1b91d2a6a0ae0cf1d39332) --- .../versions/7b2661a43ba3_taskinstance_keyed_to_dagrun.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/airflow/migrations/versions/7b2661a43ba3_taskinstance_keyed_to_dagrun.py b/airflow/migrations/versions/7b2661a43ba3_taskinstance_keyed_to_dagrun.py index 91acf5e..dd27840 100644 --- a/airflow/migrations/versions/7b2661a43ba3_taskinstance_keyed_to_dagrun.py +++ b/airflow/migrations/versions/7b2661a43ba3_taskinstance_keyed_to_dagrun.py @@ -349,12 +349,12 @@ def downgrade(): batch_op.drop_index('idx_task_reschedule_dag_task_run') with op.batch_alter_table('task_instance', schema=None) as batch_op: + batch_op.drop_constraint('task_instance_pkey', type_='primary') batch_op.alter_column('execution_date', existing_type=dt_type, existing_nullable=True, nullable=False) batch_op.alter_column( 'dag_id', existing_type=string_id_col_type, existing_nullable=True, nullable=True ) - batch_op.drop_constraint('task_instance_pkey', type_='primary') batch_op.create_primary_key('task_instance_pkey', ['dag_id', 'task_id', 'execution_date']) batch_op.drop_constraint('task_instance_dag_run_fkey', type_='foreignkey') @@ -418,11 +418,11 @@ def downgrade(): ) else: with op.batch_alter_table('dag_run', schema=None) as batch_op: - batch_op.drop_index('dag_id_state', table_name='dag_run') + batch_op.drop_index('dag_id_state') batch_op.alter_column('run_id', existing_type=sa.VARCHAR(length=250), nullable=True) batch_op.alter_column('execution_date', existing_type=dt_type, nullable=True) batch_op.alter_column('dag_id', existing_type=sa.VARCHAR(length=250), nullable=True) - batch_op.create_index('dag_id_state', 'dag_run', ['dag_id', 'state'], unique=False) + batch_op.create_index('dag_id_state', ['dag_id', 'state'], unique=False) def _multi_table_update(dialect_name, target, column):
