mistercrunch commented on a change in pull request #6544: Make owner a m2m 
relation on datasources
URL: 
https://github.com/apache/incubator-superset/pull/6544#discussion_r242005001
 
 

 ##########
 File path: 
superset/migrations/versions/3e1b21cd94a4_change_owner_to_m2m_relation_on_.py
 ##########
 @@ -0,0 +1,94 @@
+"""change_owner_to_m2m_relation_on_datasources.py
+
+Revision ID: 3e1b21cd94a4
+Revises: 4ce8df208545
+Create Date: 2018-12-15 12:34:47.228756
+
+"""
+
+# revision identifiers, used by Alembic.
+from superset import db
+
+revision = '3e1b21cd94a4'
+down_revision = '6c7537a6004a'
+
+from alembic import op
+import sqlalchemy as sa
+
+
+sqlatable_user = sa.Table(
+    'sqlatable_user', sa.MetaData(),
+    sa.Column('id', sa.Integer, primary_key=True),
+    sa.Column('user_id', sa.Integer, sa.ForeignKey('ab_user.id')),
+    sa.Column('table_id', sa.Integer, sa.ForeignKey('tables.id')),
+)
+
+SqlaTable = sa.Table(
+    'tables', sa.MetaData(),
+    sa.Column('id', sa.Integer, primary_key=True),
+    sa.Column('user_id', sa.Integer, sa.ForeignKey('ab_user.id')),
+)
+
+druiddatasource_user = sa.Table(
+    'druiddatasource_user', sa.MetaData(),
+    sa.Column('id', sa.Integer, primary_key=True),
+    sa.Column('user_id', sa.Integer, sa.ForeignKey('ab_user.id')),
+    sa.Column('datasource_id', sa.Integer, sa.ForeignKey('datasources.id')),
+)
+
+DruidDatasource = sa.Table(
+    'datasources', sa.MetaData(),
+    sa.Column('id', sa.Integer, primary_key=True),
+    sa.Column('user_id', sa.Integer, sa.ForeignKey('ab_user.id')),
+)
+
+
+def upgrade():
+    op.create_table('sqlatable_user',
+                    sa.Column('id', sa.Integer(), nullable=False),
+                    sa.Column('user_id', sa.Integer(), nullable=True),
+                    sa.Column('table_id', sa.Integer(), nullable=True),
+                    sa.ForeignKeyConstraint(['table_id'], ['tables.id'], ),
+                    sa.ForeignKeyConstraint(['user_id'], ['ab_user.id'], ),
+                    sa.PrimaryKeyConstraint('id')
+                    )
+    op.create_table('druiddatasource_user',
+                    sa.Column('id', sa.Integer(), nullable=False),
+                    sa.Column('user_id', sa.Integer(), nullable=True),
+                    sa.Column('datasource_id', sa.Integer(), nullable=True),
+                    sa.ForeignKeyConstraint(['datasource_id'], 
['datasources.id'], ),
+                    sa.ForeignKeyConstraint(['user_id'], ['ab_user.id'], ),
+                    sa.PrimaryKeyConstraint('id')
+                    )
+
+    bind = op.get_bind()
+    session = db.Session(bind=bind)
+
+    tables = session.query(SqlaTable).all()
+    for table in tables:
+        if table.user_id is not None:
+            session.execute(
+                sqlatable_user.insert().values(user_id=table.user_id, 
table_id=table.id)
+            )
+
+    druiddatasources = session.query(DruidDatasource).all()
+    for druiddatasource in druiddatasources:
+        if druiddatasource.user_id is not None:
+            session.execute(
+                
druiddatasource_user.insert().values(user_id=druiddatasource.user_id, 
datasource_id=druiddatasource.id)
+            )
+
+    session.close()
+    op.drop_constraint('user_id', 'tables', type_='foreignkey')
 
 Review comment:
   We've had issues in the past trying to drop constraints, I think other 
migrations have workarounds to catch edge cases, some of it may be reusable 
here. You can also wrap the brittle bits around a try block, where say leaving 
an unused column behind may be less trouble in those edge cases than dealing 
with this issue in SQLAlchemy.

----------------------------------------------------------------
This is an automated message from the Apache Git Service.
To respond to the message, please log on GitHub and use the
URL above to go to the specific comment.
 
For queries about this service, please contact Infrastructure at:
[email protected]


With regards,
Apache Git Services

---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]

Reply via email to