This is an automated email from the ASF dual-hosted git repository.

maximebeauchemin pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/incubator-superset.git


The following commit(s) were added to refs/heads/master by this push:
     new 7388294  feat: import/export dashboards via cli (#5991)
7388294 is described below

commit 73882945bf93c9e6c3e63d876dbbff01c347cb34
Author: Arpit <[email protected]>
AuthorDate: Tue Oct 2 02:32:16 2018 +0530

    feat: import/export dashboards via cli (#5991)
    
    * feat: import/export dashboards via cli
    
    * style: fixed lint error
    
    * test: added test for import and export util
    
    * test: removing import test as it is causing integrity issues
    
    Import is a wrapper around exist functionality so we can go ahead without a 
test or mock the actual db operation using 
https://docs.python.org/3/library/unittest.mock.html
    
    And validate the wrapper operations only.
    
    * test: remove test data file
    
    * test: removed usage of reserved keyword id
---
 superset/cli.py                          | 52 ++++++++++++++++++++++++++++++--
 superset/dashboard_import_export_util.py | 39 ++++++++++++++++++++++++
 superset/views/core.py                   | 16 ++--------
 tests/import_export_tests.py             | 33 +++++++++++++++++++-
 4 files changed, 124 insertions(+), 16 deletions(-)

diff --git a/superset/cli.py b/superset/cli.py
index 6183601..4a34bdc 100755
--- a/superset/cli.py
+++ b/superset/cli.py
@@ -18,7 +18,8 @@ import werkzeug.serving
 import yaml
 
 from superset import (
-    app, data, db, dict_import_export_util, security_manager, utils,
+    app, dashboard_import_export_util, data, db,
+    dict_import_export_util, security_manager, utils,
 )
 
 config = app.config
@@ -227,6 +228,53 @@ def refresh_druid(datasource, merge):
 @app.cli.command()
 @click.option(
     '--path', '-p',
+    help='Path to a single JSON file or path containing multiple JSON files'
+         'files to import (*.json)')
[email protected](
+    '--recursive', '-r',
+    help='recursively search the path for json files')
+def import_dashboards(path, recursive=False):
+    """Import dashboards from JSON"""
+    p = Path(path)
+    files = []
+    if p.is_file():
+        files.append(p)
+    elif p.exists() and not recursive:
+        files.extend(p.glob('*.json'))
+    elif p.exists() and recursive:
+        files.extend(p.rglob('*.json'))
+    for f in files:
+        logging.info('Importing dashboard from file %s', f)
+        try:
+            with f.open() as data_stream:
+                dashboard_import_export_util.import_dashboards(
+                    db.session, data_stream)
+        except Exception as e:
+            logging.error('Error when importing dashboard from file %s', f)
+            logging.error(e)
+
+
[email protected]()
[email protected](
+    '--dashboard-file', '-f', default=None,
+    help='Specify the the file to export to')
[email protected](
+    '--print_stdout', '-p',
+    help='Print JSON to stdout')
+def export_dashboards(print_stdout, dashboard_file):
+    """Export dashboards to JSON"""
+    data = dashboard_import_export_util.export_dashboards(db.session)
+    if print_stdout or not dashboard_file:
+        print(data)
+    if dashboard_file:
+        logging.info('Exporting dashboards to %s', dashboard_file)
+        with open(dashboard_file, 'w') as data_stream:
+            data_stream.write(data)
+
+
[email protected]()
[email protected](
+    '--path', '-p',
     help='Path to a single YAML file or path containing multiple YAML '
          'files to import (*.yaml or *.yml)')
 @click.option(
@@ -268,7 +316,7 @@ def import_datasources(path, sync, recursive=False):
     '--datasource-file', '-f', default=None,
     help='Specify the the file to export to')
 @click.option(
-    '--print', '-p',
+    '--print_stdout', '-p',
     help='Print YAML to stdout')
 @click.option(
     '--back-references', '-b',
diff --git a/superset/dashboard_import_export_util.py 
b/superset/dashboard_import_export_util.py
new file mode 100644
index 0000000..0a8fd25
--- /dev/null
+++ b/superset/dashboard_import_export_util.py
@@ -0,0 +1,39 @@
+# -*- coding: utf-8 -*-
+# pylint: disable=C,R,W
+from __future__ import absolute_import
+from __future__ import division
+from __future__ import print_function
+from __future__ import unicode_literals
+
+import json
+import logging
+import time
+
+from superset import utils
+from superset.models.core import Dashboard
+
+
+def import_dashboards(session, data_stream, import_time=None):
+    """Imports dashboards from a stream to databases"""
+    current_tt = int(time.time())
+    import_time = current_tt if import_time is None else import_time
+    data = json.loads(data_stream.read(), object_hook=utils.decode_dashboards)
+    # TODO: import DRUID datasources
+    for table in data['datasources']:
+        type(table).import_obj(table, import_time=import_time)
+    session.commit()
+    for dashboard in data['dashboards']:
+        Dashboard.import_obj(
+            dashboard, import_time=import_time)
+    session.commit()
+
+
+def export_dashboards(session):
+    """Returns all dashboards metadata as a json dump"""
+    logging.info('Starting export')
+    dashboards = session.query(Dashboard)
+    dashboard_ids = []
+    for dashboard in dashboards:
+        dashboard_ids.append(dashboard.id)
+    data = Dashboard.export_dashboards(dashboard_ids)
+    return data
diff --git a/superset/views/core.py b/superset/views/core.py
index f811fd8..6d37570 100755
--- a/superset/views/core.py
+++ b/superset/views/core.py
@@ -34,9 +34,8 @@ from werkzeug.routing import BaseConverter
 from werkzeug.utils import secure_filename
 
 from superset import (
-    app, appbuilder, cache, db, results_backend, security_manager, sql_lab, 
utils,
-    viz,
-)
+    app, appbuilder, cache, dashboard_import_export_util, db, results_backend,
+    security_manager, sql_lab, utils, viz)
 from superset.connectors.connector_registry import ConnectorRegistry
 from superset.connectors.sqla.models import AnnotationDatasource, SqlaTable
 from superset.exceptions import SupersetException
@@ -1238,16 +1237,7 @@ class Superset(BaseSupersetView):
         """Overrides the dashboards using json instances from the file."""
         f = request.files.get('file')
         if request.method == 'POST' and f:
-            current_tt = int(time.time())
-            data = json.loads(f.stream.read(), 
object_hook=utils.decode_dashboards)
-            # TODO: import DRUID datasources
-            for table in data['datasources']:
-                type(table).import_obj(table, import_time=current_tt)
-            db.session.commit()
-            for dashboard in data['dashboards']:
-                models.Dashboard.import_obj(
-                    dashboard, import_time=current_tt)
-            db.session.commit()
+            dashboard_import_export_util.import_dashboards(db.session, 
f.stream)
             return redirect('/dashboard/list/')
         return self.render_template('superset/import_dashboards.html')
 
diff --git a/tests/import_export_tests.py b/tests/import_export_tests.py
index 3a3d5f9..932f9e0 100644
--- a/tests/import_export_tests.py
+++ b/tests/import_export_tests.py
@@ -10,7 +10,7 @@ import unittest
 
 from sqlalchemy.orm.session import make_transient
 
-from superset import db, utils
+from superset import dashboard_import_export_util, db, utils
 from superset.connectors.druid.models import (
     DruidColumn, DruidDatasource, DruidMetric,
 )
@@ -149,6 +149,9 @@ class ImportExportTests(SupersetTestCase):
         return db.session.query(SqlaTable).filter_by(
             table_name=name).first()
 
+    def get_num_dashboards(self):
+        return db.session.query(models.Dashboard).count()
+
     def assert_dash_equals(self, expected_dash, actual_dash,
                            check_position=True):
         self.assertEquals(expected_dash.slug, actual_dash.slug)
@@ -547,6 +550,34 @@ class ImportExportTests(SupersetTestCase):
         self.assert_datasource_equals(
             copy_datasource, self.get_datasource(imported_id))
 
+    def test_export_dashboards_util(self):
+        dashboards_json_dump = dashboard_import_export_util.export_dashboards(
+            db.session)
+        dashboards_objects = json.loads(
+            dashboards_json_dump,
+            object_hook=utils.decode_dashboards,
+        )
+
+        exported_dashboards = dashboards_objects['dashboards']
+        for dashboard in exported_dashboards:
+            id_ = dashboard.id
+            dash = self.get_dash(id_)
+            self.assert_dash_equals(dash, dashboard)
+            self.assertEquals(
+                dash.id, json.loads(
+                    dashboard.json_metadata,
+                    object_hook=utils.decode_dashboards,
+                )['remote_id'],
+            )
+        numDasboards = self.get_num_dashboards()
+        self.assertEquals(numDasboards, len(exported_dashboards))
+
+        exported_tables = dashboards_objects['datasources']
+        for exported_table in exported_tables:
+            id_ = exported_table.id
+            table = self.get_table(id_)
+            self.assert_table_equals(table, exported_table)
+
 
 if __name__ == '__main__':
     unittest.main()

Reply via email to