dpgaspar commented on a change in pull request #11803:
URL: 
https://github.com/apache/incubator-superset/pull/11803#discussion_r543191578



##########
File path: superset/cli.py
##########
@@ -212,180 +221,305 @@ def refresh_druid(datasource: str, merge: bool) -> None:
     session.commit()
 
 
[email protected]()
-@with_appcontext
[email protected](
-    "--path",
-    "-p",
-    help="Path to a single JSON file or path containing multiple JSON "
-    "files to import (*.json)",
-)
[email protected](
-    "--recursive",
-    "-r",
-    is_flag=True,
-    default=False,
-    help="recursively search the path for json files",
-)
[email protected](
-    "--username",
-    "-u",
-    default=None,
-    help="Specify the user name to assign dashboards to",
-)
-def import_dashboards(path: str, recursive: bool, username: str) -> None:
-    """Import dashboards from JSON"""
-    from superset.dashboards.commands.importers.dispatcher import (
-        ImportDashboardsCommand,
-    )
-
-    path_object = Path(path)
-    files: List[Path] = []
-    if path_object.is_file():
-        files.append(path_object)
-    elif path_object.exists() and not recursive:
-        files.extend(path_object.glob("*.json"))
-    elif path_object.exists() and recursive:
-        files.extend(path_object.rglob("*.json"))
-    if username is not None:
-        g.user = security_manager.find_user(username=username)
-    contents = {path.name: open(path).read() for path in files}
-    try:
-        ImportDashboardsCommand(contents).run()
-    except Exception:  # pylint: disable=broad-except
-        logger.exception("Error when importing dashboard")
+if feature_flags.get("VERSIONED_EXPORT"):
 
+    @superset.command()
+    @with_appcontext
+    @click.option(
+        "--dashboard-file",
+        "-f",
+        default="dashboard_export_YYYYMMDDTHHMMSS",
+        help="Specify the the file to export to",
+    )
+    def export_dashboards(dashboard_file: Optional[str]) -> None:
+        """Export dashboards to ZIP file"""
+        from superset.dashboards.commands.export import ExportDashboardsCommand
+        from superset.models.dashboard import Dashboard
+
+        g.user = security_manager.find_user(username="admin")
+
+        dashboard_ids = [id_ for (id_,) in 
db.session.query(Dashboard.id).all()]
+        timestamp = datetime.now().strftime("%Y%m%dT%H%M%S")
+        root = f"dashboard_export_{timestamp}"
+        dashboard_file = dashboard_file or f"{root}.zip"
+
+        with ZipFile(dashboard_file, "w") as bundle:
+            for file_name, file_content in 
ExportDashboardsCommand(dashboard_ids).run():
+                with bundle.open(f"{root}/{file_name}", "w") as fp:
+                    fp.write(file_content.encode())
+
+    # pylint: disable=too-many-locals
+    @superset.command()
+    @with_appcontext
+    @click.option(
+        "--datasource-file",
+        "-f",
+        default="dataset_export_YYYYMMDDTHHMMSS",
+        help="Specify the the file to export to",
+    )
+    def export_datasources(datasource_file: Optional[str]) -> None:
+        """Export datasources to ZIP file"""
+        from superset.connectors.sqla.models import SqlaTable
+        from superset.datasets.commands.export import ExportDatasetsCommand
+
+        g.user = security_manager.find_user(username="admin")
+
+        dataset_ids = [id_ for (id_,) in db.session.query(SqlaTable.id).all()]
+        timestamp = datetime.now().strftime("%Y%m%dT%H%M%S")
+        root = f"dataset_export_{timestamp}"
+        datasource_file = datasource_file or f"{root}.zip"
+
+        with ZipFile(datasource_file, "w") as bundle:
+            for file_name, file_content in 
ExportDatasetsCommand(dataset_ids).run():
+                with bundle.open(f"{root}/{file_name}", "w") as fp:
+                    fp.write(file_content.encode())

Review comment:
       We could try except and surface a prettier message to the user's in case 
somethings goes wrong

##########
File path: superset/cli.py
##########
@@ -212,180 +221,305 @@ def refresh_druid(datasource: str, merge: bool) -> None:
     session.commit()
 
 
[email protected]()
-@with_appcontext
[email protected](
-    "--path",
-    "-p",
-    help="Path to a single JSON file or path containing multiple JSON "
-    "files to import (*.json)",
-)
[email protected](
-    "--recursive",
-    "-r",
-    is_flag=True,
-    default=False,
-    help="recursively search the path for json files",
-)
[email protected](
-    "--username",
-    "-u",
-    default=None,
-    help="Specify the user name to assign dashboards to",
-)
-def import_dashboards(path: str, recursive: bool, username: str) -> None:
-    """Import dashboards from JSON"""
-    from superset.dashboards.commands.importers.dispatcher import (
-        ImportDashboardsCommand,
-    )
-
-    path_object = Path(path)
-    files: List[Path] = []
-    if path_object.is_file():
-        files.append(path_object)
-    elif path_object.exists() and not recursive:
-        files.extend(path_object.glob("*.json"))
-    elif path_object.exists() and recursive:
-        files.extend(path_object.rglob("*.json"))
-    if username is not None:
-        g.user = security_manager.find_user(username=username)
-    contents = {path.name: open(path).read() for path in files}
-    try:
-        ImportDashboardsCommand(contents).run()
-    except Exception:  # pylint: disable=broad-except
-        logger.exception("Error when importing dashboard")
+if feature_flags.get("VERSIONED_EXPORT"):
 
+    @superset.command()
+    @with_appcontext
+    @click.option(
+        "--dashboard-file",
+        "-f",
+        default="dashboard_export_YYYYMMDDTHHMMSS",
+        help="Specify the the file to export to",
+    )
+    def export_dashboards(dashboard_file: Optional[str]) -> None:
+        """Export dashboards to ZIP file"""
+        from superset.dashboards.commands.export import ExportDashboardsCommand
+        from superset.models.dashboard import Dashboard
+
+        g.user = security_manager.find_user(username="admin")
+
+        dashboard_ids = [id_ for (id_,) in 
db.session.query(Dashboard.id).all()]
+        timestamp = datetime.now().strftime("%Y%m%dT%H%M%S")
+        root = f"dashboard_export_{timestamp}"
+        dashboard_file = dashboard_file or f"{root}.zip"
+
+        with ZipFile(dashboard_file, "w") as bundle:
+            for file_name, file_content in 
ExportDashboardsCommand(dashboard_ids).run():
+                with bundle.open(f"{root}/{file_name}", "w") as fp:
+                    fp.write(file_content.encode())
+
+    # pylint: disable=too-many-locals
+    @superset.command()
+    @with_appcontext
+    @click.option(
+        "--datasource-file",
+        "-f",
+        default="dataset_export_YYYYMMDDTHHMMSS",
+        help="Specify the the file to export to",
+    )
+    def export_datasources(datasource_file: Optional[str]) -> None:
+        """Export datasources to ZIP file"""
+        from superset.connectors.sqla.models import SqlaTable
+        from superset.datasets.commands.export import ExportDatasetsCommand
+
+        g.user = security_manager.find_user(username="admin")
+
+        dataset_ids = [id_ for (id_,) in db.session.query(SqlaTable.id).all()]
+        timestamp = datetime.now().strftime("%Y%m%dT%H%M%S")
+        root = f"dataset_export_{timestamp}"
+        datasource_file = datasource_file or f"{root}.zip"
+
+        with ZipFile(datasource_file, "w") as bundle:
+            for file_name, file_content in 
ExportDatasetsCommand(dataset_ids).run():
+                with bundle.open(f"{root}/{file_name}", "w") as fp:
+                    fp.write(file_content.encode())
+
+    @superset.command()
+    @with_appcontext
+    @click.option(
+        "--path", "-p", help="Path to a single ZIP file",
+    )
+    @click.option(
+        "--username",
+        "-u",
+        default=None,
+        help="Specify the user name to assign dashboards to",
+    )
+    def import_dashboards(path: str, username: Optional[str]) -> None:
+        """Import dashboards from ZIP file"""
+        from superset.dashboards.commands.importers.dispatcher import (
+            ImportDashboardsCommand,
+        )
 
[email protected]()
-@with_appcontext
[email protected](
-    "--dashboard-file", "-f", default=None, help="Specify the the file to 
export to"
-)
[email protected](
-    "--print_stdout", "-p", is_flag=True, default=False, help="Print JSON to 
stdout"
-)
-def export_dashboards(dashboard_file: str, print_stdout: bool) -> None:
-    """Export dashboards to JSON"""
-    from superset.utils import dashboard_import_export
-
-    data = dashboard_import_export.export_dashboards(db.session)
-    if print_stdout or not dashboard_file:
-        print(data)
-    if dashboard_file:
-        logger.info("Exporting dashboards to %s", dashboard_file)
-        with open(dashboard_file, "w") as data_stream:
-            data_stream.write(data)
+        if username is not None:
+            g.user = security_manager.find_user(username=username)
+        contents = {path: open(path).read()}
+        try:
+            ImportDashboardsCommand(contents).run()
+        except Exception:  # pylint: disable=broad-except
+            logger.exception("Error when importing dashboard")

Review comment:
       since this is a cli, could be better to print a nicer message to the user

##########
File path: superset/cli.py
##########
@@ -212,180 +221,305 @@ def refresh_druid(datasource: str, merge: bool) -> None:
     session.commit()
 
 
[email protected]()
-@with_appcontext
[email protected](
-    "--path",
-    "-p",
-    help="Path to a single JSON file or path containing multiple JSON "
-    "files to import (*.json)",
-)
[email protected](
-    "--recursive",
-    "-r",
-    is_flag=True,
-    default=False,
-    help="recursively search the path for json files",
-)
[email protected](
-    "--username",
-    "-u",
-    default=None,
-    help="Specify the user name to assign dashboards to",
-)
-def import_dashboards(path: str, recursive: bool, username: str) -> None:
-    """Import dashboards from JSON"""
-    from superset.dashboards.commands.importers.dispatcher import (
-        ImportDashboardsCommand,
-    )
-
-    path_object = Path(path)
-    files: List[Path] = []
-    if path_object.is_file():
-        files.append(path_object)
-    elif path_object.exists() and not recursive:
-        files.extend(path_object.glob("*.json"))
-    elif path_object.exists() and recursive:
-        files.extend(path_object.rglob("*.json"))
-    if username is not None:
-        g.user = security_manager.find_user(username=username)
-    contents = {path.name: open(path).read() for path in files}
-    try:
-        ImportDashboardsCommand(contents).run()
-    except Exception:  # pylint: disable=broad-except
-        logger.exception("Error when importing dashboard")
+if feature_flags.get("VERSIONED_EXPORT"):
 
+    @superset.command()
+    @with_appcontext
+    @click.option(
+        "--dashboard-file",
+        "-f",
+        default="dashboard_export_YYYYMMDDTHHMMSS",
+        help="Specify the the file to export to",
+    )
+    def export_dashboards(dashboard_file: Optional[str]) -> None:
+        """Export dashboards to ZIP file"""
+        from superset.dashboards.commands.export import ExportDashboardsCommand
+        from superset.models.dashboard import Dashboard
+
+        g.user = security_manager.find_user(username="admin")
+
+        dashboard_ids = [id_ for (id_,) in 
db.session.query(Dashboard.id).all()]
+        timestamp = datetime.now().strftime("%Y%m%dT%H%M%S")
+        root = f"dashboard_export_{timestamp}"
+        dashboard_file = dashboard_file or f"{root}.zip"
+
+        with ZipFile(dashboard_file, "w") as bundle:
+            for file_name, file_content in 
ExportDashboardsCommand(dashboard_ids).run():
+                with bundle.open(f"{root}/{file_name}", "w") as fp:
+                    fp.write(file_content.encode())
+
+    # pylint: disable=too-many-locals
+    @superset.command()
+    @with_appcontext
+    @click.option(
+        "--datasource-file",
+        "-f",
+        default="dataset_export_YYYYMMDDTHHMMSS",
+        help="Specify the the file to export to",
+    )
+    def export_datasources(datasource_file: Optional[str]) -> None:
+        """Export datasources to ZIP file"""
+        from superset.connectors.sqla.models import SqlaTable
+        from superset.datasets.commands.export import ExportDatasetsCommand
+
+        g.user = security_manager.find_user(username="admin")
+
+        dataset_ids = [id_ for (id_,) in db.session.query(SqlaTable.id).all()]
+        timestamp = datetime.now().strftime("%Y%m%dT%H%M%S")
+        root = f"dataset_export_{timestamp}"
+        datasource_file = datasource_file or f"{root}.zip"
+
+        with ZipFile(datasource_file, "w") as bundle:
+            for file_name, file_content in 
ExportDatasetsCommand(dataset_ids).run():
+                with bundle.open(f"{root}/{file_name}", "w") as fp:
+                    fp.write(file_content.encode())
+
+    @superset.command()
+    @with_appcontext
+    @click.option(
+        "--path", "-p", help="Path to a single ZIP file",
+    )
+    @click.option(
+        "--username",
+        "-u",
+        default=None,
+        help="Specify the user name to assign dashboards to",
+    )
+    def import_dashboards(path: str, username: Optional[str]) -> None:
+        """Import dashboards from ZIP file"""
+        from superset.dashboards.commands.importers.dispatcher import (
+            ImportDashboardsCommand,
+        )
 
[email protected]()
-@with_appcontext
[email protected](
-    "--dashboard-file", "-f", default=None, help="Specify the the file to 
export to"
-)
[email protected](
-    "--print_stdout", "-p", is_flag=True, default=False, help="Print JSON to 
stdout"
-)
-def export_dashboards(dashboard_file: str, print_stdout: bool) -> None:
-    """Export dashboards to JSON"""
-    from superset.utils import dashboard_import_export
-
-    data = dashboard_import_export.export_dashboards(db.session)
-    if print_stdout or not dashboard_file:
-        print(data)
-    if dashboard_file:
-        logger.info("Exporting dashboards to %s", dashboard_file)
-        with open(dashboard_file, "w") as data_stream:
-            data_stream.write(data)
+        if username is not None:
+            g.user = security_manager.find_user(username=username)
+        contents = {path: open(path).read()}
+        try:
+            ImportDashboardsCommand(contents).run()
+        except Exception:  # pylint: disable=broad-except
+            logger.exception("Error when importing dashboard")
+
+    @superset.command()
+    @with_appcontext
+    @click.option(
+        "--path",
+        "-p",
+        help="Path to a single YAML file or path containing multiple YAML "
+        "files to import (*.yaml or *.yml)",
+    )
+    @click.option(
+        "--sync",
+        "-s",
+        "sync",
+        default="",
+        help="comma seperated list of element types to synchronize "
+        'e.g. "metrics,columns" deletes metrics and columns in the DB '
+        "that are not specified in the YAML file",
+    )
+    @click.option(
+        "--recursive",
+        "-r",
+        is_flag=True,
+        default=False,
+        help="recursively search the path for yaml files",
+    )
+    def import_datasources(path: str) -> None:
+        """Import datasources from ZIP file"""
+        from superset.datasets.commands.importers.dispatcher import (
+            ImportDatasetsCommand,
+        )
 
+        contents = {path: open(path).read()}
+        try:
+            ImportDatasetsCommand(contents).run()
+        except Exception:  # pylint: disable=broad-except
+            logger.exception("Error when importing dataset")

Review comment:
       same here, prettier message?




----------------------------------------------------------------
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.

For queries about this service, please contact Infrastructure at:
[email protected]



---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]

Reply via email to