This is an automated email from the ASF dual-hosted git repository.
beto pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/incubator-superset.git
The following commit(s) were added to refs/heads/master by this push:
new fbb458f feat: add modal to import datasets (#11910)
fbb458f is described below
commit fbb458fa8b13dcdc32c7cee38a6f5a2cca270ca9
Author: Beto Dealmeida <[email protected]>
AuthorDate: Mon Dec 7 16:20:25 2020 -0800
feat: add modal to import datasets (#11910)
---
.../components/ImportModal/ImportModal.test.tsx | 106 ++++++++++++
.../datasource/components/ImportModal/index.tsx | 186 +++++++++++++++++++++
.../views/CRUD/data/dataset/AddDatasetModal.tsx | 2 +-
.../src/views/CRUD/data/dataset/DatasetList.tsx | 36 ++++
.../src/views/CRUD/data/dataset/types.ts | 61 +++++++
.../databases/commands/importers/v1/__init__.py | 21 ++-
superset/datasets/api.py | 9 +-
superset/datasets/commands/importers/dispatcher.py | 4 +-
superset/datasets/commands/importers/v0.py | 10 +-
.../datasets/commands/importers/v1/__init__.py | 18 ++
superset/datasets/schemas.py | 16 +-
11 files changed, 447 insertions(+), 22 deletions(-)
diff --git
a/superset-frontend/src/datasource/components/ImportModal/ImportModal.test.tsx
b/superset-frontend/src/datasource/components/ImportModal/ImportModal.test.tsx
new file mode 100644
index 0000000..d6e49d5
--- /dev/null
+++
b/superset-frontend/src/datasource/components/ImportModal/ImportModal.test.tsx
@@ -0,0 +1,106 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+import React from 'react';
+import thunk from 'redux-thunk';
+import configureStore from 'redux-mock-store';
+import { styledMount as mount } from 'spec/helpers/theming';
+import { ReactWrapper } from 'enzyme';
+
+import ImportDatasetModal from 'src/datasource/components/ImportModal';
+import Modal from 'src/common/components/Modal';
+
+const mockStore = configureStore([thunk]);
+const store = mockStore({});
+
+const requiredProps = {
+ addDangerToast: () => {},
+ addSuccessToast: () => {},
+ onDatasetImport: () => {},
+ show: true,
+ onHide: () => {},
+};
+
+describe('ImportDatasetModal', () => {
+ let wrapper: ReactWrapper;
+
+ beforeEach(() => {
+ wrapper = mount(<ImportDatasetModal {...requiredProps} />, {
+ context: { store },
+ });
+ });
+
+ afterEach(() => {
+ jest.clearAllMocks();
+ });
+
+ it('renders', () => {
+ expect(wrapper.find(ImportDatasetModal)).toExist();
+ });
+
+ it('renders a Modal', () => {
+ expect(wrapper.find(Modal)).toExist();
+ });
+
+ it('renders "Import Dataset" header', () => {
+ expect(wrapper.find('h4').text()).toEqual('Import Dataset');
+ });
+
+ it('renders a label and a file input field', () => {
+ expect(wrapper.find('input[type="file"]')).toExist();
+ expect(wrapper.find('label')).toExist();
+ });
+
+ it('should attach the label to the input field', () => {
+ const id = 'datasetFile';
+ expect(wrapper.find('label').prop('htmlFor')).toBe(id);
+ expect(wrapper.find('input').prop('id')).toBe(id);
+ });
+
+ it('should render the close, import and cancel buttons', () => {
+ expect(wrapper.find('button')).toHaveLength(3);
+ });
+
+ it('should render the import button initially disabled', () => {
+ expect(wrapper.find('button[children="Import"]').prop('disabled')).toBe(
+ true,
+ );
+ });
+
+ it('should render the import button enabled when a file is selected', () => {
+ const file = new File([new ArrayBuffer(1)], 'dataset_export.zip');
+ wrapper.find('input').simulate('change', { target: { files: [file] } });
+
+ expect(wrapper.find('button[children="Import"]').prop('disabled')).toBe(
+ false,
+ );
+ });
+
+ it('should render password fields when needed for import', () => {
+ const wrapperWithPasswords = mount(
+ <ImportDatasetModal
+ {...requiredProps}
+ passwordFields={['datasets/examples.yaml']}
+ />,
+ {
+ context: { store },
+ },
+ );
+ expect(wrapperWithPasswords.find('input[type="password"]')).toExist();
+ });
+});
diff --git a/superset-frontend/src/datasource/components/ImportModal/index.tsx
b/superset-frontend/src/datasource/components/ImportModal/index.tsx
new file mode 100644
index 0000000..d8599e8
--- /dev/null
+++ b/superset-frontend/src/datasource/components/ImportModal/index.tsx
@@ -0,0 +1,186 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+import React, { FunctionComponent, useEffect, useRef, useState } from 'react';
+import { t } from '@superset-ui/core';
+
+import Modal from 'src/common/components/Modal';
+import {
+ StyledIcon,
+ StyledInputContainer,
+} from 'src/views/CRUD/data/database/DatabaseModal';
+import { useImportResource } from 'src/views/CRUD/hooks';
+import { DatasetObject } from 'src/views/CRUD/data/dataset/types';
+
+export interface ImportDatasetModalProps {
+ addDangerToast: (msg: string) => void;
+ addSuccessToast: (msg: string) => void;
+ onDatasetImport: () => void;
+ show: boolean;
+ onHide: () => void;
+ passwordFields?: string[];
+ setPasswordFields?: (passwordFields: string[]) => void;
+}
+
+const ImportDatasetModal: FunctionComponent<ImportDatasetModalProps> = ({
+ addDangerToast,
+ addSuccessToast,
+ onDatasetImport,
+ show,
+ onHide,
+ passwordFields = [],
+ setPasswordFields = () => {},
+}) => {
+ const [uploadFile, setUploadFile] = useState<File | null>(null);
+ const [isHidden, setIsHidden] = useState<boolean>(true);
+ const [passwords, setPasswords] = useState<Record<string, string>>({});
+ const fileInputRef = useRef<HTMLInputElement>(null);
+
+ const clearModal = () => {
+ setUploadFile(null);
+ setPasswordFields([]);
+ setPasswords({});
+ if (fileInputRef && fileInputRef.current) {
+ fileInputRef.current.value = '';
+ }
+ };
+
+ const handleErrorMsg = (msg: string) => {
+ clearModal();
+ addDangerToast(msg);
+ };
+
+ const {
+ state: { passwordsNeeded },
+ importResource,
+ } = useImportResource<DatasetObject>('dataset', t('dataset'),
handleErrorMsg);
+
+ useEffect(() => {
+ setPasswordFields(passwordsNeeded);
+ }, [passwordsNeeded]);
+
+ // Functions
+ const hide = () => {
+ setIsHidden(true);
+ onHide();
+ };
+
+ const onUpload = () => {
+ if (uploadFile === null) {
+ return;
+ }
+
+ importResource(uploadFile, passwords).then(result => {
+ if (result) {
+ addSuccessToast(t('The datasets have been imported'));
+ clearModal();
+ onDatasetImport();
+ }
+ });
+ };
+
+ const changeFile = (event: React.ChangeEvent<HTMLInputElement>) => {
+ const { files } = event.target as HTMLInputElement;
+ setUploadFile((files && files[0]) || null);
+ };
+
+ const renderPasswordFields = () => {
+ if (passwordFields.length === 0) {
+ return null;
+ }
+
+ return (
+ <>
+ <h5>Database passwords</h5>
+ <StyledInputContainer>
+ <div className="helper">
+ {t(
+ 'The passwords for the databases below are needed in order to ' +
+ 'import them together with the datasets. Please note that the
' +
+ '"Secure Extra" and "Certificate" sections of ' +
+ 'the database configuration are not present in export files,
and ' +
+ 'should be added manually after the import if they are
needed.',
+ )}
+ </div>
+ </StyledInputContainer>
+ {passwordFields.map(fileName => (
+ <StyledInputContainer key={`password-for-${fileName}`}>
+ <div className="control-label">
+ {fileName}
+ <span className="required">*</span>
+ </div>
+ <input
+ name={`password-${fileName}`}
+ autoComplete="off"
+ type="password"
+ value={passwords[fileName]}
+ onChange={event =>
+ setPasswords({ ...passwords, [fileName]: event.target.value })
+ }
+ />
+ </StyledInputContainer>
+ ))}
+ </>
+ );
+ };
+
+ // Show/hide
+ if (isHidden && show) {
+ setIsHidden(false);
+ }
+
+ return (
+ <Modal
+ name="dataset"
+ className="dataset-modal"
+ disablePrimaryButton={uploadFile === null}
+ onHandledPrimaryAction={onUpload}
+ onHide={hide}
+ primaryButtonName={t('Import')}
+ width="750px"
+ show={show}
+ title={
+ <h4>
+ <StyledIcon name="table" />
+ {t('Import Dataset')}
+ </h4>
+ }
+ >
+ <StyledInputContainer>
+ <div className="control-label">
+ <label htmlFor="datasetFile">
+ {t('File')}
+ <span className="required">*</span>
+ </label>
+ </div>
+ <input
+ ref={fileInputRef}
+ data-test="dataset-file-input"
+ name="datasetFile"
+ id="datasetFile"
+ type="file"
+ accept=".yaml,.json,.yml,.zip"
+ onChange={changeFile}
+ />
+ </StyledInputContainer>
+ {renderPasswordFields()}
+ </Modal>
+ );
+};
+
+export default ImportDatasetModal;
diff --git a/superset-frontend/src/views/CRUD/data/dataset/AddDatasetModal.tsx
b/superset-frontend/src/views/CRUD/data/dataset/AddDatasetModal.tsx
index 8c5458d..0b72fb8 100644
--- a/superset-frontend/src/views/CRUD/data/dataset/AddDatasetModal.tsx
+++ b/superset-frontend/src/views/CRUD/data/dataset/AddDatasetModal.tsx
@@ -27,7 +27,7 @@ import { createErrorHandler } from 'src/views/CRUD/utils';
type DatasetAddObject = {
id: number;
- databse: number;
+ database: number;
schema: string;
table_name: string;
};
diff --git a/superset-frontend/src/views/CRUD/data/dataset/DatasetList.tsx
b/superset-frontend/src/views/CRUD/data/dataset/DatasetList.tsx
index a01f947..34982c3 100644
--- a/superset-frontend/src/views/CRUD/data/dataset/DatasetList.tsx
+++ b/superset-frontend/src/views/CRUD/data/dataset/DatasetList.tsx
@@ -45,6 +45,8 @@ import TooltipWrapper from 'src/components/TooltipWrapper';
import Icon from 'src/components/Icon';
import FacePile from 'src/components/FacePile';
import CertifiedIconWithTooltip from 'src/components/CertifiedIconWithTooltip';
+import ImportDatasetModal from 'src/datasource/components/ImportModal/index';
+import { isFeatureEnabled, FeatureFlag } from 'src/featureFlags';
import AddDatasetModal from './AddDatasetModal';
const PAGE_SIZE = 25;
@@ -114,6 +116,22 @@ const DatasetList: FunctionComponent<DatasetListProps> = ({
setDatasetCurrentlyEditing,
] = useState<Dataset | null>(null);
+ const [importingDataset, showImportModal] = useState<boolean>(false);
+ const [passwordFields, setPasswordFields] = useState<string[]>([]);
+
+ const openDatasetImportModal = () => {
+ showImportModal(true);
+ };
+
+ const closeDatasetImportModal = () => {
+ showImportModal(false);
+ };
+
+ const handleDatasetImport = () => {
+ showImportModal(false);
+ refreshData();
+ };
+
const canEdit = hasPerm('can_edit');
const canDelete = hasPerm('can_delete');
const canCreate = hasPerm('can_add');
@@ -453,6 +471,14 @@ const DatasetList: FunctionComponent<DatasetListProps> = ({
});
}
+ if (isFeatureEnabled(FeatureFlag.VERSIONED_EXPORT)) {
+ buttonArr.push({
+ name: <Icon name="import" />,
+ buttonStyle: 'link',
+ onClick: openDatasetImportModal,
+ });
+ }
+
menuData.buttons = buttonArr;
const closeDatasetDeleteModal = () => {
@@ -620,6 +646,16 @@ const DatasetList: FunctionComponent<DatasetListProps> = ({
);
}}
</ConfirmStatusChange>
+
+ <ImportDatasetModal
+ show={importingDataset}
+ onHide={closeDatasetImportModal}
+ addDangerToast={addDangerToast}
+ addSuccessToast={addSuccessToast}
+ onDatasetImport={handleDatasetImport}
+ passwordFields={passwordFields}
+ setPasswordFields={setPasswordFields}
+ />
</>
);
};
diff --git a/superset-frontend/src/views/CRUD/data/dataset/types.ts
b/superset-frontend/src/views/CRUD/data/dataset/types.ts
new file mode 100644
index 0000000..abf78d4
--- /dev/null
+++ b/superset-frontend/src/views/CRUD/data/dataset/types.ts
@@ -0,0 +1,61 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+type ColumnObject = {
+ id: number;
+ column_name: string;
+ type: string;
+ verbose_name?: string;
+ description?: string;
+ expression?: string;
+ filterable: boolean;
+ groupby: boolean;
+ is_active: boolean;
+ is_dttm: boolean;
+ python_date_format?: string;
+ uuid?: string;
+};
+
+type MetricObject = {
+ id: number;
+ expression?: string;
+ description?: string;
+ metric_name: string;
+ metric_type: string;
+ d3format?: string;
+ warning_text?: string;
+};
+
+export type DatasetObject = {
+ table_name?: string;
+ sql?: string;
+ filter_select_enabled?: boolean;
+ fetch_values_predicate?: string;
+ schema?: string;
+ description?: string;
+ main_dttm_col?: string;
+ offset?: number;
+ default_endpoint?: string;
+ cache_timeout?: number;
+ is_sqllab_view?: boolean;
+ template_params?: string;
+ owners: number[];
+ columns: ColumnObject[];
+ metrics: MetricObject[];
+ extra?: string;
+};
diff --git a/superset/databases/commands/importers/v1/__init__.py
b/superset/databases/commands/importers/v1/__init__.py
index 6d16649..cf9c4bc 100644
--- a/superset/databases/commands/importers/v1/__init__.py
+++ b/superset/databases/commands/importers/v1/__init__.py
@@ -15,7 +15,6 @@
# specific language governing permissions and limitations
# under the License.
-import urllib.parse
from typing import Any, Dict, List, Optional
from marshmallow import Schema, validate
@@ -48,11 +47,9 @@ class ImportDatabasesCommand(BaseCommand):
"""Import databases"""
# pylint: disable=unused-argument
- def __init__(
- self, contents: Dict[str, str], *args: Any, **kwargs: Any,
- ):
+ def __init__(self, contents: Dict[str, str], *args: Any, **kwargs: Any):
self.contents = contents
- self.passwords = kwargs.get("passwords") or {}
+ self.passwords: Dict[str, str] = kwargs.get("passwords") or {}
self._configs: Dict[str, Any] = {}
def _import_bundle(self, session: Session) -> None:
@@ -87,6 +84,14 @@ class ImportDatabasesCommand(BaseCommand):
def validate(self) -> None:
exceptions: List[ValidationError] = []
+ # load existing databases so we can apply the password validation
+ db_passwords = {
+ str(uuid): password
+ for uuid, password in db.session.query(
+ Database.uuid, Database.password
+ ).all()
+ }
+
# verify that the metadata file is present and valid
try:
metadata: Optional[Dict[str, str]] = load_metadata(self.contents)
@@ -94,14 +99,20 @@ class ImportDatabasesCommand(BaseCommand):
exceptions.append(exc)
metadata = None
+ # validate databases and dataset
for file_name, content in self.contents.items():
prefix = file_name.split("/")[0]
schema = schemas.get(f"{prefix}/")
if schema:
try:
config = load_yaml(file_name, content)
+
+ # populate passwords from the request or from existing DBs
if file_name in self.passwords:
config["password"] = self.passwords[file_name]
+ elif prefix == "databases" and config["uuid"] in
db_passwords:
+ config["password"] = db_passwords[config["uuid"]]
+
schema.load(config)
self._configs[file_name] = config
except ValidationError as exc:
diff --git a/superset/datasets/api.py b/superset/datasets/api.py
index 134eb6e..855b6eb 100644
--- a/superset/datasets/api.py
+++ b/superset/datasets/api.py
@@ -14,6 +14,7 @@
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
+import json
import logging
from datetime import datetime
from distutils.util import strtobool
@@ -656,7 +657,13 @@ class DatasetRestApi(BaseSupersetModelRestApi):
for file_name in bundle.namelist()
}
- command = ImportDatasetsCommand(contents)
+ passwords = (
+ json.loads(request.form["passwords"])
+ if "passwords" in request.form
+ else None
+ )
+
+ command = ImportDatasetsCommand(contents, passwords=passwords)
try:
command.run()
return self.response(200, message="OK")
diff --git a/superset/datasets/commands/importers/dispatcher.py
b/superset/datasets/commands/importers/dispatcher.py
index b268463..f999b18 100644
--- a/superset/datasets/commands/importers/dispatcher.py
+++ b/superset/datasets/commands/importers/dispatcher.py
@@ -46,12 +46,14 @@ class ImportDatasetsCommand(BaseCommand):
# pylint: disable=unused-argument
def __init__(self, contents: Dict[str, str], *args: Any, **kwargs: Any):
self.contents = contents
+ self.args = args
+ self.kwargs = kwargs
def run(self) -> None:
# iterate over all commands until we find a version that can
# handle the contents
for version in command_versions:
- command = version(self.contents)
+ command = version(self.contents, *self.args, **self.kwargs)
try:
command.run()
return
diff --git a/superset/datasets/commands/importers/v0.py
b/superset/datasets/commands/importers/v0.py
index 55f1a9c..df02a39 100644
--- a/superset/datasets/commands/importers/v0.py
+++ b/superset/datasets/commands/importers/v0.py
@@ -284,19 +284,17 @@ class ImportDatasetsCommand(BaseCommand):
in Superset.
"""
+ # pylint: disable=unused-argument
def __init__(
- self,
- contents: Dict[str, str],
- sync_columns: bool = False,
- sync_metrics: bool = False,
+ self, contents: Dict[str, str], *args: Any, **kwargs: Any,
):
self.contents = contents
self._configs: Dict[str, Any] = {}
self.sync = []
- if sync_columns:
+ if kwargs.get("sync_columns"):
self.sync.append("columns")
- if sync_metrics:
+ if kwargs.get("sync_metrics"):
self.sync.append("metrics")
def run(self) -> None:
diff --git a/superset/datasets/commands/importers/v1/__init__.py
b/superset/datasets/commands/importers/v1/__init__.py
index 43ea458..200fdc3 100644
--- a/superset/datasets/commands/importers/v1/__init__.py
+++ b/superset/datasets/commands/importers/v1/__init__.py
@@ -35,6 +35,7 @@ from superset.databases.schemas import ImportV1DatabaseSchema
from superset.datasets.commands.exceptions import DatasetImportError
from superset.datasets.commands.importers.v1.utils import import_dataset
from superset.datasets.schemas import ImportV1DatasetSchema
+from superset.models.core import Database
schemas: Dict[str, Schema] = {
"databases/": ImportV1DatabaseSchema(),
@@ -49,6 +50,7 @@ class ImportDatasetsCommand(BaseCommand):
# pylint: disable=unused-argument
def __init__(self, contents: Dict[str, str], *args: Any, **kwargs: Any):
self.contents = contents
+ self.passwords: Dict[str, str] = kwargs.get("passwords") or {}
self._configs: Dict[str, Any] = {}
def _import_bundle(self, session: Session) -> None:
@@ -88,6 +90,14 @@ class ImportDatasetsCommand(BaseCommand):
def validate(self) -> None:
exceptions: List[ValidationError] = []
+ # load existing databases so we can apply the password validation
+ db_passwords = {
+ str(uuid): password
+ for uuid, password in db.session.query(
+ Database.uuid, Database.password
+ ).all()
+ }
+
# verify that the metadata file is present and valid
try:
metadata: Optional[Dict[str, str]] = load_metadata(self.contents)
@@ -95,12 +105,20 @@ class ImportDatasetsCommand(BaseCommand):
exceptions.append(exc)
metadata = None
+ # validate datasets and databases
for file_name, content in self.contents.items():
prefix = file_name.split("/")[0]
schema = schemas.get(f"{prefix}/")
if schema:
try:
config = load_yaml(file_name, content)
+
+ # populate passwords from the request or from existing DBs
+ if file_name in self.passwords:
+ config["password"] = self.passwords[file_name]
+ elif prefix == "databases" and config["uuid"] in
db_passwords:
+ config["password"] = db_passwords[config["uuid"]]
+
schema.load(config)
self._configs[file_name] = config
except ValidationError as exc:
diff --git a/superset/datasets/schemas.py b/superset/datasets/schemas.py
index f32e8d5..373e49f 100644
--- a/superset/datasets/schemas.py
+++ b/superset/datasets/schemas.py
@@ -126,20 +126,20 @@ class DatasetRelatedObjectsResponse(Schema):
class ImportV1ColumnSchema(Schema):
column_name = fields.String(required=True)
- verbose_name = fields.String()
+ verbose_name = fields.String(allow_none=True)
is_dttm = fields.Boolean()
is_active = fields.Boolean(allow_none=True)
type = fields.String(required=True)
groupby = fields.Boolean()
filterable = fields.Boolean()
- expression = fields.String()
+ expression = fields.String(allow_none=True)
description = fields.String(allow_none=True)
python_date_format = fields.String(allow_none=True)
class ImportV1MetricSchema(Schema):
metric_name = fields.String(required=True)
- verbose_name = fields.String()
+ verbose_name = fields.String(allow_none=True)
metric_type = fields.String(allow_none=True)
expression = fields.String(required=True)
description = fields.String(allow_none=True)
@@ -151,12 +151,12 @@ class ImportV1MetricSchema(Schema):
class ImportV1DatasetSchema(Schema):
table_name = fields.String(required=True)
main_dttm_col = fields.String(allow_none=True)
- description = fields.String()
- default_endpoint = fields.String()
+ description = fields.String(allow_none=True)
+ default_endpoint = fields.String(allow_none=True)
offset = fields.Integer()
- cache_timeout = fields.Integer()
- schema = fields.String()
- sql = fields.String()
+ cache_timeout = fields.Integer(allow_none=True)
+ schema = fields.String(allow_none=True)
+ sql = fields.String(allow_none=True)
params = fields.String(allow_none=True)
template_params = fields.String(allow_none=True)
filter_select_enabled = fields.Boolean()