This is an automated email from the ASF dual-hosted git repository.

rusackas pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/superset.git


The following commit(s) were added to refs/heads/master by this push:
     new dec6ac49f6 chore(backend): Spelling (#19677)
dec6ac49f6 is described below

commit dec6ac49f6e60f3cb3bcda25694b3b9dcf2f41cc
Author: Josh Soref <[email protected]>
AuthorDate: Fri Feb 17 12:48:24 2023 -0500

    chore(backend): Spelling (#19677)
    
    Signed-off-by: Josh Soref <[email protected]>
    Signed-off-by: Josh Soref <[email protected]>
    Co-authored-by: Josh Soref <[email protected]>
    Co-authored-by: Evan Rusackas <[email protected]>
---
 docs/docs/miscellaneous/chart-params.mdx           |  2 +-
 docs/static/resources/openapi.json                 |  2 +-
 helm/superset/values.yaml                          |  4 ++--
 superset/cli/importexport.py                       |  2 +-
 superset/cli/update.py                             |  2 +-
 ...uthrized_object.py => not_authorized_object.py} |  0
 superset/common/query_object.py                    |  2 +-
 superset/config.py                                 |  4 ++--
 superset/connectors/sqla/models.py                 |  6 +++---
 superset/dao/base.py                               |  2 +-
 superset/dashboards/commands/importers/v1/utils.py |  2 +-
 superset/dashboards/filter_sets/commands/base.py   |  2 +-
 superset/db_engine_specs/base.py                   |  6 +++---
 superset/db_engine_specs/hive.py                   |  4 ++--
 superset/db_engine_specs/impala.py                 |  2 +-
 superset/db_engine_specs/kylin.py                  |  4 ++--
 .../dashboards/FCC_New_Coder_Survey_2018.yaml      |  4 ++--
 .../configs/datasets/examples/FCC_2018_Survey.yaml |  6 +++---
 superset/examples/multiformat_time_series.py       |  2 +-
 superset/explore/permalink/api.py                  |  2 +-
 .../versions/2017-10-03_14-37_4736ec66ce19_.py     |  4 ++--
 superset/models/helpers.py                         |  2 +-
 superset/reports/commands/exceptions.py            |  2 +-
 superset/security/manager.py                       |  2 +-
 superset/sql_parse.py                              |  2 +-
 superset/translations/en/LC_MESSAGES/messages.json |  4 ++--
 superset/translations/en/LC_MESSAGES/messages.po   |  6 +++---
 superset/translations/messages.pot                 |  4 ++--
 superset/translations/utils.py                     |  2 +-
 superset/utils/core.py                             |  2 +-
 superset/utils/date_parser.py                      |  2 +-
 .../utils/pandas_postprocessing/contribution.py    |  2 +-
 superset/utils/pandas_postprocessing/prophet.py    |  2 +-
 superset/views/base_schemas.py                     |  2 +-
 superset/views/core.py                             |  2 +-
 superset/views/database/views.py                   | 24 +++++++++++-----------
 superset/viz.py                                    | 10 ++++-----
 tests/integration_tests/conftest.py                |  2 +-
 tests/integration_tests/databases/api_tests.py     |  6 +++---
 tests/integration_tests/event_logger_tests.py      |  6 +++---
 40 files changed, 74 insertions(+), 74 deletions(-)

diff --git a/docs/docs/miscellaneous/chart-params.mdx 
b/docs/docs/miscellaneous/chart-params.mdx
index e157bf8b20..e7b8e26dc1 100644
--- a/docs/docs/miscellaneous/chart-params.mdx
+++ b/docs/docs/miscellaneous/chart-params.mdx
@@ -72,7 +72,7 @@ Note not all fields are correctly categorized. The fields 
vary based on visualiz
 | `columns`                                                                    
                          | _array(string)_                                   | 
The **Breakdowns** widget                         |
 | `groupby`                                                                    
                          | _array(string)_                                   | 
The **Group by** or **Series** widget             |
 | `limit`                                                                      
                          | _number_                                          | 
The **Series Limit** widget                       |
-| 
`metric`<br/>`metric_2`<br/>`metrics`<br/>`percent_mertics`<br/>`secondary_metric`<br/>`size`<br/>`x`<br/>`y`
 | _string_,_object_,_array(string)_,_array(object)_ | The metric(s) depending 
on the visualization type |
+| 
`metric`<br/>`metric_2`<br/>`metrics`<br/>`percent_metrics`<br/>`secondary_metric`<br/>`size`<br/>`x`<br/>`y`
 | _string_,_object_,_array(string)_,_array(object)_ | The metric(s) depending 
on the visualization type |
 | `order_asc`                                                                  
                          | _boolean_                                         | 
The **Sort Descending** widget                    |
 | `row_limit`                                                                  
                          | _number_                                          | 
The **Row limit** widget                          |
 | `timeseries_limit_metric`                                                    
                          | _object_                                          | 
The **Sort By** widget                            |
diff --git a/docs/static/resources/openapi.json 
b/docs/static/resources/openapi.json
index 18ea7a47f8..f303b83784 100644
--- a/docs/static/resources/openapi.json
+++ b/docs/static/resources/openapi.json
@@ -17285,7 +17285,7 @@
                       "type": "string"
                     },
                     "url": {
-                      "description": "pemanent link.",
+                      "description": "permanent link.",
                       "type": "string"
                     }
                   },
diff --git a/helm/superset/values.yaml b/helm/superset/values.yaml
index a212754e39..03238faf01 100644
--- a/helm/superset/values.yaml
+++ b/helm/superset/values.yaml
@@ -723,7 +723,7 @@ postgresql:
       ## Enable PostgreSQL persistence using Persistent Volume Claims.
       enabled: true
       ##
-      ## Persistant class
+      ## Persistent class
       # storageClass: classname
       ##
       ## Access modes:
@@ -778,7 +778,7 @@ redis:
       ## Use a PVC to persist data.
       enabled: false
       ##
-      ## Persistant class
+      ## Persistent class
       # storageClass: classname
       ##
       ## Access mode:
diff --git a/superset/cli/importexport.py b/superset/cli/importexport.py
index 6ca58e9952..c7689569c2 100755
--- a/superset/cli/importexport.py
+++ b/superset/cli/importexport.py
@@ -342,7 +342,7 @@ else:
         "-s",
         "sync",
         default="",
-        help="comma seperated list of element types to synchronize "
+        help="comma separated list of element types to synchronize "
         'e.g. "metrics,columns" deletes metrics and columns in the DB '
         "that are not specified in the YAML file",
     )
diff --git a/superset/cli/update.py b/superset/cli/update.py
index bdc54db3a9..d2d416e1d9 100755
--- a/superset/cli/update.py
+++ b/superset/cli/update.py
@@ -120,7 +120,7 @@ def re_encrypt_secrets(previous_secret_key: Optional[str] = 
None) -> None:
     except ValueError as exc:
         click.secho(
             f"An error occurred, "
-            f"probably an invalid previoud secret key was provided. 
Error:[{exc}]",
+            f"probably an invalid previous secret key was provided. 
Error:[{exc}]",
             err=True,
         )
         sys.exit(1)
diff --git a/superset/common/not_authrized_object.py 
b/superset/common/not_authorized_object.py
similarity index 100%
rename from superset/common/not_authrized_object.py
rename to superset/common/not_authorized_object.py
diff --git a/superset/common/query_object.py b/superset/common/query_object.py
index 94cf2a74cc..70f50c3e77 100644
--- a/superset/common/query_object.py
+++ b/superset/common/query_object.py
@@ -360,7 +360,7 @@ class QueryObject:  # pylint: 
disable=too-many-instance-attributes
 
         # TODO: the below KVs can all be cleaned up and moved to `to_dict()` 
at some
         #  predetermined point in time when orgs are aware that the previously
-        #  chached results will be invalidated.
+        #  cached results will be invalidated.
         if not self.apply_fetch_values_predicate:
             del cache_dict["apply_fetch_values_predicate"]
         if self.datasource:
diff --git a/superset/config.py b/superset/config.py
index ab23da0c29..5a0aea77fb 100644
--- a/superset/config.py
+++ b/superset/config.py
@@ -198,7 +198,7 @@ SQLALCHEMY_DATABASE_URI = "sqlite:///" + 
os.path.join(DATA_DIR, "superset.db")
 # SQLALCHEMY_DATABASE_URI = 'mysql://myapp@localhost/myapp'
 # SQLALCHEMY_DATABASE_URI = 'postgresql://root:password@localhost/myapp'
 
-# In order to hook up a custom password store for all SQLACHEMY connections
+# In order to hook up a custom password store for all SQLALCHEMY connections
 # implement a function that takes a single argument of type 'sqla.engine.url',
 # returns a password and set SQLALCHEMY_CUSTOM_PASSWORD_STORE.
 #
@@ -411,7 +411,7 @@ DEFAULT_FEATURE_FLAGS: Dict[str, bool] = {
     "ENABLE_TEMPLATE_REMOVE_FILTERS": False,
     # Allow for javascript controls components
     # this enables programmers to customize certain charts (like the
-    # geospatial ones) by inputing javascript in controls. This exposes
+    # geospatial ones) by inputting javascript in controls. This exposes
     # an XSS security vulnerability
     "ENABLE_JAVASCRIPT_CONTROLS": False,
     "KV_STORE": False,
diff --git a/superset/connectors/sqla/models.py 
b/superset/connectors/sqla/models.py
index 8be079bde2..7ad5ad0312 100644
--- a/superset/connectors/sqla/models.py
+++ b/superset/connectors/sqla/models.py
@@ -755,7 +755,7 @@ class SqlaTable(Model, BaseDatasource):  # pylint: 
disable=too-many-public-metho
         return self.database.sql_url + "?table_name=" + str(self.table_name)
 
     def external_metadata(self) -> List[Dict[str, str]]:
-        # todo(yongjie): create a pysical table column type in seprated PR
+        # todo(yongjie): create a physical table column type in a separate PR
         if self.sql:
             return get_virtual_table_metadata(dataset=self)  # type: ignore
         return get_physical_table_metadata(
@@ -1249,7 +1249,7 @@ class SqlaTable(Model, BaseDatasource):  # pylint: 
disable=too-many-public-metho
         if metrics_exprs:
             main_metric_expr = metrics_exprs[0]
         else:
-            main_metric_expr, label = literal_column("COUNT(*)"), "ccount"
+            main_metric_expr, label = literal_column("COUNT(*)"), "count"
             main_metric_expr = 
self.make_sqla_column_compatible(main_metric_expr, label)
 
         # To ensure correct handling of the ORDER BY labeling we need to 
reference the
@@ -1419,7 +1419,7 @@ class SqlaTable(Model, BaseDatasource):  # pylint: 
disable=too-many-public-metho
 
         # Order by columns are "hidden" columns, some databases require them
         # always be present in SELECT if an aggregation function is used
-        if not db_engine_spec.allows_hidden_ordeby_agg:
+        if not db_engine_spec.allows_hidden_orderby_agg:
             select_exprs = remove_duplicates(select_exprs + orderby_exprs)
 
         qry = sa.select(select_exprs)
diff --git a/superset/dao/base.py b/superset/dao/base.py
index 126238f661..28cfdf2cc6 100644
--- a/superset/dao/base.py
+++ b/superset/dao/base.py
@@ -44,7 +44,7 @@ class BaseDAO:
     """
     base_filter: Optional[BaseFilter] = None
     """
-    Child classes can register base filtering to be aplied to all filter 
methods
+    Child classes can register base filtering to be applied to all filter 
methods
     """
     id_column_name = "id"
 
diff --git a/superset/dashboards/commands/importers/v1/utils.py 
b/superset/dashboards/commands/importers/v1/utils.py
index 513d1efcdb..cd54c3cefe 100644
--- a/superset/dashboards/commands/importers/v1/utils.py
+++ b/superset/dashboards/commands/importers/v1/utils.py
@@ -79,7 +79,7 @@ def update_id_refs(  # pylint: disable=too-many-locals
         ]
 
     if "filter_scopes" in metadata:
-        # in filter_scopes the key is the chart ID as a string; we need to 
udpate
+        # in filter_scopes the key is the chart ID as a string; we need to 
update
         # them to be the new ID as a string:
         metadata["filter_scopes"] = {
             str(id_map[int(old_id)]): columns
diff --git a/superset/dashboards/filter_sets/commands/base.py 
b/superset/dashboards/filter_sets/commands/base.py
index e6a4b03e3f..a7897eca8e 100644
--- a/superset/dashboards/filter_sets/commands/base.py
+++ b/superset/dashboards/filter_sets/commands/base.py
@@ -20,7 +20,7 @@ from typing import cast, Optional
 from flask_appbuilder.models.sqla import Model
 
 from superset import security_manager
-from superset.common.not_authrized_object import NotAuthorizedException
+from superset.common.not_authorized_object import NotAuthorizedException
 from superset.dashboards.commands.exceptions import DashboardNotFoundError
 from superset.dashboards.dao import DashboardDAO
 from superset.dashboards.filter_sets.commands.exceptions import (
diff --git a/superset/db_engine_specs/base.py b/superset/db_engine_specs/base.py
index 49a01c0597..128ce511be 100644
--- a/superset/db_engine_specs/base.py
+++ b/superset/db_engine_specs/base.py
@@ -121,7 +121,7 @@ class TimestampExpression(
 ):  # pylint: disable=abstract-method, too-many-ancestors
     def __init__(self, expr: str, col: ColumnClause, **kwargs: Any) -> None:
         """Sqlalchemy class that can be can be used to render native column 
elements
-        respeting engine-specific quoting rules as part of a string-based 
expression.
+        respecting engine-specific quoting rules as part of a string-based 
expression.
 
         :param expr: Sql expression with '{col}' denoting the locations where 
the col
         object will be rendered.
@@ -331,9 +331,9 @@ class BaseEngineSpec:  # pylint: 
disable=too-many-public-methods
 
     # Whether ORDER BY clause must appear in SELECT
     # if TRUE, then it doesn't have to.
-    allows_hidden_ordeby_agg = True
+    allows_hidden_orderby_agg = True
 
-    # Whether ORDER BY clause can use sql caculated expression
+    # Whether ORDER BY clause can use sql calculated expression
     # if True, use alias of select column for `order by`
     # the True is safely for most database
     # But for backward compatibility, False by default
diff --git a/superset/db_engine_specs/hive.py b/superset/db_engine_specs/hive.py
index c049ee652e..63c531e843 100644
--- a/superset/db_engine_specs/hive.py
+++ b/superset/db_engine_specs/hive.py
@@ -96,7 +96,7 @@ class HiveEngineSpec(PrestoEngineSpec):
     engine_name = "Apache Hive"
     max_column_name_length = 767
     allows_alias_to_source_column = True
-    allows_hidden_ordeby_agg = False
+    allows_hidden_orderby_agg = False
 
     # When running `SHOW FUNCTIONS`, what is the name of the column with the
     # function names?
@@ -491,7 +491,7 @@ class HiveEngineSpec(PrestoEngineSpec):
         :param username: Effective username
         """
         # Do nothing in the URL object since instead this should modify
-        # the configuraiton dictionary. See get_configuration_for_impersonation
+        # the configuration dictionary. See get_configuration_for_impersonation
         return url
 
     @classmethod
diff --git a/superset/db_engine_specs/impala.py 
b/superset/db_engine_specs/impala.py
index 5de1e690c6..e59c2b74fb 100644
--- a/superset/db_engine_specs/impala.py
+++ b/superset/db_engine_specs/impala.py
@@ -80,7 +80,7 @@ class ImpalaEngineSpec(BaseEngineSpec):
     def has_implicit_cancel(cls) -> bool:
         """
         Return True if the live cursor handles the implicit cancelation of the 
query,
-        False otherise.
+        False otherwise.
 
         :return: Whether the live cursor implicitly cancels the query
         :see: handle_cursor
diff --git a/superset/db_engine_specs/kylin.py 
b/superset/db_engine_specs/kylin.py
index d76811e86c..e340daea51 100644
--- a/superset/db_engine_specs/kylin.py
+++ b/superset/db_engine_specs/kylin.py
@@ -49,6 +49,6 @@ class KylinEngineSpec(BaseEngineSpec):  # pylint: 
disable=abstract-method
         if isinstance(sqla_type, types.Date):
             return f"CAST('{dttm.date().isoformat()}' AS DATE)"
         if isinstance(sqla_type, types.TIMESTAMP):
-            datetime_fomatted = dttm.isoformat(sep=" ", timespec="seconds")
-            return f"""CAST('{datetime_fomatted}' AS TIMESTAMP)"""
+            datetime_formatted = dttm.isoformat(sep=" ", timespec="seconds")
+            return f"""CAST('{datetime_formatted}' AS TIMESTAMP)"""
         return None
diff --git 
a/superset/examples/configs/dashboards/FCC_New_Coder_Survey_2018.yaml 
b/superset/examples/configs/dashboards/FCC_New_Coder_Survey_2018.yaml
index a007c5ffc8..f7cfedd84d 100644
--- a/superset/examples/configs/dashboards/FCC_New_Coder_Survey_2018.yaml
+++ b/superset/examples/configs/dashboards/FCC_New_Coder_Survey_2018.yaml
@@ -504,7 +504,7 @@ position:
       code: '## FreeCodeCamp New Coder Survey 2018
 
 
-        Every year, FCC surveys its user base (mostly budding softwware 
developers)
+        Every year, FCC surveys its user base (mostly budding software 
developers)
         to learn more about their interests, backgrounds, goals, job status, 
and socioeconomic
         features. This dashboard visualizes survey data from the 2018 survey.
 
@@ -529,7 +529,7 @@ position:
     meta:
       code: "# Demographics\n\nFreeCodeCamp is a completely-online community 
of people\
         \ learning to code and consists of aspiring & current developers from 
all\
-        \ over the world. That doesn't necessarily mean that acccess to these 
types\
+        \ over the world. That doesn't necessarily mean that access to these 
types\
         \ of opportunities are evenly distributed. \n\nThe following charts 
can begin\
         \ to help us understand:\n\n- the original citizenship of the survey 
respondents\n\
         - minority representation among both aspiring and current 
developers\n- their\
diff --git a/superset/examples/configs/datasets/examples/FCC_2018_Survey.yaml 
b/superset/examples/configs/datasets/examples/FCC_2018_Survey.yaml
index 26890b5ebb..5bbbe2f74b 100644
--- a/superset/examples/configs/datasets/examples/FCC_2018_Survey.yaml
+++ b/superset/examples/configs/datasets/examples/FCC_2018_Survey.yaml
@@ -930,7 +930,7 @@ columns:
   expression: null
   description: null
   python_date_format: null
-- column_name: rsrc_pluralsght
+- column_name: rsrc_pluralsight
   verbose_name: null
   is_dttm: false
   is_active: null
@@ -1320,7 +1320,7 @@ columns:
   expression: null
   description: null
   python_date_format: null
-- column_name: curr_emplymnt_other
+- column_name: curr_employment_other
   verbose_name: null
   is_dttm: false
   is_active: null
@@ -1450,7 +1450,7 @@ columns:
   expression: null
   description: null
   python_date_format: null
-- column_name: curr_emplymnt
+- column_name: curr_employment
   verbose_name: null
   is_dttm: false
   is_active: null
diff --git a/superset/examples/multiformat_time_series.py 
b/superset/examples/multiformat_time_series.py
index b030bcdb0f..de9630ef58 100644
--- a/superset/examples/multiformat_time_series.py
+++ b/superset/examples/multiformat_time_series.py
@@ -98,7 +98,7 @@ def load_multiformat_time_series(  # pylint: 
disable=too-many-locals
     for col in obj.columns:
         dttm_and_expr = dttm_and_expr_dict[col.column_name]
         col.python_date_format = dttm_and_expr[0]
-        col.dbatabase_expr = dttm_and_expr[1]
+        col.database_expression = dttm_and_expr[1]
         col.is_dttm = True
     db.session.merge(obj)
     db.session.commit()
diff --git a/superset/explore/permalink/api.py 
b/superset/explore/permalink/api.py
index 88e819aa2b..0d12f59e2c 100644
--- a/superset/explore/permalink/api.py
+++ b/superset/explore/permalink/api.py
@@ -83,7 +83,7 @@ class ExplorePermalinkRestApi(BaseSupersetApi):
                         description: The key to retrieve the permanent link 
data.
                       url:
                         type: string
-                        description: pemanent link.
+                        description: permanent link.
             400:
               $ref: '#/components/responses/400'
             401:
diff --git a/superset/migrations/versions/2017-10-03_14-37_4736ec66ce19_.py 
b/superset/migrations/versions/2017-10-03_14-37_4736ec66ce19_.py
index 99b3e0b5e3..7fe94f3c8a 100644
--- a/superset/migrations/versions/2017-10-03_14-37_4736ec66ce19_.py
+++ b/superset/migrations/versions/2017-10-03_14-37_4736ec66ce19_.py
@@ -97,7 +97,7 @@ def upgrade():
 
             # Drop the datasource_name column and associated constraints. Note
             # due to prior revisions (1226819ee0e3, 3b626e2a6783) there may
-            # incorectly be multiple duplicate constraints.
+            # incorrectly be multiple duplicate constraints.
             names = generic_find_fk_constraint_names(
                 foreign, {"datasource_name"}, "datasources", insp
             )
@@ -124,7 +124,7 @@ def upgrade():
         logging.warning(
             "Constraint drop failed, you may want to do this "
             "manually on your database. For context, this is a known "
-            "issue around undeterministic contraint names on Postgres "
+            "issue around nondeterministic constraint names on Postgres "
             "and perhaps more databases through SQLAlchemy."
         )
         logging.exception(ex)
diff --git a/superset/models/helpers.py b/superset/models/helpers.py
index 15b7a420a0..5cc80576d0 100644
--- a/superset/models/helpers.py
+++ b/superset/models/helpers.py
@@ -1620,7 +1620,7 @@ class ExploreMixin:  # pylint: 
disable=too-many-public-methods
 
         # Order by columns are "hidden" columns, some databases require them
         # always be present in SELECT if an aggregation function is used
-        if not db_engine_spec.allows_hidden_ordeby_agg:
+        if not db_engine_spec.allows_hidden_orderby_agg:
             select_exprs = utils.remove_duplicates(select_exprs + 
orderby_exprs)
 
         qry = sa.select(select_exprs)
diff --git a/superset/reports/commands/exceptions.py 
b/superset/reports/commands/exceptions.py
index b908042f19..22aff0727d 100644
--- a/superset/reports/commands/exceptions.py
+++ b/superset/reports/commands/exceptions.py
@@ -77,7 +77,7 @@ class 
ReportScheduleRequiredTypeValidationError(ValidationError):
 
 class ReportScheduleOnlyChartOrDashboardError(ValidationError):
     """
-    Marshmallow validation error for report schedule accept exlusive chart or 
dashboard
+    Marshmallow validation error for report schedule accept exclusive chart or 
dashboard
     """
 
     def __init__(self) -> None:
diff --git a/superset/security/manager.py b/superset/security/manager.py
index 4e174c420d..b3fa1a6c53 100644
--- a/superset/security/manager.py
+++ b/superset/security/manager.py
@@ -767,7 +767,7 @@ class SupersetSecurityManager(  # pylint: 
disable=too-many-public-methods
 
     def _get_pvms_from_builtin_role(self, role_name: str) -> 
List[PermissionView]:
         """
-        Gets a list of model PermissionView permissions infered from a builtin 
role
+        Gets a list of model PermissionView permissions inferred from a 
builtin role
         definition
         """
         role_from_permissions_names = self.builtin_roles.get(role_name, [])
diff --git a/superset/sql_parse.py b/superset/sql_parse.py
index ab2f044172..ef2c38ccfa 100644
--- a/superset/sql_parse.py
+++ b/superset/sql_parse.py
@@ -217,7 +217,7 @@ class ParsedQuery:
         return self._limit
 
     def is_select(self) -> bool:
-        # make sure we strip comments; prevents a bug with coments in the CTE
+        # make sure we strip comments; prevents a bug with comments in the CTE
         parsed = sqlparse.parse(self.strip_comments())
         if parsed[0].get_type() == "SELECT":
             return True
diff --git a/superset/translations/en/LC_MESSAGES/messages.json 
b/superset/translations/en/LC_MESSAGES/messages.json
index f344b2c2ec..50d30c5e24 100644
--- a/superset/translations/en/LC_MESSAGES/messages.json
+++ b/superset/translations/en/LC_MESSAGES/messages.json
@@ -481,7 +481,7 @@
       "Alert fired during grace period.": [""],
       "Alert ended grace period.": [""],
       "Alert on grace period": [""],
-      "Report Schedule sellenium user not found": [""],
+      "Report Schedule selenium user not found": [""],
       "Report Schedule state not found": [""],
       "Report schedule unexpected error": [""],
       "Changing this report is forbidden": [""],
@@ -526,7 +526,7 @@
       ],
       "Request Permissions": [""],
       "Cancel": [""],
-      "Use the edit buttom to change this field": [""],
+      "Use the edit button to change this field": [""],
       "Test Connection": [""],
       "[Superset] Access to the datasource %(name)s was granted": [""],
       "Unable to find such a holiday: [{}]": [""],
diff --git a/superset/translations/en/LC_MESSAGES/messages.po 
b/superset/translations/en/LC_MESSAGES/messages.po
index 6faf2b1c75..d5b79fd88c 100644
--- a/superset/translations/en/LC_MESSAGES/messages.po
+++ b/superset/translations/en/LC_MESSAGES/messages.po
@@ -7879,7 +7879,7 @@ msgstr ""
 #: superset/views/core.py:2075
 msgid ""
 "One or more required fields are missing in the request. Please try again,"
-" and if the problem persists conctact your administrator."
+" and if the problem persists contact your administrator."
 msgstr ""
 
 #: superset-frontend/src/dashboard/components/SliceHeader/index.tsx:46
@@ -9228,7 +9228,7 @@ msgid "Report Schedule reached a working timeout."
 msgstr ""
 
 #: superset/reports/commands/exceptions.py:226
-msgid "Report Schedule sellenium user not found"
+msgid "Report Schedule selenium user not found"
 msgstr ""
 
 #: superset/reports/commands/exceptions.py:230
@@ -13112,7 +13112,7 @@ msgid ""
 msgstr ""
 
 #: superset/templates/superset/fab_overrides/list_with_checkboxes.html:82
-msgid "Use the edit buttom to change this field"
+msgid "Use the edit button to change this field"
 msgstr ""
 
 #: 
superset-frontend/packages/superset-ui-chart-controls/src/shared-controls/index.tsx:176
diff --git a/superset/translations/messages.pot 
b/superset/translations/messages.pot
index ad475ef1e3..79e2d0584c 100644
--- a/superset/translations/messages.pot
+++ b/superset/translations/messages.pot
@@ -7886,7 +7886,7 @@ msgstr ""
 #: superset/views/core.py:2075
 msgid ""
 "One or more required fields are missing in the request. Please try again,"
-" and if the problem persists conctact your administrator."
+" and if the problem persists contact your administrator."
 msgstr ""
 
 #: superset-frontend/src/dashboard/components/SliceHeader/index.tsx:46
@@ -9235,7 +9235,7 @@ msgid "Report Schedule reached a working timeout."
 msgstr ""
 
 #: superset/reports/commands/exceptions.py:226
-msgid "Report Schedule sellenium user not found"
+msgid "Report Schedule selenium user not found"
 msgstr ""
 
 #: superset/reports/commands/exceptions.py:230
diff --git a/superset/translations/utils.py b/superset/translations/utils.py
index 25a698f0e1..79d01539a1 100644
--- a/superset/translations/utils.py
+++ b/superset/translations/utils.py
@@ -27,7 +27,7 @@ DIR = os.path.dirname(os.path.abspath(__file__))
 def get_language_pack(locale: str) -> Optional[Dict[str, Any]]:
     """Get/cache a language pack
 
-    Returns the langugage pack from cache if it exists, caches otherwise
+    Returns the language pack from cache if it exists, caches otherwise
 
     >>> get_language_pack('fr')['Dashboards']
     "Tableaux de bords"
diff --git a/superset/utils/core.py b/superset/utils/core.py
index 6f86372f75..06f2f63df1 100644
--- a/superset/utils/core.py
+++ b/superset/utils/core.py
@@ -1700,7 +1700,7 @@ def get_column_name_from_metric(metric: Metric) -> 
Optional[str]:
 
 def get_column_names_from_metrics(metrics: List[Metric]) -> List[str]:
     """
-    Extract the columns that a list of metrics are referencing. Expcludes all
+    Extract the columns that a list of metrics are referencing. Excludes all
     SQL metrics.
 
     :param metrics: Ad-hoc metric
diff --git a/superset/utils/date_parser.py b/superset/utils/date_parser.py
index 7e79c72f1e..72c32bba4e 100644
--- a/superset/utils/date_parser.py
+++ b/superset/utils/date_parser.py
@@ -153,7 +153,7 @@ def get_since_until(  # pylint: 
disable=too-many-arguments,too-many-locals,too-m
     """Return `since` and `until` date time tuple from string representations 
of
     time_range, since, until and time_shift.
 
-    This functiom supports both reading the keys separately (from `since` and
+    This function supports both reading the keys separately (from `since` and
     `until`), as well as the new `time_range` key. Valid formats are:
 
         - ISO 8601
diff --git a/superset/utils/pandas_postprocessing/contribution.py 
b/superset/utils/pandas_postprocessing/contribution.py
index 2bfc6f4be6..f8519f39a9 100644
--- a/superset/utils/pandas_postprocessing/contribution.py
+++ b/superset/utils/pandas_postprocessing/contribution.py
@@ -35,7 +35,7 @@ def contribution(
     rename_columns: Optional[List[str]] = None,
 ) -> DataFrame:
     """
-    Calculate cell contibution to row/column total for numeric columns.
+    Calculate cell contribution to row/column total for numeric columns.
     Non-numeric columns will be kept untouched.
 
     If `columns` are specified, only calculate contributions on selected 
columns.
diff --git a/superset/utils/pandas_postprocessing/prophet.py 
b/superset/utils/pandas_postprocessing/prophet.py
index d66298b179..6d733296ad 100644
--- a/superset/utils/pandas_postprocessing/prophet.py
+++ b/superset/utils/pandas_postprocessing/prophet.py
@@ -120,7 +120,7 @@ def prophet(  # pylint: disable=too-many-arguments
             )
         )
     freq = PROPHET_TIME_GRAIN_MAP[time_grain]
-    # check type at runtime due to marhsmallow schema not being able to handle
+    # check type at runtime due to marshmallow schema not being able to handle
     # union types
     if not isinstance(periods, int) or periods < 0:
         raise InvalidPostProcessingError(_("Periods must be a whole number"))
diff --git a/superset/views/base_schemas.py b/superset/views/base_schemas.py
index 778f737fe0..8f4ed7735c 100644
--- a/superset/views/base_schemas.py
+++ b/superset/views/base_schemas.py
@@ -40,7 +40,7 @@ def validate_owner(value: int) -> None:
 class BaseSupersetSchema(Schema):
     """
     Extends Marshmallow schema so that we can pass a Model to load
-    (following marshamallow-sqlalchemy pattern). This is useful
+    (following marshmallow-sqlalchemy pattern). This is useful
     to perform partial model merges on HTTP PUT
     """
 
diff --git a/superset/views/core.py b/superset/views/core.py
index d3dfdb017c..7ee053e881 100755
--- a/superset/views/core.py
+++ b/superset/views/core.py
@@ -1977,7 +1977,7 @@ class Superset(BaseSupersetView):  # pylint: 
disable=too-many-public-methods
             raise SupersetGenericErrorException(
                 __(
                     "One or more required fields are missing in the request. 
Please try "
-                    "again, and if the problem persists conctact your 
administrator."
+                    "again, and if the problem persists contact your 
administrator."
                 ),
                 status=400,
             ) from ex
diff --git a/superset/views/database/views.py b/superset/views/database/views.py
index 5f94fe6fc8..037128ee16 100644
--- a/superset/views/database/views.py
+++ b/superset/views/database/views.py
@@ -232,10 +232,10 @@ class CsvToDatabaseView(CustomFormView):
             # Connect table to the database that should be used for 
exploration.
             # E.g. if hive was used to upload a csv, presto will be a better 
option
             # to explore the table.
-            expore_database = database
+            explore_database = database
             explore_database_id = database.explore_database_id
             if explore_database_id:
-                expore_database = (
+                explore_database = (
                     db.session.query(models.Database)
                     .filter_by(id=explore_database_id)
                     .one_or_none()
@@ -247,7 +247,7 @@ class CsvToDatabaseView(CustomFormView):
                 .filter_by(
                     table_name=csv_table.table,
                     schema=csv_table.schema,
-                    database_id=expore_database.id,
+                    database_id=explore_database.id,
                 )
                 .one_or_none()
             )
@@ -256,7 +256,7 @@ class CsvToDatabaseView(CustomFormView):
                 sqla_table.fetch_metadata()
             if not sqla_table:
                 sqla_table = SqlaTable(table_name=csv_table.table)
-                sqla_table.database = expore_database
+                sqla_table.database = explore_database
                 sqla_table.database_id = database.id
                 sqla_table.owners = [g.user]
                 sqla_table.schema = csv_table.schema
@@ -369,10 +369,10 @@ class ExcelToDatabaseView(SimpleFormView):
             # Connect table to the database that should be used for 
exploration.
             # E.g. if hive was used to upload a excel, presto will be a better 
option
             # to explore the table.
-            expore_database = database
+            explore_database = database
             explore_database_id = database.explore_database_id
             if explore_database_id:
-                expore_database = (
+                explore_database = (
                     db.session.query(models.Database)
                     .filter_by(id=explore_database_id)
                     .one_or_none()
@@ -384,7 +384,7 @@ class ExcelToDatabaseView(SimpleFormView):
                 .filter_by(
                     table_name=excel_table.table,
                     schema=excel_table.schema,
-                    database_id=expore_database.id,
+                    database_id=explore_database.id,
                 )
                 .one_or_none()
             )
@@ -393,7 +393,7 @@ class ExcelToDatabaseView(SimpleFormView):
                 sqla_table.fetch_metadata()
             if not sqla_table:
                 sqla_table = SqlaTable(table_name=excel_table.table)
-                sqla_table.database = expore_database
+                sqla_table.database = explore_database
                 sqla_table.database_id = database.id
                 sqla_table.owners = [g.user]
                 sqla_table.schema = excel_table.schema
@@ -510,10 +510,10 @@ class ColumnarToDatabaseView(SimpleFormView):
             # Connect table to the database that should be used for 
exploration.
             # E.g. if hive was used to upload a csv, presto will be a better 
option
             # to explore the table.
-            expore_database = database
+            explore_database = database
             explore_database_id = database.explore_database_id
             if explore_database_id:
-                expore_database = (
+                explore_database = (
                     db.session.query(models.Database)
                     .filter_by(id=explore_database_id)
                     .one_or_none()
@@ -525,7 +525,7 @@ class ColumnarToDatabaseView(SimpleFormView):
                 .filter_by(
                     table_name=columnar_table.table,
                     schema=columnar_table.schema,
-                    database_id=expore_database.id,
+                    database_id=explore_database.id,
                 )
                 .one_or_none()
             )
@@ -534,7 +534,7 @@ class ColumnarToDatabaseView(SimpleFormView):
                 sqla_table.fetch_metadata()
             if not sqla_table:
                 sqla_table = SqlaTable(table_name=columnar_table.table)
-                sqla_table.database = expore_database
+                sqla_table.database = explore_database
                 sqla_table.database_id = database.id
                 sqla_table.owners = [g.user]
                 sqla_table.schema = columnar_table.schema
diff --git a/superset/viz.py b/superset/viz.py
index 1f4c795325..d8f0dc342b 100644
--- a/superset/viz.py
+++ b/superset/viz.py
@@ -454,7 +454,7 @@ class BaseViz:  # pylint: disable=too-many-public-methods
         "5 days ago" or "now").
 
         The `extra` arguments are currently used by time shift queries, since
-        different time shifts wil differ only in the `from_dttm`, `to_dttm`,
+        different time shifts will differ only in the `from_dttm`, `to_dttm`,
         `inner_from_dttm`, and `inner_to_dttm` values which are stripped.
         """
         cache_dict = copy.copy(query_obj)
@@ -1708,9 +1708,9 @@ class NVD3TimePivotViz(NVD3TimeSeriesViz):
             values=utils.get_metric_name(self.form_data["metric"]),
         )
         chart_data = self.to_series(df)
-        for serie in chart_data:
-            serie["rank"] = rank_lookup[serie["key"]]
-            serie["perc"] = 1 - (serie["rank"] / (max_rank + 1))
+        for series in chart_data:
+            series["rank"] = rank_lookup[series["key"]]
+            series["perc"] = 1 - (series["rank"] / (max_rank + 1))
         return chart_data
 
 
@@ -2020,7 +2020,7 @@ class ChordViz(BaseViz):
 
         df.columns = ["source", "target", "value"]
 
-        # Preparing a symetrical matrix like d3.chords calls for
+        # Preparing a symmetrical matrix like d3.chords calls for
         nodes = list(set(df["source"]) | set(df["target"]))
         matrix = {}
         for source, target in product(nodes, nodes):
diff --git a/tests/integration_tests/conftest.py 
b/tests/integration_tests/conftest.py
index 5c132381b1..0ea5bb5106 100644
--- a/tests/integration_tests/conftest.py
+++ b/tests/integration_tests/conftest.py
@@ -134,7 +134,7 @@ def setup_sample_data() -> Any:
     yield
 
     with app.app_context():
-        # drop sqlachemy tables
+        # drop sqlalchemy tables
 
         db.session.commit()
         from sqlalchemy.ext import declarative
diff --git a/tests/integration_tests/databases/api_tests.py 
b/tests/integration_tests/databases/api_tests.py
index f4968edae9..b015e4c59b 100644
--- a/tests/integration_tests/databases/api_tests.py
+++ b/tests/integration_tests/databases/api_tests.py
@@ -884,9 +884,9 @@ class TestDatabaseApi(SupersetTestCase):
         "superset.views.core.app.config",
         {**app.config, "PREVENT_UNSAFE_DB_CONNECTIONS": True},
     )
-    def test_create_database_fail_sqllite(self):
+    def test_create_database_fail_sqlite(self):
         """
-        Database API: Test create fail with sqllite
+        Database API: Test create fail with sqlite
         """
         database_data = {
             "database_name": "test-create-sqlite-database",
@@ -1378,7 +1378,7 @@ class TestDatabaseApi(SupersetTestCase):
         """
         self.login(username="admin")
         example_db = get_example_database()
-        # sqllite will not raise a NoSuchTableError
+        # sqlite will not raise a NoSuchTableError
         if example_db.backend == "sqlite":
             return
         uri = 
f"api/v1/database/{example_db.id}/select_star/table_does_not_exist/"
diff --git a/tests/integration_tests/event_logger_tests.py 
b/tests/integration_tests/event_logger_tests.py
index 4553bb9dc7..fa965ebd7d 100644
--- a/tests/integration_tests/event_logger_tests.py
+++ b/tests/integration_tests/event_logger_tests.py
@@ -179,7 +179,7 @@ class TestEventLogger(unittest.TestCase):
                 duration=timedelta(days=64, seconds=29156, microseconds=10),
                 object_ref={"baz": "food"},
                 log_to_statsd=False,
-                payload_override={"engine": "sqllite"},
+                payload_override={"engine": "sqlite"},
             )
 
         assert logger.records == [
@@ -188,7 +188,7 @@ class TestEventLogger(unittest.TestCase):
                     {
                         "path": "/",
                         "object_ref": {"baz": "food"},
-                        "payload_override": {"engine": "sqllite"},
+                        "payload_override": {"engine": "sqlite"},
                     }
                 ],
                 "user_id": 2,
@@ -226,7 +226,7 @@ class TestEventLogger(unittest.TestCase):
                 duration=timedelta(days=64, seconds=29156, microseconds=10),
                 object_ref={"baz": "food"},
                 log_to_statsd=False,
-                payload_override={"engine": "sqllite"},
+                payload_override={"engine": "sqlite"},
             )
 
         assert logger.records[0]["user_id"] == None


Reply via email to