[
https://issues.apache.org/jira/browse/AIRFLOW-2889?page=com.atlassian.jira.plugin.system.issuetabpanels:comment-tabpanel=16577406#comment-16577406
]
ASF GitHub Bot commented on AIRFLOW-2889:
-
feng-tao closed pull request #3732: [AIRFLOW-2889] Fix typos detected by
github.com/client9/misspell
URL: https://github.com/apache/incubator-airflow/pull/3732
This is a PR merged from a forked repository.
As GitHub hides the original diff on merge, it is displayed below for
the sake of provenance:
As this is a foreign pull request (from a fork), the diff is supplied
below (as it won't show otherwise due to GitHub magic):
diff --git a/airflow/contrib/example_dags/example_databricks_operator.py
b/airflow/contrib/example_dags/example_databricks_operator.py
index bc827d465b..79f947ba1c 100644
--- a/airflow/contrib/example_dags/example_databricks_operator.py
+++ b/airflow/contrib/example_dags/example_databricks_operator.py
@@ -32,7 +32,7 @@
# the spark jar task will NOT run until the notebook task completes
# successfully.
#
-# The definition of a succesful run is if the run has a result_state of
"SUCCESS".
+# The definition of a successful run is if the run has a result_state of
"SUCCESS".
# For more information about the state of a run refer to
# https://docs.databricks.com/api/latest/jobs.html#runstate
diff --git a/airflow/contrib/hooks/azure_fileshare_hook.py
b/airflow/contrib/hooks/azure_fileshare_hook.py
index edabc17293..d4066ee549 100644
--- a/airflow/contrib/hooks/azure_fileshare_hook.py
+++ b/airflow/contrib/hooks/azure_fileshare_hook.py
@@ -100,7 +100,7 @@ def list_directories_and_files(self, share_name,
directory_name=None, **kwargs):
def create_directory(self, share_name, directory_name, **kwargs):
"""
-Create a new direcotry on a Azure File Share.
+Create a new directory on a Azure File Share.
:param share_name: Name of the share.
:type share_name: str
diff --git a/airflow/contrib/hooks/bigquery_hook.py
b/airflow/contrib/hooks/bigquery_hook.py
index 2a94580f50..e4c0653bfe 100644
--- a/airflow/contrib/hooks/bigquery_hook.py
+++ b/airflow/contrib/hooks/bigquery_hook.py
@@ -627,7 +627,7 @@ def run_query(self,
if query_params:
if self.use_legacy_sql:
-raise ValueError("Query paramaters are not allowed when using "
+raise ValueError("Query parameters are not allowed when using "
"legacy SQL")
else:
configuration['query']['queryParameters'] = query_params
diff --git a/airflow/contrib/hooks/emr_hook.py
b/airflow/contrib/hooks/emr_hook.py
index 6cd92c6d85..d116f2275f 100644
--- a/airflow/contrib/hooks/emr_hook.py
+++ b/airflow/contrib/hooks/emr_hook.py
@@ -23,7 +23,7 @@
class EmrHook(AwsHook):
"""
-Interact with AWS EMR. emr_conn_id is only neccessary for using the
+Interact with AWS EMR. emr_conn_id is only necessary for using the
create_job_flow method.
"""
diff --git a/airflow/contrib/hooks/gcp_dataproc_hook.py
b/airflow/contrib/hooks/gcp_dataproc_hook.py
index 8e4f32b137..57c48bde59 100644
--- a/airflow/contrib/hooks/gcp_dataproc_hook.py
+++ b/airflow/contrib/hooks/gcp_dataproc_hook.py
@@ -235,6 +235,6 @@ def wait(self, operation):
DataProcHook,
"await",
deprecation.deprecated(
-DataProcHook.wait, "renamed to 'wait' for Python3.7 compatability"
+DataProcHook.wait, "renamed to 'wait' for Python3.7 compatibility"
),
)
diff --git a/airflow/contrib/hooks/qubole_hook.py
b/airflow/contrib/hooks/qubole_hook.py
index 5be5923e7b..3df77d3a1f 100755
--- a/airflow/contrib/hooks/qubole_hook.py
+++ b/airflow/contrib/hooks/qubole_hook.py
@@ -125,7 +125,7 @@ def execute(self, context):
def kill(self, ti):
"""
-Kill (cancel) a Qubole commmand
+Kill (cancel) a Qubole command
:param ti: Task Instance of the dag, used to determine the Quboles
command id
:return: response from Qubole
"""
diff --git a/airflow/contrib/hooks/salesforce_hook.py
b/airflow/contrib/hooks/salesforce_hook.py
index 24b67f49fc..efc819e543 100644
--- a/airflow/contrib/hooks/salesforce_hook.py
+++ b/airflow/contrib/hooks/salesforce_hook.py
@@ -53,14 +53,14 @@ def __init__(
:param conn_id: the name of the connection that has the parameters
we need to connect to Salesforce.
-The conenction shoud be type `http` and include a
+The connection shoud be type `http` and include a
user's security token in the `Extras` field.
.. note::
For the HTTP connection type, you can include a
JSON structure in the `Extras` field.
We need a user's security token to connect to Salesforce.
So we define it in the `Extras`