This is an automated email from the ASF dual-hosted git repository.
potiuk pushed a commit to branch main
in repository https://gitbox.apache.org/repos/asf/airflow.git
The following commit(s) were added to refs/heads/main by this push:
new e45bee8840 D205 Support - Providers: Pagerduty to SMTP (inclusive)
(#32358)
e45bee8840 is described below
commit e45bee884068399e7265421511e17fed106ce5b4
Author: D. Ferruzzi <[email protected]>
AuthorDate: Wed Jul 5 11:10:03 2023 -0700
D205 Support - Providers: Pagerduty to SMTP (inclusive) (#32358)
---
airflow/providers/plexus/hooks/plexus.py | 6 +++---
airflow/providers/plexus/operators/job.py | 3 +--
airflow/providers/presto/hooks/presto.py | 3 +--
airflow/providers/qubole/operators/qubole_check.py | 9 ++++++---
airflow/providers/salesforce/hooks/salesforce.py | 5 +++--
airflow/providers/segment/hooks/segment.py | 7 ++-----
airflow/providers/sftp/operators/sftp.py | 4 ++--
airflow/providers/slack/hooks/slack.py | 6 ++----
airflow/providers/slack/hooks/slack_webhook.py | 7 +++----
airflow/providers/slack/transfers/sql_to_slack.py | 15 +++++++++------
airflow/providers/smtp/hooks/smtp.py | 9 +++++----
11 files changed, 37 insertions(+), 37 deletions(-)
diff --git a/airflow/providers/plexus/hooks/plexus.py
b/airflow/providers/plexus/hooks/plexus.py
index 09b858ad7c..9432fe5fb7 100644
--- a/airflow/providers/plexus/hooks/plexus.py
+++ b/airflow/providers/plexus/hooks/plexus.py
@@ -29,9 +29,9 @@ from airflow.models import Variable
class PlexusHook(BaseHook):
"""
- Used for jwt token generation and storage to
- make Plexus API calls. Requires email and password
- Airflow variables be created.
+ Used for jwt token generation and storage to make Plexus API calls.
+
+ Requires email and password Airflow variables be created.
Example:
- export AIRFLOW_VAR_EMAIL = [email protected]
diff --git a/airflow/providers/plexus/operators/job.py
b/airflow/providers/plexus/operators/job.py
index 963924bbbb..5eb83f7b6b 100644
--- a/airflow/providers/plexus/operators/job.py
+++ b/airflow/providers/plexus/operators/job.py
@@ -131,8 +131,7 @@ class PlexusJobOperator(BaseOperator):
def construct_job_params(self, hook: Any) -> dict[Any, Any | None]:
"""
- Creates job_params dict for api call to
- launch a Plexus job.
+ Creates job_params dict for api call to launch a Plexus job.
Some parameters required to launch a job
are not available to the user in the Plexus
diff --git a/airflow/providers/presto/hooks/presto.py
b/airflow/providers/presto/hooks/presto.py
index e0ed55758d..477b4cd797 100644
--- a/airflow/providers/presto/hooks/presto.py
+++ b/airflow/providers/presto/hooks/presto.py
@@ -220,8 +220,7 @@ class PrestoHook(DbApiHook):
@staticmethod
def _serialize_cell(cell: Any, conn: Connection | None = None) -> Any:
"""
- Presto will adapt all arguments to the execute() method internally,
- hence we return cell without any conversion.
+ Presto will adapt all execute() args internally, hence we return cell
without any conversion.
:param cell: The cell to insert into the table
:param conn: The database connection
diff --git a/airflow/providers/qubole/operators/qubole_check.py
b/airflow/providers/qubole/operators/qubole_check.py
index cf01ae244e..18b2667bda 100644
--- a/airflow/providers/qubole/operators/qubole_check.py
+++ b/airflow/providers/qubole/operators/qubole_check.py
@@ -45,7 +45,8 @@ class _QuboleCheckOperatorMixin:
def get_hook(self) -> QuboleCheckHook:
"""
- Reinitialising the hook, as some template fields might have changed
+ Reinitialising the hook, as some template fields might have changed.
+
This method overwrites the original QuboleOperator.get_hook() which
returns a QuboleHook.
"""
return QuboleCheckHook(
@@ -55,8 +56,9 @@ class _QuboleCheckOperatorMixin:
class QuboleCheckOperator(_QuboleCheckOperatorMixin, SQLCheckOperator,
QuboleOperator):
"""
- Performs checks against Qubole Commands. ``QuboleCheckOperator`` expects
- a command that will be executed on QDS.
+ Performs checks against Qubole Commands.
+
+ ``QuboleCheckOperator`` expects a command that will be executed on QDS.
By default, each value on first row of the result of this Qubole Command
is evaluated using python ``bool`` casting. If any of the
values return ``False``, the check is failed and errors out.
@@ -129,6 +131,7 @@ class QuboleCheckOperator(_QuboleCheckOperatorMixin,
SQLCheckOperator, QuboleOpe
class QuboleValueCheckOperator(_QuboleCheckOperatorMixin,
SQLValueCheckOperator, QuboleOperator):
"""
Performs a simple value check using Qubole command.
+
By default, each value on the first row of this
Qubole command is compared with a pre-defined value.
The check fails and errors out if the output of the command
diff --git a/airflow/providers/salesforce/hooks/salesforce.py
b/airflow/providers/salesforce/hooks/salesforce.py
index 7c82e46bb4..336c0db0a0 100644
--- a/airflow/providers/salesforce/hooks/salesforce.py
+++ b/airflow/providers/salesforce/hooks/salesforce.py
@@ -16,8 +16,7 @@
# specific language governing permissions and limitations
# under the License.
"""
-This module contains a Salesforce Hook which allows you to connect to your
Salesforce instance,
-retrieve data from it, and write that data to a file for other uses.
+Connect to your Salesforce instance, retrieve data from it, and write that
data to a file for other uses.
.. note:: this hook also relies on the simple_salesforce package:
https://github.com/simple-salesforce/simple-salesforce
@@ -180,6 +179,7 @@ class SalesforceHook(BaseHook):
def describe_object(self, obj: str) -> dict:
"""
Get the description of an object from Salesforce.
+
This description is the object's schema and
some extra metadata that Salesforce stores for each object.
@@ -204,6 +204,7 @@ class SalesforceHook(BaseHook):
def get_object_from_salesforce(self, obj: str, fields: Iterable[str]) ->
dict:
"""
Get all instances of the `object` from Salesforce.
+
For each model, only get the fields specified in fields.
All we really do underneath the hood is run:
diff --git a/airflow/providers/segment/hooks/segment.py
b/airflow/providers/segment/hooks/segment.py
index 43055dea62..258716387d 100644
--- a/airflow/providers/segment/hooks/segment.py
+++ b/airflow/providers/segment/hooks/segment.py
@@ -16,9 +16,7 @@
# specific language governing permissions and limitations
# under the License.
"""
-This module contains a Segment Hook
-which allows you to connect to your Segment account,
-retrieve data from it or write to that file.
+Connect to your Segment account, retrieve data from it or write to that file.
NOTE: this hook also relies on the Segment analytics package:
https://github.com/segmentio/analytics-python
@@ -33,8 +31,7 @@ from airflow.hooks.base import BaseHook
class SegmentHook(BaseHook):
"""
- Create new connection to Segment
- and allows you to pull data out of Segment or write to it.
+ Create new connection to Segment and allows you to pull data out of
Segment or write to it.
You can then use that file with other
Airflow operators to move the data around or interact with segment.
diff --git a/airflow/providers/sftp/operators/sftp.py
b/airflow/providers/sftp/operators/sftp.py
index 884411b3cc..0920387faa 100644
--- a/airflow/providers/sftp/operators/sftp.py
+++ b/airflow/providers/sftp/operators/sftp.py
@@ -39,8 +39,8 @@ class SFTPOperation:
class SFTPOperator(BaseOperator):
"""
SFTPOperator for transferring files from remote host to local or vice a
versa.
- This operator uses sftp_hook to open sftp transport channel that serve as
basis
- for file transfer.
+
+ This operator uses sftp_hook to open sftp transport channel that serve as
basis for file transfer.
:param ssh_conn_id: :ref:`ssh connection id<howto/connection:ssh>`
from airflow Connections. `ssh_conn_id` will be ignored if `ssh_hook`
diff --git a/airflow/providers/slack/hooks/slack.py
b/airflow/providers/slack/hooks/slack.py
index 9d0bc588a3..bd8ee74b34 100644
--- a/airflow/providers/slack/hooks/slack.py
+++ b/airflow/providers/slack/hooks/slack.py
@@ -38,10 +38,8 @@ if TYPE_CHECKING:
def _ensure_prefixes(conn_type):
- """
- Remove when provider min airflow version >= 2.5.0 since this is handled by
- provider manager from that version.
- """
+ # TODO: Remove when provider min airflow version >= 2.5.0 since
+ # this is handled by provider manager from that version.
def dec(func):
@wraps(func)
diff --git a/airflow/providers/slack/hooks/slack_webhook.py
b/airflow/providers/slack/hooks/slack_webhook.py
index aef5f29e71..82b21f175e 100644
--- a/airflow/providers/slack/hooks/slack_webhook.py
+++ b/airflow/providers/slack/hooks/slack_webhook.py
@@ -55,10 +55,8 @@ def check_webhook_response(func: Callable) -> Callable:
def _ensure_prefixes(conn_type):
- """
- Remove when provider min airflow version >= 2.5.0 since this is handled by
- provider manager from that version.
- """
+ # TODO: Remove when provider min airflow version >= 2.5.0 since
+ # this is handled by provider manager from that version.
def dec(func):
@wraps(func)
@@ -85,6 +83,7 @@ def _ensure_prefixes(conn_type):
class SlackWebhookHook(BaseHook):
"""
This class provide a thin wrapper around the ``slack_sdk.WebhookClient``.
+
This hook allows you to post messages to Slack by using Incoming Webhooks.
.. seealso::
diff --git a/airflow/providers/slack/transfers/sql_to_slack.py
b/airflow/providers/slack/transfers/sql_to_slack.py
index cf5c01b22c..caac3eb7d1 100644
--- a/airflow/providers/slack/transfers/sql_to_slack.py
+++ b/airflow/providers/slack/transfers/sql_to_slack.py
@@ -80,12 +80,15 @@ class BaseSqlToSlackOperator(BaseOperator):
class SqlToSlackOperator(BaseSqlToSlackOperator):
"""
- Executes an SQL statement in a given SQL connection and sends the results
to Slack. The results of the
- query are rendered into the 'slack_message' parameter as a Pandas
dataframe using a JINJA variable called
- '{{ results_df }}'. The 'results_df' variable name can be changed by
specifying a different
- 'results_df_name' parameter. The Tabulate library is added to the JINJA
environment as a filter to
- allow the dataframe to be rendered nicely. For example, set
'slack_message' to {{ results_df |
- tabulate(tablefmt="pretty", headers="keys") }} to send the results to
Slack as an ascii rendered table.
+ Executes an SQL statement in a given SQL connection and sends the results
to Slack.
+
+ The results of the query are rendered into the 'slack_message' parameter
as a Pandas
+ dataframe using a JINJA variable called '{{ results_df }}'. The
'results_df' variable
+ name can be changed by specifying a different 'results_df_name' parameter.
The Tabulate
+ library is added to the JINJA environment as a filter to allow the
dataframe to be
+ rendered nicely. For example, set 'slack_message' to {{ results_df |
+ tabulate(tablefmt="pretty", headers="keys") }} to send the results to
Slack as an ascii
+ rendered table.
.. seealso::
For more information on how to use this operator, take a look at the
guide:
diff --git a/airflow/providers/smtp/hooks/smtp.py
b/airflow/providers/smtp/hooks/smtp.py
index f3a2abb35f..6279ea4f94 100644
--- a/airflow/providers/smtp/hooks/smtp.py
+++ b/airflow/providers/smtp/hooks/smtp.py
@@ -16,8 +16,8 @@
# specific language governing permissions and limitations
# under the License.
"""
-This module provides everything to be able to search in mails for a specific
attachment
-and also to download it.
+Search in emails for a specific attachment and also to download it.
+
It uses the smtplib library that is already integrated in python 3.
"""
from __future__ import annotations
@@ -302,8 +302,9 @@ class SmtpHook(BaseHook):
def _get_email_list_from_str(self, addresses: str) -> list[str]:
"""
- Extract a list of email addresses from a string. The string
- can contain multiple email addresses separated by
+ Extract a list of email addresses from a string.
+
+ The string can contain multiple email addresses separated by
any of the following delimiters: ',' or ';'.
:param addresses: A string containing one or more email addresses.