This is an automated email from the ASF dual-hosted git repository.
eladkal pushed a commit to branch main
in repository https://gitbox.apache.org/repos/asf/airflow.git
The following commit(s) were added to refs/heads/main by this push:
new 0e5890bdcb Remove unused argument metadata from es_read and make
clearly private (#34790)
0e5890bdcb is described below
commit 0e5890bdcb6ea92f8333c46493ea7c065e4d6bf7
Author: Daniel Standish <[email protected]>
AuthorDate: Fri Oct 13 03:42:16 2023 -0700
Remove unused argument metadata from es_read and make clearly private
(#34790)
* Remove unused argument metadata from es_read
* renaem underscore; add notes
* Update airflow/providers/elasticsearch/CHANGELOG.rst
Co-authored-by: Elad Kalif <[email protected]>
---------
Co-authored-by: Elad Kalif <[email protected]>
---
airflow/providers/elasticsearch/CHANGELOG.rst | 6 ++++++
airflow/providers/elasticsearch/log/es_task_handler.py | 7 ++++---
2 files changed, 10 insertions(+), 3 deletions(-)
diff --git a/airflow/providers/elasticsearch/CHANGELOG.rst
b/airflow/providers/elasticsearch/CHANGELOG.rst
index 0f08f823ad..ec37d47d44 100644
--- a/airflow/providers/elasticsearch/CHANGELOG.rst
+++ b/airflow/providers/elasticsearch/CHANGELOG.rst
@@ -27,6 +27,12 @@
Changelog
---------
+
+.. note::
+ In PR #34790 we removed the unused argument ``metadata`` from method
``es_read``. We determined that ``es_read``
+ is an internal method and therefore not subject to backcompat, so we did not
bump major version for this provider.
+ In order to make clearer that this is an internal method we renamed it with
an underscore prefix ``_es_read``.
+
.. note::
Upgrade to Elasticsearch 8. The ElasticsearchTaskHandler &
ElasticsearchSQLHook will now use Elasticsearch 8 package.
As explained https://elasticsearch-py.readthedocs.io/en/stable ,
Elasticsearch language clients are only backwards
diff --git a/airflow/providers/elasticsearch/log/es_task_handler.py
b/airflow/providers/elasticsearch/log/es_task_handler.py
index 546dc57937..ab07e12487 100644
--- a/airflow/providers/elasticsearch/log/es_task_handler.py
+++ b/airflow/providers/elasticsearch/log/es_task_handler.py
@@ -269,7 +269,7 @@ class ElasticsearchTaskHandler(FileTaskHandler,
ExternalLoggingMixin, LoggingMix
offset = metadata["offset"]
log_id = self._render_log_id(ti, try_number)
- logs = self.es_read(log_id, offset, metadata)
+ logs = self._es_read(log_id, offset)
logs_by_host = self._group_logs_by_host(logs)
next_offset = offset if not logs else
attrgetter(self.offset_field)(logs[-1])
# Ensure a string here. Large offset numbers will get JSON.parsed
incorrectly
@@ -330,13 +330,14 @@ class ElasticsearchTaskHandler(FileTaskHandler,
ExternalLoggingMixin, LoggingMix
# Just a safe-guard to preserve backwards-compatibility
return log_line.message
- def es_read(self, log_id: str, offset: int | str, metadata: dict) -> list
| ElasticSearchResponse:
+ def _es_read(self, log_id: str, offset: int | str) -> list |
ElasticSearchResponse:
"""
Return the logs matching log_id in Elasticsearch and next offset or ''.
:param log_id: the log_id of the log to read.
:param offset: the offset start to read log from.
- :param metadata: log metadata, used for steaming log download.
+
+ :meta private:
"""
query: dict[Any, Any] = {
"query": {