This is an automated email from the ASF dual-hosted git repository.
potiuk pushed a commit to branch main
in repository https://gitbox.apache.org/repos/asf/airflow.git
The following commit(s) were added to refs/heads/main by this push:
new bb2689ab45 Use str.splitlines() to split lines in providers (#33593)
bb2689ab45 is described below
commit bb2689ab455ca5d82f4f9b8d4b73ec071849c439
Author: Miroslav Šedivý <[email protected]>
AuthorDate: Mon Aug 21 21:33:37 2023 +0000
Use str.splitlines() to split lines in providers (#33593)
---
airflow/providers/apache/hive/hooks/hive.py | 6 +++---
airflow/providers/microsoft/psrp/hooks/psrp.py | 2 +-
airflow/providers/ssh/hooks/ssh.py | 2 +-
tests/providers/docker/operators/test_docker.py | 6 +++---
tests/providers/elasticsearch/log/test_es_task_handler.py | 2 +-
tests/providers/google/cloud/hooks/test_dataflow.py | 2 +-
tests/providers/google/cloud/log/test_stackdriver_task_handler.py | 2 +-
7 files changed, 11 insertions(+), 11 deletions(-)
diff --git a/airflow/providers/apache/hive/hooks/hive.py
b/airflow/providers/apache/hive/hooks/hive.py
index 4ecb8a7860..ea004860b4 100644
--- a/airflow/providers/apache/hive/hooks/hive.py
+++ b/airflow/providers/apache/hive/hooks/hive.py
@@ -317,14 +317,14 @@ class HiveCliHook(BaseHook):
try:
self.run_cli(query, verbose=False)
except AirflowException as e:
- message = e.args[0].split("\n")[-2]
+ message = e.args[0].splitlines()[-2]
self.log.info(message)
error_loc = re.search(r"(\d+):(\d+)", message)
if error_loc and error_loc.group(1).isdigit():
lst = int(error_loc.group(1))
begin = max(lst - 2, 0)
- end = min(lst + 3, len(query.split("\n")))
- context = "\n".join(query.split("\n")[begin:end])
+ end = min(lst + 3, len(query.splitlines()))
+ context = "\n".join(query.splitlines()[begin:end])
self.log.info("Context :\n %s", context)
else:
self.log.info("SUCCESS")
diff --git a/airflow/providers/microsoft/psrp/hooks/psrp.py
b/airflow/providers/microsoft/psrp/hooks/psrp.py
index 06e3eb93a7..d77e73c278 100644
--- a/airflow/providers/microsoft/psrp/hooks/psrp.py
+++ b/airflow/providers/microsoft/psrp/hooks/psrp.py
@@ -256,7 +256,7 @@ class PsrpHook(BaseHook):
if message_type == MessageType.ERROR_RECORD:
log(INFO, "%s: %s", record.reason, record)
if record.script_stacktrace:
- for trace in record.script_stacktrace.split("\r\n"):
+ for trace in record.script_stacktrace.splitlines():
log(INFO, trace)
level = INFORMATIONAL_RECORD_LEVEL_MAP.get(message_type)
diff --git a/airflow/providers/ssh/hooks/ssh.py
b/airflow/providers/ssh/hooks/ssh.py
index e40354e1ac..6273ebd0e1 100644
--- a/airflow/providers/ssh/hooks/ssh.py
+++ b/airflow/providers/ssh/hooks/ssh.py
@@ -446,7 +446,7 @@ class SSHHook(BaseHook):
:return: ``paramiko.PKey`` appropriate for given key
:raises AirflowException: if key cannot be read
"""
- if len(private_key.split("\n", 2)) < 2:
+ if len(private_key.splitlines()) < 2:
raise AirflowException("Key must have BEGIN and END header/footer
on separate lines.")
for pkey_class in self._pkey_loaders:
diff --git a/tests/providers/docker/operators/test_docker.py
b/tests/providers/docker/operators/test_docker.py
index eea099f6ed..abe8bb4be2 100644
--- a/tests/providers/docker/operators/test_docker.py
+++ b/tests/providers/docker/operators/test_docker.py
@@ -164,9 +164,9 @@ class TestDockerOperator:
def dotenv_mock_return_value(**kwargs):
env_dict = {}
env_str = kwargs["stream"]
- for env_var in env_str.split("\n"):
- kv = env_var.split("=")
- env_dict[kv[0]] = kv[1]
+ for env_var in env_str.splitlines():
+ key, _, val = env_var.partition("=")
+ env_dict[key] = val
return env_dict
self.dotenv_patcher =
mock.patch("airflow.providers.docker.operators.docker.dotenv_values")
diff --git a/tests/providers/elasticsearch/log/test_es_task_handler.py
b/tests/providers/elasticsearch/log/test_es_task_handler.py
index 4ffa958819..8405879997 100644
--- a/tests/providers/elasticsearch/log/test_es_task_handler.py
+++ b/tests/providers/elasticsearch/log/test_es_task_handler.py
@@ -670,7 +670,7 @@ class TestElasticsearchTaskHandler:
ti.log.info("Test3")
# assert
- first_log, second_log, third_log = map(json.loads,
stdout_mock.getvalue().strip().split("\n"))
+ first_log, second_log, third_log = map(json.loads,
stdout_mock.getvalue().strip().splitlines())
assert first_log["offset"] < second_log["offset"] < third_log["offset"]
assert first_log["asctime"] == t1.format("YYYY-MM-DDTHH:mm:ss.SSSZZ")
assert second_log["asctime"] == t2.format("YYYY-MM-DDTHH:mm:ss.SSSZZ")
diff --git a/tests/providers/google/cloud/hooks/test_dataflow.py
b/tests/providers/google/cloud/hooks/test_dataflow.py
index b56574ce70..ed441f06f1 100644
--- a/tests/providers/google/cloud/hooks/test_dataflow.py
+++ b/tests/providers/google/cloud/hooks/test_dataflow.py
@@ -1883,7 +1883,7 @@ class TestDataflow:
],
)
def test_data_flow_valid_job_id(self, log):
- echos = ";".join(f"echo {shlex.quote(line)}" for line in
log.split("\n"))
+ echos = ";".join(f"echo {shlex.quote(line)}" for line in
log.splitlines())
cmd = ["bash", "-c", echos]
found_job_id = None
diff --git a/tests/providers/google/cloud/log/test_stackdriver_task_handler.py
b/tests/providers/google/cloud/log/test_stackdriver_task_handler.py
index e9a7962980..0abb714cc5 100644
--- a/tests/providers/google/cloud/log/test_stackdriver_task_handler.py
+++ b/tests/providers/google/cloud/log/test_stackdriver_task_handler.py
@@ -372,7 +372,7 @@ labels.try_number="3"'''
assert {"project", "interval", "resource", "advancedFilter"} ==
set(parsed_qs.keys())
assert "global" in parsed_qs["resource"]
- filter_params = parsed_qs["advancedFilter"][0].split("\n")
+ filter_params = parsed_qs["advancedFilter"][0].splitlines()
expected_filter = [
'resource.type="global"',
'logName="projects/project_id/logs/airflow"',