This is an automated email from the ASF dual-hosted git repository.

potiuk pushed a commit to branch main
in repository https://gitbox.apache.org/repos/asf/airflow.git


The following commit(s) were added to refs/heads/main by this push:
     new 660386b5d9 Refactor: Use inplace .sort() (#33743)
660386b5d9 is described below

commit 660386b5d962a77d895be34a644616a67a0e49c8
Author: Miroslav Šedivý <[email protected]>
AuthorDate: Sun Aug 27 16:38:24 2023 +0000

    Refactor: Use inplace .sort() (#33743)
---
 airflow/api_connexion/endpoints/task_endpoint.py     | 2 +-
 airflow/dag_processing/manager.py                    | 4 ++--
 airflow/timetables/events.py                         | 2 +-
 dev/validate_version_added_fields_in_config.py       | 5 ++---
 scripts/ci/pre_commit/pre_commit_sort_in_the_wild.py | 2 +-
 scripts/tools/generate-integrations-json.py          | 2 +-
 tests/jobs/test_scheduler_job.py                     | 2 +-
 7 files changed, 9 insertions(+), 10 deletions(-)

diff --git a/airflow/api_connexion/endpoints/task_endpoint.py 
b/airflow/api_connexion/endpoints/task_endpoint.py
index 23c2b32487..f4d48fe02d 100644
--- a/airflow/api_connexion/endpoints/task_endpoint.py
+++ b/airflow/api_connexion/endpoints/task_endpoint.py
@@ -61,7 +61,7 @@ def get_tasks(*, dag_id: str, order_by: str = "task_id") -> 
APIResponse:
     tasks = dag.tasks
 
     try:
-        tasks = sorted(tasks, key=attrgetter(order_by.lstrip("-")), 
reverse=(order_by[0:1] == "-"))
+        tasks.sort(key=attrgetter(order_by.lstrip("-")), 
reverse=(order_by[0:1] == "-"))
     except AttributeError as err:
         raise BadRequest(detail=str(err))
     task_collection = TaskCollection(tasks=tasks, total_entries=len(tasks))
diff --git a/airflow/dag_processing/manager.py 
b/airflow/dag_processing/manager.py
index 5a92af893b..269ba769cb 100644
--- a/airflow/dag_processing/manager.py
+++ b/airflow/dag_processing/manager.py
@@ -865,7 +865,7 @@ class DagFileProcessorManager(LoggingMixin):
             rows.append((file_path, processor_pid, runtime, num_dags, 
num_errors, last_runtime, last_run))
 
         # Sort by longest last runtime. (Can't sort None values in python3)
-        rows = sorted(rows, key=lambda x: x[3] or 0.0)
+        rows.sort(key=lambda x: x[3] or 0.0)
 
         formatted_rows = []
         for file_path, pid, runtime, num_dags, num_errors, last_runtime, 
last_run in rows:
@@ -1167,7 +1167,7 @@ class DagFileProcessorManager(LoggingMixin):
         if is_mtime_mode:
             file_paths = sorted(files_with_mtime, key=files_with_mtime.get, 
reverse=True)
         elif list_mode == "alphabetical":
-            file_paths = sorted(file_paths)
+            file_paths.sort()
         elif list_mode == "random_seeded_by_host":
             # Shuffle the list seeded by hostname so multiple schedulers can 
work on different
             # set of files. Since we set the seed, the sort order will remain 
same per host
diff --git a/airflow/timetables/events.py b/airflow/timetables/events.py
index a59f4fc5b2..ce8fa9527f 100644
--- a/airflow/timetables/events.py
+++ b/airflow/timetables/events.py
@@ -52,7 +52,7 @@ class EventsTimetable(Timetable):
         self.event_dates = list(event_dates)  # Must be reversible and 
indexable
         if not presorted:
             # For long lists this could take a while, so only want to do it 
once
-            self.event_dates = sorted(self.event_dates)
+            self.event_dates.sort()
         self.restrict_to_events = restrict_to_events
         if description is None:
             self.description = (
diff --git a/dev/validate_version_added_fields_in_config.py 
b/dev/validate_version_added_fields_in_config.py
index d68ce36a0d..7ee0fb16b2 100755
--- a/dev/validate_version_added_fields_in_config.py
+++ b/dev/validate_version_added_fields_in_config.py
@@ -114,9 +114,8 @@ for new_section, old_section, version_before_renaming in 
RENAMED_SECTIONS:
     computed_option_new_section.update(options)
 
 # 1. Prepare versions to checks
-airflow_version = fetch_pypi_versions()
-airflow_version = sorted(airflow_version, key=semver.VersionInfo.parse)
-to_check_versions: list[str] = [d for d in airflow_version if 
d.startswith("2.")]
+to_check_versions: list[str] = [d for d in fetch_pypi_versions() if 
d.startswith("2.")]
+to_check_versions.sort(key=semver.VersionInfo.parse)
 
 # 2. Compute expected options set with version added fields
 expected_computed_options: set[tuple[str, str, str]] = set()
diff --git a/scripts/ci/pre_commit/pre_commit_sort_in_the_wild.py 
b/scripts/ci/pre_commit/pre_commit_sort_in_the_wild.py
index a04d7d18d0..cfe2047236 100755
--- a/scripts/ci/pre_commit/pre_commit_sort_in_the_wild.py
+++ b/scripts/ci/pre_commit/pre_commit_sort_in_the_wild.py
@@ -68,5 +68,5 @@ if __name__ == "__main__":
                 line = "1." + line.split(".", maxsplit=1)[1]
                 print(f"{old_line.strip()} => {line.strip()}")
             companies.append(line)
-    companies = sorted(companies, key=stable_sort)
+    companies.sort(key=stable_sort)
     inthewild_path.write_text("".join(header) + "\n" + "".join(companies))
diff --git a/scripts/tools/generate-integrations-json.py 
b/scripts/tools/generate-integrations-json.py
index 94725da1df..e66c28d9c0 100755
--- a/scripts/tools/generate-integrations-json.py
+++ b/scripts/tools/generate-integrations-json.py
@@ -67,7 +67,7 @@ for provider_info in ALL_PROVIDER_YAMLS:
             result["logo"] = logo
         result_integrations.append(result)
 
-result_integrations = sorted(result_integrations, key=lambda x: 
x["name"].lower())
+result_integrations.sort(key=lambda x: x["name"].lower())
 with open(os.path.join(AIRFLOW_SITE_DIR, 
"landing-pages/site/static/integrations.json"), "w") as f:
     f.write(
         json.dumps(
diff --git a/tests/jobs/test_scheduler_job.py b/tests/jobs/test_scheduler_job.py
index f10a81cc51..fa425910c9 100644
--- a/tests/jobs/test_scheduler_job.py
+++ b/tests/jobs/test_scheduler_job.py
@@ -601,7 +601,7 @@ class TestSchedulerJob:
             dr2.get_task_instance(task_id_1, session=session),
             dr2.get_task_instance(task_id_2, session=session),
         ]
-        tis = sorted(tis, key=lambda ti: ti.key)
+        tis.sort(key=lambda ti: ti.key)
         for ti in tis:
             ti.state = State.SCHEDULED
             session.merge(ti)

Reply via email to