This is an automated email from the ASF dual-hosted git repository.
potiuk pushed a commit to branch main
in repository https://gitbox.apache.org/repos/asf/airflow.git
The following commit(s) were added to refs/heads/main by this push:
new d29c348560 Refactor: Simplify some loops (#34223)
d29c348560 is described below
commit d29c3485609d28e14a032ae5256998fb75e8d49f
Author: Miroslav Šedivý <[email protected]>
AuthorDate: Fri Sep 15 06:48:27 2023 +0000
Refactor: Simplify some loops (#34223)
---
dev/assign_cherry_picked_prs_with_milestone.py | 21 +++++++++------------
.../commands/release_management_commands.py | 4 +---
dev/breeze/src/airflow_breeze/utils/parallel.py | 2 +-
dev/prepare_bulk_issues.py | 13 ++-----------
dev/prepare_release_issue.py | 7 ++-----
.../endpoints/test_dag_run_endpoint.py | 6 +++---
tests/jobs/test_local_task_job.py | 4 +---
tests/models/test_taskinstance.py | 4 ++--
.../elasticsearch/log/test_es_task_handler.py | 6 ++----
9 files changed, 23 insertions(+), 44 deletions(-)
diff --git a/dev/assign_cherry_picked_prs_with_milestone.py
b/dev/assign_cherry_picked_prs_with_milestone.py
index c00e5d0373..0ffb57d92d 100755
--- a/dev/assign_cherry_picked_prs_with_milestone.py
+++ b/dev/assign_cherry_picked_prs_with_milestone.py
@@ -264,7 +264,6 @@ def assign_prs(
):
changes = get_changes(verbose, previous_release, current_release)
changes = [change for change in changes if change.pr is not None]
- prs = [change.pr for change in changes]
g = Github(github_token)
repo = g.get_repo("apache/airflow")
@@ -276,9 +275,7 @@ def assign_prs(
console.print("\n[yellow]Implying --skip-assigned as summary report is
enabled[/]\n")
skip_assigned = True
milestone = repo.get_milestone(milestone_number)
- count_prs = len(prs)
- if limit_pr_count:
- count_prs = limit_pr_count
+ count_prs = limit_pr_count or len(changes)
console.print(f"\n[green]Applying Milestone: {milestone.title} to
{count_prs} merged PRs[/]\n")
if dry_run:
console.print("[yellow]Dry run mode![/]\n")
@@ -291,8 +288,8 @@ def assign_prs(
changelog_changes: list[Change] = []
doc_only_changes: list[Change] = []
excluded_changes: list[Change] = []
- for i in range(count_prs):
- pr_number = prs[i]
+ for change in changes[:count_prs]:
+ pr_number = change.pr
if pr_number is None:
# Should not happen but MyPy is not happy
continue
@@ -322,15 +319,15 @@ def assign_prs(
if TYPE_DOC_ONLY_LABEL in label_names:
console.print("[yellow]It will be classified as doc-only
change[/]\n")
if skip_assigned:
- doc_only_changes.append(changes[i])
+ doc_only_changes.append(change)
elif CHANGELOG_SKIP_LABEL in label_names:
console.print("[yellow]It will be excluded from
changelog[/]\n")
if skip_assigned:
- excluded_changes.append(changes[i])
+ excluded_changes.append(change)
else:
console.print("[green]The change will be included in
changelog[/]\n")
if skip_assigned:
- changelog_changes.append(changes[i])
+ changelog_changes.append(change)
if skip_assigned:
continue
elif already_assigned_milestone_number is not None:
@@ -350,21 +347,21 @@ def assign_prs(
if not dry_run:
update_milestone(repo, pr, milestone)
if skip_assigned:
- changelog_changes.append(changes[i])
+ changelog_changes.append(change)
elif chosen_option in ("doc", "d"):
console.print(f"Applying the label {doc_only_label} the PR
#{pr_number}")
if not dry_run:
pr.add_to_labels(doc_only_label)
update_milestone(repo, pr, milestone)
if skip_assigned:
- doc_only_changes.append(changes[i])
+ doc_only_changes.append(change)
elif chosen_option in ("exclude", "e"):
console.print(f"Applying the label {changelog_skip_label} the PR
#{pr_number}")
if not dry_run:
pr.add_to_labels(changelog_skip_label)
update_milestone(repo, pr, milestone)
if skip_assigned:
- excluded_changes.append(changes[i])
+ excluded_changes.append(change)
elif chosen_option in ("skip", "s"):
console.print(f"Skipping the PR #{pr_number}")
elif chosen_option in ("quit", "q"):
diff --git
a/dev/breeze/src/airflow_breeze/commands/release_management_commands.py
b/dev/breeze/src/airflow_breeze/commands/release_management_commands.py
index 2e642a49a2..533c63c857 100644
--- a/dev/breeze/src/airflow_breeze/commands/release_management_commands.py
+++ b/dev/breeze/src/airflow_breeze/commands/release_management_commands.py
@@ -1234,9 +1234,7 @@ def generate_issue_content_providers(
pull_requests: dict[int, PullRequest.PullRequest | Issue.Issue] = {}
with Progress(console=get_console(), disable=disable_progress) as
progress:
task = progress.add_task(f"Retrieving {len(all_prs)} PRs ",
total=len(all_prs))
- pr_list = list(all_prs)
- for i in range(len(pr_list)):
- pr_number = pr_list[i]
+ for pr_number in all_prs:
progress.console.print(
f"Retrieving PR#{pr_number}:
https://github.com/apache/airflow/pull/{pr_number}"
)
diff --git a/dev/breeze/src/airflow_breeze/utils/parallel.py
b/dev/breeze/src/airflow_breeze/utils/parallel.py
index 48fc6ac756..ea4ef06030 100644
--- a/dev/breeze/src/airflow_breeze/utils/parallel.py
+++ b/dev/breeze/src/airflow_breeze/utils/parallel.py
@@ -66,7 +66,7 @@ def get_temp_file_name() -> str:
def get_output_files(titles: list[str]) -> list[Output]:
- outputs = [Output(title=titles[i], file_name=get_temp_file_name()) for i
in range(len(titles))]
+ outputs = [Output(title=title, file_name=get_temp_file_name()) for title
in titles]
for out in outputs:
get_console().print(f"[info]Capturing output of
{out.escaped_title}:[/] {out.file_name}")
return outputs
diff --git a/dev/prepare_bulk_issues.py b/dev/prepare_bulk_issues.py
index d5b4a04fd4..405cf4ca0b 100755
--- a/dev/prepare_bulk_issues.py
+++ b/dev/prepare_bulk_issues.py
@@ -204,16 +204,12 @@ def prepare_bulk_issues(
total_issues = len(names)
processed_issues = 0
if dry_run:
- for name in names:
+ for name in names[:max_issues]:
issue_content, issue_title = get_issue_details(issues, name,
template_file, title)
console.print(f"[yellow]### {issue_title} #####[/]")
console.print(issue_content)
console.print()
processed_issues += 1
- if max_issues is not None:
- max_issues -= 1
- if max_issues == 0:
- break
console.print()
console.print(f"Displayed {processed_issues} issue(s).")
else:
@@ -224,17 +220,12 @@ def prepare_bulk_issues(
g = Github(github_token)
repo = g.get_repo(repository)
try:
- for i in range(total_issues):
- name = names[i]
+ for name in names[:max_issues]:
issue_content, issue_title = get_issue_details(issues,
name, template_file, title)
repo.create_issue(title=issue_title, body=issue_content,
labels=labels_list)
progress.advance(task)
processed_issues += 1
sleep(2) # avoid secondary rate limit!
- if max_issues is not None:
- max_issues -= 1
- if max_issues == 0:
- break
except GithubException as e:
console.print(f"[red]Error!: {e}[/]")
console.print(
diff --git a/dev/prepare_release_issue.py b/dev/prepare_release_issue.py
index 43832ea284..2a7f855ed7 100755
--- a/dev/prepare_release_issue.py
+++ b/dev/prepare_release_issue.py
@@ -266,13 +266,10 @@ def generate_issue_content(
pull_requests: dict[int, PullRequestOrIssue] = {}
linked_issues: dict[int, list[Issue.Issue]] = defaultdict(lambda: [])
users: dict[int, set[str]] = defaultdict(lambda: set())
- count_prs = len(prs)
- if limit_pr_count:
- count_prs = limit_pr_count
+ count_prs = limit_pr_count or len(prs)
with Progress(console=console) as progress:
task = progress.add_task(f"Retrieving {count_prs} PRs ",
total=count_prs)
- for i in range(count_prs):
- pr_number = prs[i]
+ for pr_number in prs[:count_prs]:
progress.console.print(
f"Retrieving PR#{pr_number}:
https://github.com/apache/airflow/pull/{pr_number}"
)
diff --git a/tests/api_connexion/endpoints/test_dag_run_endpoint.py
b/tests/api_connexion/endpoints/test_dag_run_endpoint.py
index e66139a289..ea07701c86 100644
--- a/tests/api_connexion/endpoints/test_dag_run_endpoint.py
+++ b/tests/api_connexion/endpoints/test_dag_run_endpoint.py
@@ -958,12 +958,12 @@ class TestGetDagRunBatchDateFilters(TestDagRunEndpoint):
dag_id="TEST_DAG_ID",
run_id=f"TEST_START_EXEC_DAY_1{i}",
run_type=DagRunType.MANUAL,
- execution_date=timezone.parse(dates[i]),
- start_date=timezone.parse(dates[i]),
+ execution_date=timezone.parse(date),
+ start_date=timezone.parse(date),
external_trigger=True,
state="success",
)
- for i in range(len(dates))
+ for i, date in enumerate(dates)
]
with create_session() as session:
session.add_all(dag_runs)
diff --git a/tests/jobs/test_local_task_job.py
b/tests/jobs/test_local_task_job.py
index 33c431e97c..88dd014c0d 100644
--- a/tests/jobs/test_local_task_job.py
+++ b/tests/jobs/test_local_task_job.py
@@ -315,9 +315,7 @@ class TestLocalTaskJob:
job_runner.heartbeat_callback = lambda session:
heartbeat_records.append(job.latest_heartbeat)
run_job(job=job, execute_callable=job_runner._execute)
assert len(heartbeat_records) > 2
- for i in range(1, len(heartbeat_records)):
- time1 = heartbeat_records[i - 1]
- time2 = heartbeat_records[i]
+ for time1, time2 in zip(heartbeat_records, heartbeat_records[1:]):
# Assert that difference small enough
delta = (time2 - time1).total_seconds()
assert abs(delta - job.heartrate) < 0.8
diff --git a/tests/models/test_taskinstance.py
b/tests/models/test_taskinstance.py
index be0cd69967..c414d501c1 100644
--- a/tests/models/test_taskinstance.py
+++ b/tests/models/test_taskinstance.py
@@ -2814,13 +2814,13 @@ class TestTaskInstance:
states = [State.RUNNING, State.FAILED, State.QUEUED, State.SCHEDULED,
State.DEFERRED]
tasks = []
- for i in range(len(states)):
+ for i, state in enumerate(states):
op = EmptyOperator(
task_id=f"reg_Task{i}",
dag=dag,
)
ti = TI(task=op, run_id=dr.run_id)
- ti.state = states[i]
+ ti.state = state
session.add(ti)
tasks.append(ti)
diff --git a/tests/providers/elasticsearch/log/test_es_task_handler.py
b/tests/providers/elasticsearch/log/test_es_task_handler.py
index 3d3bf4cc43..f720429825 100644
--- a/tests/providers/elasticsearch/log/test_es_task_handler.py
+++ b/tests/providers/elasticsearch/log/test_es_task_handler.py
@@ -120,10 +120,8 @@ class TestElasticsearchTaskHandler:
logs_by_host = self.es_task_handler._group_logs_by_host(es_response)
def concat_logs(lines):
- log_range = (
- (len(lines) - 1) if lines[-1].message ==
self.es_task_handler.end_of_log_mark else len(lines)
- )
- return "\n".join(self.es_task_handler._format_msg(lines[i]) for i
in range(log_range))
+ log_range = -1 if lines[-1].message ==
self.es_task_handler.end_of_log_mark else None
+ return "\n".join(self.es_task_handler._format_msg(line) for line
in lines[:log_range])
for hosted_log in logs_by_host.values():
message = concat_logs(hosted_log)