This is an automated email from the ASF dual-hosted git repository. ephraimanierobi pushed a commit to branch v2-7-test in repository https://gitbox.apache.org/repos/asf/airflow.git
commit be31ac27526166ea1bf0c39b23686f4bca01efa4 Author: Miroslav Šedivý <[email protected]> AuthorDate: Thu Aug 3 23:10:20 2023 +0000 Get rid of Python2 numeric relics (#33050) (cherry picked from commit e3d82c6be0e0e1468ade053c37690aa1e0e4882d) --- airflow/example_dags/example_python_operator.py | 2 +- airflow/models/taskinstance.py | 2 +- airflow/providers/celery/executors/celery_executor.py | 2 +- airflow/providers/celery/executors/celery_executor_utils.py | 2 +- airflow/providers/common/sql/operators/sql.py | 4 ++-- airflow/providers/docker/operators/docker.py | 2 +- airflow/providers/google/cloud/hooks/bigquery.py | 4 ++-- airflow/providers/google/cloud/hooks/gcs.py | 4 ++-- airflow/www/views.py | 4 ++-- dev/breeze/src/airflow_breeze/utils/parallel.py | 2 +- dev/stats/get_important_pr_candidates.py | 5 ++--- tests/test_utils/perf/perf_kit/repeat_and_time.py | 4 ++-- 12 files changed, 18 insertions(+), 19 deletions(-) diff --git a/airflow/example_dags/example_python_operator.py b/airflow/example_dags/example_python_operator.py index 4f891abe60..30e447840a 100644 --- a/airflow/example_dags/example_python_operator.py +++ b/airflow/example_dags/example_python_operator.py @@ -81,7 +81,7 @@ with DAG( """This is a function that will run within the DAG execution""" time.sleep(random_base) - sleeping_task = my_sleeping_function(random_base=float(i) / 10) + sleeping_task = my_sleeping_function(random_base=i / 10) run_this >> log_the_sql >> sleeping_task # [END howto_operator_python_kwargs] diff --git a/airflow/models/taskinstance.py b/airflow/models/taskinstance.py index 4012b09d2c..72105c6a9c 100644 --- a/airflow/models/taskinstance.py +++ b/airflow/models/taskinstance.py @@ -1195,7 +1195,7 @@ class TaskInstance(Base, LoggingMixin): # If the min_backoff calculation is below 1, it will be converted to 0 via int. Thus, # we must round up prior to converting to an int, otherwise a divide by zero error # will occur in the modded_hash calculation. - min_backoff = int(math.ceil(delay.total_seconds() * (2 ** (self.try_number - 2)))) + min_backoff = math.ceil(delay.total_seconds() * (2 ** (self.try_number - 2))) # In the case when delay.total_seconds() is 0, min_backoff will not be rounded up to 1. # To address this, we impose a lower bound of 1 on min_backoff. This effectively makes diff --git a/airflow/providers/celery/executors/celery_executor.py b/airflow/providers/celery/executors/celery_executor.py index 571485b9f3..51287f3c1b 100644 --- a/airflow/providers/celery/executors/celery_executor.py +++ b/airflow/providers/celery/executors/celery_executor.py @@ -264,7 +264,7 @@ class CeleryExecutor(BaseExecutor): :return: Number of tasks that should be sent per process """ - return max(1, int(math.ceil(1.0 * to_send_count / self._sync_parallelism))) + return max(1, math.ceil(to_send_count / self._sync_parallelism)) def _process_tasks(self, task_tuples: list[TaskTuple]) -> None: from airflow.providers.celery.executors.celery_executor_utils import execute_command diff --git a/airflow/providers/celery/executors/celery_executor_utils.py b/airflow/providers/celery/executors/celery_executor_utils.py index c414a88b0c..2e739f239a 100644 --- a/airflow/providers/celery/executors/celery_executor_utils.py +++ b/airflow/providers/celery/executors/celery_executor_utils.py @@ -298,7 +298,7 @@ class BulkStateFetcher(LoggingMixin): num_process = min(len(async_results), self._sync_parallelism) with ProcessPoolExecutor(max_workers=num_process) as sync_pool: - chunksize = max(1, math.floor(math.ceil(1.0 * len(async_results) / self._sync_parallelism))) + chunksize = max(1, math.ceil(len(async_results) / self._sync_parallelism)) task_id_to_states_and_info = list( sync_pool.map(fetch_celery_task_state, async_results, chunksize=chunksize) diff --git a/airflow/providers/common/sql/operators/sql.py b/airflow/providers/common/sql/operators/sql.py index bf2b38f055..b0e70680ea 100644 --- a/airflow/providers/common/sql/operators/sql.py +++ b/airflow/providers/common/sql/operators/sql.py @@ -903,8 +903,8 @@ class SQLIntervalCheckOperator(BaseSQLOperator): ui_color = "#fff7e6" ratio_formulas = { - "max_over_min": lambda cur, ref: float(max(cur, ref)) / min(cur, ref), - "relative_diff": lambda cur, ref: float(abs(cur - ref)) / ref, + "max_over_min": lambda cur, ref: max(cur, ref) / min(cur, ref), + "relative_diff": lambda cur, ref: abs(cur - ref) / ref, } def __init__( diff --git a/airflow/providers/docker/operators/docker.py b/airflow/providers/docker/operators/docker.py index 0a20b28dc9..7683530a4c 100644 --- a/airflow/providers/docker/operators/docker.py +++ b/airflow/providers/docker/operators/docker.py @@ -374,7 +374,7 @@ class DockerOperator(BaseOperator): shm_size=self.shm_size, dns=self.dns, dns_search=self.dns_search, - cpu_shares=int(round(self.cpus * 1024)), + cpu_shares=round(self.cpus * 1024), port_bindings=self.port_bindings, mem_limit=self.mem_limit, cap_add=self.cap_add, diff --git a/airflow/providers/google/cloud/hooks/bigquery.py b/airflow/providers/google/cloud/hooks/bigquery.py index 12c084eba5..bfaef219ad 100644 --- a/airflow/providers/google/cloud/hooks/bigquery.py +++ b/airflow/providers/google/cloud/hooks/bigquery.py @@ -3261,8 +3261,8 @@ class BigQueryAsyncHook(GoogleBaseAsyncHook): raise AirflowException("The first SQL query returned None") ratio_formulas = { - "max_over_min": lambda cur, ref: float(max(cur, ref)) / min(cur, ref), - "relative_diff": lambda cur, ref: float(abs(cur - ref)) / ref, + "max_over_min": lambda cur, ref: max(cur, ref) / min(cur, ref), + "relative_diff": lambda cur, ref: abs(cur - ref) / ref, } metrics_sorted = sorted(metrics_thresholds.keys()) diff --git a/airflow/providers/google/cloud/hooks/gcs.py b/airflow/providers/google/cloud/hooks/gcs.py index a01bf72259..f489c9200b 100644 --- a/airflow/providers/google/cloud/hooks/gcs.py +++ b/airflow/providers/google/cloud/hooks/gcs.py @@ -356,7 +356,7 @@ class GCSHook(GoogleBaseHook): raise # Wait with exponential backoff scheme before retrying. - timeout_seconds = 1.0 * 2 ** (num_file_attempts - 1) + timeout_seconds = 2 ** (num_file_attempts - 1) time.sleep(timeout_seconds) continue @@ -524,7 +524,7 @@ class GCSHook(GoogleBaseHook): raise e # Wait with exponential backoff scheme before retrying. - timeout_seconds = 1.0 * 2 ** (num_file_attempts - 1) + timeout_seconds = 2 ** (num_file_attempts - 1) time.sleep(timeout_seconds) continue diff --git a/airflow/www/views.py b/airflow/www/views.py index f69634f7cf..18e062a487 100644 --- a/airflow/www/views.py +++ b/airflow/www/views.py @@ -939,7 +939,7 @@ class Airflow(AirflowBaseView): "error", ) - num_of_pages = int(math.ceil(num_of_all_dags / float(dags_per_page))) + num_of_pages = math.ceil(num_of_all_dags / dags_per_page) state_color_mapping = State.state_color.copy() state_color_mapping["null"] = state_color_mapping.pop(None) @@ -4003,7 +4003,7 @@ class Airflow(AirflowBaseView): logs_per_page = PAGE_SIZE audit_logs_count = get_query_count(query, session=session) - num_of_pages = int(math.ceil(audit_logs_count / float(logs_per_page))) + num_of_pages = math.ceil(audit_logs_count / logs_per_page) start = current_page * logs_per_page end = start + logs_per_page diff --git a/dev/breeze/src/airflow_breeze/utils/parallel.py b/dev/breeze/src/airflow_breeze/utils/parallel.py index 629d757e74..70f5ec737e 100644 --- a/dev/breeze/src/airflow_breeze/utils/parallel.py +++ b/dev/breeze/src/airflow_breeze/utils/parallel.py @@ -209,7 +209,7 @@ def bytes2human(n): prefix[s] = 1 << (i + 1) * 10 for s in reversed(symbols): if n >= prefix[s]: - value = float(n) / prefix[s] + value = n / prefix[s] return f"{value:.1f}{s}" return f"{n}B" diff --git a/dev/stats/get_important_pr_candidates.py b/dev/stats/get_important_pr_candidates.py index 738eed53c2..b54139de02 100755 --- a/dev/stats/get_important_pr_candidates.py +++ b/dev/stats/get_important_pr_candidates.py @@ -243,12 +243,11 @@ class PrStat: self.adjust_interaction_score() return round( - 1.0 - * self.interaction_score + self.interaction_score * self.label_score * self.length_score * self.change_score - / (math.log10(self.num_changed_files) if self.num_changed_files > 20 else 1.0), + / (math.log10(self.num_changed_files) if self.num_changed_files > 20 else 1), 3, ) diff --git a/tests/test_utils/perf/perf_kit/repeat_and_time.py b/tests/test_utils/perf/perf_kit/repeat_and_time.py index d9c7bbb511..67e9dbdbc3 100644 --- a/tests/test_utils/perf/perf_kit/repeat_and_time.py +++ b/tests/test_utils/perf/perf_kit/repeat_and_time.py @@ -120,10 +120,10 @@ if __name__ == "__main__": for _ in range(0, total): x_val = random.random() ** 2 y_val = random.random() ** 2 - if math.sqrt(x_val + y_val) < 1.0: + if math.sqrt(x_val + y_val) < 1: inside += 1 - return (float(inside) / total) * 4 + return (inside / total) * 4 # Example 1:s with timeout(1):
