This is an automated email from the ASF dual-hosted git repository. ephraimanierobi pushed a commit to branch v2-8-test in repository https://gitbox.apache.org/repos/asf/airflow.git
commit 503c0c539f576083983e779d5e76babb3870dbcb Author: Jarek Potiuk <[email protected]> AuthorDate: Thu Feb 22 22:11:29 2024 +0100 Fix few small release issues found during 2.8.2 preparation (#37633) * when deleting the whole .build folder, the www folder could not be created during asset compilation * -source packages were treated as sdist and we attempted to build wheel files with them * when building airflow packages, default settting (for security reasons) is to build everything in docker container, but building wheel for sdist was done outside. With this PR we only use sdist check when we use local hatch build - and sdist wheel check happens in docker container when sdists are built. * tarball was prepared before the pypi packages and it has been deleted by local hatch build (cherry picked from commit 254d7eb898b021f012373c33b219cbe12b5760a8) --- .../commands/release_candidate_command.py | 10 ++++---- .../commands/release_management_commands.py | 19 +++++++------- .../ci/pre_commit/pre_commit_compile_www_assets.py | 2 +- .../in_container/run_prepare_airflow_packages.py | 29 ++++++++++++++++++++++ 4 files changed, 45 insertions(+), 15 deletions(-) diff --git a/dev/breeze/src/airflow_breeze/commands/release_candidate_command.py b/dev/breeze/src/airflow_breeze/commands/release_candidate_command.py index 16f3dd18a9..eba0f08e41 100644 --- a/dev/breeze/src/airflow_breeze/commands/release_candidate_command.py +++ b/dev/breeze/src/airflow_breeze/commands/release_candidate_command.py @@ -380,16 +380,16 @@ def publish_release_candidate(version, previous_version, github_token): git_clean() source_date_epoch = get_source_date_epoch(AIRFLOW_SOURCES_ROOT / "airflow") shutil.rmtree(DIST_DIR, ignore_errors=True) - if confirm_action("Create tarball?"): - # Create the tarball - tarball_release( - version=version, version_without_rc=version_without_rc, source_date_epoch=source_date_epoch - ) # Create the artifacts if confirm_action("Use docker to create artifacts?"): create_artifacts_with_docker() elif confirm_action("Use hatch to create artifacts?"): create_artifacts_with_hatch(source_date_epoch) + if confirm_action("Create tarball?"): + # Create the tarball + tarball_release( + version=version, version_without_rc=version_without_rc, source_date_epoch=source_date_epoch + ) # Sign the release sign_the_release(airflow_repo_root) # Tag and push constraints diff --git a/dev/breeze/src/airflow_breeze/commands/release_management_commands.py b/dev/breeze/src/airflow_breeze/commands/release_management_commands.py index 5a966b25d4..29b16aa304 100644 --- a/dev/breeze/src/airflow_breeze/commands/release_management_commands.py +++ b/dev/breeze/src/airflow_breeze/commands/release_management_commands.py @@ -324,7 +324,7 @@ class DistributionPackageInfo(NamedTuple): dists_info = [] if package_format in ["sdist", "both"]: for file in dist_directory.glob(f"{default_glob_pattern}*tar.gz"): - if not file.is_file(): + if not file.is_file() or "-source.tar.gz" in file.name: continue dists_info.append(cls.from_sdist(filepath=file)) if package_format in ["wheel", "both"]: @@ -479,7 +479,7 @@ def _check_sdist_to_wheel(dist_info: DistributionPackageInfo, pip_command: tuple "--no-cache", "--no-binary", dist_info.package, - dist_info.filepath.__fspath__(), + dist_info.filepath.as_posix(), ], check=False, # We should run `pip wheel` outside of Project directory for avoid the case @@ -522,6 +522,14 @@ def prepare_airflow_packages( source_date_epoch=source_date_epoch, version_suffix_for_pypi=version_suffix_for_pypi, ) + get_console().print("[info]Checking if sdist packages can be built into wheels[/]") + packages = DistributionPackageInfo.dist_packages( + package_format=package_format, dist_directory=DIST_DIR, build_type="airflow" + ) + for dist_info in packages: + get_console().print(str(dist_info)) + get_console().print() + _check_sdist_to_wheel_dists(packages) else: _build_airflow_packages_with_docker( package_format=package_format, @@ -529,13 +537,6 @@ def prepare_airflow_packages( version_suffix_for_pypi=version_suffix_for_pypi, ) get_console().print("[success]Successfully prepared Airflow packages:") - packages = DistributionPackageInfo.dist_packages( - package_format=package_format, dist_directory=DIST_DIR, build_type="airflow" - ) - for dist_info in packages: - get_console().print(str(dist_info)) - get_console().print() - _check_sdist_to_wheel_dists(packages) def provider_action_summary(description: str, message_type: MessageType, packages: list[str]): diff --git a/scripts/ci/pre_commit/pre_commit_compile_www_assets.py b/scripts/ci/pre_commit/pre_commit_compile_www_assets.py index d3529f1b13..bf2664685e 100755 --- a/scripts/ci/pre_commit/pre_commit_compile_www_assets.py +++ b/scripts/ci/pre_commit/pre_commit_compile_www_assets.py @@ -56,7 +56,7 @@ if __name__ == "__main__": www_directory = AIRFLOW_SOURCES_PATH / "airflow" / "www" node_modules_directory = www_directory / "node_modules" dist_directory = www_directory / "static" / "dist" - WWW_HASH_FILE.parent.mkdir(exist_ok=True) + WWW_HASH_FILE.parent.mkdir(exist_ok=True, parents=True) if node_modules_directory.exists() and dist_directory.exists(): old_hash = WWW_HASH_FILE.read_text() if WWW_HASH_FILE.exists() else "" new_hash = get_directory_hash(www_directory, skip_path_regexp=r".*node_modules.*") diff --git a/scripts/in_container/run_prepare_airflow_packages.py b/scripts/in_container/run_prepare_airflow_packages.py index 9305d0dbd4..3e27d4c4b3 100755 --- a/scripts/in_container/run_prepare_airflow_packages.py +++ b/scripts/in_container/run_prepare_airflow_packages.py @@ -25,6 +25,7 @@ import sys from contextlib import contextmanager from pathlib import Path from shutil import rmtree +from tempfile import mkdtemp import yaml from rich.console import Console @@ -98,6 +99,34 @@ def build_airflow_packages(package_format: str): console.print("[red]Error building Airflow packages") sys.exit(build_process.returncode) else: + if package_format in ["both", "sdist"]: + console.print("[bright_blue]Checking if sdist packages can be built into wheels") + for file in (AIRFLOW_SOURCES_ROOT / "dist").glob("apache_airflow-[0-9]*.tar.gz"): + console.print(f"[bright_blue]Validate build wheel from sdist: {file.name}") + if "-sources.tar.gz" not in file.name: + # no need to delete - we are in temporary container + tmpdir = mkdtemp() + result = subprocess.run( + [ + sys.executable, + "-m", + "pip", + "wheel", + "--wheel-dir", + tmpdir, + "--no-deps", + "--no-cache", + "--no-binary", + ":all:", + file.as_posix(), + ], + check=False, + ) + if result.returncode != 0: + console.print(f"[red]Error installing {file.name}") + sys.exit(result.returncode) + console.print(f"[green]Sdist package {file.name} can be built into wheels") + console.print("[green]Sdist package is installed successfully.") console.print("[green]Airflow packages built successfully")
