This is an automated email from the ASF dual-hosted git repository.
potiuk pushed a commit to branch main
in repository https://gitbox.apache.org/repos/asf/airflow.git
The following commit(s) were added to refs/heads/main by this push:
new ffacf3e enter the shell breeze2 environment (#21145)
ffacf3e is described below
commit ffacf3e008bac1d65f5eaa6a718a5920144ecc4a
Author: Bowrna <[email protected]>
AuthorDate: Thu Mar 3 16:53:55 2022 +0530
enter the shell breeze2 environment (#21145)
---
dev/breeze/setup.cfg | 2 +
dev/breeze/src/airflow_breeze/breeze.py | 140 ++++++++++--
dev/breeze/src/airflow_breeze/cache.py | 30 ++-
dev/breeze/src/airflow_breeze/ci/build_image.py | 17 +-
dev/breeze/src/airflow_breeze/ci/build_params.py | 28 ++-
.../docs_generator/build_documentation.py | 6 +-
dev/breeze/src/airflow_breeze/global_constants.py | 136 ++++++++++--
.../airflow_breeze/shell/__init__.py} | 16 --
dev/breeze/src/airflow_breeze/shell/enter_shell.py | 240 +++++++++++++++++++++
.../src/airflow_breeze/shell/shell_builder.py | 231 ++++++++++++++++++++
.../airflow_breeze/utils/docker_command_utils.py | 117 +++++++++-
.../src/airflow_breeze/utils/host_info_utils.py | 62 ++++++
dev/breeze/src/airflow_breeze/utils/path_utils.py | 23 +-
dev/breeze/src/airflow_breeze/utils/run_utils.py | 154 ++++++++++++-
dev/breeze/src/airflow_breeze/visuals/__init__.py | 4 +-
dev/breeze/tests/test_cache.py | 32 ++-
dev/breeze/tests/test_commands.py | 9 +-
dev/breeze/tests/test_docker_command_utils.py | 180 ++++++++++++++++
.../{test_commands.py => test_host_info_utils.py} | 17 +-
dev/breeze/tests/test_run_utils.py | 53 +++++
20 files changed, 1393 insertions(+), 104 deletions(-)
diff --git a/dev/breeze/setup.cfg b/dev/breeze/setup.cfg
index 7928469..18ffcdc 100644
--- a/dev/breeze/setup.cfg
+++ b/dev/breeze/setup.cfg
@@ -61,6 +61,8 @@ install_requires =
rich_click
click_completion
requests
+ psutil
+ inputimeout
[options.packages.find]
where=src
diff --git a/dev/breeze/src/airflow_breeze/breeze.py
b/dev/breeze/src/airflow_breeze/breeze.py
index f85c55d..e6e0e55 100755
--- a/dev/breeze/src/airflow_breeze/breeze.py
+++ b/dev/breeze/src/airflow_breeze/breeze.py
@@ -32,19 +32,27 @@ from airflow_breeze.docs_generator import
build_documentation
from airflow_breeze.docs_generator.doc_builder import DocBuilder
from airflow_breeze.global_constants import (
ALLOWED_BACKENDS,
- ALLOWED_PYTHON_MAJOR_MINOR_VERSION,
+ ALLOWED_DEBIAN_VERSIONS,
+ ALLOWED_EXECUTORS,
+ ALLOWED_INSTALL_AIRFLOW_VERSIONS,
+ ALLOWED_INTEGRATIONS,
+ ALLOWED_MSSQL_VERSIONS,
+ ALLOWED_MYSQL_VERSIONS,
+ ALLOWED_POSTGRES_VERSIONS,
+ ALLOWED_PYTHON_MAJOR_MINOR_VERSIONS,
get_available_packages,
)
from airflow_breeze.pre_commit_ids import PRE_COMMIT_LIST
+from airflow_breeze.shell.enter_shell import build_shell
from airflow_breeze.utils.docker_command_utils import check_docker_resources
from airflow_breeze.utils.path_utils import (
__AIRFLOW_SOURCES_ROOT,
- BUILD_CACHE_DIR,
+ create_directories,
find_airflow_sources_root,
get_airflow_sources_root,
)
from airflow_breeze.utils.run_utils import check_package_installed, run_command
-from airflow_breeze.visuals import ASCIIART, ASCIIART_STYLE, CHEATSHEET,
CHEATSHEET_STYLE
+from airflow_breeze.visuals import ASCIIART, ASCIIART_STYLE
AIRFLOW_SOURCES_DIR =
Path(__file__).resolve().parent.parent.parent.parent.parent
@@ -61,11 +69,39 @@ def main():
option_verbose = click.option(
+ "-v",
"--verbose",
is_flag=True,
help="Print verbose information about performed steps",
)
+option_python_version = click.option(
+ '-p',
+ '--python',
+ type=click.Choice(ALLOWED_PYTHON_MAJOR_MINOR_VERSIONS),
+ help='Choose your python version',
+)
+
+option_backend = click.option(
+ '-b',
+ '--backend',
+ type=click.Choice(ALLOWED_BACKENDS),
+ help='Choose your backend database',
+)
+
+option_github_repository = click.option(
+ '-g', '--github-repository', help='GitHub repository used to pull, push
images. Default: apache/airflow.'
+)
+
+option_github_image_id = click.option(
+ '-s',
+ '--github-image-id',
+ help='Commit SHA of the image. \
+ Breeze can automatically pull the commit SHA id specified Default: latest',
+)
+
+option_image_tag = click.option('--image-tag', help='Additional tag in the
image.')
+
@main.command()
def version():
@@ -75,19 +111,75 @@ def version():
@option_verbose
[email protected]()
-def shell(verbose: bool):
[email protected](
+ context_settings=dict(
+ ignore_unknown_options=True,
+ allow_extra_args=True,
+ ),
+)
+@option_python_version
+@option_backend
[email protected]('--integration', type=click.Choice(ALLOWED_INTEGRATIONS),
multiple=True)
[email protected]('-L', '--build-cache-local', is_flag=True)
[email protected]('-U', '--build-cache-pulled', is_flag=True)
[email protected]('-X', '--build-cache-disabled', is_flag=True)
[email protected]('--postgres-version',
type=click.Choice(ALLOWED_POSTGRES_VERSIONS))
[email protected]('--mysql-version', type=click.Choice(ALLOWED_MYSQL_VERSIONS))
[email protected]('--mssql-version', type=click.Choice(ALLOWED_MSSQL_VERSIONS))
[email protected](
+ '--executor',
+ type=click.Choice(ALLOWED_EXECUTORS),
+ help='Executor to use in a kubernetes cluster. Default is
KubernetesExecutor',
+)
[email protected]('-f', '--forward-credentials', is_flag=True)
[email protected]('-l', '--skip-mounting-local-sources', is_flag=True)
[email protected]('--use-airflow-version',
type=click.Choice(ALLOWED_INSTALL_AIRFLOW_VERSIONS))
[email protected]('--use-packages-from-dist', is_flag=True)
[email protected]('--force-build', is_flag=True)
[email protected]('extra-args', nargs=-1, type=click.UNPROCESSED)
+def shell(
+ verbose: bool,
+ python: str,
+ backend: str,
+ integration: Tuple[str],
+ build_cache_local: bool,
+ build_cache_pulled: bool,
+ build_cache_disabled: bool,
+ postgres_version: str,
+ mysql_version: str,
+ mssql_version: str,
+ executor: str,
+ forward_credentials: bool,
+ skip_mounting_local_sources: bool,
+ use_airflow_version: str,
+ use_packages_from_dist: bool,
+ force_build: bool,
+ extra_args: Tuple,
+):
"""Enters breeze.py environment. this is the default command use when no
other is selected."""
- from airflow_breeze.cache import read_from_cache_file
if verbose:
console.print("\n[green]Welcome to breeze.py[/]\n")
console.print(f"\n[green]Root of Airflow Sources =
{__AIRFLOW_SOURCES_ROOT}[/]\n")
- if read_from_cache_file('suppress_asciiart') is None:
- console.print(ASCIIART, style=ASCIIART_STYLE)
- if read_from_cache_file('suppress_cheatsheet') is None:
- console.print(CHEATSHEET, style=CHEATSHEET_STYLE)
- raise ClickException("\nPlease implement entering breeze.py\n")
+ build_shell(
+ verbose,
+ python_version=python,
+ backend=backend,
+ integration=integration,
+ build_cache_local=build_cache_local,
+ build_cache_disabled=build_cache_disabled,
+ build_cache_pulled=build_cache_pulled,
+ postgres_version=postgres_version,
+ mysql_version=mysql_version,
+ mssql_version=mssql_version,
+ executor=executor,
+ forward_credentials=str(forward_credentials),
+ skip_mounting_local_sources=skip_mounting_local_sources,
+ use_airflow_version=use_airflow_version,
+ use_packages_from_dist=use_packages_from_dist,
+ force_build=force_build,
+ extra_args=extra_args,
+ )
@option_verbose
@@ -96,7 +188,7 @@ def shell(verbose: bool):
'--additional-extras',
help='This installs additional extra package while installing airflow in
the image.',
)
[email protected]('-p', '--python', help='Choose your python version')
+@option_python_version
@click.option(
'--additional-dev-apt-deps', help='Additional apt dev dependencies to use
when building the images.'
)
@@ -135,11 +227,18 @@ def shell(verbose: bool):
)
@click.option('--github-repository', help='Choose repository to push/pull
image.')
@click.option('--build-cache', help='Cache option')
[email protected]('--platform', help='Builds image for the platform specified.')
[email protected](
+ '-d',
+ '--debian-version',
+ help='Debian version used for the image.',
+ type=click.Choice(ALLOWED_DEBIAN_VERSIONS),
+)
@click.option('--upgrade-to-newer-dependencies', is_flag=True)
def build_ci_image(
verbose: bool,
additional_extras: Optional[str],
- python: Optional[float],
+ python: str,
additional_dev_apt_deps: Optional[str],
additional_runtime_apt_deps: Optional[str],
additional_python_deps: Optional[str],
@@ -153,6 +252,8 @@ def build_ci_image(
runtime_apt_deps: Optional[str],
github_repository: Optional[str],
build_cache: Optional[str],
+ platform: Optional[str],
+ debian_version: Optional[str],
upgrade_to_newer_dependencies: bool,
):
"""Builds docker CI image without entering the container."""
@@ -179,6 +280,8 @@ def build_ci_image(
runtime_apt_deps=runtime_apt_deps,
github_repository=github_repository,
docker_cache=build_cache,
+ platform=platform,
+ debian_version=debian_version,
upgrade_to_newer_dependencies=str(upgrade_to_newer_dependencies).lower(),
)
@@ -265,8 +368,8 @@ def setup_autocomplete():
@main.command(name='config')
[email protected]('--python',
type=click.Choice(ALLOWED_PYTHON_MAJOR_MINOR_VERSION))
[email protected]('--backend', type=click.Choice(ALLOWED_BACKENDS))
+@option_python_version
+@option_backend
@click.option('--cheatsheet/--no-cheatsheet', default=None)
@click.option('--asciiart/--no-asciiart', default=None)
def change_config(python, backend, cheatsheet, asciiart):
@@ -306,13 +409,12 @@ def build_docs(verbose: bool, docs_only: bool,
spellcheck_only: bool, package_fi
"""
params = BuildParams()
airflow_sources = str(get_airflow_sources_root())
- mount_all_flag = False
ci_image_name = params.airflow_ci_image_name
- check_docker_resources(verbose, mount_all_flag, airflow_sources,
ci_image_name)
+ check_docker_resources(verbose, airflow_sources, ci_image_name)
doc_builder = DocBuilder(
package_filter=package_filter, docs_only=docs_only,
spellcheck_only=spellcheck_only
)
- build_documentation.build(verbose, mount_all_flag, airflow_sources,
ci_image_name, doc_builder)
+ build_documentation.build(verbose, airflow_sources, ci_image_name,
doc_builder)
@option_verbose
@@ -356,5 +458,5 @@ def static_check(
if __name__ == '__main__':
- BUILD_CACHE_DIR.mkdir(parents=True, exist_ok=True)
+ create_directories()
main()
diff --git a/dev/breeze/src/airflow_breeze/cache.py
b/dev/breeze/src/airflow_breeze/cache.py
index 82eb41c..c9ae31e 100644
--- a/dev/breeze/src/airflow_breeze/cache.py
+++ b/dev/breeze/src/airflow_breeze/cache.py
@@ -36,8 +36,8 @@ def read_from_cache_file(param_name: str) -> Optional[str]:
return None
-def touch_cache_file(param_name: str):
- (Path(BUILD_CACHE_DIR) / f".{param_name}").touch()
+def touch_cache_file(param_name: str, root_dir: Path = BUILD_CACHE_DIR):
+ (Path(root_dir) / f".{param_name}").touch()
def write_to_cache_file(param_name: str, param_value: str,
check_allowed_values: bool = True) -> None:
@@ -47,7 +47,8 @@ def write_to_cache_file(param_name: str, param_value: str,
check_allowed_values:
allowed, allowed_values = check_if_values_allowed(param_name,
param_value)
if allowed or not check_allowed_values:
print('BUILD CACHE DIR:', BUILD_CACHE_DIR)
- Path(BUILD_CACHE_DIR, f".{param_name}").write_text(param_value)
+ cache_file = Path(BUILD_CACHE_DIR, f".{param_name}").open("w+")
+ cache_file.write(param_value)
else:
console.print(f'[cyan]You have sent the {param_value} for
{param_name}')
console.print(f'[cyan]Allowed value for the {param_name} are
{allowed_values}')
@@ -76,7 +77,7 @@ def check_cache_and_write_if_not_cached(
def check_if_values_allowed(param_name: str, param_value: str) -> Tuple[bool,
List[Any]]:
allowed = False
allowed_values: List[Any] = []
- allowed_values = getattr(global_constants, f'ALLOWED_{param_name.upper()}')
+ allowed_values = getattr(global_constants,
f'ALLOWED_{param_name.upper()}S')
if param_value in allowed_values:
allowed = True
return allowed, allowed_values
@@ -88,3 +89,24 @@ def delete_cache(param_name: str) -> bool:
(Path(BUILD_CACHE_DIR) / f".{param_name}").unlink()
deleted = True
return deleted
+
+
+def update_md5checksum_in_cache(file_content: str, cache_file_name: Path) ->
bool:
+ modified = False
+ if cache_file_name.exists():
+ old_md5_checksum_content = Path(cache_file_name).read_text()
+ if old_md5_checksum_content.strip() != file_content.strip():
+ Path(cache_file_name).write_text(file_content)
+ modified = True
+ else:
+ Path(cache_file_name).write_text(file_content)
+ modified = True
+ return modified
+
+
+def write_env_in_cache(env_variables) -> Path:
+ shell_path = Path(BUILD_CACHE_DIR, "shell_command.env")
+ with open(shell_path, 'w') as shell_env_file:
+ for env_variable in env_variables:
+ shell_env_file.write(env_variable + '\n')
+ return shell_path
diff --git a/dev/breeze/src/airflow_breeze/ci/build_image.py
b/dev/breeze/src/airflow_breeze/ci/build_image.py
index ab5a109..16a9e23 100644
--- a/dev/breeze/src/airflow_breeze/ci/build_image.py
+++ b/dev/breeze/src/airflow_breeze/ci/build_image.py
@@ -17,10 +17,10 @@
from pathlib import Path
from typing import Dict, List
-from airflow_breeze.cache import check_cache_and_write_if_not_cached,
write_to_cache_file
+from airflow_breeze.cache import check_cache_and_write_if_not_cached,
touch_cache_file, write_to_cache_file
from airflow_breeze.ci.build_params import BuildParams
from airflow_breeze.console import console
-from airflow_breeze.utils.path_utils import get_airflow_sources_root
+from airflow_breeze.utils.path_utils import AIRFLOW_SOURCE, BUILD_CACHE_DIR
from airflow_breeze.utils.run_utils import filter_out_none, run_command
PARAMS_CI_IMAGE = [
@@ -74,18 +74,25 @@ def construct_arguments_docker_command(ci_image:
BuildParams) -> List[str]:
def construct_docker_command(ci_image: BuildParams) -> List[str]:
arguments = construct_arguments_docker_command(ci_image)
final_command = []
- final_command.extend(["docker", "build"])
+ final_command.extend(["docker", "buildx", "build", "--builder", "default",
"--progress=tty", "--pull"])
final_command.extend(arguments)
final_command.extend(["-t", ci_image.airflow_ci_image_name, "--target",
"main", "."])
- final_command.extend(["-f", str(Path(get_airflow_sources_root(),
'Dockerfile.ci').resolve())])
+ final_command.extend(["-f", 'Dockerfile.ci'])
+ final_command.extend(["--platform", ci_image.platform])
return final_command
def build_image(verbose, **kwargs):
parameters_passed = filter_out_none(**kwargs)
ci_image_params = get_image_build_params(parameters_passed)
+ ci_image_cache_dir = Path(BUILD_CACHE_DIR, ci_image_params.airflow_branch)
+ ci_image_cache_dir.mkdir(parents=True, exist_ok=True)
+ touch_cache_file(
+ f"built_{ci_image_params.python_version}",
+ root_dir=ci_image_cache_dir,
+ )
cmd = construct_docker_command(ci_image_params)
- output = run_command(cmd, verbose=verbose, text=True)
+ output = run_command(cmd, verbose=verbose, cwd=AIRFLOW_SOURCE, text=True)
console.print(f"[blue]{output}")
diff --git a/dev/breeze/src/airflow_breeze/ci/build_params.py
b/dev/breeze/src/airflow_breeze/ci/build_params.py
index 0e19632..493d86c 100644
--- a/dev/breeze/src/airflow_breeze/ci/build_params.py
+++ b/dev/breeze/src/airflow_breeze/ci/build_params.py
@@ -17,11 +17,10 @@
from dataclasses import dataclass
from datetime import datetime
-from pathlib import Path
from typing import List, Optional
from airflow_breeze.branch_defaults import AIRFLOW_BRANCH,
DEFAULT_AIRFLOW_CONSTRAINTS_BRANCH
-from airflow_breeze.utils.path_utils import get_airflow_sources_root
+from airflow_breeze.global_constants import get_airflow_version
from airflow_breeze.utils.run_utils import run_command
@@ -57,6 +56,8 @@ class BuildParams:
additional_runtime_apt_command: str = ""
additional_runtime_apt_deps: str = ""
additional_runtime_apt_env: str = ""
+ platform: str = "linux/amd64"
+ debian_version: str = "bullseye"
upgrade_to_newer_dependencies: str = "true"
@property
@@ -68,6 +69,18 @@ class BuildParams:
def airflow_ci_image_name(self):
"""Construct CI image link"""
image =
f'{self.airflow_image_name}/{self.airflow_branch}/ci/python{self.python_version}'
+ return image
+
+ @property
+ def airflow_ci_image_name_with_cache(self):
+ """Construct CI image link"""
+ image =
f'{self.airflow_image_name}/{self.airflow_branch}/ci/python{self.python_version}:cache'
+ return image
+
+ @property
+ def airflow_ci_image_name_with_tag(self):
+ """Construct CI image link"""
+ image =
f'{self.airflow_image_name}/{self.airflow_branch}/ci/python{self.python_version}'
return image if not self.tag else image + f":{self.tag}"
@property
@@ -77,8 +90,7 @@ class BuildParams:
@property
def python_base_image(self):
"""Construct Python Base Image"""
- # ghcr.io/apache/airflow/main/python:3.8-slim-bullseye
- return
f'{self.airflow_image_name}/{self.airflow_branch}/python:{self.python_version}-slim-bullseye'
+ return f'python:{self.python_version}-slim-{self.debian_version}'
@property
def airflow_ci_local_manifest_image(self):
@@ -107,7 +119,7 @@ class BuildParams:
docker_cache_ci_directive = []
if self.docker_cache == "pulled":
docker_cache_ci_directive.append("--cache-from")
- docker_cache_ci_directive.append(self.airflow_ci_image_name)
+
docker_cache_ci_directive.append(self.airflow_ci_image_name_with_cache)
elif self.docker_cache == "disabled":
docker_cache_ci_directive.append("--no-cache")
else:
@@ -116,8 +128,4 @@ class BuildParams:
@property
def airflow_version(self):
- airflow_setup_file = Path(get_airflow_sources_root()) / 'setup.py'
- with open(airflow_setup_file) as setup_file:
- for line in setup_file.readlines():
- if "version =" in line:
- return line.split()[2][1:-1]
+ return get_airflow_version()
diff --git
a/dev/breeze/src/airflow_breeze/docs_generator/build_documentation.py
b/dev/breeze/src/airflow_breeze/docs_generator/build_documentation.py
index 8c77419..91f6f46 100644
--- a/dev/breeze/src/airflow_breeze/docs_generator/build_documentation.py
+++ b/dev/breeze/src/airflow_breeze/docs_generator/build_documentation.py
@@ -15,18 +15,20 @@
# specific language governing permissions and limitations
# under the License.
from airflow_breeze.docs_generator.doc_builder import DocBuilder
+from airflow_breeze.global_constants import MOUNT_ALL_LOCAL_SOURCES,
MOUNT_SELECTED_LOCAL_SOURCES
from airflow_breeze.utils.docker_command_utils import get_extra_docker_flags
from airflow_breeze.utils.run_utils import run_command
def build(
verbose: bool,
- mount_all_flag: bool,
airflow_sources: str,
airflow_ci_image_name: str,
doc_builder: DocBuilder,
):
- extra_docker_flags = get_extra_docker_flags(mount_all_flag,
airflow_sources)
+ extra_docker_flags = get_extra_docker_flags(
+ MOUNT_ALL_LOCAL_SOURCES, MOUNT_SELECTED_LOCAL_SOURCES, airflow_sources
+ )
cmd = []
cmd.extend(["docker", "run"])
cmd.extend(extra_docker_flags)
diff --git a/dev/breeze/src/airflow_breeze/global_constants.py
b/dev/breeze/src/airflow_breeze/global_constants.py
index ea8ae9f..48c8f5d 100644
--- a/dev/breeze/src/airflow_breeze/global_constants.py
+++ b/dev/breeze/src/airflow_breeze/global_constants.py
@@ -19,19 +19,19 @@ from typing import List
from airflow_breeze.utils.path_utils import get_airflow_sources_root
-AIRFLOW_SOURCES = ""
-
-FORCE_PULL_IMAGES = False
-CHECK_IF_BASE_PYTHON_IMAGE_UPDATED = False
+# Commented this out as we are using buildkit and this vars became irrelevant
+# FORCE_PULL_IMAGES = False
+# CHECK_IF_BASE_PYTHON_IMAGE_UPDATED = False
FORCE_BUILD_IMAGES = False
FORCE_ANSWER_TO_QUESTION = ""
SKIP_CHECK_REMOTE_IMAGE = False
-PUSH_PYTHON_BASE_IMAGE = False
+# PUSH_PYTHON_BASE_IMAGE = False
DEFAULT_PYTHON_MAJOR_MINOR_VERSION = '3.7'
DEFAULT_BACKEND = 'sqlite'
-ALLOWED_PYTHON_MAJOR_MINOR_VERSION = ['3.6', '3.7', '3.8', '3.9']
+# Checked before putting in build cache
+ALLOWED_PYTHON_MAJOR_MINOR_VERSIONS = ['3.6', '3.7', '3.8', '3.9']
ALLOWED_BACKENDS = ['sqlite', 'mysql', 'postgres', 'mssql']
ALLOWED_STATIC_CHECKS = [
"all",
@@ -153,7 +153,8 @@ ALLOWED_TEST_TYPES = [
'Quarantined',
]
ALLOWED_PACKAGE_FORMATS = ['both', 'sdist', 'wheel']
-ALLOWED_USE_AIRFLOW_VERSION = ['.', 'apache-airflow']
+ALLOWED_USE_AIRFLOW_VERSIONS = ['.', 'apache-airflow']
+ALLOWED_DEBIAN_VERSIONS = ['buster', 'bullseye']
PARAM_NAME_DESCRIPTION = {
"BACKEND": "backend",
@@ -179,15 +180,6 @@ PARAM_NAME_FLAG = {
"MSSQL_VERSION": "--mssql-version",
}
-
-SSH_PORT = "12322"
-WEBSERVER_HOST_PORT = "28080"
-POSTGRES_HOST_PORT = "25433"
-MYSQL_HOST_PORT = "23306"
-MSSQL_HOST_PORT = "21433"
-FLOWER_HOST_PORT = "25555"
-REDIS_HOST_PORT = "26379"
-
EXCLUDE_DOCS_PACKAGE_FOLDER = [
'exts',
'integration-logos',
@@ -204,4 +196,114 @@ def get_available_packages() -> List[str]:
return list(set(available_packages) - set(EXCLUDE_DOCS_PACKAGE_FOLDER))
-EXTRA_STATIC_CHECK_OPTIONS = "--show-diff-on-failure"
+# Initialise base variables
+DOCKER_DEFAULT_PLATFORM = "linux/amd64"
+DOCKER_BUILDKIT = 1
+
+SSH_PORT = "12322"
+WEBSERVER_HOST_PORT = "28080"
+POSTGRES_HOST_PORT = "25433"
+MYSQL_HOST_PORT = "23306"
+MSSQL_HOST_PORT = "21433"
+FLOWER_HOST_PORT = "25555"
+REDIS_HOST_PORT = "26379"
+
+SQLITE_URL = "sqlite:////root/airflow/airflow.db"
+PYTHONDONTWRITEBYTECODE = True
+
+PRODUCTION_IMAGE = False
+ALL_PYTHON_MAJOR_MINOR_VERSIONS = ['3.6', '3.7', '3.8', '3.9']
+CURRENT_PYTHON_MAJOR_MINOR_VERSIONS = ['3.6', '3.7', '3.8', '3.9']
+CURRENT_POSTGRES_VERSIONS = ['10', '11', '12', '13']
+CURRENT_MYSQL_VERSIONS = ['5.7', '8']
+CURRENT_MSSQL_VERSIONS = ['2017-latest', '2019-latest']
+POSTGRES_VERSION = CURRENT_POSTGRES_VERSIONS[0]
+MYSQL_VERSION = CURRENT_MYSQL_VERSIONS[0]
+MSSQL_VERSION = CURRENT_MSSQL_VERSIONS[0]
+DB_RESET = False
+START_AIRFLOW = "false"
+LOAD_EXAMPLES = False
+LOAD_DEFAULT_CONNECTIONS = False
+PRESERVE_VOLUMES = False
+CLEANUP_DOCKER_CONTEXT_FILES = False
+INIT_SCRIPT_FILE = ""
+DRY_RUN_DOCKER = False
+INSTALL_AIRFLOW_VERSION = ""
+SQLITE_URL = "sqlite:////root/airflow/airflow.db"
+
+
+def get_airflow_version():
+ airflow_setup_file = Path(get_airflow_sources_root()) / 'setup.py'
+ with open(airflow_setup_file) as setup_file:
+ for line in setup_file.readlines():
+ if "version =" in line:
+ return line.split()[2][1:-1]
+
+
+# Initialize integrations
+AVAILABLE_INTEGRATIONS = [
+ 'cassandra',
+ 'kerberos',
+ 'mongo',
+ 'openldap',
+ 'pinot',
+ 'rabbitmq',
+ 'redis',
+ 'statsd',
+ 'trino',
+]
+ENABLED_INTEGRATIONS = ""
+# Initialize files for rebuild check
+FILES_FOR_REBUILD_CHECK = [
+ 'setup.py',
+ 'setup.cfg',
+ 'Dockerfile.ci',
+ '.dockerignore',
+ 'scripts/docker/compile_www_assets.sh',
+ 'scripts/docker/common.sh',
+ 'scripts/docker/install_additional_dependencies.sh',
+ 'scripts/docker/install_airflow.sh',
+ 'scripts/docker/install_airflow_dependencies_from_branch_tip.sh',
+ 'scripts/docker/install_from_docker_context_files.sh',
+ 'scripts/docker/install_mysql.sh',
+ 'airflow/www/package.json',
+ 'airflow/www/yarn.lock',
+ 'airflow/www/webpack.config.js',
+ 'airflow/ui/package.json',
+ 'airflow/ui/yarn.lock',
+]
+
+# Initialize mount variables
+MOUNT_SELECTED_LOCAL_SOURCES = True
+MOUNT_ALL_LOCAL_SOURCES = False
+
+ENABLED_SYSTEMS = ""
+
+
+CURRENT_KUBERNETES_MODES = ['image']
+CURRENT_KUBERNETES_VERSIONS = ['v1.21.1', 'v1.20.2']
+CURRENT_KIND_VERSIONS = ['v0.11.1']
+CURRENT_HELM_VERSIONS = ['v3.6.3']
+CURRENT_EXECUTORS = ['KubernetesExecutor']
+
+DEFAULT_KUBERNETES_MODES = CURRENT_KUBERNETES_MODES[0]
+DEFAULT_KUBERNETES_VERSIONS = CURRENT_KUBERNETES_VERSIONS[0]
+DEFAULT_KIND_VERSIONS = CURRENT_KIND_VERSIONS[0]
+DEFAULT_HELM_VERSIONS = CURRENT_HELM_VERSIONS[0]
+DEFAULT_EXECUTOR = CURRENT_EXECUTORS[0]
+
+# Initialize image build variables - Have to check if this has to go to ci
dataclass
+SKIP_TWINE_CHECK = ""
+USE_AIRFLOW_VERSION = ""
+GITHUB_ACTIONS = ""
+
+ISSUE_ID = ""
+NUM_RUNS = ""
+
+# Initialize package variables
+PACKAGE_FORMAT = "wheel"
+VERSION_SUFFIX_FOR_SVN = ""
+VERSION_SUFFIX_FOR_PYPI = ""
+
+MIN_DOCKER_VERSION = "20.10.0"
+MIN_DOCKER_COMPOSE_VERSION = "1.29.0"
diff --git a/dev/breeze/tests/test_commands.py
b/dev/breeze/src/airflow_breeze/shell/__init__.py
similarity index 60%
copy from dev/breeze/tests/test_commands.py
copy to dev/breeze/src/airflow_breeze/shell/__init__.py
index e1aaa12..13a8339 100644
--- a/dev/breeze/tests/test_commands.py
+++ b/dev/breeze/src/airflow_breeze/shell/__init__.py
@@ -14,19 +14,3 @@
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
-
-from airflow_breeze.utils.docker_command_utils import get_extra_docker_flags
-from airflow_breeze.utils.path_utils import get_airflow_sources_root
-from airflow_breeze.visuals import ASCIIART
-
-
-def test_visuals():
- assert 2051 == len(ASCIIART)
-
-
-def test_get_extra_docker_flags():
- airflow_sources = get_airflow_sources_root()
- all = True
- assert len(get_extra_docker_flags(all, str(airflow_sources))) < 10
- all = False
- assert len(get_extra_docker_flags(all, str(airflow_sources))) > 60
diff --git a/dev/breeze/src/airflow_breeze/shell/enter_shell.py
b/dev/breeze/src/airflow_breeze/shell/enter_shell.py
new file mode 100644
index 0000000..a4d8a61
--- /dev/null
+++ b/dev/breeze/src/airflow_breeze/shell/enter_shell.py
@@ -0,0 +1,240 @@
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements. See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership. The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied. See the License for the
+# specific language governing permissions and limitations
+# under the License.
+import sys
+from pathlib import Path
+from typing import Dict
+
+import click
+from inputimeout import TimeoutOccurred, inputimeout
+
+from airflow_breeze import global_constants
+from airflow_breeze.cache import (
+ check_cache_and_write_if_not_cached,
+ read_from_cache_file,
+ write_to_cache_file,
+)
+from airflow_breeze.ci.build_image import build_image
+from airflow_breeze.console import console
+from airflow_breeze.global_constants import (
+ FLOWER_HOST_PORT,
+ MSSQL_HOST_PORT,
+ MSSQL_VERSION,
+ MYSQL_HOST_PORT,
+ MYSQL_VERSION,
+ POSTGRES_HOST_PORT,
+ POSTGRES_VERSION,
+ REDIS_HOST_PORT,
+ SSH_PORT,
+ WEBSERVER_HOST_PORT,
+)
+from airflow_breeze.shell.shell_builder import ShellBuilder
+from airflow_breeze.utils.docker_command_utils import (
+ check_docker_compose_version,
+ check_docker_resources,
+ check_docker_version,
+)
+from airflow_breeze.utils.path_utils import BUILD_CACHE_DIR
+from airflow_breeze.utils.run_utils import (
+ filter_out_none,
+ fix_group_permissions,
+ get_latest_sha,
+ instruct_build_image,
+ instruct_for_setup,
+ is_repo_rebased,
+ md5sum_check_if_build_is_needed,
+ run_command,
+)
+from airflow_breeze.visuals import ASCIIART, ASCIIART_STYLE, CHEATSHEET,
CHEATSHEET_STYLE
+
+PARAMS_TO_ENTER_SHELL = {
+ "HOST_USER_ID": "host_user_id",
+ "HOST_GROUP_ID": "host_group_id",
+ "COMPOSE_FILE": "compose_files",
+ "PYTHON_MAJOR_MINOR_VERSION": "python_version",
+ "BACKEND": "backend",
+ "AIRFLOW_VERSION": "airflow_version",
+ "INSTALL_AIRFLOW_VERSION": "install_airflow_version",
+ "AIRFLOW_SOURCES": "airflow_sources",
+ "AIRFLOW_CI_IMAGE": "airflow_ci_image_name",
+ "AIRFLOW_CI_IMAGE_WITH_TAG": "airflow_ci_image_name_with_tag",
+ "AIRFLOW_PROD_IMAGE": "airflow_prod_image_name",
+ "AIRFLOW_IMAGE_KUBERNETES": "airflow_image_kubernetes",
+ "SQLITE_URL": "sqlite_url",
+ "USE_AIRFLOW_VERSION": "use_airflow_version",
+ "SKIP_TWINE_CHECK": "skip_twine_check",
+ "USE_PACKAGES_FROM_DIST": "use_packages_from_dist",
+ "EXECUTOR": "executor",
+ "START_AIRFLOW": "start_airflow",
+ "ENABLED_INTEGRATIONS": "enabled_integrations",
+ "GITHUB_ACTIONS": "github_actions",
+ "ISSUE_ID": "issue_id",
+ "NUM_RUNS": "num_runs",
+ "VERSION_SUFFIX_FOR_SVN": "version_suffix_for_svn",
+ "VERSION_SUFFIX_FOR_PYPI": "version_suffix_for_pypi",
+}
+
+PARAMS_FOR_SHELL_CONSTANTS = {
+ "SSH_PORT": SSH_PORT,
+ "WEBSERVER_HOST_PORT": WEBSERVER_HOST_PORT,
+ "FLOWER_HOST_PORT": FLOWER_HOST_PORT,
+ "REDIS_HOST_PORT": REDIS_HOST_PORT,
+ "MYSQL_HOST_PORT": MYSQL_HOST_PORT,
+ "MYSQL_VERSION": MYSQL_VERSION,
+ "MSSQL_HOST_PORT": MSSQL_HOST_PORT,
+ "MSSQL_VERSION": MSSQL_VERSION,
+ "POSTGRES_HOST_PORT": POSTGRES_HOST_PORT,
+ "POSTGRES_VERSION": POSTGRES_VERSION,
+}
+
+PARAMS_IN_CACHE = {
+ 'python_version': 'PYTHON_MAJOR_MINOR_VERSION',
+ 'backend': 'BACKEND',
+ 'executor': 'EXECUTOR',
+ 'postgres_version': 'POSTGRES_VERSION',
+ 'mysql_version': 'MYSQL_VERSION',
+ 'mssql_version': 'MSSQL_VERSION',
+}
+
+DEFAULT_VALUES_FOR_PARAM = {
+ 'python_version': 'DEFAULT_PYTHON_MAJOR_MINOR_VERSION',
+ 'backend': 'DEFAULT_BACKEND',
+ 'executor': 'DEFAULT_EXECUTOR',
+ 'postgres_version': 'POSTGRES_VERSION',
+ 'mysql_version': 'MYSQL_VERSION',
+ 'mssql_version': 'MSSQL_VERSION',
+}
+
+
+def construct_env_variables_docker_compose_command(shell_params: ShellBuilder)
-> Dict[str, str]:
+ env_variables: Dict[str, str] = {}
+ for param_name in PARAMS_TO_ENTER_SHELL:
+ param_value = PARAMS_TO_ENTER_SHELL[param_name]
+ env_variables[param_name] = str(getattr(shell_params, param_value))
+ for constant_param_name in PARAMS_FOR_SHELL_CONSTANTS:
+ constant_param_value = PARAMS_FOR_SHELL_CONSTANTS[constant_param_name]
+ env_variables[constant_param_name] = str(constant_param_value)
+ return env_variables
+
+
+def build_image_if_needed_steps(verbose: bool, shell_params: ShellBuilder):
+ build_needed =
md5sum_check_if_build_is_needed(shell_params.md5sum_cache_dir,
shell_params.the_image_type)
+ if build_needed:
+ try:
+ user_status = inputimeout(
+ prompt='\nDo you want to build image?Press y/n/q in 5
seconds\n',
+ timeout=5,
+ )
+ if user_status == 'y':
+ latest_sha = get_latest_sha(shell_params.github_repository,
shell_params.airflow_branch)
+ if is_repo_rebased(latest_sha):
+ build_image(
+ verbose,
+ python_version=shell_params.python_version,
+ upgrade_to_newer_dependencies="false",
+ )
+ else:
+ if click.confirm(
+ "\nThis might take a lot of time, we think you should
rebase first. \
+ But if you really, really want - you can do it\n"
+ ):
+ build_image(
+ verbose,
+ python_version=shell_params.python_version,
+ upgrade_to_newer_dependencies="false",
+ )
+ else:
+ console.print(
+ '\nPlease rebase your code before continuing.\
+ Check this link to know more \
+
https://github.com/apache/airflow/blob/main/CONTRIBUTING.rst#id15\n'
+ )
+ console.print('Exiting the process')
+ sys.exit()
+ elif user_status == 'n':
+ instruct_build_image(shell_params.the_image_type,
shell_params.python_version)
+ elif user_status == 'q':
+ console.print('\nQuitting the process')
+ sys.exit()
+ else:
+ console.print('\nYou have given a wrong choice:', user_status,
' Quitting the process')
+ sys.exit()
+ except TimeoutOccurred:
+ console.print('\nTimeout. Considering your response as No\n')
+ instruct_build_image(shell_params.the_image_type,
shell_params.python_version)
+ except Exception:
+ console.print('\nTerminating the process')
+ sys.exit()
+
+
+def build_image_checks(verbose: bool, shell_params: ShellBuilder):
+ fix_group_permissions()
+ build_ci_image_check_cache = Path(
+ BUILD_CACHE_DIR, shell_params.airflow_branch,
f".built_{shell_params.python_version}"
+ )
+ if build_ci_image_check_cache.exists():
+ console.print(f'{shell_params.the_image_type} image already built
locally.')
+ else:
+ console.print(f'{shell_params.the_image_type} image not built locally')
+
+ if not shell_params.force_build:
+ build_image_if_needed_steps(verbose, shell_params)
+ else:
+ build_image(
+ verbose,
+ python_version=shell_params.python_version,
+ upgrade_to_newer_dependencies="false",
+ )
+
+ instruct_for_setup()
+ check_docker_resources(verbose, str(shell_params.airflow_sources),
shell_params.airflow_ci_image_name)
+ cmd = ['docker-compose', 'run', '--service-ports', '--rm', 'airflow']
+ cmd_added = shell_params.command_passed
+ env_variables =
construct_env_variables_docker_compose_command(shell_params)
+ if cmd_added is not None:
+ cmd.extend(['-c', cmd_added])
+ if verbose:
+ shell_params.print_badge_info()
+ output = run_command(cmd, verbose=verbose, env=env_variables, text=True)
+ if verbose:
+ console.print(f"[blue]{output}[/]")
+
+
+def get_cached_params(user_params) -> Dict:
+ updated_params = dict(user_params)
+ for param in PARAMS_IN_CACHE:
+ if param in user_params:
+ param_name = PARAMS_IN_CACHE[param]
+ user_param_value = user_params[param]
+ if user_param_value is not None:
+ write_to_cache_file(param_name, user_param_value)
+ else:
+ param_value = getattr(global_constants,
DEFAULT_VALUES_FOR_PARAM[param])
+ _, user_param_value =
check_cache_and_write_if_not_cached(param_name, param_value)
+ updated_params[param] = user_param_value
+ return updated_params
+
+
+def build_shell(verbose, **kwargs):
+ check_docker_version(verbose)
+ check_docker_compose_version(verbose)
+ updated_kwargs = get_cached_params(kwargs)
+ if read_from_cache_file('suppress_asciiart') is None:
+ console.print(ASCIIART, style=ASCIIART_STYLE)
+ if read_from_cache_file('suppress_cheatsheet') is None:
+ console.print(CHEATSHEET, style=CHEATSHEET_STYLE)
+ enter_shell_params = ShellBuilder(**filter_out_none(**updated_kwargs))
+ build_image_checks(verbose, enter_shell_params)
diff --git a/dev/breeze/src/airflow_breeze/shell/shell_builder.py
b/dev/breeze/src/airflow_breeze/shell/shell_builder.py
new file mode 100644
index 0000000..051b2d3
--- /dev/null
+++ b/dev/breeze/src/airflow_breeze/shell/shell_builder.py
@@ -0,0 +1,231 @@
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements. See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership. The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied. See the License for the
+# specific language governing permissions and limitations
+# under the License.
+
+from dataclasses import dataclass
+from pathlib import Path
+from typing import Tuple
+
+from airflow_breeze.branch_defaults import AIRFLOW_BRANCH
+from airflow_breeze.console import console
+from airflow_breeze.global_constants import AVAILABLE_INTEGRATIONS,
get_airflow_version
+from airflow_breeze.utils.host_info_utils import get_host_group_id,
get_host_user_id, get_stat_bin
+from airflow_breeze.utils.path_utils import AIRFLOW_SOURCE, BUILD_CACHE_DIR,
SCRIPTS_CI_DIR
+from airflow_breeze.utils.run_utils import get_filesystem_type, run_command
+
+
+@dataclass
+class ShellBuilder:
+ python_version: str # check in cache
+ build_cache_local: bool
+ build_cache_pulled: bool
+ build_cache_disabled: bool
+ backend: str # check in cache
+ integration: Tuple[str] # check in cache
+ postgres_version: str # check in cache
+ mssql_version: str # check in cache
+ mysql_version: str # check in cache
+ force_build: bool
+ extra_args: Tuple
+ use_airflow_version: str = ""
+ install_airflow_version: str = ""
+ tag: str = "latest"
+ github_repository: str = "apache/airflow"
+ skip_mounting_local_sources: bool = False
+ mount_all_local_sources: bool = False
+ forward_credentials: str = "false"
+ airflow_branch: str = AIRFLOW_BRANCH
+ executor: str = "KubernetesExecutor" # check in cache
+ start_airflow: str = "false"
+ skip_twine_check: str = ""
+ use_packages_from_dist: str = "false"
+ github_actions: str = ""
+ issue_id: str = ""
+ num_runs: str = ""
+ version_suffix_for_pypi: str = ""
+ version_suffix_for_svn: str = ""
+
+ @property
+ def airflow_version(self):
+ return get_airflow_version()
+
+ @property
+ def airflow_version_for_production_image(self):
+ cmd = ['docker', 'run', '--entrypoint', '/bin/bash',
f'{self.airflow_prod_image_name}']
+ cmd.extend(['-c', 'echo "${AIRFLOW_VERSION}"'])
+ output = run_command(cmd, capture_output=True, text=True)
+ return output.stdout.strip()
+
+ @property
+ def host_user_id(self):
+ return get_host_user_id()
+
+ @property
+ def host_group_id(self):
+ return get_host_group_id()
+
+ @property
+ def airflow_image_name(self) -> str:
+ image = f'ghcr.io/{self.github_repository.lower()}'
+ return image
+
+ @property
+ def airflow_ci_image_name(self) -> str:
+ """Construct CI image link"""
+ image =
f'{self.airflow_image_name}/{self.airflow_branch}/ci/python{self.python_version}'
+ return image
+
+ @property
+ def airflow_ci_image_name_with_tag(self) -> str:
+ image = self.airflow_ci_image_name
+ return image if not self.tag else image + f":{self.tag}"
+
+ @property
+ def airflow_prod_image_name(self) -> str:
+ image =
f'{self.airflow_image_name}/{self.airflow_branch}/prod/python{self.python_version}'
+ return image
+
+ @property
+ def airflow_image_kubernetes(self) -> str:
+ image =
f'{self.airflow_image_name}/{self.airflow_branch}/kubernetes/python{self.python_version}'
+ return image
+
+ @property
+ def airflow_sources(self):
+ return AIRFLOW_SOURCE
+
+ @property
+ def docker_cache(self) -> str:
+ if self.build_cache_local:
+ docker_cache = "local"
+ elif self.build_cache_disabled:
+ docker_cache = "disabled"
+ else:
+ docker_cache = "pulled"
+ return docker_cache
+
+ @property
+ def mount_selected_local_sources(self) -> bool:
+ mount_selected_local_sources = True
+ if self.mount_all_local_sources or self.skip_mounting_local_sources:
+ mount_selected_local_sources = False
+ return mount_selected_local_sources
+
+ @property
+ def enabled_integrations(self) -> str:
+ if "all" in self.integration:
+ enabled_integration = " ".join(AVAILABLE_INTEGRATIONS)
+ elif len(self.integration) > 0:
+ enabled_integration = " ".join(self.integration)
+ else:
+ enabled_integration = ""
+ return enabled_integration
+
+ @property
+ def the_image_type(self) -> str:
+ the_image_type = 'CI'
+ return the_image_type
+
+ @property
+ def image_description(self) -> str:
+ image_description = 'Airflow CI'
+ return image_description
+
+ @property
+ def md5sum_cache_dir(self) -> Path:
+ cache_dir = Path(BUILD_CACHE_DIR, self.airflow_branch,
self.python_version, self.the_image_type)
+ return cache_dir
+
+ @property
+ def backend_version(self) -> str:
+ version = ''
+ if self.backend == 'postgres':
+ version = self.postgres_version
+ if self.backend == 'mysql':
+ version = self.mysql_version
+ if self.backend == 'mssql':
+ version = self.mssql_version
+ return version
+
+ @property
+ def sqlite_url(self) -> str:
+ sqlite_url = "sqlite:////root/airflow/airflow.db"
+ return sqlite_url
+
+ def print_badge_info(self):
+ console.print(f'Use {self.the_image_type} image')
+ console.print(f'Branch Name: {self.airflow_branch}')
+ console.print(f'Docker Image: {self.airflow_ci_image_name_with_tag}')
+ console.print(f'Airflow source version:{self.airflow_version}')
+ console.print(f'Python Version: {self.python_version}')
+ console.print(f'Backend: {self.backend} {self.backend_version}')
+ console.print(f'Airflow used at runtime: {self.use_airflow_version}')
+
+ @property
+ def compose_files(self):
+ compose_ci_file = []
+ main_ci_docker_compose_file =
f"{str(SCRIPTS_CI_DIR)}/docker-compose/base.yml"
+ backend_docker_compose_file =
f"{str(SCRIPTS_CI_DIR)}/docker-compose/backend-{self.backend}.yml"
+ backend_port_docker_compose_file = (
+
f"{str(SCRIPTS_CI_DIR)}/docker-compose/backend-{self.backend}-port.yml"
+ )
+ local_docker_compose_file =
f"{str(SCRIPTS_CI_DIR)}/docker-compose/local.yml"
+ local_all_sources_docker_compose_file =
f"{str(SCRIPTS_CI_DIR)}/docker-compose/local-all-sources.yml"
+ files_docker_compose_file =
f"{str(SCRIPTS_CI_DIR)}/docker-compose/files.yml"
+ remove_sources_docker_compose_file =
f"{str(SCRIPTS_CI_DIR)}/docker-compose/remove-sources.yml"
+ forward_credentials_docker_compose_file = (
+ f"{str(SCRIPTS_CI_DIR)}/docker-compose/forward-credentials.yml"
+ )
+ # mssql based check have to be added
+ if self.backend == 'mssql':
+ docker_filesystem = get_filesystem_type('.')
+ if docker_filesystem == 'tmpfs':
+
compose_ci_file.append(f"{str(SCRIPTS_CI_DIR)}/docker-compose/backend-mssql-bind-volume.yml")
+ else:
+ compose_ci_file.append(
+
f"{str(SCRIPTS_CI_DIR)}/docker-compose/backend-mssql-docker-volume.yml"
+ )
+ compose_ci_file.extend(
+ [main_ci_docker_compose_file, backend_docker_compose_file,
files_docker_compose_file]
+ )
+
+ if self.mount_selected_local_sources:
+ compose_ci_file.extend([local_docker_compose_file,
backend_port_docker_compose_file])
+ if self.mount_all_local_sources:
+ compose_ci_file.extend([local_all_sources_docker_compose_file,
backend_port_docker_compose_file])
+ if self.forward_credentials:
+ compose_ci_file.append(forward_credentials_docker_compose_file)
+ if len(self.use_airflow_version) > 0:
+ compose_ci_file.append(remove_sources_docker_compose_file)
+ if "all" in self.integration:
+ integrations = AVAILABLE_INTEGRATIONS
+ else:
+ integrations = self.integration
+ if len(integrations) > 0:
+ for integration in integrations:
+
compose_ci_file.append(f"{str(SCRIPTS_CI_DIR)}/docker-compose/integration-{integration}.yml")
+ return ':'.join(compose_ci_file)
+
+ @property
+ def command_passed(self):
+ cmd = None
+ if len(self.extra_args) > 0:
+ cmd = str(self.extra_args[0])
+ return cmd
+
+ @property
+ def get_stat_bin(self):
+ return get_stat_bin()
diff --git a/dev/breeze/src/airflow_breeze/utils/docker_command_utils.py
b/dev/breeze/src/airflow_breeze/utils/docker_command_utils.py
index 24d5c4a..bff8d2c 100644
--- a/dev/breeze/src/airflow_breeze/utils/docker_command_utils.py
+++ b/dev/breeze/src/airflow_breeze/utils/docker_command_utils.py
@@ -14,9 +14,19 @@
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
-
+import re
+import subprocess
from typing import List
+from packaging import version
+
+from airflow_breeze.console import console
+from airflow_breeze.global_constants import (
+ MIN_DOCKER_COMPOSE_VERSION,
+ MIN_DOCKER_VERSION,
+ MOUNT_ALL_LOCAL_SOURCES,
+ MOUNT_SELECTED_LOCAL_SOURCES,
+)
from airflow_breeze.utils.run_utils import run_command
NECESSARY_HOST_VOLUMES = [
@@ -53,14 +63,17 @@ NECESSARY_HOST_VOLUMES = [
]
-def get_extra_docker_flags(all: bool, airflow_sources: str) -> List:
+def get_extra_docker_flags(all: bool, selected: bool, airflow_sources: str) ->
List:
# get_extra_docker_flags(False, str(airflow_source))
+ # add verbosity
EXTRA_DOCKER_FLAGS = []
if all:
EXTRA_DOCKER_FLAGS.extend(["-v",
f"{airflow_sources}:/opt/airflow/:cached"])
- else:
+ elif selected:
for flag in NECESSARY_HOST_VOLUMES:
EXTRA_DOCKER_FLAGS.extend(["-v", airflow_sources + flag])
+ else:
+ console.print('Skip mounting host volumes to Docker')
EXTRA_DOCKER_FLAGS.extend(["-v", f"{airflow_sources}/files:/files"])
EXTRA_DOCKER_FLAGS.extend(["-v", f"{airflow_sources}/dist:/dist"])
EXTRA_DOCKER_FLAGS.extend(["--rm"])
@@ -68,13 +81,103 @@ def get_extra_docker_flags(all: bool, airflow_sources:
str) -> List:
return EXTRA_DOCKER_FLAGS
-def check_docker_resources(
- verbose: bool, mount_all_flag: bool, airflow_sources: str,
airflow_ci_image_name: str
-):
- extra_docker_flags = get_extra_docker_flags(mount_all_flag,
airflow_sources)
+def check_docker_resources(verbose: bool, airflow_sources: str,
airflow_ci_image_name: str):
+ extra_docker_flags = get_extra_docker_flags(
+ MOUNT_ALL_LOCAL_SOURCES, MOUNT_SELECTED_LOCAL_SOURCES, airflow_sources
+ )
cmd = []
cmd.extend(["docker", "run", "-t"])
cmd.extend(extra_docker_flags)
cmd.extend(["--entrypoint", "/bin/bash", airflow_ci_image_name])
cmd.extend(["-c", "python
/opt/airflow/scripts/in_container/run_resource_check.py"])
run_command(cmd, verbose=verbose, text=True)
+
+
+def check_docker_permission(verbose) -> bool:
+ permission_denied = False
+ docker_permission_command = ["docker", "info"]
+ try:
+ _ = run_command(
+ docker_permission_command,
+ verbose=verbose,
+ suppress_console_print=True,
+ capture_output=True,
+ text=True,
+ )
+ except subprocess.CalledProcessError as ex:
+ permission_denied = True
+ if ex.stdout and 'Got permission denied while trying to connect' in
ex.stdout:
+ console.print('ERROR: You have `permission denied` error when
trying to communicate with docker.')
+ console.print(
+ 'Most likely you need to add your user to `docker` group: \
+ https://docs.docker.com/ engine/install/linux-postinstall/ .'
+ )
+ return permission_denied
+
+
+def compare_version(current_version: str, min_version: str) -> bool:
+ return version.parse(current_version) >= version.parse(min_version)
+
+
+def check_docker_version(verbose: bool):
+ permission_denied = check_docker_permission(verbose)
+ if not permission_denied:
+ docker_version_command = ['docker', 'version', '--format',
'{{.Client.Version}}']
+ docker_version = ''
+ docker_version_output = run_command(
+ docker_version_command,
+ verbose=verbose,
+ suppress_console_print=True,
+ capture_output=True,
+ text=True,
+ )
+ if docker_version_output.returncode == 0:
+ docker_version = docker_version_output.stdout.strip()
+ if docker_version == '':
+ console.print(
+ f'Your version of docker is unknown. If the scripts fail,
please make sure to \
+ install docker at least: {MIN_DOCKER_VERSION} version.'
+ )
+ else:
+ good_version = compare_version(docker_version, MIN_DOCKER_VERSION)
+ if good_version:
+ console.print(f'Good version of Docker: {docker_version}.')
+ else:
+ console.print(
+ f'Your version of docker is too old:{docker_version}.
Please upgrade to \
+ at least {MIN_DOCKER_VERSION}'
+ )
+
+
+def check_docker_compose_version(verbose: bool):
+ version_pattern = re.compile(r'(\d+)\.(\d+)\.(\d+)')
+ docker_compose_version_command = ["docker-compose", "--version"]
+ docker_compose_version_output = run_command(
+ docker_compose_version_command,
+ verbose=verbose,
+ suppress_console_print=True,
+ capture_output=True,
+ text=True,
+ )
+ if docker_compose_version_output.returncode == 0:
+ docker_compose_version = docker_compose_version_output.stdout
+ version_extracted = version_pattern.search(docker_compose_version)
+ if version_extracted is not None:
+ version = '.'.join(version_extracted.groups())
+ good_version = compare_version(version, MIN_DOCKER_COMPOSE_VERSION)
+ if good_version:
+ console.print(f'Good version of docker-compose: {version}')
+ else:
+ console.print(
+ f'You have too old version of docker-compose: {version}! \
+ At least 1.29 is needed! Please upgrade!'
+ )
+ console.print(
+ 'See https://docs.docker.com/compose/install/ for
instructions. \
+ Make sure docker-compose you install is first on the PATH
variable of yours.'
+ )
+ else:
+ console.print(
+ 'Unknown docker-compose version. At least 1.29 is needed! \
+ If Breeze fails upgrade to latest available docker-compose version'
+ )
diff --git a/dev/breeze/src/airflow_breeze/utils/host_info_utils.py
b/dev/breeze/src/airflow_breeze/utils/host_info_utils.py
new file mode 100644
index 0000000..07a91fe
--- /dev/null
+++ b/dev/breeze/src/airflow_breeze/utils/host_info_utils.py
@@ -0,0 +1,62 @@
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements. See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership. The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied. See the License for the
+# specific language governing permissions and limitations
+# under the License.
+import platform
+
+from airflow_breeze.utils.run_utils import run_command
+
+# DIRECTORIES_TO_FIX=(
+# "/files"
+# "/root/.aws"
+# "/root/.azure"
+# "/root/.config/gcloud"
+# "/root/.docker"
+# "/opt/airflow/logs"
+# "/opt/airflow/docs"
+# "/opt/airflow/dags"
+# "${AIRFLOW_SOURCE}"
+# )
+
+
+def get_host_user_id():
+ host_user_id = ''
+ os = get_host_os()
+ if os == 'Linux' or os == 'Darwin':
+ host_user_id = run_command(cmd=['id', '-ur'], capture_output=True,
text=True).stdout.strip()
+ return host_user_id
+
+
+def get_host_group_id():
+ host_group_id = ''
+ os = get_host_os()
+ if os == 'Linux' or os == 'Darwin':
+ host_group_id = run_command(cmd=['id', '-gr'], capture_output=True,
text=True).stdout.strip()
+ return host_group_id
+
+
+def get_host_os():
+ # Linux: Linux
+ # Mac: Darwin
+ # Windows: Windows
+ return platform.system()
+
+
+def get_stat_bin():
+ os = get_host_os()
+ stat_bin = 'stat'
+ if os == 'Darwin':
+ stat_bin = 'gstat'
+ return stat_bin
diff --git a/dev/breeze/src/airflow_breeze/utils/path_utils.py
b/dev/breeze/src/airflow_breeze/utils/path_utils.py
index e7d50a3..2d0909b 100644
--- a/dev/breeze/src/airflow_breeze/utils/path_utils.py
+++ b/dev/breeze/src/airflow_breeze/utils/path_utils.py
@@ -15,6 +15,7 @@
# specific language governing permissions and limitations
# under the License.
import os
+import tempfile
from pathlib import Path
from typing import Optional
@@ -55,4 +56,24 @@ def find_airflow_sources_root():
find_airflow_sources_root()
-BUILD_CACHE_DIR = Path(get_airflow_sources_root(), '.build')
+AIRFLOW_SOURCE = get_airflow_sources_root()
+BUILD_CACHE_DIR = Path(AIRFLOW_SOURCE, '.build')
+FILES_DIR = Path(AIRFLOW_SOURCE, 'files')
+MSSQL_DATA_VOLUME = Path(BUILD_CACHE_DIR, 'tmp_mssql_volume')
+MYPY_CACHE_DIR = Path(AIRFLOW_SOURCE, '.mypy_cache')
+LOGS_DIR = Path(AIRFLOW_SOURCE, 'logs')
+DIST_DIR = Path(AIRFLOW_SOURCE, 'dist')
+SCRIPTS_CI_DIR = Path(AIRFLOW_SOURCE, 'scripts', 'ci')
+
+
+def create_directories():
+ BUILD_CACHE_DIR.mkdir(parents=True, exist_ok=True)
+ FILES_DIR.mkdir(parents=True, exist_ok=True)
+ MSSQL_DATA_VOLUME.mkdir(parents=True, exist_ok=True)
+ MYPY_CACHE_DIR.mkdir(parents=True, exist_ok=True)
+ LOGS_DIR.mkdir(parents=True, exist_ok=True)
+ DIST_DIR.mkdir(parents=True, exist_ok=True)
+ CACHE_TMP_FILE_DIR = tempfile.TemporaryDirectory()
+ # add trap to this cache_tmp_file_dir
+ OUTPUT_LOG = Path(CACHE_TMP_FILE_DIR, 'out.log')
+ OUTPUT_LOG.mkdir(parents=True, exist_ok=True)
diff --git a/dev/breeze/src/airflow_breeze/utils/run_utils.py
b/dev/breeze/src/airflow_breeze/utils/run_utils.py
index 99b1bcf..cc17f4b 100644
--- a/dev/breeze/src/airflow_breeze/utils/run_utils.py
+++ b/dev/breeze/src/airflow_breeze/utils/run_utils.py
@@ -15,13 +15,24 @@
# specific language governing permissions and limitations
# under the License.
+import contextlib
import hashlib
+import os
import shlex
import shutil
+import stat
import subprocess
-from typing import Dict, List
+from copy import deepcopy
+from pathlib import Path
+from typing import Dict, List, Mapping, Optional
+import psutil
+import requests
+
+from airflow_breeze.cache import update_md5checksum_in_cache
from airflow_breeze.console import console
+from airflow_breeze.global_constants import FILES_FOR_REBUILD_CHECK
+from airflow_breeze.utils.path_utils import AIRFLOW_SOURCE
def run_command(
@@ -31,12 +42,26 @@ def run_command(
verbose: bool = False,
suppress_raise_exception: bool = False,
suppress_console_print: bool = False,
+ env: Optional[Mapping[str, str]] = None,
+ cwd: Optional[Path] = None,
**kwargs,
):
+ workdir: str = str(cwd) if cwd else os.getcwd()
if verbose:
- console.print(f"[blue]$ {' '.join(shlex.quote(c) for c in cmd)}")
+ command_to_print = ' '.join(shlex.quote(c) for c in cmd)
+ # if we pass environment variables to execute, then
+ env_to_print = ' '.join(f'{key}="{val}"' for (key, val) in
env.items()) if env else ''
+ console.print(f"\n[blue]Working directory {workdir} [/]\n")
+ # Soft wrap allows to copy&paste and run resulting output as it has no
hard EOL
+ console.print(f"\n[blue]{env_to_print} {command_to_print}[/]\n",
soft_wrap=True)
+
try:
- return subprocess.run(cmd, check=check, **kwargs)
+ # copy existing environment variables
+ cmd_env = deepcopy(os.environ)
+ if env:
+ # Add environment variables passed as parameters
+ cmd_env.update(env)
+ return subprocess.run(cmd, check=check, env=cmd_env, cwd=workdir,
**kwargs)
except subprocess.CalledProcessError as ex:
if not suppress_console_print:
console.print("========================= OUTPUT start
============================")
@@ -73,3 +98,126 @@ def check_package_installed(package_name: str) -> bool:
Please install using https://pre-commit.com/#install to
continue[/]\n"
)
return is_installed
+
+
+def get_filesystem_type(filepath):
+ root_type = "unknown"
+ for part in psutil.disk_partitions():
+ if part.mountpoint == '/':
+ root_type = part.fstype
+ continue
+ if filepath.startswith(part.mountpoint):
+ return part.fstype
+
+ return root_type
+
+
+def calculate_md5_checksum_for_files(md5sum_cache_dir: Path):
+ not_modified_files = []
+ modified_files = []
+ for calculate_md5_file in FILES_FOR_REBUILD_CHECK:
+ file_to_get_md5 = Path(AIRFLOW_SOURCE, calculate_md5_file)
+ md5_checksum = generate_md5(file_to_get_md5)
+ sub_dir_name = file_to_get_md5.parts[-2]
+ actual_file_name = file_to_get_md5.parts[-1]
+ cache_file_name = Path(md5sum_cache_dir, sub_dir_name + '-' +
actual_file_name + '.md5sum')
+ file_content = md5_checksum + ' ' + str(file_to_get_md5) + '\n'
+ is_modified = update_md5checksum_in_cache(file_content,
cache_file_name)
+ if is_modified:
+ modified_files.append(calculate_md5_file)
+ else:
+ not_modified_files.append(calculate_md5_file)
+ return modified_files, not_modified_files
+
+
+def md5sum_check_if_build_is_needed(md5sum_cache_dir: Path, the_image_type:
str) -> bool:
+ build_needed = False
+ modified_files, not_modified_files =
calculate_md5_checksum_for_files(md5sum_cache_dir)
+ if len(modified_files) > 0:
+ console.print('The following files are modified: ', modified_files)
+ console.print(f'Likely {the_image_type} image needs rebuild')
+ build_needed = True
+ else:
+ console.print(
+ f'Docker image build is not needed for {the_image_type} build as
no important files are changed!'
+ )
+ return build_needed
+
+
+def instruct_build_image(the_image_type: str, python_version: str):
+ console.print(f'\nThe {the_image_type} image for python version
{python_version} may be outdated\n')
+ console.print('Please run this command at earliest convenience:\n')
+ if the_image_type == 'CI':
+ console.print(f'./Breeze2 build-ci-image --python {python_version}')
+ else:
+ console.print(f'./Breeze2 build-prod-image --python {python_version}')
+ console.print("\nIf you run it via pre-commit as individual hook, you can
run 'pre-commit run build'.\n")
+
+
+def instruct_for_setup():
+ CMDNAME = 'Breeze2'
+ console.print(f"\nYou can setup autocomplete by running {CMDNAME}
setup-autocomplete'")
+ console.print(" You can toggle ascii/cheatsheet by running:")
+ console.print(f" * {CMDNAME} toggle-suppress-cheatsheet")
+ console.print(f" * {CMDNAME} toggle-suppress-asciiart\n")
+
+
[email protected]
+def working_directory(source_path: Path):
+ # Equivalent of pushd and popd in bash script.
+ # https://stackoverflow.com/a/42441759/3101838
+ prev_cwd = Path.cwd()
+ os.chdir(source_path)
+ try:
+ yield
+ finally:
+ os.chdir(prev_cwd)
+
+
+def change_file_permission(file_to_fix: Path):
+ if file_to_fix.exists():
+ current = stat.S_IMODE(os.stat(file_to_fix).st_mode)
+ new = current & ~stat.S_IWGRP & ~stat.S_IWOTH # Removes group/other
write permission
+ os.chmod(file_to_fix, new)
+
+
+def change_directory_permission(directory_to_fix: Path):
+ if directory_to_fix.exists():
+ current = stat.S_IMODE(os.stat(directory_to_fix).st_mode)
+ new = current & ~stat.S_IWGRP & ~stat.S_IWOTH # Removes group/other
write permission
+ new = (
+ new | stat.S_IXGRP | stat.S_IXOTH
+ ) # Add group/other execute permission (to be able to list
directories)
+ os.chmod(directory_to_fix, new)
+
+
+@working_directory(AIRFLOW_SOURCE)
+def fix_group_permissions():
+ files_to_fix_result = run_command(['git', 'ls-files', './'],
capture_output=True, text=True)
+ if files_to_fix_result.returncode == 0:
+ files_to_fix = files_to_fix_result.stdout.strip().split('\n')
+ for file_to_fix in files_to_fix:
+ change_file_permission(Path(file_to_fix))
+ directories_to_fix_result = run_command(
+ ['git', 'ls-tree', '-r', '-d', '--name-only', 'HEAD'],
capture_output=True, text=True
+ )
+ if directories_to_fix_result.returncode == 0:
+ directories_to_fix =
directories_to_fix_result.stdout.strip().split('\n')
+ for directory_to_fix in directories_to_fix:
+ change_directory_permission(Path(directory_to_fix))
+
+
+def get_latest_sha(repo: str, branch: str):
+ gh_url = f"https://api.github.com/repos/{repo}/commits/{branch}"
+ headers_dict = {"Accept": "application/vnd.github.VERSION.sha"}
+ resp = requests.get(gh_url, headers=headers_dict)
+ return resp.text
+
+
+def is_repo_rebased(latest_sha: str):
+ rebased = False
+ output = run_command(['git', 'log', '--format=format:%H'],
capture_output=True, text=True)
+ output = output.stdout.strip().splitlines()
+ if latest_sha in output:
+ rebased = True
+ return rebased
diff --git a/dev/breeze/src/airflow_breeze/visuals/__init__.py
b/dev/breeze/src/airflow_breeze/visuals/__init__.py
index b93cbfb..f13a1af 100644
--- a/dev/breeze/src/airflow_breeze/visuals/__init__.py
+++ b/dev/breeze/src/airflow_breeze/visuals/__init__.py
@@ -16,7 +16,6 @@
# under the License.
from airflow_breeze.global_constants import (
- AIRFLOW_SOURCES,
FLOWER_HOST_PORT,
MSSQL_HOST_PORT,
MYSQL_HOST_PORT,
@@ -25,6 +24,7 @@ from airflow_breeze.global_constants import (
SSH_PORT,
WEBSERVER_HOST_PORT,
)
+from airflow_breeze.utils.path_utils import get_airflow_sources_root
ASCIIART = """
@@ -79,7 +79,7 @@ Adding breeze to your path:
When you exit the environment, you can add sources of Airflow to the path -
you can
run breeze or the scripts above from any directory by calling 'breeze'
commands directly
- \'{AIRFLOW_SOURCES}\' is exported into PATH
+ \'{str(get_airflow_sources_root())}\' is exported into PATH
Port forwarding:
Ports are forwarded to the running docker containers for webserver and
database
diff --git a/dev/breeze/tests/test_cache.py b/dev/breeze/tests/test_cache.py
index e5e290e..60fe517 100644
--- a/dev/breeze/tests/test_cache.py
+++ b/dev/breeze/tests/test_cache.py
@@ -20,7 +20,12 @@ from unittest import mock
import pytest
-from airflow_breeze.cache import check_if_cache_exists,
check_if_values_allowed, read_from_cache_file
+from airflow_breeze.cache import (
+ check_if_cache_exists,
+ check_if_values_allowed,
+ delete_cache,
+ read_from_cache_file,
+)
AIRFLOW_SOURCES = Path(__file__).parent.parent.parent.parent
@@ -28,8 +33,8 @@ AIRFLOW_SOURCES = Path(__file__).parent.parent.parent.parent
@pytest.mark.parametrize(
'parameter, value, result, exception',
[
- ("backends", "mysql", (True, ['sqlite', 'mysql', 'postgres',
'mssql']), None),
- ("backends", "xxx", (False, ['sqlite', 'mysql', 'postgres', 'mssql']),
None),
+ ("backend", "mysql", (True, ['sqlite', 'mysql', 'postgres', 'mssql']),
None),
+ ("backend", "xxx", (False, ['sqlite', 'mysql', 'postgres', 'mssql']),
None),
("python_major_minor_version", "3.8", (True, ['3.6', '3.7', '3.8',
'3.9']), None),
("python_major_minor_version", "3.5", (False, ['3.6', '3.7', '3.8',
'3.9']), None),
("missing", "value", None, AttributeError),
@@ -62,6 +67,25 @@ def test_read_from_cache_file(param):
if param_value is None:
assert None is param_value
else:
- allowed, param_list = check_if_values_allowed(param + 's', param_value)
+ allowed, param_list = check_if_values_allowed(param, param_value)
if allowed:
assert param_value in param_list
+
+
[email protected]('airflow_breeze.cache.Path')
[email protected]('airflow_breeze.cache.check_if_cache_exists')
+def test_delete_cache_exists(mock_check_if_cache_exists, mock_path):
+ param = "MYSQL_VERSION"
+ mock_check_if_cache_exists.return_value = True
+ cache_deleted = delete_cache(param)
+ mock_path.assert_called_with(AIRFLOW_SOURCES / ".build")
+ assert cache_deleted
+
+
[email protected]('airflow_breeze.cache.Path')
[email protected]('airflow_breeze.cache.check_if_cache_exists')
+def test_delete_cache_not_exists(mock_check_if_cache_exists, mock_path):
+ param = "TEST_PARAM"
+ mock_check_if_cache_exists.return_value = False
+ cache_deleted = delete_cache(param)
+ assert not cache_deleted
diff --git a/dev/breeze/tests/test_commands.py
b/dev/breeze/tests/test_commands.py
index e1aaa12..9d29251 100644
--- a/dev/breeze/tests/test_commands.py
+++ b/dev/breeze/tests/test_commands.py
@@ -27,6 +27,11 @@ def test_visuals():
def test_get_extra_docker_flags():
airflow_sources = get_airflow_sources_root()
all = True
- assert len(get_extra_docker_flags(all, str(airflow_sources))) < 10
+ selected = False
+ assert len(get_extra_docker_flags(all, selected, str(airflow_sources))) <
10
all = False
- assert len(get_extra_docker_flags(all, str(airflow_sources))) > 60
+ selected = True
+ assert len(get_extra_docker_flags(all, selected, str(airflow_sources))) >
60
+ all = False
+ selected = False
+ assert len(get_extra_docker_flags(all, selected, str(airflow_sources))) < 8
diff --git a/dev/breeze/tests/test_docker_command_utils.py
b/dev/breeze/tests/test_docker_command_utils.py
new file mode 100644
index 0000000..53df745
--- /dev/null
+++ b/dev/breeze/tests/test_docker_command_utils.py
@@ -0,0 +1,180 @@
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements. See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership. The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied. See the License for the
+# specific language governing permissions and limitations
+# under the License.
+
+from unittest import mock
+from unittest.mock import call
+
+from airflow_breeze.utils.docker_command_utils import
check_docker_compose_version, check_docker_version
+
+
[email protected]('airflow_breeze.utils.docker_command_utils.run_command')
[email protected]('airflow_breeze.utils.docker_command_utils.console')
+def test_check_docker_version_unknown(mock_console, mock_run_command):
+ check_docker_version(verbose=True)
+ expected_run_command_calls = [
+ call(['docker', 'info'], verbose=True, suppress_console_print=True,
capture_output=True, text=True),
+ call(
+ ['docker', 'version', '--format', '{{.Client.Version}}'],
+ verbose=True,
+ suppress_console_print=True,
+ capture_output=True,
+ text=True,
+ ),
+ ]
+
+ mock_run_command.assert_has_calls(expected_run_command_calls)
+ mock_console.print.assert_called_with(
+ "Your version of docker is unknown. If the scripts fail, please make
sure to"
+ " install docker at least: 20.10.0 version."
+ )
+
+
[email protected]('airflow_breeze.utils.docker_command_utils.check_docker_permission')
[email protected]('airflow_breeze.utils.docker_command_utils.run_command')
[email protected]('airflow_breeze.utils.docker_command_utils.console')
+def test_check_docker_version_too_low(mock_console, mock_run_command,
mock_check_docker_permission):
+ mock_check_docker_permission.return_value = False
+ mock_run_command.return_value.returncode = 0
+ mock_run_command.return_value.stdout = "0.9"
+ check_docker_version(verbose=True)
+ mock_check_docker_permission.assert_called_with(True)
+ mock_run_command.assert_called_with(
+ ['docker', 'version', '--format', '{{.Client.Version}}'],
+ verbose=True,
+ suppress_console_print=True,
+ capture_output=True,
+ text=True,
+ )
+ mock_console.print.assert_called_with(
+ "Your version of docker is too old:0.9. Please upgrade to
at least 20.10.0"
+ )
+
+
[email protected]('airflow_breeze.utils.docker_command_utils.check_docker_permission')
[email protected]('airflow_breeze.utils.docker_command_utils.run_command')
[email protected]('airflow_breeze.utils.docker_command_utils.console')
+def test_check_docker_version_ok(mock_console, mock_run_command,
mock_check_docker_permission):
+ mock_check_docker_permission.return_value = False
+ mock_run_command.return_value.returncode = 0
+ mock_run_command.return_value.stdout = "20.10.0"
+ check_docker_version(verbose=True)
+ mock_check_docker_permission.assert_called_with(True)
+ mock_run_command.assert_called_with(
+ ['docker', 'version', '--format', '{{.Client.Version}}'],
+ verbose=True,
+ suppress_console_print=True,
+ capture_output=True,
+ text=True,
+ )
+ mock_console.print.assert_called_with("Good version of Docker: 20.10.0.")
+
+
[email protected]('airflow_breeze.utils.docker_command_utils.check_docker_permission')
[email protected]('airflow_breeze.utils.docker_command_utils.run_command')
[email protected]('airflow_breeze.utils.docker_command_utils.console')
+def test_check_docker_version_higher(mock_console, mock_run_command,
mock_check_docker_permission):
+ mock_check_docker_permission.return_value = False
+ mock_run_command.return_value.returncode = 0
+ mock_run_command.return_value.stdout = "21.10.0"
+ check_docker_version(verbose=True)
+ mock_check_docker_permission.assert_called_with(True)
+ mock_run_command.assert_called_with(
+ ['docker', 'version', '--format', '{{.Client.Version}}'],
+ verbose=True,
+ suppress_console_print=True,
+ capture_output=True,
+ text=True,
+ )
+ mock_console.print.assert_called_with("Good version of Docker: 21.10.0.")
+
+
[email protected]('airflow_breeze.utils.docker_command_utils.run_command')
[email protected]('airflow_breeze.utils.docker_command_utils.console')
+def test_check_docker_compose_version_unknown(mock_console, mock_run_command):
+ check_docker_compose_version(verbose=True)
+ expected_run_command_calls = [
+ call(
+ ["docker-compose", "--version"],
+ verbose=True,
+ suppress_console_print=True,
+ capture_output=True,
+ text=True,
+ ),
+ ]
+ mock_run_command.assert_has_calls(expected_run_command_calls)
+ mock_console.print.assert_called_with(
+ 'Unknown docker-compose version. At least 1.29 is needed! \
+ If Breeze fails upgrade to latest available docker-compose version'
+ )
+
+
[email protected]('airflow_breeze.utils.docker_command_utils.run_command')
[email protected]('airflow_breeze.utils.docker_command_utils.console')
+def test_check_docker_compose_version_low(mock_console, mock_run_command):
+ mock_run_command.return_value.returncode = 0
+ mock_run_command.return_value.stdout = "1.28.5"
+ check_docker_compose_version(verbose=True)
+ mock_run_command.assert_called_with(
+ ["docker-compose", "--version"],
+ verbose=True,
+ suppress_console_print=True,
+ capture_output=True,
+ text=True,
+ )
+ expected_print_calls = [
+ call(
+ 'You have too old version of docker-compose: 1.28.5! \
+ At least 1.29 is needed! Please upgrade!'
+ ),
+ call(
+ 'See https://docs.docker.com/compose/install/ for instructions. \
+ Make sure docker-compose you install is first on the PATH
variable of yours.'
+ ),
+ ]
+ mock_console.print.assert_has_calls(expected_print_calls)
+
+
[email protected]('airflow_breeze.utils.docker_command_utils.run_command')
[email protected]('airflow_breeze.utils.docker_command_utils.console')
+def test_check_docker_compose_version_ok(mock_console, mock_run_command):
+ mock_run_command.return_value.returncode = 0
+ mock_run_command.return_value.stdout = "1.29.0"
+ check_docker_compose_version(verbose=True)
+ mock_run_command.assert_called_with(
+ ["docker-compose", "--version"],
+ verbose=True,
+ suppress_console_print=True,
+ capture_output=True,
+ text=True,
+ )
+ mock_console.print.assert_called_with("Good version of docker-compose:
1.29.0")
+
+
[email protected]('airflow_breeze.utils.docker_command_utils.run_command')
[email protected]('airflow_breeze.utils.docker_command_utils.console')
+def test_check_docker_compose_version_higher(mock_console, mock_run_command):
+ mock_run_command.return_value.returncode = 0
+ mock_run_command.return_value.stdout = "1.29.2"
+ check_docker_compose_version(verbose=True)
+ mock_run_command.assert_called_with(
+ ["docker-compose", "--version"],
+ verbose=True,
+ suppress_console_print=True,
+ capture_output=True,
+ text=True,
+ )
+ mock_console.print.assert_called_with("Good version of docker-compose:
1.29.2")
diff --git a/dev/breeze/tests/test_commands.py
b/dev/breeze/tests/test_host_info_utils.py
similarity index 61%
copy from dev/breeze/tests/test_commands.py
copy to dev/breeze/tests/test_host_info_utils.py
index e1aaa12..abe5a1e 100644
--- a/dev/breeze/tests/test_commands.py
+++ b/dev/breeze/tests/test_host_info_utils.py
@@ -15,18 +15,11 @@
# specific language governing permissions and limitations
# under the License.
-from airflow_breeze.utils.docker_command_utils import get_extra_docker_flags
-from airflow_breeze.utils.path_utils import get_airflow_sources_root
-from airflow_breeze.visuals import ASCIIART
+from airflow_breeze.utils import host_info_utils
+SUPPORTED_OS = ['Linux', 'Darwin', 'Windows']
-def test_visuals():
- assert 2051 == len(ASCIIART)
-
-def test_get_extra_docker_flags():
- airflow_sources = get_airflow_sources_root()
- all = True
- assert len(get_extra_docker_flags(all, str(airflow_sources))) < 10
- all = False
- assert len(get_extra_docker_flags(all, str(airflow_sources))) > 60
+def test_get_host_os():
+ current_os = host_info_utils.get_host_os()
+ assert current_os in SUPPORTED_OS
diff --git a/dev/breeze/tests/test_run_utils.py
b/dev/breeze/tests/test_run_utils.py
new file mode 100644
index 0000000..35f20ca
--- /dev/null
+++ b/dev/breeze/tests/test_run_utils.py
@@ -0,0 +1,53 @@
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements. See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership. The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied. See the License for the
+# specific language governing permissions and limitations
+# under the License.
+
+import os
+import stat
+from pathlib import Path
+from unittest import TestCase
+
+from airflow_breeze.utils.run_utils import (
+ change_directory_permission,
+ change_file_permission,
+ filter_out_none,
+)
+
+
+def test_change_file_permission(tmpdir):
+ tmpfile = Path(tmpdir, 'test.config')
+ tmpfile.write_text('content')
+ change_file_permission(tmpfile)
+ mode = os.stat(tmpfile).st_mode
+ assert not (mode & stat.S_IWGRP) and not (mode & stat.S_IWOTH)
+
+
+def test_change_directory_permission(tmpdir):
+ change_directory_permission(tmpdir)
+ mode = os.stat(tmpdir).st_mode
+ assert (
+ not (mode & stat.S_IWGRP)
+ and not (mode & stat.S_IWOTH)
+ and (mode & stat.S_IXGRP)
+ and (mode & stat.S_IXOTH)
+ )
+
+
+def test_filter_out_none():
+ dict_input_with_none = {'sample': None, 'sample1': 'One', 'sample2':
'Two', 'samplen': None}
+ expected_dict_output = {'sample1': 'One', 'sample2': 'Two'}
+ output_dict = filter_out_none(**dict_input_with_none)
+ TestCase().assertDictEqual(output_dict, expected_dict_output)