This is an automated email from the ASF dual-hosted git repository.
rom pushed a commit to branch main
in repository https://gitbox.apache.org/repos/asf/airflow.git
The following commit(s) were added to refs/heads/main by this push:
new a5ffbbda174 Standard provider bash operator (#42252)
a5ffbbda174 is described below
commit a5ffbbda17450a5c99037b292844087119b5676a
Author: GPK <[email protected]>
AuthorDate: Wed Oct 9 12:40:39 2024 +0100
Standard provider bash operator (#42252)
---
.github/workflows/basic-tests.yml | 5 +++++
.pre-commit-config.yaml | 14 +++-----------
airflow/decorators/bash.py | 2 +-
airflow/example_dags/example_assets.py | 2 +-
airflow/example_dags/example_bash_operator.py | 2 +-
airflow/example_dags/example_complex.py | 2 +-
airflow/example_dags/example_inlet_event_extra.py | 2 +-
airflow/example_dags/example_outlet_event_extra.py | 2 +-
.../example_passing_params_via_test_command.py | 2 +-
airflow/example_dags/example_sensors.py | 4 ++--
airflow/example_dags/example_setup_teardown.py | 2 +-
airflow/example_dags/example_task_group.py | 2 +-
airflow/example_dags/example_trigger_target_dag.py | 2 +-
airflow/example_dags/example_xcom.py | 2 +-
airflow/example_dags/example_xcomargs.py | 2 +-
airflow/example_dags/tutorial.py | 2 +-
.../celery/executors/celery_executor_utils.py | 8 ++++++--
.../providers/edge/example_dags/integration_test.py | 6 +++++-
airflow/providers/openlineage/provider.yaml | 3 ++-
airflow/{ => providers/standard}/operators/bash.py | 0
airflow/providers/standard/provider.yaml | 2 ++
airflow/{ => providers/standard}/sensors/bash.py | 0
dev/breeze/tests/test_selective_checks.py | 14 +++++++-------
dev/perf/dags/elastic_dag.py | 2 +-
dev/perf/dags/perf_dag_2.py | 2 +-
.../notifications/chime_notifier_howto_guide.rst | 2 +-
.../notifications/sns.rst | 2 +-
.../notifications/sqs.rst | 2 +-
.../notifications/apprise_notifier_howto_guide.rst | 2 +-
.../notifications/jira-notifier-howto-guide.rst | 2 +-
.../operators/cloud/mlengine.rst | 2 +-
.../operators/cloud/pubsub.rst | 2 +-
.../guides/developer.rst | 2 +-
.../guides/user.rst | 4 ++--
.../notifications/pagerduty_notifier_howto_guide.rst | 2 +-
.../notifications/slack_notifier_howto_guide.rst | 2 +-
.../slackwebhook_notifier_howto_guide.rst | 2 +-
.../notifications/smtp_notifier_howto_guide.rst | 2 +-
.../administration-and-deployment/lineage.rst | 2 +-
docs/apache-airflow/best-practices.rst | 2 +-
docs/apache-airflow/core-concepts/dag-run.rst | 4 ++--
docs/apache-airflow/core-concepts/dags.rst | 2 +-
docs/apache-airflow/core-concepts/operators.rst | 2 +-
docs/apache-airflow/core-concepts/tasks.rst | 2 +-
docs/apache-airflow/howto/notifications.rst | 2 +-
docs/apache-airflow/howto/operator/bash.rst | 4 ++--
docs/apache-airflow/index.rst | 2 +-
docs/apache-airflow/operators-and-hooks-ref.rst | 4 ++--
docs/apache-airflow/tutorial/taskflow.rst | 2 +-
docs/exts/templates/openlineage.rst.jinja2 | 2 +-
generated/provider_dependencies.json | 7 +++++--
kubernetes_tests/test_kubernetes_executor.py | 5 ++++-
kubernetes_tests/test_other_executors.py | 5 ++++-
tests/callbacks/test_callback_requests.py | 6 +++---
tests/cli/commands/test_task_command.py | 2 +-
tests/core/test_core.py | 2 +-
tests/dags/subdir2/test_dont_ignore_this.py | 2 +-
tests/dags/test_assets.py | 2 +-
tests/dags/test_backfill_with_upstream_failed_task.py | 2 +-
tests/dags/test_default_impersonation.py | 2 +-
tests/dags/test_example_bash_operator.py | 2 +-
tests/dags/test_failing.py | 2 +-
tests/dags/test_heartbeat_failed_fast.py | 2 +-
tests/dags/test_impersonation.py | 2 +-
tests/dags/test_miscellaneous.py | 2 +-
tests/dags/test_multiple_dags.py | 2 +-
tests/dags/test_no_impersonation.py | 2 +-
tests/dags/test_on_failure_callback.py | 2 +-
tests/dags/test_retry_handling_job.py | 2 +-
tests/dags/test_sensor.py | 2 +-
tests/decorators/test_setup_teardown.py | 2 +-
tests/integration/executors/test_celery_executor.py | 2 +-
tests/jobs/test_scheduler_job.py | 2 +-
tests/listeners/test_listeners.py | 2 +-
tests/models/test_dag.py | 2 +-
tests/models/test_dagrun.py | 2 +-
tests/models/test_renderedtifields.py | 2 +-
tests/models/test_serialized_dag.py | 2 +-
tests/models/test_taskinstance.py | 2 +-
tests/models/test_xcom_arg.py | 2 +-
.../kubernetes/executors/test_kubernetes_executor.py | 2 +-
.../cncf/kubernetes/test_template_rendering.py | 2 +-
tests/providers/openlineage/extractors/test_bash.py | 2 +-
tests/providers/openlineage/extractors/test_python.py | 2 +-
tests/providers/openlineage/plugins/test_adapter.py | 2 +-
tests/providers/openlineage/plugins/test_facets.py | 2 +-
tests/providers/openlineage/plugins/test_utils.py | 13 +++++++------
tests/providers/openlineage/utils/test_utils.py | 18 +++++++++++-------
tests/{ => providers/standard}/operators/test_bash.py | 2 +-
tests/{ => providers/standard}/sensors/test_bash.py | 2 +-
tests/sensors/test_external_task_sensor.py | 2 +-
tests/serialization/test_dag_serialization.py | 8 ++++----
.../core/example_external_task_child_deferrable.py | 2 +-
tests/system/providers/amazon/aws/example_appflow.py | 2 +-
.../system/providers/amazon/aws/example_http_to_s3.py | 2 +-
tests/system/providers/amazon/aws/utils/k8s.py | 2 +-
.../providers/apache/hive/example_twitter_dag.py | 2 +-
.../system/providers/apache/iceberg/example_iceberg.py | 2 +-
.../providers/cncf/kubernetes/example_kubernetes.py | 2 +-
.../cncf/kubernetes/example_kubernetes_async.py | 2 +-
tests/system/providers/docker/example_docker.py | 2 +-
.../providers/docker/example_docker_copy_data.py | 2 +-
.../google/cloud/bigquery/example_bigquery_dataset.py | 2 +-
.../google/cloud/bigquery/example_bigquery_queries.py | 2 +-
.../cloud/bigquery/example_bigquery_queries_async.py | 2 +-
.../google/cloud/bigquery/example_bigquery_to_mssql.py | 2 +-
.../cloud/bigquery/example_bigquery_to_postgres.py | 2 +-
.../google/cloud/cloud_build/example_cloud_build.py | 2 +-
.../example_cloud_memorystore_memcached.py | 2 +-
.../example_cloud_memorystore_redis.py | 2 +-
.../google/cloud/gcs/example_gcs_copy_delete.py | 2 +-
.../providers/google/cloud/gcs/example_gcs_to_gcs.py | 2 +-
.../providers/google/cloud/gcs/example_mysql_to_gcs.py | 2 +-
.../providers/google/cloud/gcs/example_sftp_to_gcs.py | 2 +-
.../providers/google/cloud/gcs/example_sheets.py | 2 +-
.../kubernetes_engine/example_kubernetes_engine.py | 2 +-
.../example_kubernetes_engine_async.py | 2 +-
.../google/cloud/ml_engine/example_mlengine.py | 2 +-
.../cloud/natural_language/example_natural_language.py | 2 +-
.../providers/google/cloud/pubsub/example_pubsub.py | 2 +-
.../cloud/sql_to_sheets/example_sql_to_sheets.py | 2 +-
.../providers/google/cloud/tasks/example_queue.py | 2 +-
.../google/cloud/transfers/example_postgres_to_gcs.py | 2 +-
.../google/cloud/translate/example_translate.py | 2 +-
.../video_intelligence/example_video_intelligence.py | 2 +-
.../cloud/vision/example_vision_annotate_image.py | 2 +-
.../google/datacatalog/example_datacatalog_entries.py | 2 +-
.../datacatalog/example_datacatalog_search_catalog.py | 2 +-
.../datacatalog/example_datacatalog_tag_templates.py | 2 +-
.../google/datacatalog/example_datacatalog_tags.py | 2 +-
.../providers/opsgenie/example_opsgenie_notifier.py | 2 +-
.../providers/singularity/example_singularity.py | 2 +-
tests/test_utils/compat.py | 13 ++++++++++---
tests/utils/test_dot_renderer.py | 2 +-
tests/utils/test_task_group.py | 2 +-
tests/www/views/test_views_rendered.py | 3 +--
tests/www/views/test_views_tasks.py | 3 +--
137 files changed, 203 insertions(+), 176 deletions(-)
diff --git a/.github/workflows/basic-tests.yml
b/.github/workflows/basic-tests.yml
index 2ccb2394876..49d6a7245bc 100644
--- a/.github/workflows/basic-tests.yml
+++ b/.github/workflows/basic-tests.yml
@@ -200,6 +200,11 @@ jobs:
breeze release-management prepare-provider-packages fab
--package-format wheel --skip-tag-check
- name: "Install Airflow with fab for webserver tests"
run: pip install . dist/apache_airflow_providers_fab-*.whl
+ - name: "Prepare Standard provider packages: wheel"
+ run: >
+ breeze release-management prepare-provider-packages standard
--package-format wheel --skip-tag-check
+ - name: "Install Airflow with standard provider for webserver tests"
+ run: pip install . dist/apache_airflow_providers_standard-*.whl
- name: "Install Python client"
run: pip install ./dist/apache_airflow_client-*.whl
- name: "Initialize Airflow DB and start webserver"
diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml
index a568c79c265..ce557dba431 100644
--- a/.pre-commit-config.yaml
+++ b/.pre-commit-config.yaml
@@ -550,17 +550,9 @@ repos:
- id: check-no-providers-in-core-examples
language: pygrep
name: No providers imports in core example DAGs
- description: The core example DAGs have no dependencies other than
core Airflow
- entry: "^\\s*from airflow\\.providers.*"
+ description: The core example DAGs have no dependencies other than
standard provider or core Airflow
+ entry: "^\\s*from airflow\\.providers.(?!standard.)"
pass_filenames: true
- exclude: >
- (?x)
- ^airflow/example_dags/example_branch_datetime_operator.py|
- ^airflow/example_dags/example_branch_day_of_week_operator.py|
- ^airflow/example_dags/example_sensors.py|
- ^airflow/example_dags/example_sensors.py|
- ^airflow/example_dags/example_sensors.py|
- ^airflow/example_dags/example_time_delta_sensor_async.py
files: ^airflow/example_dags/.*\.py$
- id: check-no-airflow-deprecation-in-providers
language: pygrep
@@ -717,7 +709,7 @@ repos:
files: >
(?x)
^airflow/providers/.*\.py$
- exclude: ^.*/.*_vendor/
+ exclude: ^.*/.*_vendor/|airflow/providers/standard/operators/bash.py
- id: check-get-lineage-collector-providers
language: python
name: Check providers import hook lineage code from compat
diff --git a/airflow/decorators/bash.py b/airflow/decorators/bash.py
index 39d3131d28c..44738492da0 100644
--- a/airflow/decorators/bash.py
+++ b/airflow/decorators/bash.py
@@ -21,7 +21,7 @@ import warnings
from typing import Any, Callable, Collection, Mapping, Sequence
from airflow.decorators.base import DecoratedOperator, TaskDecorator,
task_decorator_factory
-from airflow.operators.bash import BashOperator
+from airflow.providers.standard.operators.bash import BashOperator
from airflow.utils.context import Context, context_merge
from airflow.utils.operator_helpers import determine_kwargs
from airflow.utils.types import NOTSET
diff --git a/airflow/example_dags/example_assets.py
b/airflow/example_dags/example_assets.py
index 66369794ed9..451f17a3a3a 100644
--- a/airflow/example_dags/example_assets.py
+++ b/airflow/example_dags/example_assets.py
@@ -56,7 +56,7 @@ import pendulum
from airflow.assets import Asset
from airflow.models.dag import DAG
-from airflow.operators.bash import BashOperator
+from airflow.providers.standard.operators.bash import BashOperator
from airflow.timetables.assets import AssetOrTimeSchedule
from airflow.timetables.trigger import CronTriggerTimetable
diff --git a/airflow/example_dags/example_bash_operator.py
b/airflow/example_dags/example_bash_operator.py
index b08d31c9930..27702d4cb5f 100644
--- a/airflow/example_dags/example_bash_operator.py
+++ b/airflow/example_dags/example_bash_operator.py
@@ -24,8 +24,8 @@ import datetime
import pendulum
from airflow.models.dag import DAG
-from airflow.operators.bash import BashOperator
from airflow.operators.empty import EmptyOperator
+from airflow.providers.standard.operators.bash import BashOperator
with DAG(
dag_id="example_bash_operator",
diff --git a/airflow/example_dags/example_complex.py
b/airflow/example_dags/example_complex.py
index e7eba78eae8..6d7d504f13d 100644
--- a/airflow/example_dags/example_complex.py
+++ b/airflow/example_dags/example_complex.py
@@ -25,7 +25,7 @@ import pendulum
from airflow.models.baseoperator import chain
from airflow.models.dag import DAG
-from airflow.operators.bash import BashOperator
+from airflow.providers.standard.operators.bash import BashOperator
with DAG(
dag_id="example_complex",
diff --git a/airflow/example_dags/example_inlet_event_extra.py
b/airflow/example_dags/example_inlet_event_extra.py
index 974534c295b..9773df7a3f9 100644
--- a/airflow/example_dags/example_inlet_event_extra.py
+++ b/airflow/example_dags/example_inlet_event_extra.py
@@ -28,7 +28,7 @@ import datetime
from airflow.assets import Asset
from airflow.decorators import task
from airflow.models.dag import DAG
-from airflow.operators.bash import BashOperator
+from airflow.providers.standard.operators.bash import BashOperator
asset = Asset("s3://output/1.txt")
diff --git a/airflow/example_dags/example_outlet_event_extra.py
b/airflow/example_dags/example_outlet_event_extra.py
index 893090460b5..0d097eab0ac 100644
--- a/airflow/example_dags/example_outlet_event_extra.py
+++ b/airflow/example_dags/example_outlet_event_extra.py
@@ -29,7 +29,7 @@ from airflow.assets import Asset
from airflow.assets.metadata import Metadata
from airflow.decorators import task
from airflow.models.dag import DAG
-from airflow.operators.bash import BashOperator
+from airflow.providers.standard.operators.bash import BashOperator
ds = Asset("s3://output/1.txt")
diff --git a/airflow/example_dags/example_passing_params_via_test_command.py
b/airflow/example_dags/example_passing_params_via_test_command.py
index 2fcb8e4edab..7dcd963c096 100644
--- a/airflow/example_dags/example_passing_params_via_test_command.py
+++ b/airflow/example_dags/example_passing_params_via_test_command.py
@@ -27,7 +27,7 @@ import pendulum
from airflow.decorators import task
from airflow.models.dag import DAG
-from airflow.operators.bash import BashOperator
+from airflow.providers.standard.operators.bash import BashOperator
@task(task_id="run_this")
diff --git a/airflow/example_dags/example_sensors.py
b/airflow/example_dags/example_sensors.py
index 6fb564e63ae..f6390838581 100644
--- a/airflow/example_dags/example_sensors.py
+++ b/airflow/example_dags/example_sensors.py
@@ -22,11 +22,11 @@ import datetime
import pendulum
from airflow.models.dag import DAG
-from airflow.operators.bash import BashOperator
+from airflow.providers.standard.operators.bash import BashOperator
+from airflow.providers.standard.sensors.bash import BashSensor
from airflow.providers.standard.sensors.time import TimeSensor, TimeSensorAsync
from airflow.providers.standard.sensors.time_delta import TimeDeltaSensor,
TimeDeltaSensorAsync
from airflow.providers.standard.sensors.weekday import DayOfWeekSensor
-from airflow.sensors.bash import BashSensor
from airflow.sensors.filesystem import FileSensor
from airflow.sensors.python import PythonSensor
from airflow.utils.trigger_rule import TriggerRule
diff --git a/airflow/example_dags/example_setup_teardown.py
b/airflow/example_dags/example_setup_teardown.py
index 9fab87df756..81994fabc20 100644
--- a/airflow/example_dags/example_setup_teardown.py
+++ b/airflow/example_dags/example_setup_teardown.py
@@ -22,7 +22,7 @@ from __future__ import annotations
import pendulum
from airflow.models.dag import DAG
-from airflow.operators.bash import BashOperator
+from airflow.providers.standard.operators.bash import BashOperator
from airflow.utils.task_group import TaskGroup
with DAG(
diff --git a/airflow/example_dags/example_task_group.py
b/airflow/example_dags/example_task_group.py
index 6435a912cc4..5129ad3cc61 100644
--- a/airflow/example_dags/example_task_group.py
+++ b/airflow/example_dags/example_task_group.py
@@ -22,8 +22,8 @@ from __future__ import annotations
import pendulum
from airflow.models.dag import DAG
-from airflow.operators.bash import BashOperator
from airflow.operators.empty import EmptyOperator
+from airflow.providers.standard.operators.bash import BashOperator
from airflow.utils.task_group import TaskGroup
# [START howto_task_group]
diff --git a/airflow/example_dags/example_trigger_target_dag.py
b/airflow/example_dags/example_trigger_target_dag.py
index 7a009b8dcc6..3af68a25607 100644
--- a/airflow/example_dags/example_trigger_target_dag.py
+++ b/airflow/example_dags/example_trigger_target_dag.py
@@ -27,7 +27,7 @@ import pendulum
from airflow.decorators import task
from airflow.models.dag import DAG
-from airflow.operators.bash import BashOperator
+from airflow.providers.standard.operators.bash import BashOperator
@task(task_id="run_this")
diff --git a/airflow/example_dags/example_xcom.py
b/airflow/example_dags/example_xcom.py
index fa99b918346..2563eda77ee 100644
--- a/airflow/example_dags/example_xcom.py
+++ b/airflow/example_dags/example_xcom.py
@@ -24,7 +24,7 @@ import pendulum
from airflow.decorators import task
from airflow.models.dag import DAG
from airflow.models.xcom_arg import XComArg
-from airflow.operators.bash import BashOperator
+from airflow.providers.standard.operators.bash import BashOperator
value_1 = [1, 2, 3]
value_2 = {"a": "b"}
diff --git a/airflow/example_dags/example_xcomargs.py
b/airflow/example_dags/example_xcomargs.py
index d9d0c94f4ea..a7103dc1911 100644
--- a/airflow/example_dags/example_xcomargs.py
+++ b/airflow/example_dags/example_xcomargs.py
@@ -25,7 +25,7 @@ import pendulum
from airflow.decorators import task
from airflow.models.dag import DAG
-from airflow.operators.bash import BashOperator
+from airflow.providers.standard.operators.bash import BashOperator
log = logging.getLogger(__name__)
diff --git a/airflow/example_dags/tutorial.py b/airflow/example_dags/tutorial.py
index 0e31775c7a9..6e27bbcd2e5 100644
--- a/airflow/example_dags/tutorial.py
+++ b/airflow/example_dags/tutorial.py
@@ -32,7 +32,7 @@ from datetime import datetime, timedelta
from airflow.models.dag import DAG
# Operators; we need this to operate!
-from airflow.operators.bash import BashOperator
+from airflow.providers.standard.operators.bash import BashOperator
# [END import_module]
diff --git a/airflow/providers/celery/executors/celery_executor_utils.py
b/airflow/providers/celery/executors/celery_executor_utils.py
index 8f25f040c90..a7aa6a87ea2 100644
--- a/airflow/providers/celery/executors/celery_executor_utils.py
+++ b/airflow/providers/celery/executors/celery_executor_utils.py
@@ -111,8 +111,12 @@ def on_celery_import_modules(*args, **kwargs):
import airflow.jobs.local_task_job_runner
import airflow.macros
- import airflow.operators.bash
- import airflow.operators.python # noqa: F401
+ import airflow.operators.python
+
+ try:
+ import airflow.providers.standard.operators.bash
+ except ImportError:
+ import airflow.operators.bash # noqa: F401
with contextlib.suppress(ImportError):
import numpy # noqa: F401
diff --git a/airflow/providers/edge/example_dags/integration_test.py
b/airflow/providers/edge/example_dags/integration_test.py
index d6074abd30d..0aad61d354c 100644
--- a/airflow/providers/edge/example_dags/integration_test.py
+++ b/airflow/providers/edge/example_dags/integration_test.py
@@ -32,10 +32,14 @@ from airflow.hooks.base import BaseHook
from airflow.models.dag import DAG
from airflow.models.param import Param
from airflow.models.variable import Variable
-from airflow.operators.bash import BashOperator
from airflow.operators.empty import EmptyOperator
from airflow.operators.python import PythonOperator
+try:
+ from airflow.providers.standard.operators.bash import BashOperator
+except ImportError:
+ from airflow.operators.bash import BashOperator # type:
ignore[no-redef,attr-defined]
+
with DAG(
dag_id="integration_test",
dag_display_name="Integration Test",
diff --git a/airflow/providers/openlineage/provider.yaml
b/airflow/providers/openlineage/provider.yaml
index b249ff46c85..5b08ac6a77a 100644
--- a/airflow/providers/openlineage/provider.yaml
+++ b/airflow/providers/openlineage/provider.yaml
@@ -84,7 +84,8 @@ config:
Exclude some Operators from emitting OpenLineage events by passing a
string of semicolon separated
full import paths of Operators to disable.
type: string
- example:
"airflow.operators.bash.BashOperator;airflow.operators.python.PythonOperator"
+ example: "airflow.providers.standard.operators.bash.BashOperator;
+ airflow.operators.python.PythonOperator"
default: ""
version_added: 1.1.0
selective_enable:
diff --git a/airflow/operators/bash.py
b/airflow/providers/standard/operators/bash.py
similarity index 100%
rename from airflow/operators/bash.py
rename to airflow/providers/standard/operators/bash.py
diff --git a/airflow/providers/standard/provider.yaml
b/airflow/providers/standard/provider.yaml
index 83d8acf0a68..2d4c4f29bef 100644
--- a/airflow/providers/standard/provider.yaml
+++ b/airflow/providers/standard/provider.yaml
@@ -42,6 +42,7 @@ operators:
python-modules:
- airflow.providers.standard.operators.datetime
- airflow.providers.standard.operators.weekday
+ - airflow.providers.standard.operators.bash
sensors:
- integration-name: Standard
@@ -50,3 +51,4 @@ sensors:
- airflow.providers.standard.sensors.time_delta
- airflow.providers.standard.sensors.time
- airflow.providers.standard.sensors.weekday
+ - airflow.providers.standard.sensors.bash
diff --git a/airflow/sensors/bash.py
b/airflow/providers/standard/sensors/bash.py
similarity index 100%
rename from airflow/sensors/bash.py
rename to airflow/providers/standard/sensors/bash.py
diff --git a/dev/breeze/tests/test_selective_checks.py
b/dev/breeze/tests/test_selective_checks.py
index 4b28c7a0a63..3b58a45ae1b 100644
--- a/dev/breeze/tests/test_selective_checks.py
+++ b/dev/breeze/tests/test_selective_checks.py
@@ -703,9 +703,9 @@ def assert_outputs_are_printed(expected_outputs: dict[str,
str], stderr: str):
id="Only Always and common providers tests should run when only
common.io and tests/always changed",
),
pytest.param(
- ("airflow/operators/bash.py",),
+ ("airflow/providers/standard/operators/bash.py",),
{
- "affected-providers-list-as-string": None,
+ "affected-providers-list-as-string": "celery edge standard",
"all-python-versions": "['3.9']",
"all-python-versions-list-as-string": "3.9",
"python-versions": "['3.9']",
@@ -717,14 +717,14 @@ def assert_outputs_are_printed(expected_outputs:
dict[str, str], stderr: str):
"run-amazon-tests": "false",
"docs-build": "true",
"run-kubernetes-tests": "false",
- "skip-pre-commits":
"check-provider-yaml-valid,identity,lint-helm-chart,mypy-airflow,mypy-dev,mypy-docs,mypy-providers,"
+ "skip-pre-commits":
"identity,lint-helm-chart,mypy-airflow,mypy-dev,mypy-docs,mypy-providers,"
"ts-compile-format-lint-ui,ts-compile-format-lint-www",
"upgrade-to-newer-dependencies": "false",
- "parallel-test-types-list-as-string": "Always Core Operators
Serialization",
+ "parallel-test-types-list-as-string": "Always Core
Providers[celery,edge,standard] Serialization",
"needs-mypy": "true",
- "mypy-folders": "['airflow']",
+ "mypy-folders": "['providers']",
},
- id="Force Core and Serialization tests to run when airflow bash.py
changed",
+ id="Providers standard tests and Serialization tests to run when
airflow bash.py changed",
),
pytest.param(
("tests/operators/bash.py",),
@@ -1720,7 +1720,7 @@ def test_upgrade_to_newer_dependencies(
),
pytest.param(
("airflow/providers/celery/file.py",),
- {"docs-list-as-string": "apache-airflow celery cncf.kubernetes"},
+ {"docs-list-as-string": "apache-airflow celery cncf.kubernetes
standard"},
id="Celery python files changed",
),
pytest.param(
diff --git a/dev/perf/dags/elastic_dag.py b/dev/perf/dags/elastic_dag.py
index e0adcdf5caf..30bfc9acf99 100644
--- a/dev/perf/dags/elastic_dag.py
+++ b/dev/perf/dags/elastic_dag.py
@@ -24,7 +24,7 @@ from enum import Enum
from airflow.models.baseoperator import chain
from airflow.models.dag import DAG
-from airflow.operators.bash import BashOperator
+from airflow.providers.standard.operators.bash import BashOperator
# DAG File used in performance tests. Its shape can be configured by
environment variables.
RE_TIME_DELTA = re.compile(
diff --git a/dev/perf/dags/perf_dag_2.py b/dev/perf/dags/perf_dag_2.py
index 641bb7565c8..592bbe60878 100644
--- a/dev/perf/dags/perf_dag_2.py
+++ b/dev/perf/dags/perf_dag_2.py
@@ -24,7 +24,7 @@ from __future__ import annotations
import datetime
from airflow.models.dag import DAG
-from airflow.operators.bash import BashOperator
+from airflow.providers.standard.operators.bash import BashOperator
args = {
"owner": "airflow",
diff --git
a/docs/apache-airflow-providers-amazon/notifications/chime_notifier_howto_guide.rst
b/docs/apache-airflow-providers-amazon/notifications/chime_notifier_howto_guide.rst
index a52540fe782..e15c3a8c0c8 100644
---
a/docs/apache-airflow-providers-amazon/notifications/chime_notifier_howto_guide.rst
+++
b/docs/apache-airflow-providers-amazon/notifications/chime_notifier_howto_guide.rst
@@ -31,7 +31,7 @@ Example Code:
from datetime import datetime
from airflow import DAG
- from airflow.operators.bash import BashOperator
+ from airflow.providers.standard.operators.bash import BashOperator
from airflow.providers.amazon.aws.notifications.chime import
send_chime_notification
with DAG(
diff --git a/docs/apache-airflow-providers-amazon/notifications/sns.rst
b/docs/apache-airflow-providers-amazon/notifications/sns.rst
index bbaad4f8147..262cd966ae4 100644
--- a/docs/apache-airflow-providers-amazon/notifications/sns.rst
+++ b/docs/apache-airflow-providers-amazon/notifications/sns.rst
@@ -33,7 +33,7 @@ Example Code:
from datetime import datetime
from airflow import DAG
- from airflow.operators.bash import BashOperator
+ from airflow.providers.standard.operators.bash import BashOperator
from airflow.providers.amazon.aws.notifications.sns import
send_sns_notification
dag_failure_sns_notification = send_sns_notification(
diff --git a/docs/apache-airflow-providers-amazon/notifications/sqs.rst
b/docs/apache-airflow-providers-amazon/notifications/sqs.rst
index 6951caa9fdd..d74a2477d62 100644
--- a/docs/apache-airflow-providers-amazon/notifications/sqs.rst
+++ b/docs/apache-airflow-providers-amazon/notifications/sqs.rst
@@ -33,7 +33,7 @@ Example Code:
from datetime import datetime, timezone
from airflow import DAG
- from airflow.operators.bash import BashOperator
+ from airflow.providers.standard.operators.bash import BashOperator
from airflow.providers.amazon.aws.notifications.sqs import
send_sqs_notification
dag_failure_sqs_notification = send_sqs_notification(
diff --git
a/docs/apache-airflow-providers-apprise/notifications/apprise_notifier_howto_guide.rst
b/docs/apache-airflow-providers-apprise/notifications/apprise_notifier_howto_guide.rst
index 777a3d46a3b..2a0aeaaa107 100644
---
a/docs/apache-airflow-providers-apprise/notifications/apprise_notifier_howto_guide.rst
+++
b/docs/apache-airflow-providers-apprise/notifications/apprise_notifier_howto_guide.rst
@@ -30,7 +30,7 @@ Example Code:
from datetime import datetime
from airflow import DAG
- from airflow.operators.bash import BashOperator
+ from airflow.providers.standard.operators.bash import BashOperator
from airflow.providers.apprise.notifications.apprise import
send_apprise_notification
from apprise import NotifyType
diff --git
a/docs/apache-airflow-providers-atlassian-jira/notifications/jira-notifier-howto-guide.rst
b/docs/apache-airflow-providers-atlassian-jira/notifications/jira-notifier-howto-guide.rst
index e0ed1255814..a5617b9035d 100644
---
a/docs/apache-airflow-providers-atlassian-jira/notifications/jira-notifier-howto-guide.rst
+++
b/docs/apache-airflow-providers-atlassian-jira/notifications/jira-notifier-howto-guide.rst
@@ -31,7 +31,7 @@ Example Code
from datetime import datetime
from airflow import DAG
- from airflow.operators.bash import BashOperator
+ from airflow.providers.standard.operators.bash import BashOperator
from airflow.providers.atlassian.jira.notifications.jira import
send_jira_notification
with DAG(
diff --git a/docs/apache-airflow-providers-google/operators/cloud/mlengine.rst
b/docs/apache-airflow-providers-google/operators/cloud/mlengine.rst
index 9c71e885d8d..f64705e1c26 100644
--- a/docs/apache-airflow-providers-google/operators/cloud/mlengine.rst
+++ b/docs/apache-airflow-providers-google/operators/cloud/mlengine.rst
@@ -96,7 +96,7 @@ instead.
You can use :ref:`Jinja templating <concepts:jinja-templating>` with the
``project_id`` and ``model``
fields to dynamically determine their values. The result are saved to
:ref:`XCom <concepts:xcom>`,
allowing them to be used by other operators. In this case, the
-:class:`~airflow.operators.bash.BashOperator` is used to print the model
information.
+:class:`~airflow.providers.standard.operators.bash.BashOperator` is used to
print the model information.
.. exampleinclude::
/../../tests/system/providers/google/cloud/ml_engine/example_mlengine.py
:language: python
diff --git a/docs/apache-airflow-providers-google/operators/cloud/pubsub.rst
b/docs/apache-airflow-providers-google/operators/cloud/pubsub.rst
index 7d9a6dd5ff2..8fb497a14f0 100644
--- a/docs/apache-airflow-providers-google/operators/cloud/pubsub.rst
+++ b/docs/apache-airflow-providers-google/operators/cloud/pubsub.rst
@@ -101,7 +101,7 @@ Also for this action you can use sensor in the deferrable
mode:
:start-after: [START howto_operator_gcp_pubsub_pull_message_with_operator]
:end-before: [END howto_operator_gcp_pubsub_pull_message_with_operator]
-To pull messages from XCom use the
:class:`~airflow.operators.bash.BashOperator`.
+To pull messages from XCom use the
:class:`~airflow.providers.standard.operators.bash.BashOperator`.
.. exampleinclude::
/../../tests/system/providers/google/cloud/pubsub/example_pubsub.py
:language: python
diff --git a/docs/apache-airflow-providers-openlineage/guides/developer.rst
b/docs/apache-airflow-providers-openlineage/guides/developer.rst
index c2a5ffdc8fd..ccab215fc18 100644
--- a/docs/apache-airflow-providers-openlineage/guides/developer.rst
+++ b/docs/apache-airflow-providers-openlineage/guides/developer.rst
@@ -390,7 +390,7 @@ An Operator inside the Airflow DAG can be annotated with
inlets and outlets like
import pendulum
from airflow import DAG
- from airflow.operators.bash import BashOperator
+ from airflow.providers.standard.operators.bash import BashOperator
from airflow.lineage.entities import Table, File, Column, User
diff --git a/docs/apache-airflow-providers-openlineage/guides/user.rst
b/docs/apache-airflow-providers-openlineage/guides/user.rst
index 2c299b8c6d3..4f95253e1f2 100644
--- a/docs/apache-airflow-providers-openlineage/guides/user.rst
+++ b/docs/apache-airflow-providers-openlineage/guides/user.rst
@@ -257,13 +257,13 @@ full import paths of Airflow Operators to disable as
``disabled_for_operators``
[openlineage]
transport = {"type": "http", "url": "http://example.com:5000", "endpoint":
"api/v1/lineage"}
- disabled_for_operators =
'airflow.operators.bash.BashOperator;airflow.operators.python.PythonOperator'
+ disabled_for_operators =
'airflow.providers.standard.operators.bash.BashOperator;airflow.operators.python.PythonOperator'
``AIRFLOW__OPENLINEAGE__DISABLED_FOR_OPERATORS`` environment variable is an
equivalent.
.. code-block:: ini
-
AIRFLOW__OPENLINEAGE__DISABLED_FOR_OPERATORS='airflow.operators.bash.BashOperator;airflow.operators.python.PythonOperator'
+
AIRFLOW__OPENLINEAGE__DISABLED_FOR_OPERATORS='airflow.providers.standard.operators.bash.BashOperator;airflow.operators.python.PythonOperator'
Full Task Info
^^^^^^^^^^^^^^
diff --git
a/docs/apache-airflow-providers-pagerduty/notifications/pagerduty_notifier_howto_guide.rst
b/docs/apache-airflow-providers-pagerduty/notifications/pagerduty_notifier_howto_guide.rst
index d16f9b2b9e4..658054bd0a5 100644
---
a/docs/apache-airflow-providers-pagerduty/notifications/pagerduty_notifier_howto_guide.rst
+++
b/docs/apache-airflow-providers-pagerduty/notifications/pagerduty_notifier_howto_guide.rst
@@ -31,7 +31,7 @@ Example Code:
from datetime import datetime
from airflow import DAG
- from airflow.operators.bash import BashOperator
+ from airflow.providers.standard.operators.bash import BashOperator
from airflow.providers.pagerduty.notifications.pagerduty import
send_pagerduty_notification
with DAG(
diff --git
a/docs/apache-airflow-providers-slack/notifications/slack_notifier_howto_guide.rst
b/docs/apache-airflow-providers-slack/notifications/slack_notifier_howto_guide.rst
index a4f891f8a57..3b6a1e78799 100644
---
a/docs/apache-airflow-providers-slack/notifications/slack_notifier_howto_guide.rst
+++
b/docs/apache-airflow-providers-slack/notifications/slack_notifier_howto_guide.rst
@@ -31,7 +31,7 @@ Example Code:
from datetime import datetime
from airflow import DAG
- from airflow.operators.bash import BashOperator
+ from airflow.providers.standard.operators.bash import BashOperator
from airflow.providers.slack.notifications.slack import
send_slack_notification
with DAG(
diff --git
a/docs/apache-airflow-providers-slack/notifications/slackwebhook_notifier_howto_guide.rst
b/docs/apache-airflow-providers-slack/notifications/slackwebhook_notifier_howto_guide.rst
index 66ced818a7d..e6ef3ab4140 100644
---
a/docs/apache-airflow-providers-slack/notifications/slackwebhook_notifier_howto_guide.rst
+++
b/docs/apache-airflow-providers-slack/notifications/slackwebhook_notifier_howto_guide.rst
@@ -32,7 +32,7 @@ Example Code:
from datetime import datetime, timezone
from airflow import DAG
- from airflow.operators.bash import BashOperator
+ from airflow.providers.standard.operators.bash import BashOperator
from airflow.providers.slack.notifications.slack_webhook import
send_slack_webhook_notification
dag_failure_slack_webhook_notification = send_slack_webhook_notification(
diff --git
a/docs/apache-airflow-providers-smtp/notifications/smtp_notifier_howto_guide.rst
b/docs/apache-airflow-providers-smtp/notifications/smtp_notifier_howto_guide.rst
index 4cb1bf310e0..e47f9e340c9 100644
---
a/docs/apache-airflow-providers-smtp/notifications/smtp_notifier_howto_guide.rst
+++
b/docs/apache-airflow-providers-smtp/notifications/smtp_notifier_howto_guide.rst
@@ -31,7 +31,7 @@ Example Code:
from datetime import datetime
from airflow import DAG
- from airflow.operators.bash import BashOperator
+ from airflow.providers.standard.operators.bash import BashOperator
from airflow.providers.smtp.notifications.smtp import
send_smtp_notification
with DAG(
diff --git a/docs/apache-airflow/administration-and-deployment/lineage.rst
b/docs/apache-airflow/administration-and-deployment/lineage.rst
index b274809175c..3740e8b56f9 100644
--- a/docs/apache-airflow/administration-and-deployment/lineage.rst
+++ b/docs/apache-airflow/administration-and-deployment/lineage.rst
@@ -36,7 +36,7 @@ works.
from airflow.lineage import AUTO
from airflow.lineage.entities import File
from airflow.models import DAG
- from airflow.operators.bash import BashOperator
+ from airflow.providers.standard.operators.bash import BashOperator
from airflow.operators.empty import EmptyOperator
FILE_CATEGORIES = ["CAT1", "CAT2", "CAT3"]
diff --git a/docs/apache-airflow/best-practices.rst
b/docs/apache-airflow/best-practices.rst
index 80a5996f367..466f546ff71 100644
--- a/docs/apache-airflow/best-practices.rst
+++ b/docs/apache-airflow/best-practices.rst
@@ -480,7 +480,7 @@ It's easier to grab the concept with an example. Let's say
that we have the foll
from airflow import DAG
from airflow.decorators import task
from airflow.exceptions import AirflowException
- from airflow.operators.bash import BashOperator
+ from airflow.providers.standard.operators.bash import BashOperator
from airflow.utils.trigger_rule import TriggerRule
diff --git a/docs/apache-airflow/core-concepts/dag-run.rst
b/docs/apache-airflow/core-concepts/dag-run.rst
index 0621d3f771e..97fd4e28a7b 100644
--- a/docs/apache-airflow/core-concepts/dag-run.rst
+++ b/docs/apache-airflow/core-concepts/dag-run.rst
@@ -101,7 +101,7 @@ in the configuration file. When turned off, the scheduler
creates a DAG run only
https://github.com/apache/airflow/blob/main/airflow/example_dags/tutorial.py
"""
from airflow.models.dag import DAG
- from airflow.operators.bash import BashOperator
+ from airflow.providers.standard.operators.bash import BashOperator
import datetime
import pendulum
@@ -241,7 +241,7 @@ Example of a parameterized DAG:
import pendulum
from airflow import DAG
- from airflow.operators.bash import BashOperator
+ from airflow.providers.standard.operators.bash import BashOperator
dag = DAG(
"example_parameterized_dag",
diff --git a/docs/apache-airflow/core-concepts/dags.rst
b/docs/apache-airflow/core-concepts/dags.rst
index f9dc7d64c72..64726e08010 100644
--- a/docs/apache-airflow/core-concepts/dags.rst
+++ b/docs/apache-airflow/core-concepts/dags.rst
@@ -574,7 +574,7 @@ TaskGroup also supports ``default_args`` like DAG, it will
overwrite the ``defau
from airflow import DAG
from airflow.decorators import task_group
- from airflow.operators.bash import BashOperator
+ from airflow.providers.standard.operators.bash import BashOperator
from airflow.operators.empty import EmptyOperator
with DAG(
diff --git a/docs/apache-airflow/core-concepts/operators.rst
b/docs/apache-airflow/core-concepts/operators.rst
index 354697c3085..6a0825df287 100644
--- a/docs/apache-airflow/core-concepts/operators.rst
+++ b/docs/apache-airflow/core-concepts/operators.rst
@@ -28,7 +28,7 @@ An Operator is conceptually a template for a predefined
:doc:`Task <tasks>`, tha
Airflow has a very extensive set of operators available, with some built-in to
the core or pre-installed providers. Some popular operators from core include:
-- :class:`~airflow.operators.bash.BashOperator` - executes a bash command
+- :class:`~airflow.providers.standard.operators.bash.BashOperator` - executes
a bash command
- :class:`~airflow.operators.python.PythonOperator` - calls an arbitrary
Python function
- :class:`~airflow.operators.email.EmailOperator` - sends an email
- Use the ``@task`` decorator to execute an arbitrary Python function. It
doesn't support rendering jinja templates passed as arguments.
diff --git a/docs/apache-airflow/core-concepts/tasks.rst
b/docs/apache-airflow/core-concepts/tasks.rst
index ad03283ef77..5adfe8be460 100644
--- a/docs/apache-airflow/core-concepts/tasks.rst
+++ b/docs/apache-airflow/core-concepts/tasks.rst
@@ -236,7 +236,7 @@ If you'd like to reproduce zombie tasks for
development/testing processes, follo
.. code-block:: python
from airflow.decorators import dag
- from airflow.operators.bash import BashOperator
+ from airflow.providers.standard.operators.bash import BashOperator
from datetime import datetime
diff --git a/docs/apache-airflow/howto/notifications.rst
b/docs/apache-airflow/howto/notifications.rst
index c477ec1d3c1..993a36b3894 100644
--- a/docs/apache-airflow/howto/notifications.rst
+++ b/docs/apache-airflow/howto/notifications.rst
@@ -59,7 +59,7 @@ Here's an example of using the above notifier:
from datetime import datetime
from airflow.models.dag import DAG
- from airflow.operators.bash import BashOperator
+ from airflow.providers.standard.operators.bash import BashOperator
from myprovider.notifier import MyNotifier
diff --git a/docs/apache-airflow/howto/operator/bash.rst
b/docs/apache-airflow/howto/operator/bash.rst
index daf430fa14c..e4af9bcad6b 100644
--- a/docs/apache-airflow/howto/operator/bash.rst
+++ b/docs/apache-airflow/howto/operator/bash.rst
@@ -22,7 +22,7 @@
BashOperator
============
-Use the :class:`~airflow.operators.bash.BashOperator` to execute
+Use the :class:`~airflow.providers.standard.operators.bash.BashOperator` to
execute
commands in a `Bash <https://www.gnu.org/software/bash/>`__ shell. The Bash
command or script to execute is
determined by:
@@ -390,7 +390,7 @@ There are numerous possibilities with this type of
pre-execution enrichment.
BashSensor
==========
-Use the :class:`~airflow.sensors.bash.BashSensor` to use arbitrary command for
sensing. The command
+Use the :class:`~airflow.providers.standard.sensors.bash.BashSensor` to use
arbitrary command for sensing. The command
should return 0 when it succeeds, any other value otherwise.
.. exampleinclude:: /../../airflow/example_dags/example_sensors.py
diff --git a/docs/apache-airflow/index.rst b/docs/apache-airflow/index.rst
index 44dcd9a3bd3..38d62ecd04a 100644
--- a/docs/apache-airflow/index.rst
+++ b/docs/apache-airflow/index.rst
@@ -41,7 +41,7 @@ Take a look at the following snippet of code:
from airflow import DAG
from airflow.decorators import task
- from airflow.operators.bash import BashOperator
+ from airflow.providers.standard.operators.bash import BashOperator
# A DAG represents a workflow, a collection of tasks
with DAG(dag_id="demo", start_date=datetime(2022, 1, 1), schedule="0 0 * *
*") as dag:
diff --git a/docs/apache-airflow/operators-and-hooks-ref.rst
b/docs/apache-airflow/operators-and-hooks-ref.rst
index 16b74305a95..c82a4f3a66d 100644
--- a/docs/apache-airflow/operators-and-hooks-ref.rst
+++ b/docs/apache-airflow/operators-and-hooks-ref.rst
@@ -50,7 +50,7 @@ For details see:
:doc:`apache-airflow-providers:operators-and-hooks-ref/index`.
* - Operators
- Guides
- * - :mod:`airflow.operators.bash`
+ * - :mod:`airflow.providers.standard.operators.bash`
- :doc:`How to use <howto/operator/bash>`
* - :mod:`airflow.operators.branch`
@@ -82,7 +82,7 @@ For details see:
:doc:`apache-airflow-providers:operators-and-hooks-ref/index`.
* - Sensors
- Guides
- * - :mod:`airflow.sensors.bash`
+ * - :mod:`airflow.providers.standard.sensors.bash`
- :ref:`How to use <howto/operator:BashSensor>`
* - :mod:`airflow.sensors.external_task`
diff --git a/docs/apache-airflow/tutorial/taskflow.rst
b/docs/apache-airflow/tutorial/taskflow.rst
index c77debab8f3..aac04f9b534 100644
--- a/docs/apache-airflow/tutorial/taskflow.rst
+++ b/docs/apache-airflow/tutorial/taskflow.rst
@@ -437,7 +437,7 @@ the parameter value is used.
Adding dependencies between decorated and traditional tasks
-----------------------------------------------------------
The above tutorial shows how to create dependencies between TaskFlow
functions. However, dependencies can also
-be set between traditional tasks (such as
:class:`~airflow.operators.bash.BashOperator`
+be set between traditional tasks (such as
:class:`~airflow.providers.standard.operators.bash.BashOperator`
or :class:`~airflow.sensors.filesystem.FileSensor`) and TaskFlow functions.
Building this dependency is shown in the code below:
diff --git a/docs/exts/templates/openlineage.rst.jinja2
b/docs/exts/templates/openlineage.rst.jinja2
index 7dffc175f84..dfac543cbe7 100644
--- a/docs/exts/templates/openlineage.rst.jinja2
+++ b/docs/exts/templates/openlineage.rst.jinja2
@@ -22,7 +22,7 @@ At the moment, two core operators supports OpenLineage. These
operators function
capable of running any code, which might limit the extent of lineage
extraction.
- :class:`~airflow.operators.python.PythonOperator` (via
:class:`airflow.providers.openlineage.extractors.python.PythonExtractor`)
-- :class:`~airflow.operators.bash.BashOperator` (via
:class:`airflow.providers.openlineage.extractors.bash.BashExtractor`)
+- :class:`~airflow.providers.standard.operators.bash.BashOperator` (via
:class:`airflow.providers.openlineage.extractors.bash.BashExtractor`)
:class:`~airflow.providers.common.sql.operators.sql.SQLExecuteQueryOperator`
diff --git a/generated/provider_dependencies.json
b/generated/provider_dependencies.json
index b1ff0f7dcfa..57bca636ed6 100644
--- a/generated/provider_dependencies.json
+++ b/generated/provider_dependencies.json
@@ -344,7 +344,8 @@
"devel-deps": [],
"plugins": [],
"cross-providers-deps": [
- "cncf.kubernetes"
+ "cncf.kubernetes",
+ "standard"
],
"excluded-python-versions": [],
"state": "ready"
@@ -532,7 +533,9 @@
"plugin-class":
"airflow.providers.edge.plugins.edge_executor_plugin.EdgeExecutorPlugin"
}
],
- "cross-providers-deps": [],
+ "cross-providers-deps": [
+ "standard"
+ ],
"excluded-python-versions": [],
"state": "not-ready"
},
diff --git a/kubernetes_tests/test_kubernetes_executor.py
b/kubernetes_tests/test_kubernetes_executor.py
index a270243bfac..42b181b4430 100644
--- a/kubernetes_tests/test_kubernetes_executor.py
+++ b/kubernetes_tests/test_kubernetes_executor.py
@@ -20,7 +20,10 @@ import time
import pytest
-from kubernetes_tests.test_base import EXECUTOR, BaseK8STest # isort:skip
(needed to workaround isort bug)
+from kubernetes_tests.test_base import (
+ EXECUTOR,
+ BaseK8STest, # isort:skip (needed to workaround isort bug)
+)
@pytest.mark.skipif(EXECUTOR != "KubernetesExecutor", reason="Only runs on
KubernetesExecutor")
diff --git a/kubernetes_tests/test_other_executors.py
b/kubernetes_tests/test_other_executors.py
index 97b7e3df728..3a1aea16f12 100644
--- a/kubernetes_tests/test_other_executors.py
+++ b/kubernetes_tests/test_other_executors.py
@@ -20,7 +20,10 @@ import time
import pytest
-from kubernetes_tests.test_base import EXECUTOR, BaseK8STest # isort:skip
(needed to workaround isort bug)
+from kubernetes_tests.test_base import (
+ EXECUTOR,
+ BaseK8STest, # isort:skip (needed to workaround isort bug)
+)
# These tests are here because only KubernetesExecutor can run the tests in
diff --git a/tests/callbacks/test_callback_requests.py
b/tests/callbacks/test_callback_requests.py
index 5992ee6fbbf..7bbe4138775 100644
--- a/tests/callbacks/test_callback_requests.py
+++ b/tests/callbacks/test_callback_requests.py
@@ -27,7 +27,7 @@ from airflow.callbacks.callback_requests import (
)
from airflow.models.dag import DAG
from airflow.models.taskinstance import SimpleTaskInstance, TaskInstance
-from airflow.operators.bash import BashOperator
+from airflow.providers.standard.operators.bash import BashOperator
from airflow.utils import timezone
from airflow.utils.state import State
from airflow.utils.types import DagRunType
@@ -100,7 +100,7 @@ class TestCallbackRequest:
from airflow.callbacks.callback_requests import TaskCallbackRequest
from airflow.models import TaskInstance
from airflow.models.taskinstance import SimpleTaskInstance
- from airflow.operators.bash import BashOperator
+ from airflow.providers.standard.operators.bash import BashOperator
test_pod = k8s.V1Pod(metadata=k8s.V1ObjectMeta(name="hello",
namespace="ns"))
op = BashOperator(task_id="hi", executor_config={"pod_override":
test_pod}, bash_command="hi")
@@ -115,7 +115,7 @@ class TestCallbackRequest:
from airflow.callbacks.callback_requests import TaskCallbackRequest
from airflow.models import TaskInstance
from airflow.models.taskinstance import SimpleTaskInstance
- from airflow.operators.bash import BashOperator
+ from airflow.providers.standard.operators.bash import BashOperator
with dag_maker(schedule=timedelta(weeks=1), serialized=True):
op = BashOperator(task_id="hi", bash_command="hi")
diff --git a/tests/cli/commands/test_task_command.py
b/tests/cli/commands/test_task_command.py
index 36cbcc85a72..3397005b80e 100644
--- a/tests/cli/commands/test_task_command.py
+++ b/tests/cli/commands/test_task_command.py
@@ -45,8 +45,8 @@ from airflow.exceptions import AirflowException,
DagRunNotFound
from airflow.executors.local_executor import LocalExecutor
from airflow.models import DagBag, DagRun, Pool, TaskInstance
from airflow.models.serialized_dag import SerializedDagModel
-from airflow.operators.bash import BashOperator
from airflow.operators.empty import EmptyOperator
+from airflow.providers.standard.operators.bash import BashOperator
from airflow.utils import timezone
from airflow.utils.session import create_session
from airflow.utils.state import State, TaskInstanceState
diff --git a/tests/core/test_core.py b/tests/core/test_core.py
index a75428b33a1..d44235f9552 100644
--- a/tests/core/test_core.py
+++ b/tests/core/test_core.py
@@ -27,9 +27,9 @@ from airflow import settings
from airflow.exceptions import AirflowTaskTimeout
from airflow.models import DagRun, TaskFail, TaskInstance
from airflow.models.baseoperator import BaseOperator
-from airflow.operators.bash import BashOperator
from airflow.operators.empty import EmptyOperator
from airflow.operators.python import PythonOperator
+from airflow.providers.standard.operators.bash import BashOperator
from airflow.utils.timezone import datetime
from airflow.utils.types import DagRunType
from tests.test_utils.db import clear_db_dags, clear_db_runs,
clear_db_task_fail
diff --git a/tests/dags/subdir2/test_dont_ignore_this.py
b/tests/dags/subdir2/test_dont_ignore_this.py
index 72c1796a424..07f04293d7d 100644
--- a/tests/dags/subdir2/test_dont_ignore_this.py
+++ b/tests/dags/subdir2/test_dont_ignore_this.py
@@ -20,7 +20,7 @@ from __future__ import annotations
from datetime import datetime
from airflow.models.dag import DAG
-from airflow.operators.bash import BashOperator
+from airflow.providers.standard.operators.bash import BashOperator
DEFAULT_DATE = datetime(2019, 12, 1)
diff --git a/tests/dags/test_assets.py b/tests/dags/test_assets.py
index a4ecd6aad4a..014ae6fd0ca 100644
--- a/tests/dags/test_assets.py
+++ b/tests/dags/test_assets.py
@@ -22,8 +22,8 @@ from datetime import datetime
from airflow.assets import Asset
from airflow.exceptions import AirflowFailException, AirflowSkipException
from airflow.models.dag import DAG
-from airflow.operators.bash import BashOperator
from airflow.operators.python import PythonOperator
+from airflow.providers.standard.operators.bash import BashOperator
skip_task_dag_dataset = Asset("s3://dag_with_skip_task/output_1.txt",
extra={"hi": "bye"})
fail_task_dag_dataset = Asset("s3://dag_with_fail_task/output_1.txt",
extra={"hi": "bye"})
diff --git a/tests/dags/test_backfill_with_upstream_failed_task.py
b/tests/dags/test_backfill_with_upstream_failed_task.py
index d2cb6353bfa..865b0da4ff4 100644
--- a/tests/dags/test_backfill_with_upstream_failed_task.py
+++ b/tests/dags/test_backfill_with_upstream_failed_task.py
@@ -20,7 +20,7 @@ from __future__ import annotations
import datetime
from airflow.models.dag import DAG
-from airflow.operators.bash import BashOperator
+from airflow.providers.standard.operators.bash import BashOperator
dag = DAG(
dag_id="test_backfill_with_upstream_failed_task",
diff --git a/tests/dags/test_default_impersonation.py
b/tests/dags/test_default_impersonation.py
index 468b7dce072..4bee30457b7 100644
--- a/tests/dags/test_default_impersonation.py
+++ b/tests/dags/test_default_impersonation.py
@@ -21,7 +21,7 @@ import textwrap
from datetime import datetime
from airflow.models.dag import DAG
-from airflow.operators.bash import BashOperator
+from airflow.providers.standard.operators.bash import BashOperator
DEFAULT_DATE = datetime(2016, 1, 1)
diff --git a/tests/dags/test_example_bash_operator.py
b/tests/dags/test_example_bash_operator.py
index eb472b8011e..52126f0e102 100644
--- a/tests/dags/test_example_bash_operator.py
+++ b/tests/dags/test_example_bash_operator.py
@@ -20,8 +20,8 @@ from __future__ import annotations
import datetime
from airflow.models.dag import DAG
-from airflow.operators.bash import BashOperator
from airflow.operators.empty import EmptyOperator
+from airflow.providers.standard.operators.bash import BashOperator
dag = DAG(
dag_id="test_example_bash_operator",
diff --git a/tests/dags/test_failing.py b/tests/dags/test_failing.py
index 28e2fb58819..646665a8802 100644
--- a/tests/dags/test_failing.py
+++ b/tests/dags/test_failing.py
@@ -20,7 +20,7 @@ from __future__ import annotations
import datetime
from airflow.models.dag import DAG
-from airflow.operators.bash import BashOperator
+from airflow.providers.standard.operators.bash import BashOperator
dag = DAG(
dag_id="test_failing_bash_operator",
diff --git a/tests/dags/test_heartbeat_failed_fast.py
b/tests/dags/test_heartbeat_failed_fast.py
index aee7a670305..890756ef201 100644
--- a/tests/dags/test_heartbeat_failed_fast.py
+++ b/tests/dags/test_heartbeat_failed_fast.py
@@ -20,7 +20,7 @@ from __future__ import annotations
from datetime import datetime
from airflow.models.dag import DAG
-from airflow.operators.bash import BashOperator
+from airflow.providers.standard.operators.bash import BashOperator
DEFAULT_DATE = datetime(2016, 1, 1)
diff --git a/tests/dags/test_impersonation.py b/tests/dags/test_impersonation.py
index 33a3c89d328..6c2ca2d8100 100644
--- a/tests/dags/test_impersonation.py
+++ b/tests/dags/test_impersonation.py
@@ -21,7 +21,7 @@ import textwrap
from datetime import datetime
from airflow.models.dag import DAG
-from airflow.operators.bash import BashOperator
+from airflow.providers.standard.operators.bash import BashOperator
DEFAULT_DATE = datetime(2016, 1, 1)
diff --git a/tests/dags/test_miscellaneous.py b/tests/dags/test_miscellaneous.py
index c19277a6175..4a2c6b56a36 100644
--- a/tests/dags/test_miscellaneous.py
+++ b/tests/dags/test_miscellaneous.py
@@ -22,8 +22,8 @@ from __future__ import annotations
import datetime
from airflow.models.dag import DAG
-from airflow.operators.bash import BashOperator
from airflow.operators.empty import EmptyOperator
+from tests.test_utils.compat import BashOperator
args = {
"owner": "airflow",
diff --git a/tests/dags/test_multiple_dags.py b/tests/dags/test_multiple_dags.py
index 5801084fab7..27f159bfb12 100644
--- a/tests/dags/test_multiple_dags.py
+++ b/tests/dags/test_multiple_dags.py
@@ -20,7 +20,7 @@ from __future__ import annotations
import datetime
from airflow.models.dag import DAG
-from airflow.operators.bash import BashOperator
+from airflow.providers.standard.operators.bash import BashOperator
args = {"owner": "airflow", "retries": 3, "start_date":
datetime.datetime(2022, 1, 1)}
diff --git a/tests/dags/test_no_impersonation.py
b/tests/dags/test_no_impersonation.py
index 2a75d532147..22b47fcc878 100644
--- a/tests/dags/test_no_impersonation.py
+++ b/tests/dags/test_no_impersonation.py
@@ -21,7 +21,7 @@ import textwrap
from datetime import datetime
from airflow.models.dag import DAG
-from airflow.operators.bash import BashOperator
+from airflow.providers.standard.operators.bash import BashOperator
DEFAULT_DATE = datetime(2016, 1, 1)
diff --git a/tests/dags/test_on_failure_callback.py
b/tests/dags/test_on_failure_callback.py
index e2f4ab9027a..f6765a36980 100644
--- a/tests/dags/test_on_failure_callback.py
+++ b/tests/dags/test_on_failure_callback.py
@@ -21,8 +21,8 @@ from datetime import datetime
from airflow.exceptions import AirflowFailException
from airflow.models.dag import DAG
-from airflow.operators.bash import BashOperator
from airflow.operators.python import PythonOperator
+from airflow.providers.standard.operators.bash import BashOperator
DEFAULT_DATE = datetime(2016, 1, 1)
diff --git a/tests/dags/test_retry_handling_job.py
b/tests/dags/test_retry_handling_job.py
index 7040e8c8756..ede9c4c6ace 100644
--- a/tests/dags/test_retry_handling_job.py
+++ b/tests/dags/test_retry_handling_job.py
@@ -20,7 +20,7 @@ from __future__ import annotations
from datetime import datetime, timedelta
from airflow.models.dag import DAG
-from airflow.operators.bash import BashOperator
+from airflow.providers.standard.operators.bash import BashOperator
default_args = {
"owner": "airflow",
diff --git a/tests/dags/test_sensor.py b/tests/dags/test_sensor.py
index d023949e31a..07c9cc7efdf 100644
--- a/tests/dags/test_sensor.py
+++ b/tests/dags/test_sensor.py
@@ -20,8 +20,8 @@ import datetime
from airflow.decorators import task
from airflow.models.dag import DAG
-from airflow.providers.standard.sensors.date_time import DateTimeSensor
from airflow.utils import timezone
+from tests.test_utils.compat import DateTimeSensor
with DAG(
dag_id="test_sensor", start_date=datetime.datetime(2022, 1, 1),
catchup=False, schedule="@once"
diff --git a/tests/decorators/test_setup_teardown.py
b/tests/decorators/test_setup_teardown.py
index 13451ba379e..1f2a3dcdbc6 100644
--- a/tests/decorators/test_setup_teardown.py
+++ b/tests/decorators/test_setup_teardown.py
@@ -22,7 +22,7 @@ import pytest
from airflow.decorators import setup, task, task_group, teardown
from airflow.decorators.setup_teardown import context_wrapper
from airflow.exceptions import AirflowException
-from airflow.operators.bash import BashOperator
+from airflow.providers.standard.operators.bash import BashOperator
pytestmark = pytest.mark.db_test
diff --git a/tests/integration/executors/test_celery_executor.py
b/tests/integration/executors/test_celery_executor.py
index 9c7fe96ff18..4ec1cc458c3 100644
--- a/tests/integration/executors/test_celery_executor.py
+++ b/tests/integration/executors/test_celery_executor.py
@@ -43,7 +43,7 @@ from airflow.executors import base_executor
from airflow.models.dag import DAG
from airflow.models.taskinstance import SimpleTaskInstance, TaskInstance
from airflow.models.taskinstancekey import TaskInstanceKey
-from airflow.operators.bash import BashOperator
+from airflow.providers.standard.operators.bash import BashOperator
from airflow.utils.state import State, TaskInstanceState
from tests.test_utils import db
diff --git a/tests/jobs/test_scheduler_job.py b/tests/jobs/test_scheduler_job.py
index b4e4c10cff4..d26369f5d72 100644
--- a/tests/jobs/test_scheduler_job.py
+++ b/tests/jobs/test_scheduler_job.py
@@ -60,8 +60,8 @@ from airflow.models.db_callback_request import
DbCallbackRequest
from airflow.models.pool import Pool
from airflow.models.serialized_dag import SerializedDagModel
from airflow.models.taskinstance import SimpleTaskInstance, TaskInstance,
TaskInstanceKey
-from airflow.operators.bash import BashOperator
from airflow.operators.empty import EmptyOperator
+from airflow.providers.standard.operators.bash import BashOperator
from airflow.serialization.serialized_objects import SerializedDAG
from airflow.utils import timezone
from airflow.utils.file import list_py_file_paths
diff --git a/tests/listeners/test_listeners.py
b/tests/listeners/test_listeners.py
index 3c34ab0ff8a..29ec25a9a8d 100644
--- a/tests/listeners/test_listeners.py
+++ b/tests/listeners/test_listeners.py
@@ -25,7 +25,7 @@ import pytest
from airflow.exceptions import AirflowException
from airflow.jobs.job import Job, run_job
from airflow.listeners.listener import get_listener_manager
-from airflow.operators.bash import BashOperator
+from airflow.providers.standard.operators.bash import BashOperator
from airflow.utils import timezone
from airflow.utils.session import provide_session
from airflow.utils.state import DagRunState, TaskInstanceState
diff --git a/tests/models/test_dag.py b/tests/models/test_dag.py
index ab67c3778c2..c79ca24e03a 100644
--- a/tests/models/test_dag.py
+++ b/tests/models/test_dag.py
@@ -74,9 +74,9 @@ from airflow.models.param import DagParam, Param, ParamsDict
from airflow.models.serialized_dag import SerializedDagModel
from airflow.models.taskfail import TaskFail
from airflow.models.taskinstance import TaskInstance as TI
-from airflow.operators.bash import BashOperator
from airflow.operators.empty import EmptyOperator
from airflow.operators.python import PythonOperator
+from airflow.providers.standard.operators.bash import BashOperator
from airflow.security import permissions
from airflow.templates import NativeEnvironment, SandboxedEnvironment
from airflow.timetables.base import DagRunInfo, DataInterval, TimeRestriction,
Timetable
diff --git a/tests/models/test_dagrun.py b/tests/models/test_dagrun.py
index c7dacaeb291..9184f561b3d 100644
--- a/tests/models/test_dagrun.py
+++ b/tests/models/test_dagrun.py
@@ -36,9 +36,9 @@ from airflow.models.dagrun import DagRun, DagRunNote
from airflow.models.taskinstance import TaskInstance, TaskInstanceNote,
clear_task_instances
from airflow.models.taskmap import TaskMap
from airflow.models.taskreschedule import TaskReschedule
-from airflow.operators.bash import BashOperator
from airflow.operators.empty import EmptyOperator
from airflow.operators.python import ShortCircuitOperator
+from airflow.providers.standard.operators.bash import BashOperator
from airflow.serialization.serialized_objects import SerializedDAG
from airflow.stats import Stats
from airflow.triggers.base import StartTriggerArgs
diff --git a/tests/models/test_renderedtifields.py
b/tests/models/test_renderedtifields.py
index b8c45193814..1de83954ee4 100644
--- a/tests/models/test_renderedtifields.py
+++ b/tests/models/test_renderedtifields.py
@@ -31,7 +31,7 @@ from airflow.configuration import conf
from airflow.decorators import task as task_decorator
from airflow.models import Variable
from airflow.models.renderedtifields import RenderedTaskInstanceFields as RTIF
-from airflow.operators.bash import BashOperator
+from airflow.providers.standard.operators.bash import BashOperator
from airflow.utils.task_instance_session import
set_current_task_instance_session
from airflow.utils.timezone import datetime
from tests.test_utils.asserts import assert_queries_count
diff --git a/tests/models/test_serialized_dag.py
b/tests/models/test_serialized_dag.py
index d9a77e55eda..93845e95832 100644
--- a/tests/models/test_serialized_dag.py
+++ b/tests/models/test_serialized_dag.py
@@ -31,7 +31,7 @@ from airflow.models.dag import DAG
from airflow.models.dagbag import DagBag
from airflow.models.dagcode import DagCode
from airflow.models.serialized_dag import SerializedDagModel as SDM
-from airflow.operators.bash import BashOperator
+from airflow.providers.standard.operators.bash import BashOperator
from airflow.serialization.serialized_objects import SerializedDAG
from airflow.settings import json
from airflow.utils.hashlib_wrapper import md5
diff --git a/tests/models/test_taskinstance.py
b/tests/models/test_taskinstance.py
index 8c334366f04..c09d3575d1e 100644
--- a/tests/models/test_taskinstance.py
+++ b/tests/models/test_taskinstance.py
@@ -76,9 +76,9 @@ from airflow.models.taskreschedule import TaskReschedule
from airflow.models.variable import Variable
from airflow.models.xcom import LazyXComSelectSequence, XCom
from airflow.notifications.basenotifier import BaseNotifier
-from airflow.operators.bash import BashOperator
from airflow.operators.empty import EmptyOperator
from airflow.operators.python import PythonOperator
+from airflow.providers.standard.operators.bash import BashOperator
from airflow.sensors.base import BaseSensorOperator
from airflow.sensors.python import PythonSensor
from airflow.serialization.serialized_objects import SerializedBaseOperator,
SerializedDAG
diff --git a/tests/models/test_xcom_arg.py b/tests/models/test_xcom_arg.py
index 6108c5e8193..fcc2e546009 100644
--- a/tests/models/test_xcom_arg.py
+++ b/tests/models/test_xcom_arg.py
@@ -19,8 +19,8 @@ from __future__ import annotations
import pytest
from airflow.models.xcom_arg import XComArg
-from airflow.operators.bash import BashOperator
from airflow.operators.python import PythonOperator
+from airflow.providers.standard.operators.bash import BashOperator
from airflow.utils.types import NOTSET
from tests.test_utils.config import conf_vars
from tests.test_utils.db import clear_db_dags, clear_db_runs
diff --git
a/tests/providers/cncf/kubernetes/executors/test_kubernetes_executor.py
b/tests/providers/cncf/kubernetes/executors/test_kubernetes_executor.py
index 4622d31b575..12435426dd8 100644
--- a/tests/providers/cncf/kubernetes/executors/test_kubernetes_executor.py
+++ b/tests/providers/cncf/kubernetes/executors/test_kubernetes_executor.py
@@ -30,7 +30,6 @@ from urllib3 import HTTPResponse
from airflow.exceptions import AirflowException,
AirflowProviderDeprecationWarning
from airflow.models.taskinstancekey import TaskInstanceKey
-from airflow.operators.bash import BashOperator
from airflow.operators.empty import EmptyOperator
from airflow.providers.cncf.kubernetes import pod_generator
from airflow.providers.cncf.kubernetes.executors.kubernetes_executor import (
@@ -55,6 +54,7 @@ from
airflow.providers.cncf.kubernetes.kubernetes_helper_functions import (
from airflow.providers.cncf.kubernetes.pod_generator import PodGenerator
from airflow.utils import timezone
from airflow.utils.state import State, TaskInstanceState
+from tests.test_utils.compat import BashOperator
from tests.test_utils.config import conf_vars
pytestmark = pytest.mark.skip_if_database_isolation_mode
diff --git a/tests/providers/cncf/kubernetes/test_template_rendering.py
b/tests/providers/cncf/kubernetes/test_template_rendering.py
index ab2820284d5..4c087d6040e 100644
--- a/tests/providers/cncf/kubernetes/test_template_rendering.py
+++ b/tests/providers/cncf/kubernetes/test_template_rendering.py
@@ -24,11 +24,11 @@ from sqlalchemy.orm import make_transient
from airflow.configuration import TEST_DAGS_FOLDER
from airflow.models.renderedtifields import RenderedTaskInstanceFields,
RenderedTaskInstanceFields as RTIF
-from airflow.operators.bash import BashOperator
from airflow.providers.cncf.kubernetes.template_rendering import
get_rendered_k8s_spec, render_k8s_pod_yaml
from airflow.utils.session import create_session
from airflow.version import version
from tests.models import DEFAULT_DATE
+from tests.test_utils.compat import BashOperator
pytestmark = [pytest.mark.db_test, pytest.mark.skip_if_database_isolation_mode]
diff --git a/tests/providers/openlineage/extractors/test_bash.py
b/tests/providers/openlineage/extractors/test_bash.py
index de65a1d176d..fc862e5ee30 100644
--- a/tests/providers/openlineage/extractors/test_bash.py
+++ b/tests/providers/openlineage/extractors/test_bash.py
@@ -26,8 +26,8 @@ from openlineage.client.facet_v2 import source_code_job
from airflow import DAG
from airflow.exceptions import AirflowProviderDeprecationWarning
-from airflow.operators.bash import BashOperator
from airflow.providers.openlineage.extractors.bash import BashExtractor
+from tests.test_utils.compat import BashOperator
pytestmark = pytest.mark.db_test
diff --git a/tests/providers/openlineage/extractors/test_python.py
b/tests/providers/openlineage/extractors/test_python.py
index 81284383d86..44c5503b712 100644
--- a/tests/providers/openlineage/extractors/test_python.py
+++ b/tests/providers/openlineage/extractors/test_python.py
@@ -28,9 +28,9 @@ from openlineage.client.facet_v2 import source_code_job
from airflow import DAG
from airflow.exceptions import AirflowProviderDeprecationWarning
-from airflow.operators.bash import BashOperator
from airflow.operators.python import PythonOperator
from airflow.providers.openlineage.extractors.python import PythonExtractor
+from tests.test_utils.compat import BashOperator
pytestmark = pytest.mark.db_test
diff --git a/tests/providers/openlineage/plugins/test_adapter.py
b/tests/providers/openlineage/plugins/test_adapter.py
index 26088347087..b01fe46fdca 100644
--- a/tests/providers/openlineage/plugins/test_adapter.py
+++ b/tests/providers/openlineage/plugins/test_adapter.py
@@ -40,7 +40,6 @@ from openlineage.client.facet_v2 import (
from airflow import DAG
from airflow.models.dagrun import DagRun, DagRunState
from airflow.models.taskinstance import TaskInstance, TaskInstanceState
-from airflow.operators.bash import BashOperator
from airflow.operators.empty import EmptyOperator
from airflow.providers.openlineage.conf import namespace
from airflow.providers.openlineage.extractors import OperatorLineage
@@ -52,6 +51,7 @@ from airflow.providers.openlineage.plugins.facets import (
)
from airflow.providers.openlineage.utils.utils import get_airflow_job_facet
from airflow.utils.task_group import TaskGroup
+from tests.test_utils.compat import BashOperator
from tests.test_utils.config import conf_vars
pytestmark = pytest.mark.db_test
diff --git a/tests/providers/openlineage/plugins/test_facets.py
b/tests/providers/openlineage/plugins/test_facets.py
index 0ed5b4bf7c4..d46cadc9d69 100644
--- a/tests/providers/openlineage/plugins/test_facets.py
+++ b/tests/providers/openlineage/plugins/test_facets.py
@@ -81,7 +81,7 @@ def test_airflow_dag_run_facet():
},
tasks={
"task_0": {
- "operator": "airflow.operators.bash.BashOperator",
+ "operator":
"airflow.providers.standard.operators.bash.BashOperator",
"task_group": None,
"emits_ol_events": True,
"ui_color": "#f0ede4",
diff --git a/tests/providers/openlineage/plugins/test_utils.py
b/tests/providers/openlineage/plugins/test_utils.py
index 5335739a8ff..65874a5eceb 100644
--- a/tests/providers/openlineage/plugins/test_utils.py
+++ b/tests/providers/openlineage/plugins/test_utils.py
@@ -29,7 +29,6 @@ from openlineage.client.utils import RedactMixin
from pkg_resources import parse_version
from airflow.models import DAG as AIRFLOW_DAG, DagModel
-from airflow.operators.bash import BashOperator
from airflow.providers.openlineage.plugins.facets import AirflowDebugRunFacet
from airflow.providers.openlineage.utils.utils import (
InfoJsonEncodable,
@@ -44,11 +43,15 @@ from airflow.providers.openlineage.utils.utils import (
from airflow.utils import timezone
from airflow.utils.log.secrets_masker import _secrets_masker
from airflow.utils.state import State
-from tests.test_utils.compat import AIRFLOW_V_3_0_PLUS
+from tests.test_utils.compat import AIRFLOW_V_2_10_PLUS, AIRFLOW_V_3_0_PLUS,
BashOperator
if AIRFLOW_V_3_0_PLUS:
from airflow.utils.types import DagRunTriggeredByType
+BASH_OPERATOR_PATH = "airflow.providers.standard.operators.bash"
+if not AIRFLOW_V_2_10_PLUS:
+ BASH_OPERATOR_PATH = "airflow.operators.bash"
+
class SafeStrDict(dict):
def __str__(self):
@@ -262,7 +265,7 @@ def test_get_fully_qualified_class_name():
from airflow.providers.openlineage.plugins.adapter import
OpenLineageAdapter
result = get_fully_qualified_class_name(BashOperator(task_id="test",
bash_command="exit 0;"))
- assert result == "airflow.operators.bash.BashOperator"
+ assert result == f"{BASH_OPERATOR_PATH}.BashOperator"
result = get_fully_qualified_class_name(OpenLineageAdapter())
assert result ==
"airflow.providers.openlineage.plugins.adapter.OpenLineageAdapter"
@@ -278,7 +281,7 @@ def test_is_operator_disabled(mock_disabled_operators):
assert is_operator_disabled(op) is False
mock_disabled_operators.return_value = {
- "airflow.operators.bash.BashOperator",
+ f"{BASH_OPERATOR_PATH}.BashOperator",
"airflow.operators.python.PythonOperator",
}
assert is_operator_disabled(op) is True
@@ -303,8 +306,6 @@ def
test_includes_full_task_info(mock_include_full_task_info):
@patch("airflow.providers.openlineage.conf.include_full_task_info")
def test_does_not_include_full_task_info(mock_include_full_task_info):
- from airflow.operators.bash import BashOperator
-
mock_include_full_task_info.return_value = False
# There should be no 'bash_command' in excludes and it's not in includes -
so
# it's a good choice for checking TaskInfo vs TaskInfoComplete
diff --git a/tests/providers/openlineage/utils/test_utils.py
b/tests/providers/openlineage/utils/test_utils.py
index d97a447e999..20eba76adeb 100644
--- a/tests/providers/openlineage/utils/test_utils.py
+++ b/tests/providers/openlineage/utils/test_utils.py
@@ -27,7 +27,6 @@ from airflow.models.baseoperator import BaseOperator
from airflow.models.dagrun import DagRun
from airflow.models.mappedoperator import MappedOperator
from airflow.models.taskinstance import TaskInstance, TaskInstanceState
-from airflow.operators.bash import BashOperator
from airflow.operators.empty import EmptyOperator
from airflow.operators.python import PythonOperator
from airflow.providers.openlineage.plugins.facets import AirflowDagRunFacet,
AirflowJobFacet
@@ -44,8 +43,13 @@ from airflow.providers.openlineage.utils.utils import (
from airflow.serialization.serialized_objects import SerializedBaseOperator
from airflow.utils.task_group import TaskGroup
from airflow.utils.types import DagRunType
+from tests.test_utils.compat import AIRFLOW_V_2_10_PLUS, BashOperator
from tests.test_utils.mock_operators import MockOperator
+BASH_OPERATOR_PATH = "airflow.providers.standard.operators.bash"
+if not AIRFLOW_V_2_10_PLUS:
+ BASH_OPERATOR_PATH = "airflow.operators.bash"
+
class CustomOperatorForTest(BashOperator):
pass
@@ -82,7 +86,7 @@ def test_get_airflow_job_facet():
},
tasks={
"task_0": {
- "operator": "airflow.operators.bash.BashOperator",
+ "operator": f"{BASH_OPERATOR_PATH}.BashOperator",
"task_group": None,
"emits_ol_events": True,
"ui_color": "#f0ede4",
@@ -166,7 +170,7 @@ def test_get_airflow_dag_run_facet():
def test_get_fully_qualified_class_name_serialized_operator():
- op_module_path = "airflow.operators.bash"
+ op_module_path = BASH_OPERATOR_PATH
op_name = "BashOperator"
op = BashOperator(task_id="test", bash_command="echo 1")
@@ -191,7 +195,7 @@ def test_get_fully_qualified_class_name_mapped_operator():
def test_get_fully_qualified_class_name_bash_operator():
result = get_fully_qualified_class_name(BashOperator(task_id="test",
bash_command="echo 0;"))
- expected_result = "airflow.operators.bash.BashOperator"
+ expected_result = f"{BASH_OPERATOR_PATH}.BashOperator"
assert result == expected_result
@@ -319,7 +323,7 @@ def test_get_tasks_details():
],
},
"task_0": {
- "operator": "airflow.operators.bash.BashOperator",
+ "operator": f"{BASH_OPERATOR_PATH}.BashOperator",
"task_group": None,
"emits_ol_events": True,
"ui_color": BashOperator.ui_color,
@@ -360,7 +364,7 @@ def test_get_tasks_details():
],
},
"task_3": {
- "operator": "airflow.operators.bash.BashOperator",
+ "operator": f"{BASH_OPERATOR_PATH}.BashOperator",
"task_group": None,
"emits_ol_events": True,
"ui_color": BashOperator.ui_color,
@@ -388,7 +392,7 @@ def test_get_tasks_details():
],
},
"task_5": {
- "operator": "airflow.operators.bash.BashOperator",
+ "operator": f"{BASH_OPERATOR_PATH}.BashOperator",
"task_group": None,
"emits_ol_events": True,
"ui_color": BashOperator.ui_color,
diff --git a/tests/operators/test_bash.py
b/tests/providers/standard/operators/test_bash.py
similarity index 99%
rename from tests/operators/test_bash.py
rename to tests/providers/standard/operators/test_bash.py
index 8aacb3b7c77..2c29a0b96dc 100644
--- a/tests/operators/test_bash.py
+++ b/tests/providers/standard/operators/test_bash.py
@@ -28,7 +28,7 @@ from unittest import mock
import pytest
from airflow.exceptions import AirflowException, AirflowSkipException,
AirflowTaskTimeout
-from airflow.operators.bash import BashOperator
+from airflow.providers.standard.operators.bash import BashOperator
from airflow.utils import timezone
from airflow.utils.state import State
from airflow.utils.types import DagRunType
diff --git a/tests/sensors/test_bash.py
b/tests/providers/standard/sensors/test_bash.py
similarity index 97%
rename from tests/sensors/test_bash.py
rename to tests/providers/standard/sensors/test_bash.py
index 3282f6b9712..d51db033be3 100644
--- a/tests/sensors/test_bash.py
+++ b/tests/providers/standard/sensors/test_bash.py
@@ -23,7 +23,7 @@ import pytest
from airflow.exceptions import AirflowFailException, AirflowSensorTimeout
from airflow.models.dag import DAG
-from airflow.sensors.bash import BashSensor
+from airflow.providers.standard.sensors.bash import BashSensor
class TestBashSensor:
diff --git a/tests/sensors/test_external_task_sensor.py
b/tests/sensors/test_external_task_sensor.py
index 3d6268834dc..9947a197a03 100644
--- a/tests/sensors/test_external_task_sensor.py
+++ b/tests/sensors/test_external_task_sensor.py
@@ -35,9 +35,9 @@ from airflow.models import DagBag, DagRun, TaskInstance
from airflow.models.dag import DAG
from airflow.models.serialized_dag import SerializedDagModel
from airflow.models.xcom_arg import XComArg
-from airflow.operators.bash import BashOperator
from airflow.operators.empty import EmptyOperator
from airflow.operators.python import PythonOperator
+from airflow.providers.standard.operators.bash import BashOperator
from airflow.providers.standard.sensors.time import TimeSensor
from airflow.sensors.external_task import (
ExternalTaskMarker,
diff --git a/tests/serialization/test_dag_serialization.py
b/tests/serialization/test_dag_serialization.py
index d063b3e7803..f0f51704231 100644
--- a/tests/serialization/test_dag_serialization.py
+++ b/tests/serialization/test_dag_serialization.py
@@ -60,11 +60,11 @@ from airflow.models.expandinput import EXPAND_INPUT_EMPTY
from airflow.models.mappedoperator import MappedOperator
from airflow.models.param import Param, ParamsDict
from airflow.models.xcom import XCom
-from airflow.operators.bash import BashOperator
from airflow.operators.empty import EmptyOperator
from airflow.providers.cncf.kubernetes.pod_generator import PodGenerator
+from airflow.providers.standard.operators.bash import BashOperator
+from airflow.providers.standard.sensors.bash import BashSensor
from airflow.security import permissions
-from airflow.sensors.bash import BashSensor
from airflow.serialization.enums import Encoding
from airflow.serialization.json_schema import load_dag_schema_dict
from airflow.serialization.serialized_objects import (
@@ -154,7 +154,7 @@ serialized_simple_dag_ground_truth = {
"template_fields_renderers": {"bash_command": "bash",
"env": "json"},
"bash_command": "echo {{ task.task_id }}",
"_task_type": "BashOperator",
- "_task_module": "airflow.operators.bash",
+ "_task_module":
"airflow.providers.standard.operators.bash",
"pool": "default_pool",
"is_setup": False,
"is_teardown": False,
@@ -2284,7 +2284,7 @@ def test_operator_expand_serde():
"_is_empty": False,
"_is_mapped": True,
"_needs_expansion": True,
- "_task_module": "airflow.operators.bash",
+ "_task_module": "airflow.providers.standard.operators.bash",
"_task_type": "BashOperator",
"start_trigger_args": None,
"start_from_trigger": False,
diff --git a/tests/system/core/example_external_task_child_deferrable.py
b/tests/system/core/example_external_task_child_deferrable.py
index 9af83b7699a..781ad4ea5ef 100644
--- a/tests/system/core/example_external_task_child_deferrable.py
+++ b/tests/system/core/example_external_task_child_deferrable.py
@@ -19,7 +19,7 @@ from __future__ import annotations
from datetime import datetime
from airflow import DAG
-from airflow.operators.bash import BashOperator
+from airflow.providers.standard.operators.bash import BashOperator
with DAG(
dag_id="child_dag",
diff --git a/tests/system/providers/amazon/aws/example_appflow.py
b/tests/system/providers/amazon/aws/example_appflow.py
index 0fb2764c0b7..5ba38533b02 100644
--- a/tests/system/providers/amazon/aws/example_appflow.py
+++ b/tests/system/providers/amazon/aws/example_appflow.py
@@ -20,7 +20,6 @@ from datetime import datetime
from airflow.models.baseoperator import chain
from airflow.models.dag import DAG
-from airflow.operators.bash import BashOperator
from airflow.providers.amazon.aws.operators.appflow import (
AppflowRecordsShortCircuitOperator,
AppflowRunAfterOperator,
@@ -28,6 +27,7 @@ from airflow.providers.amazon.aws.operators.appflow import (
AppflowRunDailyOperator,
AppflowRunFullOperator,
)
+from airflow.providers.standard.operators.bash import BashOperator
from tests.system.providers.amazon.aws.utils import SystemTestContextBuilder
sys_test_context_task = SystemTestContextBuilder().build()
diff --git a/tests/system/providers/amazon/aws/example_http_to_s3.py
b/tests/system/providers/amazon/aws/example_http_to_s3.py
index 3654140b4a1..d6424f98021 100644
--- a/tests/system/providers/amazon/aws/example_http_to_s3.py
+++ b/tests/system/providers/amazon/aws/example_http_to_s3.py
@@ -23,9 +23,9 @@ from airflow.decorators import task
from airflow.models.baseoperator import chain
from airflow.models.connection import Connection
from airflow.models.dag import DAG
-from airflow.operators.bash import BashOperator
from airflow.providers.amazon.aws.operators.s3 import S3CreateBucketOperator,
S3DeleteBucketOperator
from airflow.providers.amazon.aws.transfers.http_to_s3 import HttpToS3Operator
+from airflow.providers.standard.operators.bash import BashOperator
from airflow.utils.trigger_rule import TriggerRule
from tests.system.providers.amazon.aws.utils import SystemTestContextBuilder
diff --git a/tests/system/providers/amazon/aws/utils/k8s.py
b/tests/system/providers/amazon/aws/utils/k8s.py
index 551d09629e9..a882d9e4284 100644
--- a/tests/system/providers/amazon/aws/utils/k8s.py
+++ b/tests/system/providers/amazon/aws/utils/k8s.py
@@ -18,7 +18,7 @@ from __future__ import annotations
from typing import TYPE_CHECKING
-from airflow.operators.bash import BashOperator
+from airflow.providers.standard.operators.bash import BashOperator
if TYPE_CHECKING:
from airflow.models.operator import Operator
diff --git a/tests/system/providers/apache/hive/example_twitter_dag.py
b/tests/system/providers/apache/hive/example_twitter_dag.py
index 53b824f5092..4ceb119ba55 100644
--- a/tests/system/providers/apache/hive/example_twitter_dag.py
+++ b/tests/system/providers/apache/hive/example_twitter_dag.py
@@ -26,8 +26,8 @@ from datetime import date, datetime, timedelta
from airflow import DAG
from airflow.decorators import task
-from airflow.operators.bash import BashOperator
from airflow.providers.apache.hive.operators.hive import HiveOperator
+from airflow.providers.standard.operators.bash import BashOperator
#
--------------------------------------------------------------------------------
# Caveat: This Dag will not run because of missing scripts.
diff --git a/tests/system/providers/apache/iceberg/example_iceberg.py
b/tests/system/providers/apache/iceberg/example_iceberg.py
index 0318a8e22b7..41e751624b5 100644
--- a/tests/system/providers/apache/iceberg/example_iceberg.py
+++ b/tests/system/providers/apache/iceberg/example_iceberg.py
@@ -19,8 +19,8 @@ from __future__ import annotations
from datetime import datetime, timedelta
from airflow import DAG
-from airflow.operators.bash import BashOperator
from airflow.providers.apache.iceberg.hooks.iceberg import IcebergHook
+from airflow.providers.standard.operators.bash import BashOperator
bash_command = f"""
echo "Our token: {IcebergHook().get_token_macro()}"
diff --git a/tests/system/providers/cncf/kubernetes/example_kubernetes.py
b/tests/system/providers/cncf/kubernetes/example_kubernetes.py
index 57bab063a9e..3756d0c4e21 100644
--- a/tests/system/providers/cncf/kubernetes/example_kubernetes.py
+++ b/tests/system/providers/cncf/kubernetes/example_kubernetes.py
@@ -27,9 +27,9 @@ from datetime import datetime
from kubernetes.client import models as k8s
from airflow import DAG
-from airflow.operators.bash import BashOperator
from airflow.providers.cncf.kubernetes.operators.pod import
KubernetesPodOperator
from airflow.providers.cncf.kubernetes.secret import Secret
+from airflow.providers.standard.operators.bash import BashOperator
# [START howto_operator_k8s_cluster_resources]
secret_file = Secret("volume", "/etc/sql_conn", "airflow-secrets",
"sql_alchemy_conn")
diff --git a/tests/system/providers/cncf/kubernetes/example_kubernetes_async.py
b/tests/system/providers/cncf/kubernetes/example_kubernetes_async.py
index 881bfd61f7c..cb3d25a33fc 100644
--- a/tests/system/providers/cncf/kubernetes/example_kubernetes_async.py
+++ b/tests/system/providers/cncf/kubernetes/example_kubernetes_async.py
@@ -27,9 +27,9 @@ from datetime import datetime
from kubernetes.client import models as k8s
from airflow import DAG
-from airflow.operators.bash import BashOperator
from airflow.providers.cncf.kubernetes.operators.pod import
KubernetesPodOperator
from airflow.providers.cncf.kubernetes.secret import Secret
+from airflow.providers.standard.operators.bash import BashOperator
# [START howto_operator_k8s_cluster_resources]
secret_file = Secret("volume", "/etc/sql_conn", "airflow-secrets",
"sql_alchemy_conn")
diff --git a/tests/system/providers/docker/example_docker.py
b/tests/system/providers/docker/example_docker.py
index 069f4794de6..18f7d2f0ea0 100644
--- a/tests/system/providers/docker/example_docker.py
+++ b/tests/system/providers/docker/example_docker.py
@@ -21,8 +21,8 @@ import os
from datetime import datetime
from airflow import models
-from airflow.operators.bash import BashOperator
from airflow.providers.docker.operators.docker import DockerOperator
+from airflow.providers.standard.operators.bash import BashOperator
ENV_ID = os.environ.get("SYSTEM_TESTS_ENV_ID")
DAG_ID = "docker_test"
diff --git a/tests/system/providers/docker/example_docker_copy_data.py
b/tests/system/providers/docker/example_docker_copy_data.py
index 50373af7925..4e4e8466e50 100644
--- a/tests/system/providers/docker/example_docker_copy_data.py
+++ b/tests/system/providers/docker/example_docker_copy_data.py
@@ -32,9 +32,9 @@ from datetime import datetime
from docker.types import Mount
from airflow import models
-from airflow.operators.bash import BashOperator
from airflow.operators.python import ShortCircuitOperator
from airflow.providers.docker.operators.docker import DockerOperator
+from airflow.providers.standard.operators.bash import BashOperator
ENV_ID = os.environ.get("SYSTEM_TESTS_ENV_ID")
DAG_ID = "docker_sample_copy_data"
diff --git
a/tests/system/providers/google/cloud/bigquery/example_bigquery_dataset.py
b/tests/system/providers/google/cloud/bigquery/example_bigquery_dataset.py
index bd74e49d409..004f996975b 100644
--- a/tests/system/providers/google/cloud/bigquery/example_bigquery_dataset.py
+++ b/tests/system/providers/google/cloud/bigquery/example_bigquery_dataset.py
@@ -25,13 +25,13 @@ import os
from datetime import datetime
from airflow.models.dag import DAG
-from airflow.operators.bash import BashOperator
from airflow.providers.google.cloud.operators.bigquery import (
BigQueryCreateEmptyDatasetOperator,
BigQueryDeleteDatasetOperator,
BigQueryGetDatasetOperator,
BigQueryUpdateDatasetOperator,
)
+from airflow.providers.standard.operators.bash import BashOperator
from airflow.utils.trigger_rule import TriggerRule
ENV_ID = os.environ.get("SYSTEM_TESTS_ENV_ID", "default")
diff --git
a/tests/system/providers/google/cloud/bigquery/example_bigquery_queries.py
b/tests/system/providers/google/cloud/bigquery/example_bigquery_queries.py
index 6878a9822de..ab7a4b3757b 100644
--- a/tests/system/providers/google/cloud/bigquery/example_bigquery_queries.py
+++ b/tests/system/providers/google/cloud/bigquery/example_bigquery_queries.py
@@ -25,7 +25,6 @@ import os
from datetime import datetime
from airflow.models.dag import DAG
-from airflow.operators.bash import BashOperator
from airflow.providers.google.cloud.operators.bigquery import (
BigQueryCheckOperator,
BigQueryColumnCheckOperator,
@@ -38,6 +37,7 @@ from airflow.providers.google.cloud.operators.bigquery import
(
BigQueryTableCheckOperator,
BigQueryValueCheckOperator,
)
+from airflow.providers.standard.operators.bash import BashOperator
from airflow.utils.trigger_rule import TriggerRule
from tests.system.providers.google import DEFAULT_GCP_SYSTEM_TEST_PROJECT_ID
diff --git
a/tests/system/providers/google/cloud/bigquery/example_bigquery_queries_async.py
b/tests/system/providers/google/cloud/bigquery/example_bigquery_queries_async.py
index 2f1ed573aa0..a007e1cd639 100644
---
a/tests/system/providers/google/cloud/bigquery/example_bigquery_queries_async.py
+++
b/tests/system/providers/google/cloud/bigquery/example_bigquery_queries_async.py
@@ -25,7 +25,6 @@ import os
from datetime import datetime, timedelta
from airflow.models.dag import DAG
-from airflow.operators.bash import BashOperator
from airflow.providers.google.cloud.operators.bigquery import (
BigQueryCheckOperator,
BigQueryCreateEmptyDatasetOperator,
@@ -36,6 +35,7 @@ from airflow.providers.google.cloud.operators.bigquery import
(
BigQueryIntervalCheckOperator,
BigQueryValueCheckOperator,
)
+from airflow.providers.standard.operators.bash import BashOperator
from airflow.utils.trigger_rule import TriggerRule
ENV_ID = os.environ.get("SYSTEM_TESTS_ENV_ID", "default")
diff --git
a/tests/system/providers/google/cloud/bigquery/example_bigquery_to_mssql.py
b/tests/system/providers/google/cloud/bigquery/example_bigquery_to_mssql.py
index 0f7acd8a14d..e9b3269ecfb 100644
--- a/tests/system/providers/google/cloud/bigquery/example_bigquery_to_mssql.py
+++ b/tests/system/providers/google/cloud/bigquery/example_bigquery_to_mssql.py
@@ -36,7 +36,6 @@ from pendulum import duration
from airflow.decorators import task
from airflow.models import Connection
from airflow.models.dag import DAG
-from airflow.operators.bash import BashOperator
from airflow.providers.common.sql.operators.sql import SQLExecuteQueryOperator
from airflow.providers.google.cloud.hooks.compute import ComputeEngineHook
from airflow.providers.google.cloud.hooks.compute_ssh import
ComputeEngineSSHHook
@@ -51,6 +50,7 @@ from airflow.providers.google.cloud.operators.compute import (
ComputeEngineInsertInstanceOperator,
)
from airflow.providers.ssh.operators.ssh import SSHOperator
+from airflow.providers.standard.operators.bash import BashOperator
from airflow.settings import Session
from airflow.utils.trigger_rule import TriggerRule
diff --git
a/tests/system/providers/google/cloud/bigquery/example_bigquery_to_postgres.py
b/tests/system/providers/google/cloud/bigquery/example_bigquery_to_postgres.py
index 990820bfe28..4a3b0386da0 100644
---
a/tests/system/providers/google/cloud/bigquery/example_bigquery_to_postgres.py
+++
b/tests/system/providers/google/cloud/bigquery/example_bigquery_to_postgres.py
@@ -35,7 +35,6 @@ from pendulum import duration
from airflow.decorators import task
from airflow.models import Connection
from airflow.models.dag import DAG
-from airflow.operators.bash import BashOperator
from airflow.providers.common.sql.operators.sql import SQLExecuteQueryOperator
from airflow.providers.google.cloud.hooks.compute import ComputeEngineHook
from airflow.providers.google.cloud.hooks.compute_ssh import
ComputeEngineSSHHook
@@ -50,6 +49,7 @@ from airflow.providers.google.cloud.operators.compute import (
)
from airflow.providers.google.cloud.transfers.bigquery_to_postgres import
BigQueryToPostgresOperator
from airflow.providers.ssh.operators.ssh import SSHOperator
+from airflow.providers.standard.operators.bash import BashOperator
from airflow.settings import Session
from airflow.utils.trigger_rule import TriggerRule
diff --git
a/tests/system/providers/google/cloud/cloud_build/example_cloud_build.py
b/tests/system/providers/google/cloud/cloud_build/example_cloud_build.py
index 42cfbc8808f..cb31a3b4d09 100644
--- a/tests/system/providers/google/cloud/cloud_build/example_cloud_build.py
+++ b/tests/system/providers/google/cloud/cloud_build/example_cloud_build.py
@@ -31,7 +31,6 @@ import yaml
from airflow.decorators import task_group
from airflow.models.dag import DAG
from airflow.models.xcom_arg import XComArg
-from airflow.operators.bash import BashOperator
from airflow.providers.google.cloud.operators.cloud_build import (
CloudBuildCancelBuildOperator,
CloudBuildCreateBuildOperator,
@@ -39,6 +38,7 @@ from airflow.providers.google.cloud.operators.cloud_build
import (
CloudBuildListBuildsOperator,
CloudBuildRetryBuildOperator,
)
+from airflow.providers.standard.operators.bash import BashOperator
from tests.system.providers.google import DEFAULT_GCP_SYSTEM_TEST_PROJECT_ID
ENV_ID = os.environ.get("SYSTEM_TESTS_ENV_ID", "default")
diff --git
a/tests/system/providers/google/cloud/cloud_memorystore/example_cloud_memorystore_memcached.py
b/tests/system/providers/google/cloud/cloud_memorystore/example_cloud_memorystore_memcached.py
index dce93182090..4884122751e 100644
---
a/tests/system/providers/google/cloud/cloud_memorystore/example_cloud_memorystore_memcached.py
+++
b/tests/system/providers/google/cloud/cloud_memorystore/example_cloud_memorystore_memcached.py
@@ -32,7 +32,6 @@ from datetime import datetime
from google.protobuf.field_mask_pb2 import FieldMask
from airflow.models.dag import DAG
-from airflow.operators.bash import BashOperator
from airflow.providers.google.cloud.operators.cloud_memorystore import (
CloudMemorystoreMemcachedApplyParametersOperator,
CloudMemorystoreMemcachedCreateInstanceOperator,
@@ -42,6 +41,7 @@ from
airflow.providers.google.cloud.operators.cloud_memorystore import (
CloudMemorystoreMemcachedUpdateInstanceOperator,
CloudMemorystoreMemcachedUpdateParametersOperator,
)
+from airflow.providers.standard.operators.bash import BashOperator
from airflow.utils.trigger_rule import TriggerRule
ENV_ID = os.environ.get("SYSTEM_TESTS_ENV_ID", "default")
diff --git
a/tests/system/providers/google/cloud/cloud_memorystore/example_cloud_memorystore_redis.py
b/tests/system/providers/google/cloud/cloud_memorystore/example_cloud_memorystore_redis.py
index aeee437ef9b..c46d966371d 100644
---
a/tests/system/providers/google/cloud/cloud_memorystore/example_cloud_memorystore_redis.py
+++
b/tests/system/providers/google/cloud/cloud_memorystore/example_cloud_memorystore_redis.py
@@ -27,7 +27,6 @@ from datetime import datetime
from google.cloud.redis_v1 import FailoverInstanceRequest, Instance
from airflow.models.dag import DAG
-from airflow.operators.bash import BashOperator
from airflow.providers.google.cloud.operators.cloud_memorystore import (
CloudMemorystoreCreateInstanceAndImportOperator,
CloudMemorystoreCreateInstanceOperator,
@@ -46,6 +45,7 @@ from airflow.providers.google.cloud.operators.gcs import (
GCSCreateBucketOperator,
GCSDeleteBucketOperator,
)
+from airflow.providers.standard.operators.bash import BashOperator
from airflow.utils.trigger_rule import TriggerRule
from tests.system.providers.google import DEFAULT_GCP_SYSTEM_TEST_PROJECT_ID
diff --git a/tests/system/providers/google/cloud/gcs/example_gcs_copy_delete.py
b/tests/system/providers/google/cloud/gcs/example_gcs_copy_delete.py
index 7d9046088a7..aebb1e3e7ed 100644
--- a/tests/system/providers/google/cloud/gcs/example_gcs_copy_delete.py
+++ b/tests/system/providers/google/cloud/gcs/example_gcs_copy_delete.py
@@ -27,7 +27,6 @@ from datetime import datetime
from airflow.models.baseoperator import chain
from airflow.models.dag import DAG
-from airflow.operators.bash import BashOperator
from airflow.providers.google.cloud.operators.gcs import (
GCSCreateBucketOperator,
GCSDeleteBucketOperator,
@@ -35,6 +34,7 @@ from airflow.providers.google.cloud.operators.gcs import (
GCSListObjectsOperator,
)
from airflow.providers.google.cloud.transfers.gcs_to_gcs import
GCSToGCSOperator
+from airflow.providers.standard.operators.bash import BashOperator
from airflow.utils.trigger_rule import TriggerRule
from tests.system.providers.google import DEFAULT_GCP_SYSTEM_TEST_PROJECT_ID
diff --git a/tests/system/providers/google/cloud/gcs/example_gcs_to_gcs.py
b/tests/system/providers/google/cloud/gcs/example_gcs_to_gcs.py
index 9e92102fa92..55bec85a505 100644
--- a/tests/system/providers/google/cloud/gcs/example_gcs_to_gcs.py
+++ b/tests/system/providers/google/cloud/gcs/example_gcs_to_gcs.py
@@ -29,7 +29,6 @@ from datetime import datetime
from airflow.decorators import task
from airflow.models.baseoperator import chain
from airflow.models.dag import DAG
-from airflow.operators.bash import BashOperator
from airflow.operators.python import PythonOperator
from airflow.providers.google.cloud.operators.gcs import (
GCSCreateBucketOperator,
@@ -39,6 +38,7 @@ from airflow.providers.google.cloud.operators.gcs import (
)
from airflow.providers.google.cloud.transfers.gcs_to_gcs import
GCSToGCSOperator
from airflow.providers.google.cloud.transfers.local_to_gcs import
LocalFilesystemToGCSOperator
+from airflow.providers.standard.operators.bash import BashOperator
from airflow.utils.trigger_rule import TriggerRule
from tests.system.providers.google import DEFAULT_GCP_SYSTEM_TEST_PROJECT_ID
diff --git a/tests/system/providers/google/cloud/gcs/example_mysql_to_gcs.py
b/tests/system/providers/google/cloud/gcs/example_mysql_to_gcs.py
index 56fb0811b06..a673ab88f72 100644
--- a/tests/system/providers/google/cloud/gcs/example_mysql_to_gcs.py
+++ b/tests/system/providers/google/cloud/gcs/example_mysql_to_gcs.py
@@ -34,7 +34,6 @@ import pytest
from airflow.decorators import task
from airflow.models import Connection
from airflow.models.dag import DAG
-from airflow.operators.bash import BashOperator
from airflow.providers.common.sql.operators.sql import SQLExecuteQueryOperator
from airflow.providers.google.cloud.hooks.compute import ComputeEngineHook
from airflow.providers.google.cloud.hooks.compute_ssh import
ComputeEngineSSHHook
@@ -47,6 +46,7 @@ from airflow.providers.google.cloud.operators.gcs import (
GCSDeleteBucketOperator,
)
from airflow.providers.ssh.operators.ssh import SSHOperator
+from airflow.providers.standard.operators.bash import BashOperator
from airflow.settings import Session
from airflow.utils.trigger_rule import TriggerRule
diff --git a/tests/system/providers/google/cloud/gcs/example_sftp_to_gcs.py
b/tests/system/providers/google/cloud/gcs/example_sftp_to_gcs.py
index 31951cd0230..2860d8552e1 100644
--- a/tests/system/providers/google/cloud/gcs/example_sftp_to_gcs.py
+++ b/tests/system/providers/google/cloud/gcs/example_sftp_to_gcs.py
@@ -27,9 +27,9 @@ from pathlib import Path
from airflow.models.baseoperator import chain
from airflow.models.dag import DAG
-from airflow.operators.bash import BashOperator
from airflow.providers.google.cloud.operators.gcs import
GCSCreateBucketOperator, GCSDeleteBucketOperator
from airflow.providers.google.cloud.transfers.sftp_to_gcs import
SFTPToGCSOperator
+from airflow.providers.standard.operators.bash import BashOperator
from airflow.utils.trigger_rule import TriggerRule
from tests.system.providers.google import DEFAULT_GCP_SYSTEM_TEST_PROJECT_ID
diff --git a/tests/system/providers/google/cloud/gcs/example_sheets.py
b/tests/system/providers/google/cloud/gcs/example_sheets.py
index 958bd903602..2247819494f 100644
--- a/tests/system/providers/google/cloud/gcs/example_sheets.py
+++ b/tests/system/providers/google/cloud/gcs/example_sheets.py
@@ -26,11 +26,11 @@ from airflow.decorators import task
from airflow.models import Connection
from airflow.models.dag import DAG
from airflow.models.xcom_arg import XComArg
-from airflow.operators.bash import BashOperator
from airflow.providers.google.cloud.operators.gcs import
GCSCreateBucketOperator, GCSDeleteBucketOperator
from airflow.providers.google.cloud.transfers.sheets_to_gcs import
GoogleSheetsToGCSOperator
from airflow.providers.google.suite.operators.sheets import
GoogleSheetsCreateSpreadsheetOperator
from airflow.providers.google.suite.transfers.gcs_to_sheets import
GCSToGoogleSheetsOperator
+from airflow.providers.standard.operators.bash import BashOperator
from airflow.settings import Session
from airflow.utils.trigger_rule import TriggerRule
diff --git
a/tests/system/providers/google/cloud/kubernetes_engine/example_kubernetes_engine.py
b/tests/system/providers/google/cloud/kubernetes_engine/example_kubernetes_engine.py
index 031f8326ee9..173fddad3a0 100644
---
a/tests/system/providers/google/cloud/kubernetes_engine/example_kubernetes_engine.py
+++
b/tests/system/providers/google/cloud/kubernetes_engine/example_kubernetes_engine.py
@@ -25,12 +25,12 @@ import os
from datetime import datetime
from airflow.models.dag import DAG
-from airflow.operators.bash import BashOperator
from airflow.providers.google.cloud.operators.kubernetes_engine import (
GKECreateClusterOperator,
GKEDeleteClusterOperator,
GKEStartPodOperator,
)
+from airflow.providers.standard.operators.bash import BashOperator
from airflow.utils.trigger_rule import TriggerRule
from tests.system.providers.google import DEFAULT_GCP_SYSTEM_TEST_PROJECT_ID
diff --git
a/tests/system/providers/google/cloud/kubernetes_engine/example_kubernetes_engine_async.py
b/tests/system/providers/google/cloud/kubernetes_engine/example_kubernetes_engine_async.py
index 5e3f4ddbf70..e974a628c7a 100644
---
a/tests/system/providers/google/cloud/kubernetes_engine/example_kubernetes_engine_async.py
+++
b/tests/system/providers/google/cloud/kubernetes_engine/example_kubernetes_engine_async.py
@@ -25,12 +25,12 @@ import os
from datetime import datetime
from airflow.models.dag import DAG
-from airflow.operators.bash import BashOperator
from airflow.providers.google.cloud.operators.kubernetes_engine import (
GKECreateClusterOperator,
GKEDeleteClusterOperator,
GKEStartPodOperator,
)
+from airflow.providers.standard.operators.bash import BashOperator
from airflow.utils.trigger_rule import TriggerRule
from tests.system.providers.google import DEFAULT_GCP_SYSTEM_TEST_PROJECT_ID
diff --git a/tests/system/providers/google/cloud/ml_engine/example_mlengine.py
b/tests/system/providers/google/cloud/ml_engine/example_mlengine.py
index 87602da88c4..bde2c0bbaf9 100644
--- a/tests/system/providers/google/cloud/ml_engine/example_mlengine.py
+++ b/tests/system/providers/google/cloud/ml_engine/example_mlengine.py
@@ -29,7 +29,6 @@ from google.protobuf.json_format import ParseDict
from google.protobuf.struct_pb2 import Value
from airflow import models
-from airflow.operators.bash import BashOperator
from airflow.providers.google.cloud.operators.gcs import (
GCSCreateBucketOperator,
GCSDeleteBucketOperator,
@@ -53,6 +52,7 @@ from
airflow.providers.google.cloud.operators.vertex_ai.model_service import (
ListModelVersionsOperator,
SetDefaultVersionOnModelOperator,
)
+from airflow.providers.standard.operators.bash import BashOperator
from airflow.utils.trigger_rule import TriggerRule
PROJECT_ID = os.environ.get("SYSTEM_TESTS_GCP_PROJECT", "default")
diff --git
a/tests/system/providers/google/cloud/natural_language/example_natural_language.py
b/tests/system/providers/google/cloud/natural_language/example_natural_language.py
index 5bc38f92208..e04fdf4fb60 100644
---
a/tests/system/providers/google/cloud/natural_language/example_natural_language.py
+++
b/tests/system/providers/google/cloud/natural_language/example_natural_language.py
@@ -27,13 +27,13 @@ from datetime import datetime
from google.cloud.language_v1 import Document
from airflow.models.dag import DAG
-from airflow.operators.bash import BashOperator
from airflow.providers.google.cloud.operators.natural_language import (
CloudNaturalLanguageAnalyzeEntitiesOperator,
CloudNaturalLanguageAnalyzeEntitySentimentOperator,
CloudNaturalLanguageAnalyzeSentimentOperator,
CloudNaturalLanguageClassifyTextOperator,
)
+from airflow.providers.standard.operators.bash import BashOperator
ENV_ID = os.environ.get("SYSTEM_TESTS_ENV_ID", "default")
DAG_ID = "gcp_natural_language"
diff --git a/tests/system/providers/google/cloud/pubsub/example_pubsub.py
b/tests/system/providers/google/cloud/pubsub/example_pubsub.py
index 29ba6469ea0..4ff3091e5fa 100644
--- a/tests/system/providers/google/cloud/pubsub/example_pubsub.py
+++ b/tests/system/providers/google/cloud/pubsub/example_pubsub.py
@@ -25,7 +25,6 @@ import os
from datetime import datetime
from airflow.models.dag import DAG
-from airflow.operators.bash import BashOperator
from airflow.providers.google.cloud.operators.pubsub import (
PubSubCreateSubscriptionOperator,
PubSubCreateTopicOperator,
@@ -35,6 +34,7 @@ from airflow.providers.google.cloud.operators.pubsub import (
PubSubPullOperator,
)
from airflow.providers.google.cloud.sensors.pubsub import PubSubPullSensor
+from airflow.providers.standard.operators.bash import BashOperator
from airflow.utils.trigger_rule import TriggerRule
ENV_ID = os.environ.get("SYSTEM_TESTS_ENV_ID", "default")
diff --git
a/tests/system/providers/google/cloud/sql_to_sheets/example_sql_to_sheets.py
b/tests/system/providers/google/cloud/sql_to_sheets/example_sql_to_sheets.py
index cf256c8f9fa..11231c0dfd4 100644
--- a/tests/system/providers/google/cloud/sql_to_sheets/example_sql_to_sheets.py
+++ b/tests/system/providers/google/cloud/sql_to_sheets/example_sql_to_sheets.py
@@ -34,7 +34,6 @@ from datetime import datetime
from airflow.decorators import task
from airflow.models import Connection
from airflow.models.dag import DAG
-from airflow.operators.bash import BashOperator
from airflow.providers.common.sql.operators.sql import SQLExecuteQueryOperator
from airflow.providers.google.cloud.hooks.compute import ComputeEngineHook
from airflow.providers.google.cloud.hooks.compute_ssh import
ComputeEngineSSHHook
@@ -45,6 +44,7 @@ from airflow.providers.google.cloud.operators.compute import (
from airflow.providers.google.suite.operators.sheets import
GoogleSheetsCreateSpreadsheetOperator
from airflow.providers.google.suite.transfers.sql_to_sheets import
SQLToGoogleSheetsOperator
from airflow.providers.ssh.operators.ssh import SSHOperator
+from airflow.providers.standard.operators.bash import BashOperator
from airflow.settings import Session, json
from airflow.utils.trigger_rule import TriggerRule
diff --git a/tests/system/providers/google/cloud/tasks/example_queue.py
b/tests/system/providers/google/cloud/tasks/example_queue.py
index 53919fb146d..4c29b584f5b 100644
--- a/tests/system/providers/google/cloud/tasks/example_queue.py
+++ b/tests/system/providers/google/cloud/tasks/example_queue.py
@@ -35,7 +35,6 @@ from google.protobuf.field_mask_pb2 import FieldMask
from airflow.decorators import task
from airflow.models.baseoperator import chain
from airflow.models.dag import DAG
-from airflow.operators.bash import BashOperator
from airflow.providers.google.cloud.operators.tasks import (
CloudTasksQueueCreateOperator,
CloudTasksQueueDeleteOperator,
@@ -46,6 +45,7 @@ from airflow.providers.google.cloud.operators.tasks import (
CloudTasksQueuesListOperator,
CloudTasksQueueUpdateOperator,
)
+from airflow.providers.standard.operators.bash import BashOperator
from airflow.utils.trigger_rule import TriggerRule
ENV_ID = os.environ.get("SYSTEM_TESTS_ENV_ID", "default")
diff --git
a/tests/system/providers/google/cloud/transfers/example_postgres_to_gcs.py
b/tests/system/providers/google/cloud/transfers/example_postgres_to_gcs.py
index 8394c99fcef..33a289c1ffa 100644
--- a/tests/system/providers/google/cloud/transfers/example_postgres_to_gcs.py
+++ b/tests/system/providers/google/cloud/transfers/example_postgres_to_gcs.py
@@ -33,7 +33,6 @@ from datetime import datetime
from airflow.decorators import task
from airflow.models import Connection
from airflow.models.dag import DAG
-from airflow.operators.bash import BashOperator
from airflow.providers.common.sql.operators.sql import SQLExecuteQueryOperator
from airflow.providers.google.cloud.hooks.compute import ComputeEngineHook
from airflow.providers.google.cloud.hooks.compute_ssh import
ComputeEngineSSHHook
@@ -47,6 +46,7 @@ from airflow.providers.google.cloud.operators.gcs import (
)
from airflow.providers.google.cloud.transfers.postgres_to_gcs import
PostgresToGCSOperator
from airflow.providers.ssh.operators.ssh import SSHOperator
+from airflow.providers.standard.operators.bash import BashOperator
from airflow.settings import Session
from airflow.utils.trigger_rule import TriggerRule
diff --git a/tests/system/providers/google/cloud/translate/example_translate.py
b/tests/system/providers/google/cloud/translate/example_translate.py
index 87f424673ec..b593060f6e5 100644
--- a/tests/system/providers/google/cloud/translate/example_translate.py
+++ b/tests/system/providers/google/cloud/translate/example_translate.py
@@ -25,8 +25,8 @@ from __future__ import annotations
from datetime import datetime
from airflow.models.dag import DAG
-from airflow.operators.bash import BashOperator
from airflow.providers.google.cloud.operators.translate import
CloudTranslateTextOperator
+from airflow.providers.standard.operators.bash import BashOperator
DAG_ID = "gcp_translate"
diff --git
a/tests/system/providers/google/cloud/video_intelligence/example_video_intelligence.py
b/tests/system/providers/google/cloud/video_intelligence/example_video_intelligence.py
index eae6a54a89c..499db2d6427 100644
---
a/tests/system/providers/google/cloud/video_intelligence/example_video_intelligence.py
+++
b/tests/system/providers/google/cloud/video_intelligence/example_video_intelligence.py
@@ -33,7 +33,6 @@ from google.api_core.retry import Retry
from airflow.models.baseoperator import chain
from airflow.models.dag import DAG
-from airflow.operators.bash import BashOperator
from airflow.providers.google.cloud.operators.gcs import
GCSCreateBucketOperator, GCSDeleteBucketOperator
from airflow.providers.google.cloud.operators.video_intelligence import (
CloudVideoIntelligenceDetectVideoExplicitContentOperator,
@@ -41,6 +40,7 @@ from
airflow.providers.google.cloud.operators.video_intelligence import (
CloudVideoIntelligenceDetectVideoShotsOperator,
)
from airflow.providers.google.cloud.transfers.gcs_to_gcs import
GCSToGCSOperator
+from airflow.providers.standard.operators.bash import BashOperator
from airflow.utils.trigger_rule import TriggerRule
ENV_ID = os.environ.get("SYSTEM_TESTS_ENV_ID", "default")
diff --git
a/tests/system/providers/google/cloud/vision/example_vision_annotate_image.py
b/tests/system/providers/google/cloud/vision/example_vision_annotate_image.py
index 1d6167c6866..2a4d7b75f13 100644
---
a/tests/system/providers/google/cloud/vision/example_vision_annotate_image.py
+++
b/tests/system/providers/google/cloud/vision/example_vision_annotate_image.py
@@ -22,7 +22,6 @@ from datetime import datetime
from airflow.models.baseoperator import chain
from airflow.models.dag import DAG
-from airflow.operators.bash import BashOperator
from airflow.providers.google.cloud.operators.gcs import
GCSCreateBucketOperator, GCSDeleteBucketOperator
from airflow.providers.google.cloud.operators.vision import (
CloudVisionDetectImageLabelsOperator,
@@ -32,6 +31,7 @@ from airflow.providers.google.cloud.operators.vision import (
CloudVisionTextDetectOperator,
)
from airflow.providers.google.cloud.transfers.gcs_to_gcs import
GCSToGCSOperator
+from airflow.providers.standard.operators.bash import BashOperator
from airflow.utils.trigger_rule import TriggerRule
# [START howto_operator_vision_retry_import]
diff --git
a/tests/system/providers/google/datacatalog/example_datacatalog_entries.py
b/tests/system/providers/google/datacatalog/example_datacatalog_entries.py
index db7d74b18d9..47edfb96368 100644
--- a/tests/system/providers/google/datacatalog/example_datacatalog_entries.py
+++ b/tests/system/providers/google/datacatalog/example_datacatalog_entries.py
@@ -24,7 +24,6 @@ from google.protobuf.field_mask_pb2 import FieldMask
from airflow.models.dag import DAG
from airflow.models.xcom_arg import XComArg
-from airflow.operators.bash import BashOperator
from airflow.providers.google.cloud.operators.datacatalog import (
CloudDataCatalogCreateEntryGroupOperator,
CloudDataCatalogCreateEntryOperator,
@@ -36,6 +35,7 @@ from airflow.providers.google.cloud.operators.datacatalog
import (
CloudDataCatalogUpdateEntryOperator,
)
from airflow.providers.google.cloud.operators.gcs import
GCSCreateBucketOperator, GCSDeleteBucketOperator
+from airflow.providers.standard.operators.bash import BashOperator
from airflow.utils.trigger_rule import TriggerRule
from tests.system.providers.google import DEFAULT_GCP_SYSTEM_TEST_PROJECT_ID
diff --git
a/tests/system/providers/google/datacatalog/example_datacatalog_search_catalog.py
b/tests/system/providers/google/datacatalog/example_datacatalog_search_catalog.py
index 8061ecaf110..781d047c534 100644
---
a/tests/system/providers/google/datacatalog/example_datacatalog_search_catalog.py
+++
b/tests/system/providers/google/datacatalog/example_datacatalog_search_catalog.py
@@ -25,7 +25,6 @@ from google.cloud.datacatalog import TagField,
TagTemplateField
from airflow.models.dag import DAG
from airflow.models.xcom_arg import XComArg
-from airflow.operators.bash import BashOperator
from airflow.providers.google.cloud.operators.datacatalog import (
CloudDataCatalogCreateEntryGroupOperator,
CloudDataCatalogCreateEntryOperator,
@@ -38,6 +37,7 @@ from airflow.providers.google.cloud.operators.datacatalog
import (
CloudDataCatalogSearchCatalogOperator,
)
from airflow.providers.google.cloud.operators.gcs import
GCSCreateBucketOperator, GCSDeleteBucketOperator
+from airflow.providers.standard.operators.bash import BashOperator
from airflow.utils.trigger_rule import TriggerRule
from tests.system.providers.google import DEFAULT_GCP_SYSTEM_TEST_PROJECT_ID
diff --git
a/tests/system/providers/google/datacatalog/example_datacatalog_tag_templates.py
b/tests/system/providers/google/datacatalog/example_datacatalog_tag_templates.py
index 6c1fa6f0cab..b8dd9170c3c 100644
---
a/tests/system/providers/google/datacatalog/example_datacatalog_tag_templates.py
+++
b/tests/system/providers/google/datacatalog/example_datacatalog_tag_templates.py
@@ -24,7 +24,6 @@ from google.cloud.datacatalog import FieldType,
TagTemplateField
from airflow.models.dag import DAG
from airflow.models.xcom_arg import XComArg
-from airflow.operators.bash import BashOperator
from airflow.providers.google.cloud.operators.datacatalog import (
CloudDataCatalogCreateTagTemplateFieldOperator,
CloudDataCatalogCreateTagTemplateOperator,
@@ -35,6 +34,7 @@ from airflow.providers.google.cloud.operators.datacatalog
import (
CloudDataCatalogUpdateTagTemplateFieldOperator,
CloudDataCatalogUpdateTagTemplateOperator,
)
+from airflow.providers.standard.operators.bash import BashOperator
from airflow.utils.trigger_rule import TriggerRule
from tests.system.providers.google import DEFAULT_GCP_SYSTEM_TEST_PROJECT_ID
diff --git
a/tests/system/providers/google/datacatalog/example_datacatalog_tags.py
b/tests/system/providers/google/datacatalog/example_datacatalog_tags.py
index 28764c3e033..17397fcea28 100644
--- a/tests/system/providers/google/datacatalog/example_datacatalog_tags.py
+++ b/tests/system/providers/google/datacatalog/example_datacatalog_tags.py
@@ -25,7 +25,6 @@ from google.cloud.datacatalog import TagField,
TagTemplateField
from airflow.models.dag import DAG
from airflow.models.xcom_arg import XComArg
-from airflow.operators.bash import BashOperator
from airflow.providers.google.cloud.operators.datacatalog import (
CloudDataCatalogCreateEntryGroupOperator,
CloudDataCatalogCreateEntryOperator,
@@ -39,6 +38,7 @@ from airflow.providers.google.cloud.operators.datacatalog
import (
CloudDataCatalogUpdateTagOperator,
)
from airflow.providers.google.cloud.operators.gcs import
GCSCreateBucketOperator, GCSDeleteBucketOperator
+from airflow.providers.standard.operators.bash import BashOperator
from airflow.utils.trigger_rule import TriggerRule
from tests.system.providers.google import DEFAULT_GCP_SYSTEM_TEST_PROJECT_ID
diff --git a/tests/system/providers/opsgenie/example_opsgenie_notifier.py
b/tests/system/providers/opsgenie/example_opsgenie_notifier.py
index a9cdd70de01..10edf8debda 100644
--- a/tests/system/providers/opsgenie/example_opsgenie_notifier.py
+++ b/tests/system/providers/opsgenie/example_opsgenie_notifier.py
@@ -21,8 +21,8 @@ from __future__ import annotations
from datetime import datetime
from airflow import DAG
-from airflow.operators.bash import BashOperator
from airflow.providers.opsgenie.notifications.opsgenie import
send_opsgenie_notification
+from airflow.providers.standard.operators.bash import BashOperator
with DAG(
"opsgenie_notifier",
diff --git a/tests/system/providers/singularity/example_singularity.py
b/tests/system/providers/singularity/example_singularity.py
index d802fbb31e8..4b60c080dcd 100644
--- a/tests/system/providers/singularity/example_singularity.py
+++ b/tests/system/providers/singularity/example_singularity.py
@@ -21,8 +21,8 @@ import os
from datetime import datetime, timedelta
from airflow import DAG
-from airflow.operators.bash import BashOperator
from airflow.providers.singularity.operators.singularity import
SingularityOperator
+from airflow.providers.standard.operators.bash import BashOperator
ENV_ID = os.environ.get("SYSTEM_TESTS_ENV_ID")
DAG_ID = "singularity_sample"
diff --git a/tests/test_utils/compat.py b/tests/test_utils/compat.py
index 09f3653db82..67a111350da 100644
--- a/tests/test_utils/compat.py
+++ b/tests/test_utils/compat.py
@@ -18,8 +18,6 @@ from __future__ import annotations
import contextlib
import json
-import os
-from importlib.metadata import version
from typing import TYPE_CHECKING, Any, cast
from packaging.version import Version
@@ -38,7 +36,6 @@ try:
except ImportError:
from airflow.models.errors import ImportError as ParseImportError # type:
ignore[no-redef,attr-defined]
-
from airflow import __version__ as airflow_version
AIRFLOW_VERSION = Version(airflow_version)
@@ -53,6 +50,16 @@ except ImportError:
# Compatibility for Airflow 2.7.*
from airflow.models.baseoperator import BaseOperatorLink
+try:
+ from airflow.providers.standard.operators.bash import BashOperator
+ from airflow.providers.standard.sensors.bash import BashSensor
+ from airflow.providers.standard.sensors.date_time import DateTimeSensor
+except ImportError:
+ # Compatibility for Airflow < 2.10.*
+ from airflow.operators.bash import BashOperator # type:
ignore[no-redef,attr-defined]
+ from airflow.sensors.bash import BashSensor # type:
ignore[no-redef,attr-defined]
+ from airflow.sensors.date_time import DateTimeSensor # type:
ignore[no-redef,attr-defined]
+
if TYPE_CHECKING:
from airflow.models.asset import (
diff --git a/tests/utils/test_dot_renderer.py b/tests/utils/test_dot_renderer.py
index 5cb52696f19..0376848fce8 100644
--- a/tests/utils/test_dot_renderer.py
+++ b/tests/utils/test_dot_renderer.py
@@ -23,13 +23,13 @@ from unittest import mock
import pytest
from airflow.models.dag import DAG
-from airflow.operators.bash import BashOperator
from airflow.operators.empty import EmptyOperator
from airflow.operators.python import PythonOperator
from airflow.serialization.dag_dependency import DagDependency
from airflow.utils import dot_renderer, timezone
from airflow.utils.state import State
from airflow.utils.task_group import TaskGroup
+from tests.test_utils.compat import BashOperator
from tests.test_utils.db import clear_db_dags
START_DATE = timezone.utcnow()
diff --git a/tests/utils/test_task_group.py b/tests/utils/test_task_group.py
index 084d8c35ac0..a6008dc58c0 100644
--- a/tests/utils/test_task_group.py
+++ b/tests/utils/test_task_group.py
@@ -34,12 +34,12 @@ from airflow.exceptions import TaskAlreadyInTaskGroup
from airflow.models.baseoperator import BaseOperator
from airflow.models.dag import DAG
from airflow.models.xcom_arg import XComArg
-from airflow.operators.bash import BashOperator
from airflow.operators.empty import EmptyOperator
from airflow.operators.python import PythonOperator
from airflow.utils.dag_edges import dag_edges
from airflow.utils.task_group import TASKGROUP_ARGS_EXPECTED_TYPES, TaskGroup,
task_group_to_dict
from tests.models import DEFAULT_DATE
+from tests.test_utils.compat import BashOperator
def make_task(name, type_="classic"):
diff --git a/tests/www/views/test_views_rendered.py
b/tests/www/views/test_views_rendered.py
index f3947b141a3..2d1754af29f 100644
--- a/tests/www/views/test_views_rendered.py
+++ b/tests/www/views/test_views_rendered.py
@@ -28,7 +28,6 @@ from airflow.models.baseoperator import BaseOperator
from airflow.models.dag import DAG
from airflow.models.renderedtifields import RenderedTaskInstanceFields
from airflow.models.variable import Variable
-from airflow.operators.bash import BashOperator
from airflow.operators.python import PythonOperator
from airflow.serialization.serialized_objects import SerializedDAG
from airflow.utils import timezone
@@ -36,7 +35,7 @@ from airflow.utils.session import create_session
from airflow.utils.state import DagRunState, TaskInstanceState
from airflow.utils.types import DagRunType
from tests.conftest import initial_db_init
-from tests.test_utils.compat import AIRFLOW_V_3_0_PLUS
+from tests.test_utils.compat import AIRFLOW_V_3_0_PLUS, BashOperator
from tests.test_utils.db import clear_db_dags, clear_db_runs,
clear_rendered_ti_fields
from tests.test_utils.www import check_content_in_response,
check_content_not_in_response
diff --git a/tests/www/views/test_views_tasks.py
b/tests/www/views/test_views_tasks.py
index 7b65051724c..4dcb7252a36 100644
--- a/tests/www/views/test_views_tasks.py
+++ b/tests/www/views/test_views_tasks.py
@@ -34,7 +34,6 @@ from airflow.models.dagcode import DagCode
from airflow.models.taskinstance import TaskInstance
from airflow.models.taskreschedule import TaskReschedule
from airflow.models.xcom import XCom
-from airflow.operators.bash import BashOperator
from airflow.operators.empty import EmptyOperator
from airflow.providers.celery.executors.celery_executor import CeleryExecutor
from airflow.security import permissions
@@ -49,7 +48,7 @@ from
tests.providers.fab.auth_manager.api_endpoints.api_connexion_utils import (
delete_roles,
delete_user,
)
-from tests.test_utils.compat import AIRFLOW_V_3_0_PLUS
+from tests.test_utils.compat import AIRFLOW_V_3_0_PLUS, BashOperator
from tests.test_utils.config import conf_vars
from tests.test_utils.db import clear_db_runs, clear_db_xcom
from tests.test_utils.www import check_content_in_response,
check_content_not_in_response, client_with_login