This is an automated email from the ASF dual-hosted git repository.

potiuk pushed a commit to branch v2-8-test
in repository https://gitbox.apache.org/repos/asf/airflow.git

commit 91475e801e2d3259b0532ad0069c63c969f723fc
Author: Jarek Potiuk <[email protected]>
AuthorDate: Sun Feb 11 18:19:34 2024 +0100

    Get rid of pytest-httpx as dependency (#37334)
    
    We were using it in one test case only that can be easily mocked
    manually. Pytest-httpx blocks us from upgrading to pytest 8 because
    the newer versions of it that support Pytest 8 have Python >= 3.9.
    
    At the same time we make explicit dependency on requirements which
    are also used internally and bump min version to 2.27.0 which is
    already more than year long and some of our providers already had
    this min version set.
    
    (cherry picked from commit 78294c24e2a522815459b5ba9f67b9d2f8143990)
---
 Dockerfile                                 |  2 +-
 Dockerfile.ci                              |  2 +-
 airflow/providers/databricks/provider.yaml |  2 +-
 airflow/providers/http/provider.yaml       |  2 +-
 airflow/providers/influxdb/provider.yaml   |  2 +-
 generated/provider_dependencies.json       |  6 +++---
 pyproject.toml                             | 11 ++++++-----
 tests/cli/commands/test_info_command.py    | 31 +++++++++++++++---------------
 8 files changed, 30 insertions(+), 28 deletions(-)

diff --git a/Dockerfile b/Dockerfile
index 80233a5bcc..6bb9341abf 100644
--- a/Dockerfile
+++ b/Dockerfile
@@ -1403,7 +1403,7 @@ COPY --from=scripts install_from_docker_context_files.sh 
install_airflow.sh \
 # an incorrect architecture.
 ARG TARGETARCH
 # Value to be able to easily change cache id and therefore use a bare new cache
-ARG PIP_CACHE_EPOCH="0"
+ARG PIP_CACHE_EPOCH="9"
 
 # hadolint ignore=SC2086, SC2010, DL3042
 RUN 
--mount=type=cache,id=$PYTHON_BASE_IMAGE-$AIRFLOW_PIP_VERSION-$TARGETARCH-$PIP_CACHE_EPOCH,target=/tmp/.cache/pip,uid=${AIRFLOW_UID}
 \
diff --git a/Dockerfile.ci b/Dockerfile.ci
index bef8a6b564..e915d6a51f 100644
--- a/Dockerfile.ci
+++ b/Dockerfile.ci
@@ -1071,7 +1071,7 @@ ARG AIRFLOW_CONSTRAINTS_LOCATION=""
 ARG DEFAULT_CONSTRAINTS_BRANCH="constraints-main"
 # By changing the epoch we can force reinstalling Airflow and pip all 
dependencies
 # It can also be overwritten manually by setting the AIRFLOW_CI_BUILD_EPOCH 
environment variable.
-ARG AIRFLOW_CI_BUILD_EPOCH="6"
+ARG AIRFLOW_CI_BUILD_EPOCH="7"
 ARG AIRFLOW_PRE_CACHED_PIP_PACKAGES="true"
 ARG AIRFLOW_PIP_VERSION=24.0
 # Setup PIP
diff --git a/airflow/providers/databricks/provider.yaml 
b/airflow/providers/databricks/provider.yaml
index ad3d5f86f7..b08850428f 100644
--- a/airflow/providers/databricks/provider.yaml
+++ b/airflow/providers/databricks/provider.yaml
@@ -60,7 +60,7 @@ versions:
 dependencies:
   - apache-airflow>=2.6.0
   - apache-airflow-providers-common-sql>=1.10.0
-  - requests>=2.27,<3
+  - requests>=2.27.0,<3
   # The connector 2.9.0 released on Aug 10, 2023 has a bug that it does not 
properly declare urllib3 and
   # it needs to be excluded. See 
https://github.com/databricks/databricks-sql-python/issues/190
   # The 2.9.1 (to be released soon) already contains the fix
diff --git a/airflow/providers/http/provider.yaml 
b/airflow/providers/http/provider.yaml
index fd252fbadb..0c0e0958cf 100644
--- a/airflow/providers/http/provider.yaml
+++ b/airflow/providers/http/provider.yaml
@@ -54,7 +54,7 @@ dependencies:
   - apache-airflow>=2.6.0
   # The 2.26.0 release of requests got rid of the chardet LGPL mandatory 
dependency, allowing us to
   # release it as a requirement for airflow
-  - requests>=2.26.0
+  - requests>=2.27.0,<3
   - requests_toolbelt
   - aiohttp>=3.9.2
   - asgiref
diff --git a/airflow/providers/influxdb/provider.yaml 
b/airflow/providers/influxdb/provider.yaml
index 6820b4cfeb..097bed145f 100644
--- a/airflow/providers/influxdb/provider.yaml
+++ b/airflow/providers/influxdb/provider.yaml
@@ -26,7 +26,7 @@ description: |
 dependencies:
   - apache-airflow>=2.6.0
   - influxdb-client>=1.19.0
-  - requests>=2.26.0
+  - requests>=2.27.0,<3
 
 state: ready
 source-date-epoch: 1703288143
diff --git a/generated/provider_dependencies.json 
b/generated/provider_dependencies.json
index e54396ad8f..bc3d4c84db 100644
--- a/generated/provider_dependencies.json
+++ b/generated/provider_dependencies.json
@@ -381,7 +381,7 @@
       "apache-airflow-providers-common-sql>=1.10.0",
       "apache-airflow>=2.6.0",
       "databricks-sql-connector>=2.0.0, <3.0.0, !=2.9.0",
-      "requests>=2.27,<3"
+      "requests>=2.27.0,<3"
     ],
     "devel-deps": [
       "deltalake>=0.12.0"
@@ -624,7 +624,7 @@
       "aiohttp>=3.9.2",
       "apache-airflow>=2.6.0",
       "asgiref",
-      "requests>=2.26.0",
+      "requests>=2.27.0,<3",
       "requests_toolbelt"
     ],
     "devel-deps": [],
@@ -645,7 +645,7 @@
     "deps": [
       "apache-airflow>=2.6.0",
       "influxdb-client>=1.19.0",
-      "requests>=2.26.0"
+      "requests>=2.27.0,<3"
     ],
     "devel-deps": [],
     "cross-providers-deps": [],
diff --git a/pyproject.toml b/pyproject.toml
index 0a44d32d16..42dea4c650 100644
--- a/pyproject.toml
+++ b/pyproject.toml
@@ -133,6 +133,8 @@ dependencies = [
     "python-dateutil>=2.3",
     "python-nvd3>=0.15.0",
     "python-slugify>=5.0",
+    # Requests 3 if it will be released, will be heavily breaking.
+    "requests>=2.27.0,<3",
     "rfc3339-validator>=0.1.4",
     "rich-argparse>=1.0.0",
     "rich>=12.4.4",
@@ -189,7 +191,7 @@ cgroups = [
     "cgroupspy>=0.2.2",
 ]
 deprecated-api = [
-    "requests>=2.26.0",
+    "requests>=2.27.0,<3",
 ]
 github-enterprise = [
     "apache-airflow[fab]",
@@ -344,7 +346,6 @@ devel-tests = [
     "coverage>=7.2",
     "pytest-asyncio>=0.23.3",
     "pytest-cov>=4.1.0",
-    "pytest-httpx>=0.21.3",
     "pytest-icdiff>=0.9",
     "pytest-instafail>=0.5.0",
     "pytest-mock>=3.12.0",
@@ -658,7 +659,7 @@ databricks = [ # source: 
airflow/providers/databricks/provider.yaml
   "aiohttp>=3.9.2, <4",
   "apache-airflow[common_sql]",
   "databricks-sql-connector>=2.0.0, <3.0.0, !=2.9.0",
-  "requests>=2.27,<3",
+  "requests>=2.27.0,<3",
   # Devel dependencies for the databricks provider
   "deltalake>=0.12.0",
 ]
@@ -767,13 +768,13 @@ hashicorp = [ # source: 
airflow/providers/hashicorp/provider.yaml
 http = [ # source: airflow/providers/http/provider.yaml
   "aiohttp>=3.9.2",
   "asgiref",
-  "requests>=2.26.0",
+  "requests>=2.27.0,<3",
   "requests_toolbelt",
 ]
 imap = [] # source: airflow/providers/imap/provider.yaml
 influxdb = [ # source: airflow/providers/influxdb/provider.yaml
   "influxdb-client>=1.19.0",
-  "requests>=2.26.0",
+  "requests>=2.27.0,<3",
 ]
 jdbc = [ # source: airflow/providers/jdbc/provider.yaml
   "apache-airflow[common_sql]",
diff --git a/tests/cli/commands/test_info_command.py 
b/tests/cli/commands/test_info_command.py
index 74426f1225..9670f3de6a 100644
--- a/tests/cli/commands/test_info_command.py
+++ b/tests/cli/commands/test_info_command.py
@@ -21,7 +21,9 @@ import importlib
 import logging
 import os
 from io import StringIO
+from unittest import mock
 
+import httpx
 import pytest
 from rich.console import Console
 
@@ -181,18 +183,17 @@ class TestInfoCommandMockHttpx:
             ("database", "sql_alchemy_conn"): 
"postgresql+psycopg2://postgres:airflow@postgres/airflow",
         }
     )
-    def test_show_info_anonymize_fileio(self, httpx_mock, setup_parser):
-        httpx_mock.add_response(
-            url="https://file.io";,
-            method="post",
-            json={
-                "success": True,
-                "key": "f9U3zs3I",
-                "link": "https://file.io/TEST";,
-                "expiry": "14 days",
-            },
-            status_code=200,
-        )
-        with contextlib.redirect_stdout(StringIO()) as stdout:
-            info_command.show_info(setup_parser.parse_args(["info", 
"--file-io"]))
-        assert "https://file.io/TEST"; in stdout.getvalue()
+    def test_show_info_anonymize_fileio(self, setup_parser):
+        with mock.patch("airflow.cli.commands.info_command.httpx.post") as 
post:
+            post.return_value = httpx.Response(
+                status_code=200,
+                json={
+                    "success": True,
+                    "key": "f9U3zs3I",
+                    "link": "https://file.io/TEST";,
+                    "expiry": "14 days",
+                },
+            )
+            with contextlib.redirect_stdout(StringIO()) as stdout:
+                info_command.show_info(setup_parser.parse_args(["info", 
"--file-io", "--anonymize"]))
+            assert "https://file.io/TEST"; in stdout.getvalue()

Reply via email to