This is an automated email from the ASF dual-hosted git repository.
potiuk pushed a commit to branch main
in repository https://gitbox.apache.org/repos/asf/airflow.git
The following commit(s) were added to refs/heads/main by this push:
new aa75fbb2b8 Restore Python 3.12 support for Databricks (#38207)
aa75fbb2b8 is described below
commit aa75fbb2b849e1f43b04e2f73ccc834511ea01e5
Author: Jarek Potiuk <[email protected]>
AuthorDate: Sat Mar 16 12:45:18 2024 +0100
Restore Python 3.12 support for Databricks (#38207)
We excluded Python 3.12 from Databricks provider, because it was
failing our Python 3.12 tests intermittently (but often enough to
make a difference). It turned out that this was caused by running
the tests with coverage enabled and PEP 669 implementation in
Python 3.12 impacting intermittently performance of tests run
with coverage. However seems that experimenetal PEP 669 support
implemented in coverage 7.4.0 is nicely handling the performance
issues and after #38194 we shoudl be able to enable Python 3.12 for
Databricks without impacting our tests.
Related: https://github.com/databricks/databricks-sql-python/issues/369
---
airflow/providers/databricks/provider.yaml | 6 ------
generated/provider_dependencies.json | 4 +---
pyproject.toml | 13 +++++++------
3 files changed, 8 insertions(+), 15 deletions(-)
diff --git a/airflow/providers/databricks/provider.yaml
b/airflow/providers/databricks/provider.yaml
index 6bc0bbfe87..ddc22d4a4c 100644
--- a/airflow/providers/databricks/provider.yaml
+++ b/airflow/providers/databricks/provider.yaml
@@ -60,12 +60,6 @@ versions:
- 1.0.1
- 1.0.0
-
-# Databricks is excluded for Python 3.12 because running databricks-sql-python
imports on Python 3.12
-# Cause extremely long import times
https://github.com/databricks/databricks-sql-python/issues/369
-# and until the problem is fixed, we exclude Python 3.12 for Databricks
provider
-excluded-python-versions: ['3.12']
-
dependencies:
- apache-airflow>=2.6.0
- apache-airflow-providers-common-sql>=1.10.0
diff --git a/generated/provider_dependencies.json
b/generated/provider_dependencies.json
index 480ebc76fe..10d50d9bba 100644
--- a/generated/provider_dependencies.json
+++ b/generated/provider_dependencies.json
@@ -377,9 +377,7 @@
"cross-providers-deps": [
"common.sql"
],
- "excluded-python-versions": [
- "3.12"
- ],
+ "excluded-python-versions": [],
"state": "ready"
},
"datadog": {
diff --git a/pyproject.toml b/pyproject.toml
index 2fe574eaab..2a26414bec 100644
--- a/pyproject.toml
+++ b/pyproject.toml
@@ -346,7 +346,8 @@ devel-tests = [
"aioresponses>=0.7.6",
"backports.zoneinfo>=0.2.1;python_version<'3.9'",
"beautifulsoup4>=4.7.1",
- "coverage>=7.2",
+ # Coverage 7.4.0 added experimental support for Python 3.12 PEP669 which
we use in Airflow
+ "coverage>=7.4.0",
"pytest-asyncio>=0.23.3",
"pytest-cov>=4.1.0",
"pytest-icdiff>=0.9",
@@ -665,12 +666,12 @@ common-sql = [ # source:
airflow/providers/common/sql/provider.yaml
"sqlparse>=0.4.2",
]
databricks = [ # source: airflow/providers/databricks/provider.yaml
- "aiohttp>=3.9.2, <4;python_version != \"3.12\"",
- "apache-airflow[common_sql];python_version != \"3.12\"",
- "databricks-sql-connector>=2.0.0, <3.0.0, !=2.9.0;python_version !=
\"3.12\"",
- "requests>=2.27.0,<3;python_version != \"3.12\"",
+ "aiohttp>=3.9.2, <4",
+ "apache-airflow[common_sql]",
+ "databricks-sql-connector>=2.0.0, <3.0.0, !=2.9.0",
+ "requests>=2.27.0,<3",
# Devel dependencies for the databricks provider
- "deltalake>=0.12.0;python_version != \"3.12\"",
+ "deltalake>=0.12.0",
]
datadog = [ # source: airflow/providers/datadog/provider.yaml
"datadog>=0.14.0",