This is an automated email from the ASF dual-hosted git repository.
potiuk pushed a commit to branch main
in repository https://gitbox.apache.org/repos/asf/airflow.git
The following commit(s) were added to refs/heads/main by this push:
new 2f70daf5ac Databricks: fix test_connection implementation (#25114)
2f70daf5ac is described below
commit 2f70daf5ac36100ff0bbd4ac66ce921a2bc6dea0
Author: Alex Ott <[email protected]>
AuthorDate: Mon Jul 18 14:45:11 2022 +0200
Databricks: fix test_connection implementation (#25114)
Original implementation has used the API that exists only on AWS, so it
will be failing on
Azure & GCP Databricks. This PR uses another API that is available on all
Databricks
cloud platforms.
---
airflow/providers/databricks/hooks/databricks.py | 4 ++--
tests/providers/databricks/hooks/test_databricks.py | 18 +++++++++++-------
2 files changed, 13 insertions(+), 9 deletions(-)
diff --git a/airflow/providers/databricks/hooks/databricks.py
b/airflow/providers/databricks/hooks/databricks.py
index 33b4aeb8f7..9ae061b02e 100644
--- a/airflow/providers/databricks/hooks/databricks.py
+++ b/airflow/providers/databricks/hooks/databricks.py
@@ -52,7 +52,7 @@ WORKSPACE_GET_STATUS_ENDPOINT = ('GET',
'api/2.0/workspace/get-status')
RUN_LIFE_CYCLE_STATES = ['PENDING', 'RUNNING', 'TERMINATING', 'TERMINATED',
'SKIPPED', 'INTERNAL_ERROR']
-LIST_ZONES_ENDPOINT = ('GET', 'api/2.0/clusters/list-zones')
+SPARK_VERSIONS_ENDPOINT = ('GET', 'api/2.0/clusters/spark-versions')
class RunState:
@@ -415,7 +415,7 @@ class DatabricksHook(BaseDatabricksHook):
"""Test the Databricks connectivity from UI"""
hook = DatabricksHook(databricks_conn_id=self.databricks_conn_id)
try:
- hook._do_api_call(endpoint_info=LIST_ZONES_ENDPOINT).get('zones')
+
hook._do_api_call(endpoint_info=SPARK_VERSIONS_ENDPOINT).get('versions')
status = True
message = 'Connection successfully tested'
except Exception as e:
diff --git a/tests/providers/databricks/hooks/test_databricks.py
b/tests/providers/databricks/hooks/test_databricks.py
index 8599ac9918..fa2c416a10 100644
--- a/tests/providers/databricks/hooks/test_databricks.py
+++ b/tests/providers/databricks/hooks/test_databricks.py
@@ -104,7 +104,11 @@ LIST_JOBS_RESPONSE = {
],
'has_more': False,
}
-LIST_ZONES_RESPONSE = {'zones': ['us-east-2b', 'us-east-2c', 'us-east-2a'],
'default_zone': 'us-east-2b'}
+LIST_SPARK_VERSIONS_RESPONSE = {
+ "versions": [
+ {"key": "8.2.x-scala2.12", "name": "8.2 (includes Apache Spark 3.1.1,
Scala 2.12)"},
+ ]
+}
def run_now_endpoint(host):
@@ -184,9 +188,9 @@ def list_jobs_endpoint(host):
return f'https://{host}/api/2.1/jobs/list'
-def list_zones_endpoint(host):
- """Utility function to generate the list zones endpoint given the host"""
- return f'https://{host}/api/2.0/clusters/list-zones'
+def list_spark_versions_endpoint(host):
+ """Utility function to generate the list spark versions endpoint given the
host"""
+ return f'https://{host}/api/2.0/clusters/spark-versions'
def create_valid_response_mock(content):
@@ -715,13 +719,13 @@ class TestDatabricksHook(unittest.TestCase):
@mock.patch('airflow.providers.databricks.hooks.databricks_base.requests')
def test_connection_success(self, mock_requests):
mock_requests.codes.ok = 200
- mock_requests.get.return_value.json.return_value = LIST_ZONES_RESPONSE
+ mock_requests.get.return_value.json.return_value =
LIST_SPARK_VERSIONS_RESPONSE
status_code_mock = mock.PropertyMock(return_value=200)
type(mock_requests.get.return_value).status_code = status_code_mock
response = self.hook.test_connection()
assert response == (True, 'Connection successfully tested')
mock_requests.get.assert_called_once_with(
- list_zones_endpoint(HOST),
+ list_spark_versions_endpoint(HOST),
json=None,
params=None,
auth=HTTPBasicAuth(LOGIN, PASSWORD),
@@ -738,7 +742,7 @@ class TestDatabricksHook(unittest.TestCase):
response = self.hook.test_connection()
assert response == (False, 'Connection Failure')
mock_requests.get.assert_called_once_with(
- list_zones_endpoint(HOST),
+ list_spark_versions_endpoint(HOST),
json=None,
params=None,
auth=HTTPBasicAuth(LOGIN, PASSWORD),