This is an automated email from the ASF dual-hosted git repository.

singhpk234 pushed a commit to branch main
in repository https://gitbox.apache.org/repos/asf/polaris.git


The following commit(s) were added to refs/heads/main by this push:
     new 59ab382ba Fixes for direct usage of client_secret #1756
59ab382ba is described below

commit 59ab382bae3981eefacecfc3cf1b616dc9950db0
Author: Eric Maynard <eric.maynard+...@snowflake.com>
AuthorDate: Thu May 29 23:13:50 2025 -0700

    Fixes for direct usage of client_secret #1756
    
    When the spec was upgraded and the python client regenerated from it, 
clientSecret was made a password, which means calling str on it directly yields 
a redacted string like ******. In the initial PR to change the python client 
and fix regtests, some existing usage of client_secret was not changed.
---
 getting-started/spark/notebooks/SparkPolaris.ipynb       | 16 ++++++++--------
 .../v3.5/getting-started/notebooks/SparkPolaris.ipynb    |  4 ++--
 .../t_pyspark/src/test_spark_sql_s3_with_privileges.py   |  4 ++--
 3 files changed, 12 insertions(+), 12 deletions(-)

diff --git a/getting-started/spark/notebooks/SparkPolaris.ipynb 
b/getting-started/spark/notebooks/SparkPolaris.ipynb
index b3e416e80..08f28e9f2 100644
--- a/getting-started/spark/notebooks/SparkPolaris.ipynb
+++ b/getting-started/spark/notebooks/SparkPolaris.ipynb
@@ -267,7 +267,7 @@
     "  # Enable token refresh\n",
     "  .config(\"spark.sql.catalog.polaris.token-refresh-enabled\", 
\"true\")\n",
     "  # specify the client_id:client_secret pair\n",
-    "  .config(\"spark.sql.catalog.polaris.credential\", 
f\"{engineer_principal.credentials.client_id}:{engineer_principal.credentials.client_secret}\")\n",
+    "  .config(\"spark.sql.catalog.polaris.credential\", 
f\"{engineer_principal.credentials.client_id}:{engineer_principal.credentials.client_secret.get_secret_value()}\")\n",
     "\n",
     "  # Set the warehouse to the name of the catalog we created\n",
     "  .config(\"spark.sql.catalog.polaris.warehouse\", catalog_name)\n",
@@ -412,13 +412,13 @@
    "source": [
     "# Create a client to fetch an API token - use our client_id and 
client_secret as the username/password\n",
     "token_client = 
CatalogApiClient(CatalogApiClientConfiguration(username=engineer_principal.credentials.client_id,\n",
-    "                                 
password=engineer_principal.credentials.client_secret,\n",
+    "                                 
password=engineer_principal.credentials.client_secret.get_secret_value(),\n",
     "                                 
host='http://polaris:8181/api/catalog'))\n",
     "\n",
     "# Use the client to get the token from the /tokens endpoint\n",
     "collado_token = 
IcebergOAuth2API(token_client).get_token(scope='PRINCIPAL_ROLE:ALL',\n",
     "                            
client_id=engineer_principal.credentials.client_id,\n",
-    "                          
client_secret=engineer_principal.credentials.client_secret,\n",
+    "                          
client_secret=engineer_principal.credentials.client_secret.get_secret_value(),\n",
     "                          grant_type='client_credentials',\n",
     "                          _headers={'realm': 'default-realm'})\n",
     "\n",
@@ -474,7 +474,7 @@
    "source": [
     "# The new spark session inherits everything from the previous session 
except for the overridden credentials\n",
     "new_spark = spark.newSession()\n",
-    "new_spark.conf.set(\"spark.sql.catalog.polaris.credential\", 
f\"{reader_principal.credentials.client_id}:{reader_principal.credentials.client_secret}\")\n",
+    "new_spark.conf.set(\"spark.sql.catalog.polaris.credential\", 
f\"{reader_principal.credentials.client_id}:{reader_principal.credentials.client_secret.get_secret_value()}\")\n",
     "new_spark.sql(\"USE polaris\")"
    ]
   },
@@ -565,13 +565,13 @@
    "source": [
     "# Create a client to fetch an API token - use the reader's client_id and 
client_secret as the username/password\n",
     "token_client = 
CatalogApiClient(CatalogApiClientConfiguration(username=reader_principal.credentials.client_id,\n",
-    "                                 
password=reader_principal.credentials.client_secret,\n",
+    "                                 
password=reader_principal.credentials.client_secret.get_secret_value(),\n",
     "                                 
host='http://polaris:8181/api/catalog'))\n",
     "\n",
     "# Get the token\n",
     "pm_token = 
IcebergOAuth2API(token_client).get_token(scope='PRINCIPAL_ROLE:ALL',\n",
     "                            
client_id=reader_principal.credentials.client_id,\n",
-    "                          
client_secret=reader_principal.credentials.client_secret,\n",
+    "                          
client_secret=reader_principal.credentials.client_secret.get_secret_value(),\n",
     "                          grant_type='client_credentials',\n",
     "                          _headers={'realm': 'default-realm'})\n",
     "\n",
@@ -759,13 +759,13 @@
    "source": [
     "# create a token client with the _engineer's_ credentials\n",
     "token_client = 
CatalogApiClient(CatalogApiClientConfiguration(username=engineer_principal.credentials.client_id,\n",
-    "                                 
password=engineer_principal.credentials.client_secret,\n",
+    "                                 
password=engineer_principal.credentials.client_secret.get_secret_value(),\n",
     "                                 
host='http://polaris:8181/api/catalog'))\n",
     "\n",
     "# specify the role I want to activate - only ops_engineer\n",
     "ops_token = 
IcebergOAuth2API(token_client).get_token(scope='PRINCIPAL_ROLE:ops_engineer',\n",
     "                            
client_id=engineer_principal.credentials.client_id,\n",
-    "                          
client_secret=engineer_principal.credentials.client_secret,\n",
+    "                          
client_secret=engineer_principal.credentials.client_secret.get_secret_value(),\n",
     "                          grant_type='client_credentials',\n",
     "                          _headers={'realm': 'default-realm'})\n",
     "\n",
diff --git a/plugins/spark/v3.5/getting-started/notebooks/SparkPolaris.ipynb 
b/plugins/spark/v3.5/getting-started/notebooks/SparkPolaris.ipynb
index cd0b02c8a..2b3b9cc46 100644
--- a/plugins/spark/v3.5/getting-started/notebooks/SparkPolaris.ipynb
+++ b/plugins/spark/v3.5/getting-started/notebooks/SparkPolaris.ipynb
@@ -279,7 +279,7 @@
     "  # Enable token refresh\n",
     "  .config(\"spark.sql.catalog.polaris.token-refresh-enabled\", 
\"true\")\n",
     "  # specify the client_id:client_secret pair\n",
-    "  .config(\"spark.sql.catalog.polaris.credential\", 
f\"{engineer_principal.credentials.client_id}:{engineer_principal.credentials.client_secret}\")\n",
+    "  .config(\"spark.sql.catalog.polaris.credential\", 
f\"{engineer_principal.credentials.client_id}:{engineer_principal.credentials.client_secret.get_secret_value()}\")\n",
     "\n",
     "  # Set the warehouse to the name of the catalog we created\n",
     "  .config(\"spark.sql.catalog.polaris.warehouse\", catalog_name)\n",
@@ -614,7 +614,7 @@
    "source": [
     "# The new spark session inherits everything from the previous session 
except for the overridden credentials\n",
     "new_spark = spark.newSession()\n",
-    "new_spark.conf.set(\"spark.sql.catalog.polaris.credential\", 
f\"{reader_principal.credentials.client_id}:{reader_principal.credentials.client_secret}\")\n",
+    "new_spark.conf.set(\"spark.sql.catalog.polaris.credential\", 
f\"{reader_principal.credentials.client_id}:{reader_principal.credentials.client_secret.get_secret_value()}\")\n",
     "new_spark.sql(\"USE polaris\")"
    ]
   },
diff --git a/regtests/t_pyspark/src/test_spark_sql_s3_with_privileges.py 
b/regtests/t_pyspark/src/test_spark_sql_s3_with_privileges.py
index d47b1c750..64daaeab5 100644
--- a/regtests/t_pyspark/src/test_spark_sql_s3_with_privileges.py
+++ b/regtests/t_pyspark/src/test_spark_sql_s3_with_privileges.py
@@ -1165,7 +1165,7 @@ def 
test_spark_credentials_s3_exception_on_metadata_file_deletion(root_client, s
     :param reader_catalog_client:
     :return:
     """
-    with 
IcebergSparkSession(credentials=f'{snowman.principal.client_id}:{snowman.credentials.client_secret}',
+    with 
IcebergSparkSession(credentials=f'{snowman.principal.client_id}:{snowman.credentials.client_secret.get_secret_value()}',
                              catalog_name=snowflake_catalog.name,
                              polaris_url=polaris_catalog_url) as spark:
         spark.sql(f'USE {snowflake_catalog.name}')
@@ -1214,7 +1214,7 @@ def 
test_spark_credentials_s3_exception_on_metadata_file_deletion(root_client, s
         assert '404' in str(e)
 
 
-    with 
IcebergSparkSession(credentials=f'{snowman.principal.client_id}:{snowman.credentials.client_secret}',
+    with 
IcebergSparkSession(credentials=f'{snowman.principal.client_id}:{snowman.credentials.client_secret.get_secret_value()}',
                              catalog_name=snowflake_catalog.name,
                              polaris_url=polaris_catalog_url) as spark:
         spark.sql(f'USE {snowflake_catalog.name}')

Reply via email to