This is an automated email from the ASF dual-hosted git repository.
collado pushed a commit to branch main
in repository https://gitbox.apache.org/repos/asf/polaris.git
The following commit(s) were added to refs/heads/main by this push:
new 4226713c Fixed reg test for cross-region S3 calls (#995)
4226713c is described below
commit 4226713c8fdc928411ed4183c9ef375ba38b4439
Author: Michael Collado <[email protected]>
AuthorDate: Fri Feb 14 13:52:05 2025 -0800
Fixed reg test for cross-region S3 calls (#995)
---
regtests/docker-compose.yml | 1 +
regtests/t_spark_sql/ref/spark_sql_s3_cross_region.sh.ref | 11 +++++------
regtests/t_spark_sql/src/spark_sql_s3_cross_region.sh | 5 +----
3 files changed, 7 insertions(+), 10 deletions(-)
diff --git a/regtests/docker-compose.yml b/regtests/docker-compose.yml
index 2ca14437..94a0f750 100644
--- a/regtests/docker-compose.yml
+++ b/regtests/docker-compose.yml
@@ -66,6 +66,7 @@ services:
AWS_CROSS_REGION_TEST_ENABLED: $AWS_CROSS_REGION_TEST_ENABLED
AWS_CROSS_REGION_BUCKET: $AWS_CROSS_REGION_BUCKET
AWS_ROLE_FOR_CROSS_REGION_BUCKET: $AWS_ROLE_FOR_CROSS_REGION_BUCKET
+ AWS_REGION_FOR_CROSS_REGION_TEST: $AWS_REGION_FOR_CROSS_REGION_TEST
volumes:
- ./output:/tmp/polaris-regtests/
- ./credentials:/tmp/credentials/
diff --git a/regtests/t_spark_sql/ref/spark_sql_s3_cross_region.sh.ref
b/regtests/t_spark_sql/ref/spark_sql_s3_cross_region.sh.ref
index feef8667..c984d763 100644
--- a/regtests/t_spark_sql/ref/spark_sql_s3_cross_region.sh.ref
+++ b/regtests/t_spark_sql/ref/spark_sql_s3_cross_region.sh.ref
@@ -1,4 +1,3 @@
-{"defaults":{"default-base-location":"s3://sfc-role-stage-for-reg-test-do-not-modify-write-only/polaris_test/spark_sql_s3_cross_region_catalog/"},"overrides":{"prefix":"spark_sql_s3_cross_region_catalog"},"endpoints":["GET
/v1/{prefix}/namespaces","GET /v1/{prefix}/namespaces/{namespace}","POST
/v1/{prefix}/namespaces","POST
/v1/{prefix}/namespaces/{namespace}/properties","DELETE
/v1/{prefix}/namespaces/{namespace}","GET
/v1/{prefix}/namespaces/{namespace}/tables","GET /v1/{prefix}/names [...]
Catalog created
spark-sql (default)> use polaris;
spark-sql ()> show namespaces;
@@ -7,23 +6,23 @@ spark-sql ()> create namespace db2;
spark-sql ()> show namespaces;
db1
db2
-spark-sql ()>
+spark-sql ()>
> create namespace db1.schema1;
spark-sql ()> show namespaces;
db1
db2
spark-sql ()> show namespaces in db1;
db1.schema1
-spark-sql ()>
+spark-sql ()>
> create table db1.schema1.tbl1 (col1 int);
spark-sql ()> show tables in db1;
spark-sql ()> use db1.schema1;
-spark-sql (db1.schema1)>
+spark-sql (db1.schema1)>
> insert into tbl1 values (123), (234);
spark-sql (db1.schema1)> select * from tbl1;
123
234
-spark-sql (db1.schema1)>
+spark-sql (db1.schema1)>
> drop table tbl1 purge;
spark-sql (db1.schema1)> show tables;
spark-sql (db1.schema1)> drop namespace db1.schema1;
@@ -32,4 +31,4 @@ spark-sql (db1.schema1)> show namespaces;
db2
spark-sql (db1.schema1)> drop namespace db2;
spark-sql (db1.schema1)> show namespaces;
-spark-sql (db1.schema1)>
+spark-sql (db1.schema1)>
diff --git a/regtests/t_spark_sql/src/spark_sql_s3_cross_region.sh
b/regtests/t_spark_sql/src/spark_sql_s3_cross_region.sh
old mode 100644
new mode 100755
index d68e121c..76157ce4
--- a/regtests/t_spark_sql/src/spark_sql_s3_cross_region.sh
+++ b/regtests/t_spark_sql/src/spark_sql_s3_cross_region.sh
@@ -30,7 +30,7 @@ ROLE_ARN="${AWS_ROLE_FOR_CROSS_REGION_BUCKET}"
curl -i -X POST -H "Authorization: Bearer ${SPARK_BEARER_TOKEN}" -H 'Accept:
application/json' -H 'Content-Type: application/json' \
http://${POLARIS_HOST:-localhost}:8181/api/management/v1/catalogs \
- -d '{"name": "spark_sql_s3_cross_region_catalog", "id": 100, "type":
"INTERNAL", "readOnly": false, "properties": {"default-base-location":
"s3://${BUCKET}/polaris_test/spark_sql_s3_cross_region_catalog/"},
"storageConfigInfo": {"storageType": "S3", "allowedLocations":
["s3://${BUCKET}/polaris_test/"], "roleArn": "${ROLE_ARN}"}}' > /dev/stderr
+ -d "{\"name\": \"spark_sql_s3_cross_region_catalog\", \"id\": 100, \"type\":
\"INTERNAL\", \"readOnly\": false, \"properties\": {\"client.region\":
\"${AWS_REGION_FOR_CROSS_REGION_TEST}\", \"default-base-location\":
\"s3://${BUCKET}/polaris_test/spark_sql_s3_cross_region_catalog/\"},
\"storageConfigInfo\": {\"storageType\": \"S3\", \"allowedLocations\":
[\"s3://${BUCKET}/polaris_test/\"], \"roleArn\": \"${ROLE_ARN}\", \"region\":
\"${AWS_REGION_FOR_CROSS_REGION_TEST}\"}}" > /dev/stderr
# Add TABLE_WRITE_DATA to the catalog's catalog_admin role since by default it
can only manage access and metadata
curl -i -X PUT -H "Authorization: Bearer ${SPARK_BEARER_TOKEN}" -H 'Accept:
application/json' -H 'Content-Type: application/json' \
@@ -42,9 +42,6 @@ curl -i -X PUT -H "Authorization: Bearer
${SPARK_BEARER_TOKEN}" -H 'Accept: appl
http://${POLARIS_HOST:-localhost}:8181/api/management/v1/principal-roles/service_admin/catalog-roles/spark_sql_s3_cross_region_catalog
\
-d '{"name": "catalog_admin"}' > /dev/stderr
-curl -H "Authorization: Bearer ${SPARK_BEARER_TOKEN}" -H 'Accept:
application/json' -H 'Content-Type: application/json' \
-
"http://${POLARIS_HOST:-localhost}:8181/api/catalog/v1/config?warehouse=spark_sql_s3_cross_region_catalog"
-echo
echo "Catalog created"
cat << EOF | ${SPARK_HOME}/bin/spark-sql -S --conf
spark.sql.catalog.polaris.token="${SPARK_BEARER_TOKEN}" --conf
spark.sql.catalog.polaris.warehouse=spark_sql_s3_cross_region_catalog
use polaris;