This is an automated email from the ASF dual-hosted git repository.
yiguolei pushed a commit to branch branch-4.0
in repository https://gitbox.apache.org/repos/asf/doris.git
The following commit(s) were added to refs/heads/branch-4.0 by this push:
new a0eb8c0c715 branch-4.0: [opt](s3) use ak+sk combine in calculate hash
value #60149 (#60218)
a0eb8c0c715 is described below
commit a0eb8c0c715fbae23b8c6fde3bfbcde666246835
Author: github-actions[bot]
<41898282+github-actions[bot]@users.noreply.github.com>
AuthorDate: Mon Jan 26 19:29:41 2026 +0800
branch-4.0: [opt](s3) use ak+sk combine in calculate hash value #60149
(#60218)
Cherry-picked from #60149
Co-authored-by: zhangstar333 <[email protected]>
---
be/src/util/s3_util.h | 4 ++--
.../s3/test_outfile_with_different_s3.groovy | 22 +++++++++++++++++++++-
2 files changed, 23 insertions(+), 3 deletions(-)
diff --git a/be/src/util/s3_util.h b/be/src/util/s3_util.h
index 46282e525df..39e5d8e807c 100644
--- a/be/src/util/s3_util.h
+++ b/be/src/util/s3_util.h
@@ -88,8 +88,8 @@ struct S3ClientConf {
uint64_t get_hash() const {
uint64_t hash_code = 0;
- hash_code ^= crc32_hash(ak);
- hash_code ^= crc32_hash(sk);
+ // Use crc32_hash(ak + sk) hash to prevent swapped AK/SK order from
producing same result.
+ hash_code ^= crc32_hash(ak + sk);
hash_code ^= crc32_hash(token);
hash_code ^= crc32_hash(endpoint);
hash_code ^= crc32_hash(region);
diff --git
a/regression-test/suites/export_p0/outfile/s3/test_outfile_with_different_s3.groovy
b/regression-test/suites/export_p0/outfile/s3/test_outfile_with_different_s3.groovy
index 4e0ece42e5c..18ad0a02c18 100644
---
a/regression-test/suites/export_p0/outfile/s3/test_outfile_with_different_s3.groovy
+++
b/regression-test/suites/export_p0/outfile/s3/test_outfile_with_different_s3.groovy
@@ -71,8 +71,24 @@ suite("test_outfile_with_different_s3", "p0") {
String region = getS3Region()
String bucket = context.config.otherConfigs.get("s3BucketName");
- def outfile_url = outfile_to_S3(bucket, s3_endpoint, region, ak, sk)
+ def outFilePath = "${bucket}/outfile_different_s3/exp_"
+ test {
+ // change the ak/sk order
+ sql """
+ SELECT * FROM ${export_table_name} t ORDER BY user_id
+ INTO OUTFILE "s3://${outFilePath}"
+ FORMAT AS parquet
+ PROPERTIES (
+ "s3.endpoint" = "${s3_endpoint}",
+ "s3.region" = "${region}",
+ "s3.access_key" = "${sk}",
+ "s3.secret_key"="${ak}"
+ );
+ """
+ exception "InvalidAccessKeyId"
+ }
+ def outfile_url = outfile_to_S3(bucket, s3_endpoint, region, ak, sk)
// http schema
qt_s3_tvf_1_http """ SELECT * FROM S3 (
"uri" = "http://${bucket}.${s3_endpoint}${outfile_url.substring(5
+ bucket.length(), outfile_url.length() - 1)}0.parquet",
@@ -92,6 +108,10 @@ suite("test_outfile_with_different_s3", "p0") {
"region" = "${region}"
);
"""
+
+
+
+
} finally {
}
---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]