This is an automated email from the ASF dual-hosted git repository.
dataroaring pushed a commit to branch branch-3.0
in repository https://gitbox.apache.org/repos/asf/doris.git
The following commit(s) were added to refs/heads/branch-3.0 by this push:
new ca9bf8c61ea branch-3.0: [fix](storage vault) Fix missing
use_path_style when create storage vault #45155 (#45560)
ca9bf8c61ea is described below
commit ca9bf8c61eacb67586ddfe8d32f60196dc8305ed
Author: github-actions[bot]
<41898282+github-actions[bot]@users.noreply.github.com>
AuthorDate: Mon Dec 23 21:51:50 2024 +0800
branch-3.0: [fix](storage vault) Fix missing use_path_style when create
storage vault #45155 (#45560)
Cherry-picked from #45155
Co-authored-by: Gavin Chou <[email protected]>
---
cloud/src/meta-service/meta_service_resource.cpp | 27 +++++++-----
.../org/apache/doris/catalog/StorageVault.java | 4 ++
regression-test/suites/vault_p0/create/load.groovy | 48 ++++++++++++++++++++++
3 files changed, 68 insertions(+), 11 deletions(-)
diff --git a/cloud/src/meta-service/meta_service_resource.cpp
b/cloud/src/meta-service/meta_service_resource.cpp
index db232eb5314..4fa8cc5a132 100644
--- a/cloud/src/meta-service/meta_service_resource.cpp
+++ b/cloud/src/meta-service/meta_service_resource.cpp
@@ -765,6 +765,7 @@ struct ObjectStorageDesc {
std::string& endpoint;
std::string& external_endpoint;
std::string& region;
+ bool& use_path_style;
};
static int extract_object_storage_info(const AlterObjStoreInfoRequest* request,
@@ -777,7 +778,7 @@ static int extract_object_storage_info(const
AlterObjStoreInfoRequest* request,
msg = "s3 obj info err " + proto_to_json(*request);
return -1;
}
- auto& [ak, sk, bucket, prefix, endpoint, external_endpoint, region] =
obj_desc;
+ auto& [ak, sk, bucket, prefix, endpoint, external_endpoint, region,
use_path_style] = obj_desc;
const auto& obj = request->has_obj() ? request->obj() :
request->vault().obj_info();
// Prepare data
if (!obj.has_ak() || !obj.has_sk()) {
@@ -803,6 +804,7 @@ static int extract_object_storage_info(const
AlterObjStoreInfoRequest* request,
endpoint = obj.has_endpoint() ? obj.endpoint() : "";
external_endpoint = obj.has_external_endpoint() ? obj.external_endpoint()
: "";
region = obj.has_region() ? obj.region() : "";
+ use_path_style = obj.use_path_style();
// obj size > 1k, refuse
if (obj.ByteSizeLong() > 1024) {
code = MetaServiceCode::INVALID_ARGUMENT;
@@ -812,13 +814,13 @@ static int extract_object_storage_info(const
AlterObjStoreInfoRequest* request,
return 0;
}
-static ObjectStoreInfoPB object_info_pb_factory(ObjectStorageDesc& obj_info,
+static ObjectStoreInfoPB object_info_pb_factory(ObjectStorageDesc& obj_desc,
const ObjectStoreInfoPB& obj,
InstanceInfoPB& instance,
EncryptionInfoPB&
encryption_info,
AkSkPair& cipher_ak_sk_pair) {
ObjectStoreInfoPB last_item;
- auto& [ak, sk, bucket, prefix, endpoint, external_endpoint, region] =
obj_info;
+ auto& [ak, sk, bucket, prefix, endpoint, external_endpoint, region,
use_path_style] = obj_desc;
auto now_time = std::chrono::system_clock::now();
uint64_t time =
std::chrono::duration_cast<std::chrono::seconds>(now_time.time_since_epoch()).count();
@@ -840,6 +842,7 @@ static ObjectStoreInfoPB
object_info_pb_factory(ObjectStorageDesc& obj_info,
last_item.set_region(region);
last_item.set_provider(obj.provider());
last_item.set_sse_enabled(instance.sse_enabled());
+ last_item.set_use_path_style(use_path_style);
return last_item;
}
@@ -848,14 +851,15 @@ void
MetaServiceImpl::alter_storage_vault(google::protobuf::RpcController* contr
AlterObjStoreInfoResponse* response,
::google::protobuf::Closure* done) {
std::string ak, sk, bucket, prefix, endpoint, external_endpoint, region;
+ bool use_path_style;
EncryptionInfoPB encryption_info;
AkSkPair cipher_ak_sk_pair;
RPC_PREPROCESS(alter_storage_vault);
switch (request->op()) {
case AlterObjStoreInfoRequest::ADD_S3_VAULT:
case AlterObjStoreInfoRequest::DROP_S3_VAULT: {
- auto tmp_desc =
- ObjectStorageDesc {ak, sk, bucket, prefix, endpoint,
external_endpoint, region};
+ auto tmp_desc = ObjectStorageDesc {
+ ak, sk, bucket, prefix, endpoint, external_endpoint, region,
use_path_style};
if (0 != extract_object_storage_info(request, code, msg, tmp_desc,
encryption_info,
cipher_ak_sk_pair)) {
return;
@@ -994,8 +998,8 @@ void
MetaServiceImpl::alter_storage_vault(google::protobuf::RpcController* contr
}
}
// calc id
- auto tmp_tuple =
- ObjectStorageDesc {ak, sk, bucket, prefix, endpoint,
external_endpoint, region};
+ auto tmp_tuple = ObjectStorageDesc {
+ ak, sk, bucket, prefix, endpoint, external_endpoint, region,
use_path_style};
ObjectStoreInfoPB last_item = object_info_pb_factory(tmp_tuple, obj,
instance,
encryption_info,
cipher_ak_sk_pair);
if (instance.storage_vault_names().end() !=
@@ -1138,6 +1142,7 @@ void
MetaServiceImpl::alter_obj_store_info(google::protobuf::RpcController* cont
AlterObjStoreInfoResponse* response,
::google::protobuf::Closure* done) {
std::string ak, sk, bucket, prefix, endpoint, external_endpoint, region;
+ bool use_path_style;
EncryptionInfoPB encryption_info;
AkSkPair cipher_ak_sk_pair;
RPC_PREPROCESS(alter_obj_store_info);
@@ -1145,8 +1150,8 @@ void
MetaServiceImpl::alter_obj_store_info(google::protobuf::RpcController* cont
case AlterObjStoreInfoRequest::ADD_OBJ_INFO:
case AlterObjStoreInfoRequest::LEGACY_UPDATE_AK_SK:
case AlterObjStoreInfoRequest::UPDATE_AK_SK: {
- auto tmp_desc =
- ObjectStorageDesc {ak, sk, bucket, prefix, endpoint,
external_endpoint, region};
+ auto tmp_desc = ObjectStorageDesc {
+ ak, sk, bucket, prefix, endpoint, external_endpoint, region,
use_path_style};
if (0 != extract_object_storage_info(request, code, msg, tmp_desc,
encryption_info,
cipher_ak_sk_pair)) {
return;
@@ -1285,8 +1290,8 @@ void
MetaServiceImpl::alter_obj_store_info(google::protobuf::RpcController* cont
}
}
// calc id
- auto tmp_tuple =
- ObjectStorageDesc {ak, sk, bucket, prefix, endpoint,
external_endpoint, region};
+ auto tmp_tuple = ObjectStorageDesc {
+ ak, sk, bucket, prefix, endpoint, external_endpoint, region,
use_path_style};
ObjectStoreInfoPB last_item = object_info_pb_factory(tmp_tuple, obj,
instance,
encryption_info,
cipher_ak_sk_pair);
instance.add_obj_info()->CopyFrom(last_item);
diff --git
a/fe/fe-core/src/main/java/org/apache/doris/catalog/StorageVault.java
b/fe/fe-core/src/main/java/org/apache/doris/catalog/StorageVault.java
index df9310526e4..9d45ce7bdd8 100644
--- a/fe/fe-core/src/main/java/org/apache/doris/catalog/StorageVault.java
+++ b/fe/fe-core/src/main/java/org/apache/doris/catalog/StorageVault.java
@@ -23,6 +23,7 @@ import org.apache.doris.cloud.proto.Cloud;
import org.apache.doris.common.AnalysisException;
import org.apache.doris.common.DdlException;
import org.apache.doris.common.UserException;
+import org.apache.doris.datasource.property.PropertyConverter;
import org.apache.doris.qe.ShowResultSetMetaData;
import com.google.common.base.Strings;
@@ -144,6 +145,9 @@ public abstract class StorageVault {
vault.modifyProperties(stmt.getProperties());
break;
case S3:
+ if
(!stmt.getProperties().containsKey(PropertyConverter.USE_PATH_STYLE)) {
+ stmt.getProperties().put(PropertyConverter.USE_PATH_STYLE,
"true");
+ }
CreateResourceStmt resourceStmt =
new CreateResourceStmt(false, ifNotExists, name,
stmt.getProperties());
resourceStmt.analyzeResourceType();
diff --git a/regression-test/suites/vault_p0/create/load.groovy
b/regression-test/suites/vault_p0/create/load.groovy
index 58974856db5..321a58a2170 100644
--- a/regression-test/suites/vault_p0/create/load.groovy
+++ b/regression-test/suites/vault_p0/create/load.groovy
@@ -112,6 +112,54 @@ suite("create_storage_vault", "nonConcurrent") {
);
"""
+ // test path style
+ sql """
+ CREATE STORAGE VAULT IF NOT EXISTS create_s3_vault1
+ PROPERTIES (
+ "type"="S3",
+ "s3.endpoint"="${getS3Endpoint()}",
+ "s3.region" = "${getS3Region()}",
+ "s3.access_key" = "${getS3AK()}",
+ "s3.secret_key" = "${getS3SK()}",
+ "s3.root.path" = "test_create_s3_vault1",
+ "s3.bucket" = "${getS3BucketName()}",
+ "s3.external_endpoint" = "",
+ "use_path_style" = "true",
+ "provider" = "${getS3Provider()}"
+ );
+ """
+
+ // test path style
+ sql """
+ CREATE STORAGE VAULT IF NOT EXISTS create_s3_vault2
+ PROPERTIES (
+ "type"="S3",
+ "s3.endpoint"="${getS3Endpoint()}",
+ "s3.region" = "${getS3Region()}",
+ "s3.access_key" = "${getS3AK()}",
+ "s3.secret_key" = "${getS3SK()}",
+ "s3.root.path" = "test_create_s3_vault2",
+ "s3.bucket" = "${getS3BucketName()}",
+ "s3.external_endpoint" = "",
+ "use_path_style" = "false",
+ "provider" = "${getS3Provider()}"
+ );
+ """
+
+ var result = """show storage vault"""
+ assertTrue(result.size() >= 3);
+ for (int i = 0; i < result.size(); ++i) {
+ if (result[i][0].equals("create_s3_vault")) {
+ assertTrue(result[i][2].contains("use_path_style: true"));
+ }
+ if (result[i][0].equals("create_s3_vault1")) {
+ assertTrue(result[i][2].contains("use_path_style: true"));
+ }
+ if (result[i][0].equals("create_s3_vault2")) {
+ assertTrue(result[i][2].contains("use_path_style: false"));
+ }
+ }
+
expectExceptionLike({
sql """
CREATE STORAGE VAULT create_s3_vault
---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]