This is an automated email from the ASF dual-hosted git repository.

dataroaring pushed a commit to branch branch-4.0-preview
in repository https://gitbox.apache.org/repos/asf/doris.git

commit d578a7e20064b9b68c158c8e4e3e8df3e010370e
Author: AlexYue <[email protected]>
AuthorDate: Tue Apr 16 20:38:26 2024 +0800

    [case-wip](Vault) Add basic vault case for P0 (#33667)
---
 .../main/java/org/apache/doris/catalog/Env.java    |   2 +-
 .../doris/catalog/InternalSchemaInitializer.java   |   6 +-
 .../apache/doris/common/util/PropertyAnalyzer.java |   7 +-
 .../org/apache/doris/regression/Config.groovy      |  10 ++
 .../apache/doris/regression/ConfigOptions.groovy   |   9 ++
 .../org/apache/doris/regression/suite/Suite.groovy |  14 ++
 .../suites/hdfs_vault/default_vault_p2/load.groovy |   4 +-
 .../multi_vault_p2/ddl/customer_create.sql         |   2 +-
 .../hdfs_vault/multi_vault_p2/ddl/date_create.sql  |   2 +-
 .../multi_vault_p2/ddl/lineorder_create.sql        |   2 +-
 .../multi_vault_p2/ddl/lineorder_flat_create.sql   |   2 +-
 .../hdfs_vault/multi_vault_p2/ddl/part_create.sql  |   2 +-
 .../multi_vault_p2/ddl/supplier_create.sql         |   2 +-
 .../suites/hdfs_vault/multi_vault_p2/load.groovy   |   4 +-
 .../hdfs_vault/ssb_sf1_p2/ddl/customer_create.sql  |   2 +-
 .../hdfs_vault/ssb_sf1_p2/ddl/date_create.sql      |   2 +-
 .../hdfs_vault/ssb_sf1_p2/ddl/lineorder_create.sql |   2 +-
 .../ssb_sf1_p2/ddl/lineorder_flat_create.sql       |   2 +-
 .../hdfs_vault/ssb_sf1_p2/ddl/part_create.sql      |   2 +-
 .../hdfs_vault/ssb_sf1_p2/ddl/supplier_create.sql  |   2 +-
 .../suites/hdfs_vault/ssb_sf1_p2/load.groovy       |   2 +-
 .../multi_vault_p2/ddl/customer_create.sql         |   2 +-
 .../s3_vault/multi_vault_p2/ddl/date_create.sql    |   2 +-
 .../multi_vault_p2/ddl/lineorder_create.sql        |   2 +-
 .../multi_vault_p2/ddl/lineorder_flat_create.sql   |   2 +-
 .../s3_vault/multi_vault_p2/ddl/part_create.sql    |   2 +-
 .../multi_vault_p2/ddl/supplier_create.sql         |   2 +-
 .../s3_vault/ssb_sf1_p2/ddl/customer_create.sql    |   2 +-
 .../suites/s3_vault/ssb_sf1_p2/ddl/date_create.sql |   2 +-
 .../s3_vault/ssb_sf1_p2/ddl/lineorder_create.sql   |   2 +-
 .../ssb_sf1_p2/ddl/lineorder_flat_create.sql       |   2 +-
 .../suites/s3_vault/ssb_sf1_p2/ddl/part_create.sql |   2 +-
 .../s3_vault/ssb_sf1_p2/ddl/supplier_create.sql    |   2 +-
 regression-test/suites/vaults/create/create.groovy | 148 +++++++++++++++++++++
 .../suites/vaults/default/default.groovy           | 108 +++++++++++++++
 35 files changed, 325 insertions(+), 37 deletions(-)

diff --git a/fe/fe-core/src/main/java/org/apache/doris/catalog/Env.java 
b/fe/fe-core/src/main/java/org/apache/doris/catalog/Env.java
index 9497785faea..d723bedce81 100755
--- a/fe/fe-core/src/main/java/org/apache/doris/catalog/Env.java
+++ b/fe/fe-core/src/main/java/org/apache/doris/catalog/Env.java
@@ -3522,7 +3522,7 @@ public class Env {
             // Storage Vault
             if (!olapTable.getStorageVaultName().isEmpty()) {
                 sb.append(",\n\"").append(PropertyAnalyzer
-                                    .PROPERTIES_STORAGE_VAULT).append("\" = 
\"");
+                                    .PROPERTIES_STORAGE_VAULT_NAME).append("\" 
= \"");
                 sb.append(olapTable.getStorageVaultName()).append("\"");
             }
 
diff --git 
a/fe/fe-core/src/main/java/org/apache/doris/catalog/InternalSchemaInitializer.java
 
b/fe/fe-core/src/main/java/org/apache/doris/catalog/InternalSchemaInitializer.java
index 84db6cbdb79..96dfb00bdbd 100644
--- 
a/fe/fe-core/src/main/java/org/apache/doris/catalog/InternalSchemaInitializer.java
+++ 
b/fe/fe-core/src/main/java/org/apache/doris/catalog/InternalSchemaInitializer.java
@@ -189,7 +189,7 @@ public class InternalSchemaInitializer extends Thread {
             {
                 put("replication_num", String.valueOf(
                         Math.max(1, Config.min_replication_num_per_tablet)));
-                put("storage_vault", FeConstants.BUILT_IN_STORAGE_VAULT_NAME);
+                put("storage_vault_name", 
FeConstants.BUILT_IN_STORAGE_VAULT_NAME);
             }
         };
         PropertyAnalyzer.getInstance().rewriteForceProperties(properties);
@@ -214,7 +214,7 @@ public class InternalSchemaInitializer extends Thread {
             {
                 put("replication_num", String.valueOf(Math.max(1,
                         Config.min_replication_num_per_tablet)));
-                put("storage_vault", FeConstants.BUILT_IN_STORAGE_VAULT_NAME);
+                put("storage_vault_name", 
FeConstants.BUILT_IN_STORAGE_VAULT_NAME);
             }
         };
         PropertyAnalyzer.getInstance().rewriteForceProperties(properties);
@@ -248,7 +248,7 @@ public class InternalSchemaInitializer extends Thread {
                 put("dynamic_partition.enable", "true");
                 put("replication_num", String.valueOf(Math.max(1,
                         Config.min_replication_num_per_tablet)));
-                put("storage_vault", FeConstants.BUILT_IN_STORAGE_VAULT_NAME);
+                put("storage_vault_name", 
FeConstants.BUILT_IN_STORAGE_VAULT_NAME);
             }
         };
         PropertyAnalyzer.getInstance().rewriteForceProperties(properties);
diff --git 
a/fe/fe-core/src/main/java/org/apache/doris/common/util/PropertyAnalyzer.java 
b/fe/fe-core/src/main/java/org/apache/doris/common/util/PropertyAnalyzer.java
index 7f05ea40e19..1f3d52623c0 100644
--- 
a/fe/fe-core/src/main/java/org/apache/doris/common/util/PropertyAnalyzer.java
+++ 
b/fe/fe-core/src/main/java/org/apache/doris/common/util/PropertyAnalyzer.java
@@ -178,7 +178,6 @@ public class PropertyAnalyzer {
     public static final String PROPERTIES_PARTITION_SYNC_LIMIT = 
"partition_sync_limit";
     public static final String PROPERTIES_PARTITION_TIME_UNIT = 
"partition_sync_time_unit";
     public static final String PROPERTIES_PARTITION_DATE_FORMAT = 
"partition_date_format";
-    public static final String PROPERTIES_STORAGE_VAULT = "storage_vault";
     public static final String PROPERTIES_STORAGE_VAULT_NAME = 
"storage_vault_name";
     public static final String PROPERTIES_STORAGE_VAULT_ID = 
"storage_vault_id";
     // For unique key data model, the feature Merge-on-Write will leverage a 
primary
@@ -1056,9 +1055,9 @@ public class PropertyAnalyzer {
 
     public static String analyzeStorageVault(Map<String, String> properties) {
         String storageVault = null;
-        if (properties != null && 
properties.containsKey(PROPERTIES_STORAGE_VAULT)) {
-            storageVault = properties.get(PROPERTIES_STORAGE_VAULT);
-            properties.remove(PROPERTIES_STORAGE_VAULT);
+        if (properties != null && 
properties.containsKey(PROPERTIES_STORAGE_VAULT_NAME)) {
+            storageVault = properties.get(PROPERTIES_STORAGE_VAULT_NAME);
+            properties.remove(PROPERTIES_STORAGE_VAULT_NAME);
         }
 
         return storageVault;
diff --git 
a/regression-test/framework/src/main/groovy/org/apache/doris/regression/Config.groovy
 
b/regression-test/framework/src/main/groovy/org/apache/doris/regression/Config.groovy
index 0ee44babb82..08e1d353bb7 100644
--- 
a/regression-test/framework/src/main/groovy/org/apache/doris/regression/Config.groovy
+++ 
b/regression-test/framework/src/main/groovy/org/apache/doris/regression/Config.groovy
@@ -69,6 +69,7 @@ class Config {
     public String realDataPath
     public String cacheDataPath
     public boolean enableCacheData
+    public boolean enableStorageVault
     public String pluginPath
     public String sslCertificatePath
     public String dorisComposePath
@@ -163,6 +164,7 @@ class Config {
             String realDataPath,
             String cacheDataPath,
             Boolean enableCacheData,
+            Boolean enableStorageVault,
             String testGroups,
             String excludeGroups,
             String testSuites, 
@@ -215,6 +217,7 @@ class Config {
         this.realDataPath = realDataPath
         this.cacheDataPath = cacheDataPath
         this.enableCacheData = enableCacheData
+        this.enableStorageVault = enableStorageVault
         this.testGroups = testGroups
         this.excludeGroups = excludeGroups
         this.testSuites = testSuites
@@ -269,6 +272,7 @@ class Config {
         config.realDataPath = 
FileUtils.getCanonicalPath(cmd.getOptionValue(realDataOpt, config.realDataPath))
         config.cacheDataPath = cmd.getOptionValue(cacheDataOpt, 
config.cacheDataPath)
         config.enableCacheData = 
Boolean.parseBoolean(cmd.getOptionValue(enableCacheDataOpt, "true"))
+        config.enableStorageVault = 
Boolean.parseBoolean(cmd.getOptionValue(enableStorageVaultOpt, "true"))
         config.pluginPath = 
FileUtils.getCanonicalPath(cmd.getOptionValue(pluginOpt, config.pluginPath))
         config.sslCertificatePath = 
FileUtils.getCanonicalPath(cmd.getOptionValue(sslCertificateOpt, 
config.sslCertificatePath))
         config.dorisComposePath = 
FileUtils.getCanonicalPath(config.dorisComposePath)
@@ -492,6 +496,7 @@ class Config {
             configToString(obj.realDataPath),
             configToString(obj.cacheDataPath),
             configToBoolean(obj.enableCacheData),
+            configToBoolean(obj.enableStorageVault),
             configToString(obj.testGroups),
             configToString(obj.excludeGroups),
             configToString(obj.testSuites),
@@ -716,6 +721,11 @@ class Config {
             log.info("Set enableCacheData to '${config.enableCacheData}' 
because not specify.".toString())
         }
 
+        if (config.enableStorageVault == null) {
+            config.enableStorageVault = true
+            log.info("Set enableStorageVault to '${config.enableStorageVault}' 
because not specify.".toString())
+        }
+
         if (config.pluginPath == null) {
             config.pluginPath = "regression-test/plugins"
             log.info("Set dataPath to '${config.pluginPath}' because not 
specify.".toString())
diff --git 
a/regression-test/framework/src/main/groovy/org/apache/doris/regression/ConfigOptions.groovy
 
b/regression-test/framework/src/main/groovy/org/apache/doris/regression/ConfigOptions.groovy
index 48e8a74ffd6..77fb592cab7 100644
--- 
a/regression-test/framework/src/main/groovy/org/apache/doris/regression/ConfigOptions.groovy
+++ 
b/regression-test/framework/src/main/groovy/org/apache/doris/regression/ConfigOptions.groovy
@@ -52,6 +52,7 @@ class ConfigOptions {
     static Option realDataOpt
     static Option cacheDataOpt
     static Option enableCacheDataOpt
+    static Option enableStorageVaultOpt
     static Option pluginOpt
     static Option sslCertificateOpt
     static Option imageOpt
@@ -182,6 +183,14 @@ class ConfigOptions {
                 .longOpt("enableCacheData")
                 .desc("enable caches data for stream load from s3")
                 .build()
+        enableStorageVaultOpt = Option.builder("ESV")
+                .argName("enableStorageVault")
+                .required(false)
+                .hasArg(true)
+                .type(String.class)
+                .longOpt("enableStorageVault")
+                .desc("does cloud mode enable storage vault")
+                .build()
         pluginOpt = Option.builder("plugin")
                 .argName("pluginPath")
                 .required(false)
diff --git 
a/regression-test/framework/src/main/groovy/org/apache/doris/regression/suite/Suite.groovy
 
b/regression-test/framework/src/main/groovy/org/apache/doris/regression/suite/Suite.groovy
index e300296a7c0..0778af070bf 100644
--- 
a/regression-test/framework/src/main/groovy/org/apache/doris/regression/suite/Suite.groovy
+++ 
b/regression-test/framework/src/main/groovy/org/apache/doris/regression/suite/Suite.groovy
@@ -567,6 +567,16 @@ class Suite implements GroovyInterceptable {
         }
     }
 
+    void expectExceptionLike(Closure userFunction, String errorMessage = null) 
{
+        try {
+            userFunction()
+        } catch (Exception e) {
+            if (!e.getMessage().contains(errorMessage)) {
+                throw e
+            }
+        }
+    }
+
     String getBrokerName() {
         String brokerName = context.config.otherConfigs.get("brokerName")
         return brokerName
@@ -1031,6 +1041,10 @@ class Suite implements GroovyInterceptable {
         return !getFeConfig("cloud_unique_id").isEmpty()
     }
 
+    boolean enableStoragevault() {
+        return isCloudMode() && context.config.enableStorageVault;
+    }
+
     String getFeConfig(String key) {
         return sql_return_maparray("SHOW FRONTEND CONFIG LIKE 
'${key}'")[0].Value
     }
diff --git a/regression-test/suites/hdfs_vault/default_vault_p2/load.groovy 
b/regression-test/suites/hdfs_vault/default_vault_p2/load.groovy
index bdef1c9f82c..e734f5f5169 100644
--- a/regression-test/suites/hdfs_vault/default_vault_p2/load.groovy
+++ b/regression-test/suites/hdfs_vault/default_vault_p2/load.groovy
@@ -28,7 +28,7 @@ suite("load") {
         PROPERTIES (
         "type"="hdfs",
         "fs.defaultFS"="${getHdfsFs()}",
-        "root_prefix" = "default_vault_ssb_hdfs_vault"
+        "path_prefix" = "default_vault_ssb_hdfs_vault"
         );
     """
 
@@ -37,7 +37,7 @@ suite("load") {
         PROPERTIES (
         "type"="hdfs",
         "fs.defaultFS"="${getHdfsFs()}",
-        "root_prefix" = "default_vault_ssb_flat_hdfs_vault"
+        "path_prefix" = "default_vault_ssb_flat_hdfs_vault"
         );
     """
 
diff --git 
a/regression-test/suites/hdfs_vault/multi_vault_p2/ddl/customer_create.sql 
b/regression-test/suites/hdfs_vault/multi_vault_p2/ddl/customer_create.sql
index 32351252497..e0c8c349954 100644
--- a/regression-test/suites/hdfs_vault/multi_vault_p2/ddl/customer_create.sql
+++ b/regression-test/suites/hdfs_vault/multi_vault_p2/ddl/customer_create.sql
@@ -12,5 +12,5 @@ UNIQUE KEY (`c_custkey`)
 DISTRIBUTED BY HASH(`c_custkey`) BUCKETS 1
 PROPERTIES (
 "replication_num" = "1",
-"storage_vault" = "multi_vault_ssb_hdfs_vault"
+"storage_vault_name" = "multi_vault_ssb_hdfs_vault"
 );
\ No newline at end of file
diff --git 
a/regression-test/suites/hdfs_vault/multi_vault_p2/ddl/date_create.sql 
b/regression-test/suites/hdfs_vault/multi_vault_p2/ddl/date_create.sql
index a02d5535605..53ce6c5e3bc 100644
--- a/regression-test/suites/hdfs_vault/multi_vault_p2/ddl/date_create.sql
+++ b/regression-test/suites/hdfs_vault/multi_vault_p2/ddl/date_create.sql
@@ -21,5 +21,5 @@ UNIQUE KEY (`d_datekey`)
 DISTRIBUTED BY HASH(`d_datekey`) BUCKETS 1
 PROPERTIES (
 "replication_num" = "1",
-"storage_vault" = "multi_vault_ssb_hdfs_vault"
+"storage_vault_name" = "multi_vault_ssb_hdfs_vault"
 );
\ No newline at end of file
diff --git 
a/regression-test/suites/hdfs_vault/multi_vault_p2/ddl/lineorder_create.sql 
b/regression-test/suites/hdfs_vault/multi_vault_p2/ddl/lineorder_create.sql
index 1543d853093..4aad658ef59 100644
--- a/regression-test/suites/hdfs_vault/multi_vault_p2/ddl/lineorder_create.sql
+++ b/regression-test/suites/hdfs_vault/multi_vault_p2/ddl/lineorder_create.sql
@@ -21,5 +21,5 @@ UNIQUE KEY (`lo_orderkey`, `lo_linenumber`)
 DISTRIBUTED BY HASH(`lo_orderkey`) BUCKETS 1
 PROPERTIES (
 "replication_num" = "1",
-"storage_vault" = "multi_vault_ssb_hdfs_vault"
+"storage_vault_name" = "multi_vault_ssb_hdfs_vault"
 );
\ No newline at end of file
diff --git 
a/regression-test/suites/hdfs_vault/multi_vault_p2/ddl/lineorder_flat_create.sql
 
b/regression-test/suites/hdfs_vault/multi_vault_p2/ddl/lineorder_flat_create.sql
index 0f4946489c6..215b0362d29 100644
--- 
a/regression-test/suites/hdfs_vault/multi_vault_p2/ddl/lineorder_flat_create.sql
+++ 
b/regression-test/suites/hdfs_vault/multi_vault_p2/ddl/lineorder_flat_create.sql
@@ -42,5 +42,5 @@ UNIQUE KEY(`LO_ORDERDATE`, `LO_ORDERKEY`, `LO_LINENUMBER`)
 DISTRIBUTED BY HASH(`LO_ORDERKEY`) BUCKETS 1
 PROPERTIES (
 "replication_num" = "1",
-"storage_vault" = "multi_vault_ssb_flat_hdfs_vault"
+"storage_vault_name" = "multi_vault_ssb_flat_hdfs_vault"
 );
\ No newline at end of file
diff --git 
a/regression-test/suites/hdfs_vault/multi_vault_p2/ddl/part_create.sql 
b/regression-test/suites/hdfs_vault/multi_vault_p2/ddl/part_create.sql
index ce764259270..7089984c21b 100644
--- a/regression-test/suites/hdfs_vault/multi_vault_p2/ddl/part_create.sql
+++ b/regression-test/suites/hdfs_vault/multi_vault_p2/ddl/part_create.sql
@@ -13,5 +13,5 @@ UNIQUE KEY (`p_partkey`)
 DISTRIBUTED BY HASH(`p_partkey`) BUCKETS 1
 PROPERTIES (
 "replication_num" = "1",
-"storage_vault" = "multi_vault_ssb_hdfs_vault"
+"storage_vault_name" = "multi_vault_ssb_hdfs_vault"
 );
\ No newline at end of file
diff --git 
a/regression-test/suites/hdfs_vault/multi_vault_p2/ddl/supplier_create.sql 
b/regression-test/suites/hdfs_vault/multi_vault_p2/ddl/supplier_create.sql
index 6e04d01134b..58dbdfd4fa1 100644
--- a/regression-test/suites/hdfs_vault/multi_vault_p2/ddl/supplier_create.sql
+++ b/regression-test/suites/hdfs_vault/multi_vault_p2/ddl/supplier_create.sql
@@ -11,5 +11,5 @@ UNIQUE KEY (`s_suppkey`)
 DISTRIBUTED BY HASH(`s_suppkey`) BUCKETS 1
 PROPERTIES (
 "replication_num" = "1",
-"storage_vault" = "multi_vault_ssb_hdfs_vault"
+"storage_vault_name" = "multi_vault_ssb_hdfs_vault"
 );
\ No newline at end of file
diff --git a/regression-test/suites/hdfs_vault/multi_vault_p2/load.groovy 
b/regression-test/suites/hdfs_vault/multi_vault_p2/load.groovy
index c0ff09cf06f..c80bec525dd 100644
--- a/regression-test/suites/hdfs_vault/multi_vault_p2/load.groovy
+++ b/regression-test/suites/hdfs_vault/multi_vault_p2/load.groovy
@@ -28,7 +28,7 @@ suite("load") {
         PROPERTIES (
         "type"="hdfs",
         "fs.defaultFS"="${getHdfsFs()}",
-        "root_prefix" = "multi_vault_ssb_hdfs_vault"
+        "path_prefix" = "multi_vault_ssb_hdfs_vault"
         );
     """
 
@@ -37,7 +37,7 @@ suite("load") {
         PROPERTIES (
         "type"="hdfs",
         "fs.defaultFS"="${getHdfsFs()}",
-        "root_prefix" = "multi_vault_ssb_flat_hdfs_vault"
+        "path_prefix" = "multi_vault_ssb_flat_hdfs_vault"
         );
     """
 
diff --git 
a/regression-test/suites/hdfs_vault/ssb_sf1_p2/ddl/customer_create.sql 
b/regression-test/suites/hdfs_vault/ssb_sf1_p2/ddl/customer_create.sql
index 5f439f1f571..72af37c40b5 100644
--- a/regression-test/suites/hdfs_vault/ssb_sf1_p2/ddl/customer_create.sql
+++ b/regression-test/suites/hdfs_vault/ssb_sf1_p2/ddl/customer_create.sql
@@ -12,5 +12,5 @@ UNIQUE KEY (`c_custkey`)
 DISTRIBUTED BY HASH(`c_custkey`) BUCKETS 1
 PROPERTIES (
 "replication_num" = "1",
-"storage_vault" = "ssb_sf1_p2"
+"storage_vault_name" = "ssb_sf1_p2"
 );
\ No newline at end of file
diff --git a/regression-test/suites/hdfs_vault/ssb_sf1_p2/ddl/date_create.sql 
b/regression-test/suites/hdfs_vault/ssb_sf1_p2/ddl/date_create.sql
index 8ed70acb550..5e5a2631428 100644
--- a/regression-test/suites/hdfs_vault/ssb_sf1_p2/ddl/date_create.sql
+++ b/regression-test/suites/hdfs_vault/ssb_sf1_p2/ddl/date_create.sql
@@ -21,5 +21,5 @@ UNIQUE KEY (`d_datekey`)
 DISTRIBUTED BY HASH(`d_datekey`) BUCKETS 1
 PROPERTIES (
 "replication_num" = "1",
-"storage_vault" = "ssb_sf1_p2"
+"storage_vault_name" = "ssb_sf1_p2"
 );
\ No newline at end of file
diff --git 
a/regression-test/suites/hdfs_vault/ssb_sf1_p2/ddl/lineorder_create.sql 
b/regression-test/suites/hdfs_vault/ssb_sf1_p2/ddl/lineorder_create.sql
index 8ae4302ffd0..7e71dbbbdde 100644
--- a/regression-test/suites/hdfs_vault/ssb_sf1_p2/ddl/lineorder_create.sql
+++ b/regression-test/suites/hdfs_vault/ssb_sf1_p2/ddl/lineorder_create.sql
@@ -21,5 +21,5 @@ UNIQUE KEY (`lo_orderkey`, `lo_linenumber`)
 DISTRIBUTED BY HASH(`lo_orderkey`) BUCKETS 1
 PROPERTIES (
 "replication_num" = "1",
-"storage_vault" = "ssb_sf1_p2"
+"storage_vault_name" = "ssb_sf1_p2"
 );
\ No newline at end of file
diff --git 
a/regression-test/suites/hdfs_vault/ssb_sf1_p2/ddl/lineorder_flat_create.sql 
b/regression-test/suites/hdfs_vault/ssb_sf1_p2/ddl/lineorder_flat_create.sql
index 984c8b2921b..79b34b38377 100644
--- a/regression-test/suites/hdfs_vault/ssb_sf1_p2/ddl/lineorder_flat_create.sql
+++ b/regression-test/suites/hdfs_vault/ssb_sf1_p2/ddl/lineorder_flat_create.sql
@@ -42,5 +42,5 @@ UNIQUE KEY(`LO_ORDERDATE`, `LO_ORDERKEY`, `LO_LINENUMBER`)
 DISTRIBUTED BY HASH(`LO_ORDERKEY`) BUCKETS 1
 PROPERTIES (
 "replication_num" = "1",
-"storage_vault" = "ssb_sf1_p2"
+"storage_vault_name" = "ssb_sf1_p2"
 );
\ No newline at end of file
diff --git a/regression-test/suites/hdfs_vault/ssb_sf1_p2/ddl/part_create.sql 
b/regression-test/suites/hdfs_vault/ssb_sf1_p2/ddl/part_create.sql
index 59c51a7830a..d04c847aba9 100644
--- a/regression-test/suites/hdfs_vault/ssb_sf1_p2/ddl/part_create.sql
+++ b/regression-test/suites/hdfs_vault/ssb_sf1_p2/ddl/part_create.sql
@@ -13,5 +13,5 @@ UNIQUE KEY (`p_partkey`)
 DISTRIBUTED BY HASH(`p_partkey`) BUCKETS 1
 PROPERTIES (
 "replication_num" = "1",
-"storage_vault" = "ssb_sf1_p2"
+"storage_vault_name" = "ssb_sf1_p2"
 );
\ No newline at end of file
diff --git 
a/regression-test/suites/hdfs_vault/ssb_sf1_p2/ddl/supplier_create.sql 
b/regression-test/suites/hdfs_vault/ssb_sf1_p2/ddl/supplier_create.sql
index 1e1f2a828a3..c279a57cbe2 100644
--- a/regression-test/suites/hdfs_vault/ssb_sf1_p2/ddl/supplier_create.sql
+++ b/regression-test/suites/hdfs_vault/ssb_sf1_p2/ddl/supplier_create.sql
@@ -11,5 +11,5 @@ UNIQUE KEY (`s_suppkey`)
 DISTRIBUTED BY HASH(`s_suppkey`) BUCKETS 1
 PROPERTIES (
 "replication_num" = "1",
-"storage_vault" = "ssb_sf1_p2"
+"storage_vault_name" = "ssb_sf1_p2"
 );
\ No newline at end of file
diff --git a/regression-test/suites/hdfs_vault/ssb_sf1_p2/load.groovy 
b/regression-test/suites/hdfs_vault/ssb_sf1_p2/load.groovy
index eb4c59baa63..c8771252cfd 100644
--- a/regression-test/suites/hdfs_vault/ssb_sf1_p2/load.groovy
+++ b/regression-test/suites/hdfs_vault/ssb_sf1_p2/load.groovy
@@ -28,7 +28,7 @@ suite("load") {
         PROPERTIES (
         "type"="hdfs",
         "fs.defaultFS"="${getHdfsFs()}",
-        "root_prefix" = "ssb_sf1_p2"
+        "path_prefix" = "ssb_sf1_p2"
         );
     """
 
diff --git 
a/regression-test/suites/s3_vault/multi_vault_p2/ddl/customer_create.sql 
b/regression-test/suites/s3_vault/multi_vault_p2/ddl/customer_create.sql
index 746691100a6..6889890da39 100644
--- a/regression-test/suites/s3_vault/multi_vault_p2/ddl/customer_create.sql
+++ b/regression-test/suites/s3_vault/multi_vault_p2/ddl/customer_create.sql
@@ -12,5 +12,5 @@ UNIQUE KEY (`c_custkey`)
 DISTRIBUTED BY HASH(`c_custkey`) BUCKETS 1
 PROPERTIES (
 "replication_num" = "1",
-"storage_vault" = "multi_vault_ssb_hdfs_vault_s3"
+"storage_vault_name" = "multi_vault_ssb_hdfs_vault_s3"
 );
\ No newline at end of file
diff --git a/regression-test/suites/s3_vault/multi_vault_p2/ddl/date_create.sql 
b/regression-test/suites/s3_vault/multi_vault_p2/ddl/date_create.sql
index 33d445150eb..1d6b11827fc 100644
--- a/regression-test/suites/s3_vault/multi_vault_p2/ddl/date_create.sql
+++ b/regression-test/suites/s3_vault/multi_vault_p2/ddl/date_create.sql
@@ -21,5 +21,5 @@ UNIQUE KEY (`d_datekey`)
 DISTRIBUTED BY HASH(`d_datekey`) BUCKETS 1
 PROPERTIES (
 "replication_num" = "1",
-"storage_vault" = "multi_vault_ssb_hdfs_vault_s3"
+"storage_vault_name" = "multi_vault_ssb_hdfs_vault_s3"
 );
\ No newline at end of file
diff --git 
a/regression-test/suites/s3_vault/multi_vault_p2/ddl/lineorder_create.sql 
b/regression-test/suites/s3_vault/multi_vault_p2/ddl/lineorder_create.sql
index b6ee44d6ee0..fd5c98d9c60 100644
--- a/regression-test/suites/s3_vault/multi_vault_p2/ddl/lineorder_create.sql
+++ b/regression-test/suites/s3_vault/multi_vault_p2/ddl/lineorder_create.sql
@@ -21,5 +21,5 @@ UNIQUE KEY (`lo_orderkey`, `lo_linenumber`)
 DISTRIBUTED BY HASH(`lo_orderkey`) BUCKETS 1
 PROPERTIES (
 "replication_num" = "1",
-"storage_vault" = "multi_vault_ssb_hdfs_vault_s3"
+"storage_vault_name" = "multi_vault_ssb_hdfs_vault_s3"
 );
\ No newline at end of file
diff --git 
a/regression-test/suites/s3_vault/multi_vault_p2/ddl/lineorder_flat_create.sql 
b/regression-test/suites/s3_vault/multi_vault_p2/ddl/lineorder_flat_create.sql
index 5a1f716da62..cc3ca294844 100644
--- 
a/regression-test/suites/s3_vault/multi_vault_p2/ddl/lineorder_flat_create.sql
+++ 
b/regression-test/suites/s3_vault/multi_vault_p2/ddl/lineorder_flat_create.sql
@@ -42,5 +42,5 @@ UNIQUE KEY(`LO_ORDERDATE`, `LO_ORDERKEY`, `LO_LINENUMBER`)
 DISTRIBUTED BY HASH(`LO_ORDERKEY`) BUCKETS 1
 PROPERTIES (
 "replication_num" = "1",
-"storage_vault" = "multi_vault_ssb_flat_hdfs_vault_s3"
+"storage_vault_name" = "multi_vault_ssb_flat_hdfs_vault_s3"
 );
\ No newline at end of file
diff --git a/regression-test/suites/s3_vault/multi_vault_p2/ddl/part_create.sql 
b/regression-test/suites/s3_vault/multi_vault_p2/ddl/part_create.sql
index 48c08e2d9b4..11002286596 100644
--- a/regression-test/suites/s3_vault/multi_vault_p2/ddl/part_create.sql
+++ b/regression-test/suites/s3_vault/multi_vault_p2/ddl/part_create.sql
@@ -13,5 +13,5 @@ UNIQUE KEY (`p_partkey`)
 DISTRIBUTED BY HASH(`p_partkey`) BUCKETS 1
 PROPERTIES (
 "replication_num" = "1",
-"storage_vault" = "multi_vault_ssb_hdfs_vault_s3"
+"storage_vault_name" = "multi_vault_ssb_hdfs_vault_s3"
 );
\ No newline at end of file
diff --git 
a/regression-test/suites/s3_vault/multi_vault_p2/ddl/supplier_create.sql 
b/regression-test/suites/s3_vault/multi_vault_p2/ddl/supplier_create.sql
index 00360492a45..9d89ba4f07f 100644
--- a/regression-test/suites/s3_vault/multi_vault_p2/ddl/supplier_create.sql
+++ b/regression-test/suites/s3_vault/multi_vault_p2/ddl/supplier_create.sql
@@ -11,5 +11,5 @@ UNIQUE KEY (`s_suppkey`)
 DISTRIBUTED BY HASH(`s_suppkey`) BUCKETS 1
 PROPERTIES (
 "replication_num" = "1",
-"storage_vault" = "multi_vault_ssb_hdfs_vault_s3"
+"storage_vault_name" = "multi_vault_ssb_hdfs_vault_s3"
 );
\ No newline at end of file
diff --git a/regression-test/suites/s3_vault/ssb_sf1_p2/ddl/customer_create.sql 
b/regression-test/suites/s3_vault/ssb_sf1_p2/ddl/customer_create.sql
index de62670708b..0da027707df 100644
--- a/regression-test/suites/s3_vault/ssb_sf1_p2/ddl/customer_create.sql
+++ b/regression-test/suites/s3_vault/ssb_sf1_p2/ddl/customer_create.sql
@@ -12,5 +12,5 @@ UNIQUE KEY (`c_custkey`)
 DISTRIBUTED BY HASH(`c_custkey`) BUCKETS 1
 PROPERTIES (
 "replication_num" = "1",
-"storage_vault" = "ssb_sf1_p2_s3"
+"storage_vault_name" = "ssb_sf1_p2_s3"
 );
\ No newline at end of file
diff --git a/regression-test/suites/s3_vault/ssb_sf1_p2/ddl/date_create.sql 
b/regression-test/suites/s3_vault/ssb_sf1_p2/ddl/date_create.sql
index fadfff0118f..bfb448bce42 100644
--- a/regression-test/suites/s3_vault/ssb_sf1_p2/ddl/date_create.sql
+++ b/regression-test/suites/s3_vault/ssb_sf1_p2/ddl/date_create.sql
@@ -21,5 +21,5 @@ UNIQUE KEY (`d_datekey`)
 DISTRIBUTED BY HASH(`d_datekey`) BUCKETS 1
 PROPERTIES (
 "replication_num" = "1",
-"storage_vault" = "ssb_sf1_p2_s3"
+"storage_vault_name" = "ssb_sf1_p2_s3"
 );
\ No newline at end of file
diff --git 
a/regression-test/suites/s3_vault/ssb_sf1_p2/ddl/lineorder_create.sql 
b/regression-test/suites/s3_vault/ssb_sf1_p2/ddl/lineorder_create.sql
index eee10a809d5..d83400e2b2b 100644
--- a/regression-test/suites/s3_vault/ssb_sf1_p2/ddl/lineorder_create.sql
+++ b/regression-test/suites/s3_vault/ssb_sf1_p2/ddl/lineorder_create.sql
@@ -21,5 +21,5 @@ UNIQUE KEY (`lo_orderkey`, `lo_linenumber`)
 DISTRIBUTED BY HASH(`lo_orderkey`) BUCKETS 1
 PROPERTIES (
 "replication_num" = "1",
-"storage_vault" = "ssb_sf1_p2_s3"
+"storage_vault_name" = "ssb_sf1_p2_s3"
 );
\ No newline at end of file
diff --git 
a/regression-test/suites/s3_vault/ssb_sf1_p2/ddl/lineorder_flat_create.sql 
b/regression-test/suites/s3_vault/ssb_sf1_p2/ddl/lineorder_flat_create.sql
index 709f3ac1c2b..1f42b5f026b 100644
--- a/regression-test/suites/s3_vault/ssb_sf1_p2/ddl/lineorder_flat_create.sql
+++ b/regression-test/suites/s3_vault/ssb_sf1_p2/ddl/lineorder_flat_create.sql
@@ -42,5 +42,5 @@ UNIQUE KEY(`LO_ORDERDATE`, `LO_ORDERKEY`, `LO_LINENUMBER`)
 DISTRIBUTED BY HASH(`LO_ORDERKEY`) BUCKETS 1
 PROPERTIES (
 "replication_num" = "1",
-"storage_vault" = "ssb_sf1_p2_s3"
+"storage_vault_name" = "ssb_sf1_p2_s3"
 );
\ No newline at end of file
diff --git a/regression-test/suites/s3_vault/ssb_sf1_p2/ddl/part_create.sql 
b/regression-test/suites/s3_vault/ssb_sf1_p2/ddl/part_create.sql
index 7f1b4620a9c..66fafe1d751 100644
--- a/regression-test/suites/s3_vault/ssb_sf1_p2/ddl/part_create.sql
+++ b/regression-test/suites/s3_vault/ssb_sf1_p2/ddl/part_create.sql
@@ -13,5 +13,5 @@ UNIQUE KEY (`p_partkey`)
 DISTRIBUTED BY HASH(`p_partkey`) BUCKETS 1
 PROPERTIES (
 "replication_num" = "1",
-"storage_vault" = "ssb_sf1_p2_s3"
+"storage_vault_name" = "ssb_sf1_p2_s3"
 );
\ No newline at end of file
diff --git a/regression-test/suites/s3_vault/ssb_sf1_p2/ddl/supplier_create.sql 
b/regression-test/suites/s3_vault/ssb_sf1_p2/ddl/supplier_create.sql
index bcaf9ea9b73..21b73c9b66c 100644
--- a/regression-test/suites/s3_vault/ssb_sf1_p2/ddl/supplier_create.sql
+++ b/regression-test/suites/s3_vault/ssb_sf1_p2/ddl/supplier_create.sql
@@ -11,5 +11,5 @@ UNIQUE KEY (`s_suppkey`)
 DISTRIBUTED BY HASH(`s_suppkey`) BUCKETS 1
 PROPERTIES (
 "replication_num" = "1",
-"storage_vault" = "ssb_sf1_p2_s3"
+"storage_vault_name" = "ssb_sf1_p2_s3"
 );
\ No newline at end of file
diff --git a/regression-test/suites/vaults/create/create.groovy 
b/regression-test/suites/vaults/create/create.groovy
new file mode 100644
index 00000000000..e49b30f8c46
--- /dev/null
+++ b/regression-test/suites/vaults/create/create.groovy
@@ -0,0 +1,148 @@
+// Licensed to the Apache Software Foundation (ASF) under one
+// or more contributor license agreements.  See the NOTICE file
+// distributed with this work for additional information
+// regarding copyright ownership.  The ASF licenses this file
+// to you under the Apache License, Version 2.0 (the
+// "License"); you may not use this file except in compliance
+// with the License.  You may obtain a copy of the License at
+//
+//   http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing,
+// software distributed under the License is distributed on an
+// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+// KIND, either express or implied.  See the License for the
+// specific language governing permissions and limitations
+// under the License.
+
+suite("create_vault") {
+    if (!enableStoragevault()) {
+        logger.info("skip create storgage vault case")
+        return
+    }
+
+    expectExceptionLike({
+        sql """
+            CREATE STORAGE VAULT IF NOT EXISTS failed_vault
+            PROPERTIES (
+            "type"="S3",
+            "fs.defaultFS"="${getHdfsFs()}",
+            "path_prefix" = "ssb_sf1_p2"
+            );
+        """
+    }, "Missing")
+
+    expectExceptionLike({
+        sql """
+            CREATE STORAGE VAULT IF NOT EXISTS failed_vault
+            PROPERTIES (
+            "type"="hdfs",
+            "s3.bucket"="${getHdfsFs()}",
+            "path_prefix" = "ssb_sf1_p2"
+            );
+        """
+    }, "invalid fs_name")
+
+    expectExceptionLike({
+        sql """
+            CREATE STORAGE VAULT IF NOT EXISTS failed_vault
+            PROPERTIES (
+            );
+        """
+    }, "Encountered")
+
+
+    sql """
+        CREATE STORAGE VAULT IF NOT EXISTS create_hdfs_vault
+        PROPERTIES (
+        "type"="hdfs",
+        "fs.defaultFS"="${getHdfsFs()}",
+        "path_prefix" = "default_vault_ssb_hdfs_vault"
+        );
+    """
+
+    expectExceptionLike({
+        sql """
+            CREATE STORAGE VAULT create_hdfs_vault
+            PROPERTIES (
+            "type"="hdfs",
+            "fs.defaultFS"="${getHdfsFs()}",
+            "path_prefix" = "default_vault_ssb_hdfs_vault"
+            );
+        """
+    }, "already created")
+
+
+    sql """
+        CREATE STORAGE VAULT IF NOT EXISTS create_s3_vault
+        PROPERTIES (
+        "type"="S3",
+        "s3.endpoint"="${getS3Endpoint()}",
+        "s3.region" = "${getS3Region()}",
+        "s3.access_key" = "${getS3AK()}",
+        "s3.secret_key" = "${getS3SK()}",
+        "s3.root.path" = "ssb_sf1_p2_s3",
+        "s3.bucket" = "${getS3BucketName()}",
+        "s3.external_endpoint" = "",
+        "provider" = "${getS3Provider()}"
+        );
+    """
+
+    expectExceptionLike({
+        sql """
+            CREATE STORAGE VAULT create_s3_vault
+            PROPERTIES (
+            "type"="S3",
+            "s3.endpoint"="${getS3Endpoint()}",
+            "s3.region" = "${getS3Region()}",
+            "s3.access_key" = "${getS3AK()}",
+            "s3.secret_key" = "${getS3SK()}",
+            "s3.root.path" = "ssb_sf1_p2_s3",
+            "s3.bucket" = "${getS3BucketName()}",
+            "s3.external_endpoint" = "",
+            "provider" = "${getS3Provider()}"
+            );
+        """
+    }, "already created")
+
+    def vaults_info = try_sql """
+        show storage vault
+    """
+
+    
+    boolean create_hdfs_vault_exist = false;
+    boolean create_s3_vault_exist = false;
+    boolean built_in_storage_vault_exist = false;
+    for (int i = 0; i < vaults_info.size(); i++) {
+        def name = vaults_info[i][0]
+        if (name.equals("create_hdfs_vault")) {
+            create_hdfs_vault_exist = true;
+        }
+        if (name.equals("create_s3_vault")) {
+            create_s3_vault_exist = true;
+        }
+        if (name.equals("built_in_storage_vault")) {
+            built_in_storage_vault_exist = true
+        }
+    }
+    assertTrue(create_hdfs_vault_exist)
+    assertTrue(create_s3_vault_exist)
+    assertTrue(built_in_storage_vault_exist)
+
+    expectExceptionLike({
+        sql """
+            CREATE STORAGE VAULT IF NOT EXISTS built_in_storage_vault
+            PROPERTIES (
+            "type"="S3",
+            "s3.endpoint"="${getS3Endpoint()}",
+            "s3.region" = "${getS3Region()}",
+            "s3.access_key" = "${getS3AK()}",
+            "s3.secret_key" = "${getS3SK()}",
+            "s3.root.path" = "ssb_sf1_p2_s3",
+            "s3.bucket" = "${getS3BucketName()}",
+            "s3.external_endpoint" = "",
+            "provider" = "${getS3Provider()}"
+            );
+        """
+    }, "already created")
+}
\ No newline at end of file
diff --git a/regression-test/suites/vaults/default/default.groovy 
b/regression-test/suites/vaults/default/default.groovy
new file mode 100644
index 00000000000..46f55a865ed
--- /dev/null
+++ b/regression-test/suites/vaults/default/default.groovy
@@ -0,0 +1,108 @@
+// Licensed to the Apache Software Foundation (ASF) under one
+// or more contributor license agreements.  See the NOTICE file
+// distributed with this work for additional information
+// regarding copyright ownership.  The ASF licenses this file
+// to you under the Apache License, Version 2.0 (the
+// "License"); you may not use this file except in compliance
+// with the License.  You may obtain a copy of the License at
+//
+//   http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing,
+// software distributed under the License is distributed on an
+// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+// KIND, either express or implied.  See the License for the
+// specific language governing permissions and limitations
+// under the License.
+
+suite("default_vault") {
+    if (!enableStoragevault()) {
+        logger.info("skip create storgage vault case")
+        return
+    }
+    expectExceptionLike({
+        sql """
+            set not_exist as default storage vault
+        """
+    }, "invalid storage vault name")
+
+    def tableName = "table_use_vault"
+
+    expectExceptionLike({
+        sql "DROP TABLE IF EXISTS ${tableName}"
+        sql """
+            CREATE TABLE ${tableName} (
+                `key` INT,
+                value INT
+            ) DUPLICATE KEY (`key`) DISTRIBUTED BY HASH (`key`) BUCKETS 1
+            PROPERTIES ('replication_num' = '1')
+        """
+    }, "supply")
+
+    sql """
+        set built_in_storage_vault as default storage vault
+    """
+
+
+    sql "DROP TABLE IF EXISTS ${tableName}"
+    sql """
+        CREATE TABLE ${tableName} (
+            `key` INT,
+            value INT
+        ) DUPLICATE KEY (`key`) DISTRIBUTED BY HASH (`key`) BUCKETS 1
+        PROPERTIES ('replication_num' = '1')
+    """
+
+
+    sql """
+        set built_in_storage_vault as default storage vault
+    """
+
+    sql """
+        CREATE STORAGE VAULT IF NOT EXISTS create_default_hdfs_vault
+        PROPERTIES (
+        "type"="hdfs",
+        "fs.defaultFS"="${getHdfsFs()}",
+        "path_prefix" = "default_vault_ssb_hdfs_vault"
+        );
+    """
+
+    sql """
+        set create_default_hdfs_vault as default storage vault
+    """
+
+    sql "DROP TABLE IF EXISTS ${tableName}"
+    sql """
+        CREATE TABLE ${tableName} (
+            `key` INT,
+            value INT
+        ) DUPLICATE KEY (`key`) DISTRIBUTED BY HASH (`key`) BUCKETS 1
+        PROPERTIES ('replication_num' = '1')
+    """
+    sql """
+        insert into ${tableName} values(1, 1);
+    """
+    sql """
+        select * from ${tableName};
+    """
+
+    def create_table_stmt = sql """
+        show create table ${tableName}
+    """
+
+    assertTrue(create_table_stmt[0][1].contains("create_default_hdfs_vault"))
+
+    expectExceptionLike({
+        sql """
+            alter table ${tableName} set("storage_vault_name" = 
"built_in_storage_vault");
+        """
+    }, "You can not modify")
+
+    try {
+        sql """
+            set null as default storage vault
+        """
+    } catch (Exception e) {
+    }
+
+}
\ No newline at end of file


---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]


Reply via email to