This is an automated email from the ASF dual-hosted git repository.

dataroaring pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/doris.git


The following commit(s) were added to refs/heads/master by this push:
     new 8e93989d460 [feature](doris compose) set up local test multi clusters  
(#32626)
8e93989d460 is described below

commit 8e93989d46032c94cc6f3ddafe43b380bdad2124
Author: yujun <[email protected]>
AuthorDate: Fri Mar 22 08:27:40 2024 +0800

    [feature](doris compose) set up local test multi clusters  (#32626)
---
 docker/runtime/doris-compose/Readme.md           |  8 +++
 docker/runtime/doris-compose/cluster.py          | 19 +++--
 docker/runtime/doris-compose/command.py          | 92 +++++++++++++++++++-----
 docker/runtime/doris-compose/resource/init_be.sh |  4 +-
 4 files changed, 102 insertions(+), 21 deletions(-)

diff --git a/docker/runtime/doris-compose/Readme.md 
b/docker/runtime/doris-compose/Readme.md
index cd3d7805fe8..7539ad01a0d 100644
--- a/docker/runtime/doris-compose/Readme.md
+++ b/docker/runtime/doris-compose/Readme.md
@@ -123,3 +123,11 @@ python docker/runtime/doris-compose/doris-compose.py 
config <cluster-name>
 
 Generate regression-conf-custom.groovy to connect to the specific docker 
cluster.
 
+### Setup cloud multi clusters test env
+
+steps:
+
+1. Create a new cluster:  `python doris-compose.py up my-cluster  my-image  
--add-fe-num 1  --add-be-num 4 --cloud --no-reg-be`
+2. Generate regression-conf-custom.groovy: `python doris-compose.py config 
my-cluster`
+3. Run regression test: `bash run-regression-test.sh --run -times 1 -parallel 
1 -suiteParallel 1 -d cloud/multi_cluster`
+
diff --git a/docker/runtime/doris-compose/cluster.py 
b/docker/runtime/doris-compose/cluster.py
index cda6c3d845d..83df8d448d5 100644
--- a/docker/runtime/doris-compose/cluster.py
+++ b/docker/runtime/doris-compose/cluster.py
@@ -435,6 +435,10 @@ class BE(Node):
         with open("{}/conf/CLUSTER_NAME".format(self.get_path()), "w") as f:
             f.write(self.cluster.be_cluster)
 
+    def get_cluster_name(self):
+        with open("{}/conf/CLUSTER_NAME".format(self.get_path()), "r") as f:
+            return f.read().strip()
+
     def init_disk(self, be_disks):
         path = self.get_path()
         dirs = []
@@ -480,6 +484,7 @@ class BE(Node):
         envs = super().docker_env()
         if self.cluster.is_cloud:
             envs["CLOUD_UNIQUE_ID"] = self.cloud_unique_id()
+            envs["REG_BE_TO_MS"] = 1 if self.cluster.reg_be else 0
         return envs
 
     def cloud_unique_id(self):
@@ -590,8 +595,8 @@ class FDB(Node):
 class Cluster(object):
 
     def __init__(self, name, subnet, image, is_cloud, fe_config, be_config,
-                 ms_config, recycle_config, be_disks, be_cluster, coverage_dir,
-                 cloud_store_config):
+                 ms_config, recycle_config, be_disks, be_cluster, reg_be,
+                 coverage_dir, cloud_store_config):
         self.name = name
         self.subnet = subnet
         self.image = image
@@ -602,6 +607,7 @@ class Cluster(object):
         self.recycle_config = recycle_config
         self.be_disks = be_disks
         self.be_cluster = be_cluster
+        self.reg_be = reg_be
         self.coverage_dir = coverage_dir
         self.cloud_store_config = cloud_store_config
         self.groups = {
@@ -611,14 +617,15 @@ class Cluster(object):
 
     @staticmethod
     def new(name, image, is_cloud, fe_config, be_config, ms_config,
-            recycle_config, be_disks, be_cluster, coverage_dir,
+            recycle_config, be_disks, be_cluster, reg_be, coverage_dir,
             cloud_store_config):
         os.makedirs(LOCAL_DORIS_PATH, exist_ok=True)
         with filelock.FileLock(os.path.join(LOCAL_DORIS_PATH, "lock")):
             subnet = gen_subnet_prefix16()
             cluster = Cluster(name, subnet, image, is_cloud, fe_config,
                               be_config, ms_config, recycle_config, be_disks,
-                              be_cluster, coverage_dir, cloud_store_config)
+                              be_cluster, reg_be, coverage_dir,
+                              cloud_store_config)
             os.makedirs(cluster.get_path(), exist_ok=True)
             os.makedirs(get_status_path(name), exist_ok=True)
             cluster._save_meta()
@@ -707,6 +714,10 @@ class Cluster(object):
     def get_meta_server_addr(self):
         return "{}:{}".format(self.get_node(Node.TYPE_MS, 1).get_ip(), MS_PORT)
 
+    def get_recycle_addr(self):
+        return "{}:{}".format(
+            self.get_node(Node.TYPE_RECYCLE, 1).get_ip(), MS_PORT)
+
     def remove(self, node_type, id):
         group = self.get_group(node_type)
         group.remove(id)
diff --git a/docker/runtime/doris-compose/command.py 
b/docker/runtime/doris-compose/command.py
index aa98e932872..86e3afd225e 100644
--- a/docker/runtime/doris-compose/command.py
+++ b/docker/runtime/doris-compose/command.py
@@ -310,6 +310,23 @@ class UpCommand(Command):
                                 action=self._get_parser_bool_action(False),
                                 help="Run containers in frontend. ")
 
+        if self._support_boolean_action():
+            parser.add_argument(
+                "--reg-be",
+                default=True,
+                action=self._get_parser_bool_action(False),
+                help="Register be to meta server in cloud mode, use for multi 
clusters test. If specific --no-reg-be, "\
+                "will not register be to meta server. ")
+        else:
+            parser.add_argument(
+                "--no-reg-be",
+                dest='reg_be',
+                default=True,
+                action=self._get_parser_bool_action(False),
+                help=
+                "Don't register be to meta server in cloud mode, use for multi 
clusters test"
+            )
+
     def run(self, args):
         if not args.NAME:
             raise Exception("Need specific not empty cluster name")
@@ -370,7 +387,7 @@ class UpCommand(Command):
                                           args.fe_config, args.be_config,
                                           args.ms_config, args.recycle_config,
                                           args.be_disks, args.be_cluster,
-                                          args.coverage_dir,
+                                          args.reg_be, args.coverage_dir,
                                           cloud_store_config)
             LOG.info("Create new cluster {} succ, cluster path is {}".format(
                 args.NAME, cluster.get_path()))
@@ -732,14 +749,42 @@ class GenConfCommand(Command):
         return parser
 
     def run(self, args):
-        content = '''
-jdbcUrl = 
"jdbc:mysql://127.0.0.1:9030/?useLocalSessionState=true&allowLoadLocalInfile=true"
-targetJdbcUrl = 
"jdbc:mysql://127.0.0.1:9030/?useLocalSessionState=true&allowLoadLocalInfile=true"
-feSourceThriftAddress = "127.0.0.1:9020"
-feTargetThriftAddress = "127.0.0.1:9020"
-syncerAddress = "127.0.0.1:9190"
-feHttpAddress = "127.0.0.1:8030"
+        base_conf = '''
+jdbcUrl = 
"jdbc:mysql://{fe_ip}:9030/?useLocalSessionState=true&allowLoadLocalInfile=true"
+targetJdbcUrl = 
"jdbc:mysql://{fe_ip}:9030/?useLocalSessionState=true&allowLoadLocalInfile=true"
+feSourceThriftAddress = "{fe_ip}:9020"
+feTargetThriftAddress = "{fe_ip}:9020"
+syncerAddress = "{fe_ip}:9190"
+feHttpAddress = "{fe_ip}:8030"
+'''
+
+        cloud_conf = '''
+feCloudHttpAddress = "{fe_ip}:18030"
+metaServiceHttpAddress = "{ms_endpoint}"
+recycleServiceHttpAddress = "{recycle_endpoint}"
+multiClusterInstance = "default_instance_id"
+multiClusterBes = "{multi_cluster_bes}"
+metaServiceToken = "greedisgood9999"
 '''
+
+        def confirm_custom_file_path(doris_root_dir):
+            relative_custom_file_path = 
"regression-test/conf/regression-conf-custom.groovy"
+            regression_conf_custom = os.path.join(doris_root_dir,
+                                                  relative_custom_file_path)
+            ans = input(
+                "\nwrite file {} ?  y / n / c(change custom conf path):  ".
+                format(regression_conf_custom))
+            if ans == 'y':
+                return regression_conf_custom
+            elif ans == 'c':
+                return confirm_custom_file_path(
+                    input("\ninput your doris or selectdb-core root path (ie. 
the regression-test 's "\
+                        "parent path, for save the custom conf file): "
+                    ).strip())
+            else:
+                return ""
+
+        cluster = CLUSTER.Cluster.load(args.NAME)
         master_fe_ip = CLUSTER.get_master_fe_endpoint(args.NAME)
         if not master_fe_ip:
             print("Not found cluster with name {} in directory {}".format(
@@ -748,16 +793,31 @@ feHttpAddress = "127.0.0.1:8030"
         doris_root_dir = os.path.abspath(__file__)
         for i in range(4):
             doris_root_dir = os.path.dirname(doris_root_dir)
-        regression_conf_custom = doris_root_dir + 
"/regression-test/conf/regression-conf-custom.groovy"
-        if input("write file {} ?\n   y/N:  ".format(
-                regression_conf_custom)) != 'y':
-            print("No write regression custom file.")
+
+        regression_conf_custom = confirm_custom_file_path(doris_root_dir)
+        if not regression_conf_custom:
+            print("\nNo write regression custom file.")
             return
+
         with open(regression_conf_custom, "w") as f:
-            f.write(
-                content.replace("127.0.0.1",
-                                master_fe_ip[:master_fe_ip.find(':')]))
-        print("Write succ: " + regression_conf_custom)
+            fe_ip = master_fe_ip[:master_fe_ip.find(':')]
+            f.write(base_conf.format(fe_ip=fe_ip))
+            if cluster.is_cloud:
+                multi_cluster_bes = ",".join([
+                    "{}:{}:{}:{}:{}".format(be.get_ip(),
+                                            CLUSTER.BE_HEARTBEAT_PORT,
+                                            CLUSTER.BE_WEBSVR_PORT,
+                                            be.cloud_unique_id(),
+                                            CLUSTER.BE_BRPC_PORT)
+                    for be in cluster.get_all_nodes(CLUSTER.Node.TYPE_BE)
+                ])
+                f.write(
+                    cloud_conf.format(
+                        fe_ip=fe_ip,
+                        ms_endpoint=cluster.get_meta_server_addr(),
+                        recycle_endpoint=cluster.get_recycle_addr(),
+                        multi_cluster_bes=multi_cluster_bes))
+        print("\nWrite succ: " + regression_conf_custom)
 
 
 class ListCommand(Command):
diff --git a/docker/runtime/doris-compose/resource/init_be.sh 
b/docker/runtime/doris-compose/resource/init_be.sh
index 0df464c625c..d9b7953b534 100755
--- a/docker/runtime/doris-compose/resource/init_be.sh
+++ b/docker/runtime/doris-compose/resource/init_be.sh
@@ -146,7 +146,9 @@ add_be_to_cluster() {
     fi
 
     if [ "${IS_CLOUD}" == "1" ]; then
-        add_cloud_be
+        if [ "${REG_BE_TO_MS}" == "1" ]; then
+            add_cloud_be
+        fi
     else
         add_local_be
     fi


---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]

Reply via email to