This is an automated email from the ASF dual-hosted git repository.
jacktengg pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/doris.git
The following commit(s) were added to refs/heads/master by this push:
new 97314924dec [regression-test](framework) support trino connector tests
in multi be and fe env (#34731)
97314924dec is described below
commit 97314924dec9d087bf2df3d6f00d43fc02668809
Author: shuke <[email protected]>
AuthorDate: Mon May 13 17:35:45 2024 +0800
[regression-test](framework) support trino connector tests in multi be and
fe env (#34731)
* 1
* 2
* 2
* 3
* 4
---
.../org/apache/doris/regression/suite/Suite.groovy | 75 +++++++++++++++++++++-
.../hive/test_trino_different_parquet_types.groovy | 52 +++------------
.../hive/test_trino_hive_orc.groovy | 50 +++------------
.../hive/test_trino_hive_other.groovy | 51 +++------------
.../hive/test_trino_hive_parquet.groovy | 52 +++------------
.../hive/test_trino_hive_schema_evolution.groovy | 52 +++------------
.../hive/test_trino_hive_serde_prop.groovy | 52 +++------------
.../hive/test_trino_hive_tablesample_p0.groovy | 52 +++------------
.../hive/test_trino_hive_tpch_sf1_orc.groovy | 51 +++------------
.../hive/test_trino_hive_tpch_sf1_parquet.groovy | 51 +++------------
.../test_trino_prepare_hive_data_in_case.groovy | 52 +++------------
.../trino_connector/test_plugins_download.groovy | 50 +++------------
12 files changed, 161 insertions(+), 479 deletions(-)
diff --git
a/regression-test/framework/src/main/groovy/org/apache/doris/regression/suite/Suite.groovy
b/regression-test/framework/src/main/groovy/org/apache/doris/regression/suite/Suite.groovy
index d5eb22756ef..b42f21d297f 100644
---
a/regression-test/framework/src/main/groovy/org/apache/doris/regression/suite/Suite.groovy
+++
b/regression-test/framework/src/main/groovy/org/apache/doris/regression/suite/Suite.groovy
@@ -80,6 +80,7 @@ class Suite implements GroovyInterceptable {
final String name
final String group
final Logger logger = LoggerFactory.getLogger(this.class)
+ static final Logger staticLogger = LoggerFactory.getLogger(Suite.class)
// set this in suite to determine which hive docker to use
String hivePrefix = "hive2"
@@ -89,6 +90,7 @@ class Suite implements GroovyInterceptable {
final List<Closure> finishCallbacks = new Vector<>()
final List<Throwable> lazyCheckExceptions = new Vector<>()
final List<Future> lazyCheckFutures = new Vector<>()
+ static Boolean isTrinoConnectorDownloaded = false
Suite(String name, String group, SuiteContext context, SuiteCluster
cluster) {
this.name = name
@@ -755,17 +757,86 @@ class Suite implements GroovyInterceptable {
return s3Url
}
- void scpFiles(String username, String host, String files, String filePath,
boolean fromDst=true) {
+ static void scpFiles(String username, String host, String files, String
filePath, boolean fromDst=true) {
String cmd = "scp -o StrictHostKeyChecking=no -r
${username}@${host}:${files} ${filePath}"
if (!fromDst) {
cmd = "scp -o StrictHostKeyChecking=no -r ${files}
${username}@${host}:${filePath}"
}
- logger.info("Execute: ${cmd}".toString())
+ staticLogger.info("Execute: ${cmd}".toString())
Process process = cmd.execute()
def code = process.waitFor()
Assert.assertEquals(0, code)
}
+ void dispatchTrinoConnectors(ArrayList host_ips)
+ {
+ def dir_download = context.config.otherConfigs.get("trinoPluginsPath")
+ def s3_url = getS3Url()
+ def url = "${s3_url}/regression/trino-connectors.tar.gz"
+ dispatchTrinoConnectors_impl(host_ips, dir_download, url)
+ }
+
+ /*
+ * download trino connectors, and sends to every fe and be.
+ * There are 3 configures to support this: trino_connectors in
regression-conf.groovy, and trino_connector_plugin_dir in be and fe.
+ * fe and be's config must satisfy regression-conf.groovy's config.
+ * e.g. in regression-conf.groovy, trino_connectors =
"/tmp/trino_connector", then in be.conf and fe.conf, must set
trino_connector_plugin_dir="/tmp/trino_connector/connectors"
+ *
+ * this function must be not reentrant.
+ *
+ * If failed, will call assertTrue(false).
+ */
+ static synchronized void dispatchTrinoConnectors_impl(ArrayList host_ips,
String dir_download, String url) {
+ if (isTrinoConnectorDownloaded == true) {
+ staticLogger.info("trino connector downloaded")
+ return
+ }
+
+ Assert.assertTrue(!dir_download.isEmpty())
+ def path_tar = "${dir_download}/trino-connectors.tar.gz"
+ // extract to a tmp direcotry, and then scp to every host_ips,
including self.
+ def dir_connector_tmp = "${dir_download}/connectors_tmp"
+ def path_connector_tmp = "${dir_connector_tmp}/connectors"
+ def path_connector = "${dir_download}/connectors"
+
+ def cmds = [] as List
+ cmds.add("mkdir -p ${dir_download}")
+ cmds.add("rm -rf ${path_tar}")
+ cmds.add("rm -rf ${dir_connector_tmp}")
+ cmds.add("mkdir -p ${dir_connector_tmp}")
+ cmds.add("/usr/bin/curl --max-time 600 ${url} --output ${path_tar}")
+ cmds.add("tar -zxvf ${path_tar} -C ${dir_connector_tmp}")
+
+ def executeCommand = { String cmd, Boolean mustSuc ->
+ try {
+ staticLogger.info("execute ${cmd}")
+ def proc = cmd.execute()
+ // if timeout, exception will be thrown
+ proc.waitForOrKill(900 * 1000)
+ staticLogger.info("execute result ${proc.getText()}.")
+ if (mustSuc == true) {
+ Assert.assertEquals(0, proc.exitValue())
+ }
+ } catch (IOException e) {
+ Assert.assertTrue(false, "execute timeout")
+ }
+ }
+
+ for (def cmd in cmds) {
+ executeCommand(cmd, true)
+ }
+
+ host_ips = host_ips.unique()
+ for (def ip in host_ips) {
+ staticLogger.info("scp to ${ip}")
+ executeCommand("ssh -o StrictHostKeyChecking=no root@${ip} \"rm
-rf ${path_connector}\"", false)
+ scpFiles("root", ip, path_connector_tmp, path_connector, false) //
if failed, assertTrue(false) is executed.
+ }
+
+ isTrinoConnectorDownloaded = true
+ staticLogger.info("dispatch trino connector to ${dir_download}
succeed")
+ }
+
void mkdirRemote(String username, String host, String path) {
String cmd = "ssh ${username}@${host} 'mkdir -p ${path}'"
logger.info("Execute: ${cmd}".toString())
diff --git
a/regression-test/suites/external_table_p0/trino_connector/hive/test_trino_different_parquet_types.groovy
b/regression-test/suites/external_table_p0/trino_connector/hive/test_trino_different_parquet_types.groovy
index 44dd015cfc3..63dca63c422 100644
---
a/regression-test/suites/external_table_p0/trino_connector/hive/test_trino_different_parquet_types.groovy
+++
b/regression-test/suites/external_table_p0/trino_connector/hive/test_trino_different_parquet_types.groovy
@@ -16,52 +16,16 @@
// under the License.
suite("test_trino_different_parquet_types",
"p0,external,hive,external_docker,external_docker_hive") {
- def trino_connector_download_dir =
context.config.otherConfigs.get("trinoPluginsPath")
-
- // mkdir trino_connector_download_dir
- logger.info("start create dir ${trino_connector_download_dir} ...")
- def mkdir_connectors_tar = "mkdir -p
${trino_connector_download_dir}".execute().getText()
- logger.info("finish create dir, result: ${mkdir_connectors_tar} ...")
-
-
- def plugins_compression =
"${trino_connector_download_dir}/trino-connectors.tar.gz"
- def plugins_dir = "${trino_connector_download_dir}/connectors"
- // download trino-connectors.tar.gz
- File path = new File("${plugins_compression}")
- if (path.exists() && path.isFile()) {
- logger.info("${plugins_compression} has been downloaded")
- } else {
- logger.info("start delete trino-connector plugins dir ...")
- def delete_local_connectors_tar = "rm -r ${plugins_dir}".execute()
- logger.info("start download trino-connector plugins ...")
- def s3_url = getS3Url()
-
- logger.info("getS3Url ==== ${s3_url}")
- def download_connectors_tar = "/usr/bin/curl
${s3_url}/regression/trino-connectors.tar.gz --output ${plugins_compression}"
- logger.info("download cmd : ${download_connectors_tar}")
- def run_download_connectors_cmd =
download_connectors_tar.execute().getText()
- logger.info("result: ${run_download_connectors_cmd}")
- logger.info("finish download ${plugins_compression} ...")
+ def host_ips = new ArrayList()
+ String[][] backends = sql """ show backends """
+ for (def b in backends) {
+ host_ips.add(b[1])
}
-
- // decompression trino-plugins.tar.gz
- File dir = new File("${plugins_dir}")
- if (dir.exists() && dir.isDirectory()) {
- logger.info("${plugins_dir} dir has been decompressed")
- } else {
- if (path.exists() && path.isFile()) {
- def run_cmd = "tar -zxvf ${plugins_compression} -C
${trino_connector_download_dir}"
- logger.info("run_cmd : $run_cmd")
- def run_decompress_cmd = run_cmd.execute().getText()
- logger.info("result: $run_decompress_cmd")
- } else {
- logger.info("${plugins_compression} is not exist or is not a
file.")
- throw exception
- }
+ String [][] frontends = sql """ show frontends """
+ for (def f in frontends) {
+ host_ips.add(f[1])
}
-
-
-
+ dispatchTrinoConnectors(host_ips.unique())
String hms_port = context.config.otherConfigs.get("hive2HmsPort")
String hdfs_port = context.config.otherConfigs.get("hive2HdfsPort")
diff --git
a/regression-test/suites/external_table_p0/trino_connector/hive/test_trino_hive_orc.groovy
b/regression-test/suites/external_table_p0/trino_connector/hive/test_trino_hive_orc.groovy
index 22b7724d954..602169b7975 100644
---
a/regression-test/suites/external_table_p0/trino_connector/hive/test_trino_hive_orc.groovy
+++
b/regression-test/suites/external_table_p0/trino_connector/hive/test_trino_hive_orc.groovy
@@ -16,50 +16,16 @@
// under the License.
suite("test_trino_hive_orc",
"all_types,external,hive,external_docker,external_docker_hive") {
- def trino_connector_download_dir =
context.config.otherConfigs.get("trinoPluginsPath")
-
- // mkdir trino_connector_download_dir
- logger.info("start create dir ${trino_connector_download_dir} ...")
- def mkdir_connectors_tar = "mkdir -p
${trino_connector_download_dir}".execute().getText()
- logger.info("finish create dir, result: ${mkdir_connectors_tar} ...")
-
-
- def plugins_compression =
"${trino_connector_download_dir}/trino-connectors.tar.gz"
- def plugins_dir = "${trino_connector_download_dir}/connectors"
-
- // download trino-connectors.tar.gz
- File path = new File("${plugins_compression}")
- if (path.exists() && path.isFile()) {
- logger.info("${plugins_compression} has been downloaded")
- } else {
- logger.info("start delete trino-connector plugins dir ...")
- def delete_local_connectors_tar = "rm -r ${plugins_dir}".execute()
- logger.info("start download trino-connector plugins ...")
- def s3_url = getS3Url()
-
- logger.info("getS3Url ==== ${s3_url}")
- def download_connectors_tar = "/usr/bin/curl
${s3_url}/regression/trino-connectors.tar.gz --output ${plugins_compression}"
- logger.info("download cmd : ${download_connectors_tar}")
- def run_download_connectors_cmd =
download_connectors_tar.execute().getText()
- logger.info("result: ${run_download_connectors_cmd}")
- logger.info("finish download ${plugins_compression} ...")
+ def host_ips = new ArrayList()
+ String[][] backends = sql """ show backends """
+ for (def b in backends) {
+ host_ips.add(b[1])
}
-
- // decompression trino-plugins.tar.gz
- File dir = new File("${plugins_dir}")
- if (dir.exists() && dir.isDirectory()) {
- logger.info("${plugins_dir} dir has been decompressed")
- } else {
- if (path.exists() && path.isFile()) {
- def run_cmd = "tar -zxvf ${plugins_compression} -C
${trino_connector_download_dir}"
- logger.info("run_cmd : $run_cmd")
- def run_decompress_cmd = run_cmd.execute().getText()
- logger.info("result: $run_decompress_cmd")
- } else {
- logger.info("${plugins_compression} is not exist or is not a
file.")
- throw exception
- }
+ String [][] frontends = sql """ show frontends """
+ for (def f in frontends) {
+ host_ips.add(f[1])
}
+ dispatchTrinoConnectors(host_ips.unique())
// Ensure that all types are parsed correctly
def select_top50 = {
diff --git
a/regression-test/suites/external_table_p0/trino_connector/hive/test_trino_hive_other.groovy
b/regression-test/suites/external_table_p0/trino_connector/hive/test_trino_hive_other.groovy
index 0c66fbbc29b..427951d0599 100644
---
a/regression-test/suites/external_table_p0/trino_connector/hive/test_trino_hive_other.groovy
+++
b/regression-test/suites/external_table_p0/trino_connector/hive/test_trino_hive_other.groovy
@@ -16,51 +16,16 @@
// under the License.
suite("test_trino_hive_other",
"external,hive,external_docker,external_docker_hive") {
- def trino_connector_download_dir =
context.config.otherConfigs.get("trinoPluginsPath")
-
- // mkdir trino_connector_download_dir
- logger.info("start create dir ${trino_connector_download_dir} ...")
- def mkdir_connectors_tar = "mkdir -p
${trino_connector_download_dir}".execute().getText()
- logger.info("finish create dir, result: ${mkdir_connectors_tar} ...")
-
-
- def plugins_compression =
"${trino_connector_download_dir}/trino-connectors.tar.gz"
- def plugins_dir = "${trino_connector_download_dir}/connectors"
-
- // download trino-connectors.tar.gz
- File path = new File("${plugins_compression}")
- if (path.exists() && path.isFile()) {
- logger.info("${plugins_compression} has been downloaded")
- } else {
- logger.info("start delete trino-connector plugins dir ...")
- def delete_local_connectors_tar = "rm -r ${plugins_dir}".execute()
- logger.info("start download trino-connector plugins ...")
- def s3_url = getS3Url()
-
- logger.info("getS3Url ==== ${s3_url}")
- def download_connectors_tar = "/usr/bin/curl
${s3_url}/regression/trino-connectors.tar.gz --output ${plugins_compression}"
- logger.info("download cmd : ${download_connectors_tar}")
- def run_download_connectors_cmd =
download_connectors_tar.execute().getText()
- logger.info("result: ${run_download_connectors_cmd}")
- logger.info("finish download ${plugins_compression} ...")
+ def host_ips = new ArrayList()
+ String[][] backends = sql """ show backends """
+ for (def b in backends) {
+ host_ips.add(b[1])
}
-
- // decompression trino-plugins.tar.gz
- File dir = new File("${plugins_dir}")
- if (dir.exists() && dir.isDirectory()) {
- logger.info("${plugins_dir} dir has been decompressed")
- } else {
- if (path.exists() && path.isFile()) {
- def run_cmd = "tar -zxvf ${plugins_compression} -C
${trino_connector_download_dir}"
- logger.info("run_cmd : $run_cmd")
- def run_decompress_cmd = run_cmd.execute().getText()
- logger.info("result: $run_decompress_cmd")
- } else {
- logger.info("${plugins_compression} is not exist or is not a
file.")
- throw exception
- }
+ String [][] frontends = sql """ show frontends """
+ for (def f in frontends) {
+ host_ips.add(f[1])
}
-
+ dispatchTrinoConnectors(host_ips.unique())
def q01 = {
qt_q24 """ select name, count(1) as c from student group by name order
by name desc;"""
diff --git
a/regression-test/suites/external_table_p0/trino_connector/hive/test_trino_hive_parquet.groovy
b/regression-test/suites/external_table_p0/trino_connector/hive/test_trino_hive_parquet.groovy
index 4f37ebc52bc..a060311e903 100644
---
a/regression-test/suites/external_table_p0/trino_connector/hive/test_trino_hive_parquet.groovy
+++
b/regression-test/suites/external_table_p0/trino_connector/hive/test_trino_hive_parquet.groovy
@@ -16,52 +16,16 @@
// under the License.
suite("test_trino_hive_parquet",
"p0,external,hive,external_docker,external_docker_hive") {
- def trino_connector_download_dir =
context.config.otherConfigs.get("trinoPluginsPath")
-
- // mkdir trino_connector_download_dir
- logger.info("start create dir ${trino_connector_download_dir} ...")
- def mkdir_connectors_tar = "mkdir -p
${trino_connector_download_dir}".execute().getText()
- logger.info("finish create dir, result: ${mkdir_connectors_tar} ...")
-
-
- def plugins_compression =
"${trino_connector_download_dir}/trino-connectors.tar.gz"
- def plugins_dir = "${trino_connector_download_dir}/connectors"
-
- // download trino-connectors.tar.gz
- File path = new File("${plugins_compression}")
- if (path.exists() && path.isFile()) {
- logger.info("${plugins_compression} has been downloaded")
- } else {
- logger.info("start delete trino-connector plugins dir ...")
- def delete_local_connectors_tar = "rm -r ${plugins_dir}".execute()
- logger.info("start download trino-connector plugins ...")
- def s3_url = getS3Url()
-
- logger.info("getS3Url ==== ${s3_url}")
- def download_connectors_tar = "/usr/bin/curl
${s3_url}/regression/trino-connectors.tar.gz --output ${plugins_compression}"
- logger.info("download cmd : ${download_connectors_tar}")
- def run_download_connectors_cmd =
download_connectors_tar.execute().getText()
- logger.info("result: ${run_download_connectors_cmd}")
- logger.info("finish download ${plugins_compression} ...")
+ def host_ips = new ArrayList()
+ String[][] backends = sql """ show backends """
+ for (def b in backends) {
+ host_ips.add(b[1])
}
-
- // decompression trino-plugins.tar.gz
- File dir = new File("${plugins_dir}")
- if (dir.exists() && dir.isDirectory()) {
- logger.info("${plugins_dir} dir has been decompressed")
- } else {
- if (path.exists() && path.isFile()) {
- def run_cmd = "tar -zxvf ${plugins_compression} -C
${trino_connector_download_dir}"
- logger.info("run_cmd : $run_cmd")
- def run_decompress_cmd = run_cmd.execute().getText()
- logger.info("result: $run_decompress_cmd")
- } else {
- logger.info("${plugins_compression} is not exist or is not a
file.")
- throw exception
- }
+ String [][] frontends = sql """ show frontends """
+ for (def f in frontends) {
+ host_ips.add(f[1])
}
-
-
+ dispatchTrinoConnectors(host_ips.unique())
def q01 = {
qt_q01 """
diff --git
a/regression-test/suites/external_table_p0/trino_connector/hive/test_trino_hive_schema_evolution.groovy
b/regression-test/suites/external_table_p0/trino_connector/hive/test_trino_hive_schema_evolution.groovy
index 7fe152ccff6..a3662eb4b0c 100644
---
a/regression-test/suites/external_table_p0/trino_connector/hive/test_trino_hive_schema_evolution.groovy
+++
b/regression-test/suites/external_table_p0/trino_connector/hive/test_trino_hive_schema_evolution.groovy
@@ -16,52 +16,16 @@
// under the License.
suite("test_trino_hive_schema_evolution",
"p0,external,hive,external_docker,external_docker_hive") {
- def trino_connector_download_dir =
context.config.otherConfigs.get("trinoPluginsPath")
-
- // mkdir trino_connector_download_dir
- logger.info("start create dir ${trino_connector_download_dir} ...")
- def mkdir_connectors_tar = "mkdir -p
${trino_connector_download_dir}".execute().getText()
- logger.info("finish create dir, result: ${mkdir_connectors_tar} ...")
-
-
- def plugins_compression =
"${trino_connector_download_dir}/trino-connectors.tar.gz"
- def plugins_dir = "${trino_connector_download_dir}/connectors"
-
- // download trino-connectors.tar.gz
- File path = new File("${plugins_compression}")
- if (path.exists() && path.isFile()) {
- logger.info("${plugins_compression} has been downloaded")
- } else {
- logger.info("start delete trino-connector plugins dir ...")
- def delete_local_connectors_tar = "rm -r ${plugins_dir}".execute()
- logger.info("start download trino-connector plugins ...")
- def s3_url = getS3Url()
-
- logger.info("getS3Url ==== ${s3_url}")
- def download_connectors_tar = "/usr/bin/curl
${s3_url}/regression/trino-connectors.tar.gz --output ${plugins_compression}"
- logger.info("download cmd : ${download_connectors_tar}")
- def run_download_connectors_cmd =
download_connectors_tar.execute().getText()
- logger.info("result: ${run_download_connectors_cmd}")
- logger.info("finish download ${plugins_compression} ...")
+ def host_ips = new ArrayList()
+ String[][] backends = sql """ show backends """
+ for (def b in backends) {
+ host_ips.add(b[1])
}
-
- // decompression trino-plugins.tar.gz
- File dir = new File("${plugins_dir}")
- if (dir.exists() && dir.isDirectory()) {
- logger.info("${plugins_dir} dir has been decompressed")
- } else {
- if (path.exists() && path.isFile()) {
- def run_cmd = "tar -zxvf ${plugins_compression} -C
${trino_connector_download_dir}"
- logger.info("run_cmd : $run_cmd")
- def run_decompress_cmd = run_cmd.execute().getText()
- logger.info("result: $run_decompress_cmd")
- } else {
- logger.info("${plugins_compression} is not exist or is not a
file.")
- throw exception
- }
+ String [][] frontends = sql """ show frontends """
+ for (def f in frontends) {
+ host_ips.add(f[1])
}
-
-
+ dispatchTrinoConnectors(host_ips.unique())
def q_text = {
qt_q01 """
diff --git
a/regression-test/suites/external_table_p0/trino_connector/hive/test_trino_hive_serde_prop.groovy
b/regression-test/suites/external_table_p0/trino_connector/hive/test_trino_hive_serde_prop.groovy
index d3437417f22..c2caf784e45 100644
---
a/regression-test/suites/external_table_p0/trino_connector/hive/test_trino_hive_serde_prop.groovy
+++
b/regression-test/suites/external_table_p0/trino_connector/hive/test_trino_hive_serde_prop.groovy
@@ -16,52 +16,16 @@
// under the License.
suite("test_trino_hive_serde_prop",
"external_docker,hive,external_docker_hive,p0,external") {
- def trino_connector_download_dir =
context.config.otherConfigs.get("trinoPluginsPath")
-
- // mkdir trino_connector_download_dir
- logger.info("start create dir ${trino_connector_download_dir} ...")
- def mkdir_connectors_tar = "mkdir -p
${trino_connector_download_dir}".execute().getText()
- logger.info("finish create dir, result: ${mkdir_connectors_tar} ...")
-
-
- def plugins_compression =
"${trino_connector_download_dir}/trino-connectors.tar.gz"
- def plugins_dir = "${trino_connector_download_dir}/connectors"
-
- // download trino-connectors.tar.gz
- File path = new File("${plugins_compression}")
- if (path.exists() && path.isFile()) {
- logger.info("${plugins_compression} has been downloaded")
- } else {
- logger.info("start delete trino-connector plugins dir ...")
- def delete_local_connectors_tar = "rm -r ${plugins_dir}".execute()
- logger.info("start download trino-connector plugins ...")
- def s3_url = getS3Url()
-
- logger.info("getS3Url ==== ${s3_url}")
- def download_connectors_tar = "/usr/bin/curl
${s3_url}/regression/trino-connectors.tar.gz --output ${plugins_compression}"
- logger.info("download cmd : ${download_connectors_tar}")
- def run_download_connectors_cmd =
download_connectors_tar.execute().getText()
- logger.info("result: ${run_download_connectors_cmd}")
- logger.info("finish download ${plugins_compression} ...")
+ def host_ips = new ArrayList()
+ String[][] backends = sql """ show backends """
+ for (def b in backends) {
+ host_ips.add(b[1])
}
-
- // decompression trino-plugins.tar.gz
- File dir = new File("${plugins_dir}")
- if (dir.exists() && dir.isDirectory()) {
- logger.info("${plugins_dir} dir has been decompressed")
- } else {
- if (path.exists() && path.isFile()) {
- def run_cmd = "tar -zxvf ${plugins_compression} -C
${trino_connector_download_dir}"
- logger.info("run_cmd : $run_cmd")
- def run_decompress_cmd = run_cmd.execute().getText()
- logger.info("result: $run_decompress_cmd")
- } else {
- logger.info("${plugins_compression} is not exist or is not a
file.")
- throw exception
- }
+ String [][] frontends = sql """ show frontends """
+ for (def f in frontends) {
+ host_ips.add(f[1])
}
-
-
+ dispatchTrinoConnectors(host_ips.unique())
String enabled = context.config.otherConfigs.get("enableHiveTest")
if (enabled != null && enabled.equalsIgnoreCase("true")) {
diff --git
a/regression-test/suites/external_table_p0/trino_connector/hive/test_trino_hive_tablesample_p0.groovy
b/regression-test/suites/external_table_p0/trino_connector/hive/test_trino_hive_tablesample_p0.groovy
index 1a834b35e02..83c358e05fc 100644
---
a/regression-test/suites/external_table_p0/trino_connector/hive/test_trino_hive_tablesample_p0.groovy
+++
b/regression-test/suites/external_table_p0/trino_connector/hive/test_trino_hive_tablesample_p0.groovy
@@ -16,52 +16,16 @@
// under the License.
suite("test_trino_hive_tablesample_p0",
"all_types,p0,external,hive,external_docker,external_docker_hive") {
- def trino_connector_download_dir =
context.config.otherConfigs.get("trinoPluginsPath")
-
- // mkdir trino_connector_download_dir
- logger.info("start create dir ${trino_connector_download_dir} ...")
- def mkdir_connectors_tar = "mkdir -p
${trino_connector_download_dir}".execute().getText()
- logger.info("finish create dir, result: ${mkdir_connectors_tar} ...")
-
-
- def plugins_compression =
"${trino_connector_download_dir}/trino-connectors.tar.gz"
- def plugins_dir = "${trino_connector_download_dir}/connectors"
-
- // download trino-connectors.tar.gz
- File path = new File("${plugins_compression}")
- if (path.exists() && path.isFile()) {
- logger.info("${plugins_compression} has been downloaded")
- } else {
- logger.info("start delete trino-connector plugins dir ...")
- def delete_local_connectors_tar = "rm -r ${plugins_dir}".execute()
- logger.info("start download trino-connector plugins ...")
- def s3_url = getS3Url()
-
- logger.info("getS3Url ==== ${s3_url}")
- def download_connectors_tar = "/usr/bin/curl
${s3_url}/regression/trino-connectors.tar.gz --output ${plugins_compression}"
- logger.info("download cmd : ${download_connectors_tar}")
- def run_download_connectors_cmd =
download_connectors_tar.execute().getText()
- logger.info("result: ${run_download_connectors_cmd}")
- logger.info("finish download ${plugins_compression} ...")
+ def host_ips = new ArrayList()
+ String[][] backends = sql """ show backends """
+ for (def b in backends) {
+ host_ips.add(b[1])
}
-
- // decompression trino-plugins.tar.gz
- File dir = new File("${plugins_dir}")
- if (dir.exists() && dir.isDirectory()) {
- logger.info("${plugins_dir} dir has been decompressed")
- } else {
- if (path.exists() && path.isFile()) {
- def run_cmd = "tar -zxvf ${plugins_compression} -C
${trino_connector_download_dir}"
- logger.info("run_cmd : $run_cmd")
- def run_decompress_cmd = run_cmd.execute().getText()
- logger.info("result: $run_decompress_cmd")
- } else {
- logger.info("${plugins_compression} is not exist or is not a
file.")
- throw exception
- }
+ String [][] frontends = sql """ show frontends """
+ for (def f in frontends) {
+ host_ips.add(f[1])
}
-
-
+ dispatchTrinoConnectors(host_ips.unique())
String enabled = context.config.otherConfigs.get("enableHiveTest")
if (enabled != null && enabled.equalsIgnoreCase("true")) {
diff --git
a/regression-test/suites/external_table_p0/trino_connector/hive/test_trino_hive_tpch_sf1_orc.groovy
b/regression-test/suites/external_table_p0/trino_connector/hive/test_trino_hive_tpch_sf1_orc.groovy
index d8de7faefbc..76769332f31 100644
---
a/regression-test/suites/external_table_p0/trino_connector/hive/test_trino_hive_tpch_sf1_orc.groovy
+++
b/regression-test/suites/external_table_p0/trino_connector/hive/test_trino_hive_tpch_sf1_orc.groovy
@@ -16,51 +16,16 @@
// under the License.
suite("test_trino_hive_tpch_sf1_orc",
"p0,external,hive,external_docker,external_docker_hive") {
- def trino_connector_download_dir =
context.config.otherConfigs.get("trinoPluginsPath")
-
- // mkdir trino_connector_download_dir
- logger.info("start create dir ${trino_connector_download_dir} ...")
- def mkdir_connectors_tar = "mkdir -p
${trino_connector_download_dir}".execute().getText()
- logger.info("finish create dir, result: ${mkdir_connectors_tar} ...")
-
-
- def plugins_compression =
"${trino_connector_download_dir}/trino-connectors.tar.gz"
- def plugins_dir = "${trino_connector_download_dir}/connectors"
- // download trino-connectors.tar.gz
- File path = new File("${plugins_compression}")
- if (path.exists() && path.isFile()) {
- logger.info("${plugins_compression} has been downloaded")
- } else {
- logger.info("start delete trino-connector plugins dir ...")
- def delete_local_connectors_tar = "rm -r ${plugins_dir}".execute()
- logger.info("start download trino-connector plugins ...")
- def s3_url = getS3Url()
-
- logger.info("getS3Url ==== ${s3_url}")
- def download_connectors_tar = "/usr/bin/curl
${s3_url}/regression/trino-connectors.tar.gz --output ${plugins_compression}"
- logger.info("download cmd : ${download_connectors_tar}")
- def run_download_connectors_cmd =
download_connectors_tar.execute().getText()
- logger.info("result: ${run_download_connectors_cmd}")
- logger.info("finish download ${plugins_compression} ...")
+ def host_ips = new ArrayList()
+ String[][] backends = sql """ show backends """
+ for (def b in backends) {
+ host_ips.add(b[1])
}
-
- // decompression trino-plugins.tar.gz
- File dir = new File("${plugins_dir}")
- if (dir.exists() && dir.isDirectory()) {
- logger.info("${plugins_dir} dir has been decompressed")
- } else {
- if (path.exists() && path.isFile()) {
- def run_cmd = "tar -zxvf ${plugins_compression} -C
${trino_connector_download_dir}"
- logger.info("run_cmd : $run_cmd")
- def run_decompress_cmd = run_cmd.execute().getText()
- logger.info("result: $run_decompress_cmd")
- } else {
- logger.info("${plugins_compression} is not exist or is not a
file.")
- throw exception
- }
+ String [][] frontends = sql """ show frontends """
+ for (def f in frontends) {
+ host_ips.add(f[1])
}
-
-
+ dispatchTrinoConnectors(host_ips.unique())
String enable_file_cache = "false"
def q01 = {
diff --git
a/regression-test/suites/external_table_p0/trino_connector/hive/test_trino_hive_tpch_sf1_parquet.groovy
b/regression-test/suites/external_table_p0/trino_connector/hive/test_trino_hive_tpch_sf1_parquet.groovy
index 0675e28bee2..e09782771fa 100644
---
a/regression-test/suites/external_table_p0/trino_connector/hive/test_trino_hive_tpch_sf1_parquet.groovy
+++
b/regression-test/suites/external_table_p0/trino_connector/hive/test_trino_hive_tpch_sf1_parquet.groovy
@@ -16,51 +16,16 @@
// under the License.
suite("test_trino_hive_tpch_sf1_parquet",
"p0,external,hive,external_docker,external_docker_hive") {
- def trino_connector_download_dir =
context.config.otherConfigs.get("trinoPluginsPath")
-
- // mkdir trino_connector_download_dir
- logger.info("start create dir ${trino_connector_download_dir} ...")
- def mkdir_connectors_tar = "mkdir -p
${trino_connector_download_dir}".execute().getText()
- logger.info("finish create dir, result: ${mkdir_connectors_tar} ...")
-
-
- def plugins_compression =
"${trino_connector_download_dir}/trino-connectors.tar.gz"
- def plugins_dir = "${trino_connector_download_dir}/connectors"
- // download trino-connectors.tar.gz
- File path = new File("${plugins_compression}")
- if (path.exists() && path.isFile()) {
- logger.info("${plugins_compression} has been downloaded")
- } else {
- logger.info("start delete trino-connector plugins dir ...")
- def delete_local_connectors_tar = "rm -r ${plugins_dir}".execute()
- logger.info("start download trino-connector plugins ...")
- def s3_url = getS3Url()
-
- logger.info("getS3Url ==== ${s3_url}")
- def download_connectors_tar = "/usr/bin/curl
${s3_url}/regression/trino-connectors.tar.gz --output ${plugins_compression}"
- logger.info("download cmd : ${download_connectors_tar}")
- def run_download_connectors_cmd =
download_connectors_tar.execute().getText()
- logger.info("result: ${run_download_connectors_cmd}")
- logger.info("finish download ${plugins_compression} ...")
+ def host_ips = new ArrayList()
+ String[][] backends = sql """ show backends """
+ for (def b in backends) {
+ host_ips.add(b[1])
}
-
- // decompression trino-plugins.tar.gz
- File dir = new File("${plugins_dir}")
- if (dir.exists() && dir.isDirectory()) {
- logger.info("${plugins_dir} dir has been decompressed")
- } else {
- if (path.exists() && path.isFile()) {
- def run_cmd = "tar -zxvf ${plugins_compression} -C
${trino_connector_download_dir}"
- logger.info("run_cmd : $run_cmd")
- def run_decompress_cmd = run_cmd.execute().getText()
- logger.info("result: $run_decompress_cmd")
- } else {
- logger.info("${plugins_compression} is not exist or is not a
file.")
- throw exception
- }
+ String [][] frontends = sql """ show frontends """
+ for (def f in frontends) {
+ host_ips.add(f[1])
}
-
-
+ dispatchTrinoConnectors(host_ips.unique())
String enable_file_cache = "false"
def q01 = {
diff --git
a/regression-test/suites/external_table_p0/trino_connector/hive/test_trino_prepare_hive_data_in_case.groovy
b/regression-test/suites/external_table_p0/trino_connector/hive/test_trino_prepare_hive_data_in_case.groovy
index 037ad855cf2..b465c2373d2 100644
---
a/regression-test/suites/external_table_p0/trino_connector/hive/test_trino_prepare_hive_data_in_case.groovy
+++
b/regression-test/suites/external_table_p0/trino_connector/hive/test_trino_prepare_hive_data_in_case.groovy
@@ -16,52 +16,16 @@
// under the License.
suite("test_trino_prepare_hive_data_in_case",
"p0,external,hive,external_docker,external_docker_hive") {
- def trino_connector_download_dir =
context.config.otherConfigs.get("trinoPluginsPath")
-
- // mkdir trino_connector_download_dir
- logger.info("start create dir ${trino_connector_download_dir} ...")
- def mkdir_connectors_tar = "mkdir -p
${trino_connector_download_dir}".execute().getText()
- logger.info("finish create dir, result: ${mkdir_connectors_tar} ...")
-
-
- def plugins_compression =
"${trino_connector_download_dir}/trino-connectors.tar.gz"
- def plugins_dir = "${trino_connector_download_dir}/connectors"
-
- // download trino-connectors.tar.gz
- File path = new File("${plugins_compression}")
- if (path.exists() && path.isFile()) {
- logger.info("${plugins_compression} has been downloaded")
- } else {
- logger.info("start delete trino-connector plugins dir ...")
- def delete_local_connectors_tar = "rm -r ${plugins_dir}".execute()
- logger.info("start download trino-connector plugins ...")
- def s3_url = getS3Url()
-
- logger.info("getS3Url ==== ${s3_url}")
- def download_connectors_tar = "/usr/bin/curl
${s3_url}/regression/trino-connectors.tar.gz --output ${plugins_compression}"
- logger.info("download cmd : ${download_connectors_tar}")
- def run_download_connectors_cmd =
download_connectors_tar.execute().getText()
- logger.info("result: ${run_download_connectors_cmd}")
- logger.info("finish download ${plugins_compression} ...")
+ def host_ips = new ArrayList()
+ String[][] backends = sql """ show backends """
+ for (def b in backends) {
+ host_ips.add(b[1])
}
-
- // decompression trino-plugins.tar.gz
- File dir = new File("${plugins_dir}")
- if (dir.exists() && dir.isDirectory()) {
- logger.info("${plugins_dir} dir has been decompressed")
- } else {
- if (path.exists() && path.isFile()) {
- def run_cmd = "tar -zxvf ${plugins_compression} -C
${trino_connector_download_dir}"
- logger.info("run_cmd : $run_cmd")
- def run_decompress_cmd = run_cmd.execute().getText()
- logger.info("result: $run_decompress_cmd")
- } else {
- logger.info("${plugins_compression} is not exist or is not a
file.")
- throw exception
- }
+ String [][] frontends = sql """ show frontends """
+ for (def f in frontends) {
+ host_ips.add(f[1])
}
-
-
+ dispatchTrinoConnectors(host_ips.unique())
String enabled = context.config.otherConfigs.get("enableHiveTest")
def catalog_name = "test_trino_prepare_hive_data_in_case"
diff --git
a/regression-test/suites/external_table_p0/trino_connector/test_plugins_download.groovy
b/regression-test/suites/external_table_p0/trino_connector/test_plugins_download.groovy
index acf9f4558df..3d28612cf62 100644
---
a/regression-test/suites/external_table_p0/trino_connector/test_plugins_download.groovy
+++
b/regression-test/suites/external_table_p0/trino_connector/test_plugins_download.groovy
@@ -16,48 +16,14 @@
// under the License.
suite("test_plugins_download",
"external,hive,external_docker,external_docker_hive") {
- def trino_connector_download_dir =
context.config.otherConfigs.get("trinoPluginsPath")
-
- // mkdir trino_connector_download_dir
- logger.info("start create dir ${trino_connector_download_dir} ...")
- def mkdir_connectors_tar = "mkdir -p
${trino_connector_download_dir}".execute().getText()
- logger.info("finish create dir, result: ${mkdir_connectors_tar} ...")
-
-
- def plugins_compression =
"${trino_connector_download_dir}/trino-connectors.tar.gz"
- def plugins_dir = "${trino_connector_download_dir}/connectors"
-
- // download trino-connectors.tar.gz
- File path = new File("${plugins_compression}")
- if (path.exists() && path.isFile()) {
- logger.info("${plugins_compression} has been downloaded")
- } else {
- logger.info("start delete trino-connector plugins dir ...")
- def delete_local_connectors_tar = "rm -r ${plugins_dir}".execute()
- logger.info("start download trino-connector plugins ...")
- def s3_url = getS3Url()
-
- logger.info("getS3Url ==== ${s3_url}")
- def download_connectors_tar = "/usr/bin/curl
${s3_url}/regression/trino-connectors.tar.gz --output ${plugins_compression}"
- logger.info("download cmd : ${download_connectors_tar}")
- def run_download_connectors_cmd =
download_connectors_tar.execute().getText()
- logger.info("result: ${run_download_connectors_cmd}")
- logger.info("finish download ${plugins_compression} ...")
+ def host_ips = new ArrayList()
+ String[][] backends = sql """ show backends """
+ for (def b in backends) {
+ host_ips.add(b[1])
}
-
- // decompression trino-plugins.tar.gz
- File dir = new File("${plugins_dir}")
- if (dir.exists() && dir.isDirectory()) {
- logger.info("${plugins_dir} dir has been decompressed")
- } else {
- if (path.exists() && path.isFile()) {
- def run_cmd = "tar -zxvf ${plugins_compression} -C
${trino_connector_download_dir}"
- logger.info("run_cmd : $run_cmd")
- def run_decompress_cmd = run_cmd.execute().getText()
- logger.info("result: $run_decompress_cmd")
- } else {
- logger.info("${plugins_compression} is not exist or is not a
file.")
- throw exception
- }
+ String [][] frontends = sql """ show frontends """
+ for (def f in frontends) {
+ host_ips.add(f[1])
}
+ dispatchTrinoConnectors(host_ips.unique())
}
\ No newline at end of file
---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]