This is an automated email from the ASF dual-hosted git repository.
morningman pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/incubator-doris.git
The following commit(s) were added to refs/heads/master by this push:
new 4e3b576 [NewFeature] Support ExternalCatalogResource to simplify
external table manage operation. (#4559)
4e3b576 is described below
commit 4e3b576fd3b09920b65582d8899336bd95fa7cb7
Author: HappenLee <[email protected]>
AuthorDate: Fri Sep 25 10:20:33 2020 +0800
[NewFeature] Support ExternalCatalogResource to simplify external table
manage operation. (#4559)
1. Add new Resource ExternalCatalogResource
```
create external resource "odbc"
properties
(
"type" = "external_catalog", (required)
"user" = "test",(required)
"password" = "", (required)
"host" = "192.168.0.1", (required)
"port" = "8086", (required)
"type" = "oracle" , (optinal,only odbc exteranl table use)
"driver" = "Oracle 19 ODBC driver" (optional,only odbc exteranl table
use)
)
```
2.After create ExternalCatalogResource, can create external table like:
```
CREATE TABLE `test_mysql` (
`k1` tinyint(4) NOT NULL,
`k2` smallint(6) NOT NULL,
`k3` int(11) NOT NULL,
`k4` bigint(20) NOT NULL,
`k5` decimal(9,3) NOT NULL,
`k6` char(5) NOT NULL,
`k10` date DEFAULT NULL,
`k11` datetime DEFAULT NULL,
`k7` varchar(20) NOT NULL,
`k8` double NOT NULL,
`k9` float NOT NULL
) ENGINE=MYSQL
PROPERTIES (
"external_catalog_resource" = "odbc",
"database" = "test",
"table" = "test"
);
```
---
.../sql-statements/Data Definition/CREATE TABLE.md | 59 +++++--
.../sql-statements/Data Definition/CREATE TABLE.md | 49 +++++-
.../apache/doris/analysis/ShowResourcesStmt.java | 2 +-
.../java/org/apache/doris/catalog/Catalog.java | 30 ++--
.../java/org/apache/doris/catalog/MysqlTable.java | 186 ++++++++++++++------
.../apache/doris/catalog/OdbcCatalogResource.java | 109 ++++++++++++
.../java/org/apache/doris/catalog/OdbcTable.java | 188 ++++++++++++++-------
.../java/org/apache/doris/catalog/Resource.java | 6 +-
.../java/org/apache/doris/catalog/ResourceMgr.java | 4 +-
.../org/apache/doris/common/FeMetaVersion.java | 4 +-
.../org/apache/doris/persist/gson/GsonUtils.java | 4 +-
.../doris/analysis/CreateResourceStmtTest.java | 21 ++-
.../doris/catalog/OdbcCatalogResourceTest.java | 159 +++++++++++++++++
.../org/apache/doris/planner/QueryPlanTest.java | 4 +-
14 files changed, 671 insertions(+), 154 deletions(-)
diff --git a/docs/en/sql-reference/sql-statements/Data Definition/CREATE
TABLE.md b/docs/en/sql-reference/sql-statements/Data Definition/CREATE TABLE.md
index 1171adc..f8a3188 100644
--- a/docs/en/sql-reference/sql-statements/Data Definition/CREATE TABLE.md
+++ b/docs/en/sql-reference/sql-statements/Data Definition/CREATE TABLE.md
@@ -94,6 +94,7 @@ Syntax:
* BITMAP_UNION: Only for BITMAP type
Allow NULL: Default is NOT NULL. NULL value should be represented as `\N`
in load source file.
Notice:
+
The origin value of BITMAP_UNION column should be TINYINT, SMALLINT,
INT, BIGINT.
2. index_definition
Syntax:
@@ -133,14 +134,14 @@ Syntax:
"line_delimiter" = "value_delimiter"
)
```
-
+
```
BROKER PROPERTIES(
"username" = "name",
"password" = "password"
)
```
-
+
For different broker, the broker properties are different
Notice:
Files name in "path" is separated by ",". If file name includes ",",
use "%2c" instead. If file name includes "%", use "%25" instead.
@@ -220,7 +221,7 @@ Syntax:
["replication_num" = "3"]
)
```
-
+
storage_medium: SSD or HDD, The default initial storage media
can be specified by `default_storage_medium= XXX` in the fe configuration file
`fe.conf`, or, if not, by default, HDD.
Note: when FE configuration
'enable_strict_storage_medium_check' is' True ', if the corresponding storage
medium is not set in the cluster, the construction clause 'Failed to find
enough host in all backends with storage medium is SSD|HDD'.
storage_cooldown_time: If storage_medium is SSD, data will be
automatically moved to HDD when timeout.
@@ -246,9 +247,9 @@ Syntax:
"colocate_with"="table1"
)
```
-
+
4) if you want to use the dynamic partitioning feature, specify it in
properties
-
+
```
PROPERTIES (
"dynamic_partition.enable" = "true|false",
@@ -268,6 +269,7 @@ Syntax:
Dynamic_partition. Prefix: used to specify the partition name prefix to
be created, such as the partition name prefix p, automatically creates the
partition name p20200108
Dynamic_partition. Buckets: specifies the number of partition buckets
that are automatically created
+ ```
8. rollup_index
grammar:
```
@@ -320,6 +322,7 @@ Syntax:
"storage_medium" = "SSD",
"storage_cooldown_time" = "2015-06-04 00:00:00"
);
+ ```
3. Create an olap table, with range partitioned, distributed by hash.
@@ -347,16 +350,16 @@ Syntax:
"storage_medium" = "SSD", "storage_cooldown_time" = "2015-06-04 00:00:00"
);
```
-
+
Explain:
This statement will create 3 partitions:
-
+
```
( { MIN }, {"2014-01-01"} )
[ {"2014-01-01"}, {"2014-06-01"} )
[ {"2014-06-01"}, {"2014-12-01"} )
```
-
+
Data outside these ranges will not be loaded.
2) Fixed Range
@@ -381,8 +384,8 @@ Syntax:
);
4. Create a mysql table
-
- ```
+ 4.1 Create MySQL table directly from external table information
+```
CREATE EXTERNAL TABLE example_db.table_mysql
(
k1 DATE,
@@ -400,8 +403,38 @@ Syntax:
"password" = "mysql_passwd",
"database" = "mysql_db_test",
"table" = "mysql_table_test"
- );
- ```
+ )
+```
+
+ 4.2 Create MySQL table with external ODBC catalog resource
+```
+ CREATE EXTERNAL RESOURCE "mysql_resource"
+ PROPERTIES
+ (
+ "type" = "odbc_catalog",
+ "user" = "mysql_user",
+ "password" = "mysql_passwd",
+ "host" = "127.0.0.1",
+ "port" = "8239"
+ );
+```
+```
+ CREATE EXTERNAL TABLE example_db.table_mysql
+ (
+ k1 DATE,
+ k2 INT,
+ k3 SMALLINT,
+ k4 VARCHAR(2048),
+ k5 DATETIME
+ )
+ ENGINE=mysql
+ PROPERTIES
+ (
+ "odbc_catalog_resource" = "mysql_resource",
+ "database" = "mysql_db_test",
+ "table" = "mysql_table_test"
+ )
+```
5. Create a broker table, with file on HDFS, line delimit by "|", column
separated by "\n"
@@ -549,7 +582,7 @@ Syntax:
"dynamic_partition.prefix" = "p",
"dynamic_partition.buckets" = "32"
);
- ```
+ ```
12. Create a table with rollup index
```
CREATE TABLE example_db.rolup_index_table
diff --git a/docs/zh-CN/sql-reference/sql-statements/Data Definition/CREATE
TABLE.md b/docs/zh-CN/sql-reference/sql-statements/Data Definition/CREATE
TABLE.md
index ca223b0..f1e7c8f 100644
--- a/docs/zh-CN/sql-reference/sql-statements/Data Definition/CREATE TABLE.md
+++ b/docs/zh-CN/sql-reference/sql-statements/Data Definition/CREATE TABLE.md
@@ -151,7 +151,7 @@ under the License.
注意:
"path" 中如果有多个文件,用逗号[,]分割。如果文件名中包含逗号,那么使用 %2c 来替代。如果文件名中包含 %,使用 %25 代替
现在文件内容格式支持CSV,支持GZ,BZ2,LZ4,LZO(LZOP) 压缩格式。
-
+
3) 如果是 hive,则需要在 properties 提供以下信息:
```
PROPERTIES (
@@ -159,7 +159,7 @@ under the License.
"table" = "hive_table_name",
"hive.metastore.uris" = "thrift://127.0.0.1:9083"
)
-
+
```
其中 database 是 hive 表对应的库名字,table 是 hive 表的名字,hive.metastore.uris 是 hive
metastore 服务地址。
注意:目前hive外部表仅用于Spark Load使用,不支持查询。
@@ -193,7 +193,7 @@ under the License.
...
)
```
-
+
说明:
使用指定的 key 列和指定的数值范围进行分区。
1) 分区名称仅支持字母开头,字母、数字和下划线组成
@@ -202,7 +202,7 @@ under the License.
3) 分区为左闭右开区间,首个分区的左边界为做最小值
4) NULL 值只会存放在包含最小值的分区中。当包含最小值的分区被删除后,NULL 值将无法导入。
5) 可以指定一列或多列作为分区列。如果分区值缺省,则会默认填充最小值。
-
+
注意:
1) 分区一般用于时间维度的数据管理
2) 有数据回溯需求的,可以考虑首个分区为空分区,以便后续增加分区
@@ -270,9 +270,9 @@ under the License.
"colocate_with"="table1"
)
```
-
+
4) 如果希望使用动态分区特性,需要在properties 中指定
-
+
```
PROPERTIES (
"dynamic_partition.enable" = "true|false",
@@ -288,7 +288,7 @@ under the License.
dynamic_partition.end: 用于指定提前创建的分区数量。值必须大于0。
dynamic_partition.prefix: 用于指定创建的分区名前缀,例如分区名前缀为p,则自动创建分区名为p20200108
dynamic_partition.buckets: 用于指定自动创建的分区分桶数量
-
+
5) 建表时可以批量创建多个 Rollup
语法:
```
@@ -296,7 +296,7 @@ under the License.
[FROM from_index_name]
[PROPERTIES ("key"="value", ...)],...)
```
-
+
6) 如果希望使用 内存表 特性,需要在 properties 中指定
```
@@ -419,6 +419,7 @@ under the License.
4. 创建一个 mysql 表
+ 4.1 直接通过外表信息创建mysql表
```
CREATE EXTERNAL TABLE example_db.table_mysql
(
@@ -440,6 +441,36 @@ under the License.
)
```
+ 4.2 通过External Catalog Resource创建mysql表
+```
+ CREATE EXTERNAL RESOURCE "mysql_resource"
+ PROPERTIES
+ (
+ "type" = "odbc_catalog",
+ "user" = "mysql_user",
+ "password" = "mysql_passwd",
+ "host" = "127.0.0.1",
+ "port" = "8239"
+ );
+```
+```
+ CREATE EXTERNAL TABLE example_db.table_mysql
+ (
+ k1 DATE,
+ k2 INT,
+ k3 SMALLINT,
+ k4 VARCHAR(2048),
+ k5 DATETIME
+ )
+ ENGINE=mysql
+ PROPERTIES
+ (
+ "odbc_catalog_resource" = "mysql_resource",
+ "database" = "mysql_db_test",
+ "table" = "mysql_table_test"
+ )
+```
+
5. 创建一个数据文件存储在HDFS上的 broker 外部表, 数据使用 "|" 分割,"\n" 换行
```
@@ -650,3 +681,5 @@ under the License.
## keyword
CREATE,TABLE
+
+```
diff --git
a/fe/fe-core/src/main/java/org/apache/doris/analysis/ShowResourcesStmt.java
b/fe/fe-core/src/main/java/org/apache/doris/analysis/ShowResourcesStmt.java
index 1a1c88d..edb39ff 100644
--- a/fe/fe-core/src/main/java/org/apache/doris/analysis/ShowResourcesStmt.java
+++ b/fe/fe-core/src/main/java/org/apache/doris/analysis/ShowResourcesStmt.java
@@ -215,7 +215,7 @@ public class ShowResourcesStmt extends ShowStmt {
if (!valid) {
throw new AnalysisException("Where clause should looks like: NAME
= \"your_resource_name\","
- + " or NAME LIKE \"matcher\", " + " or RESOURCETYPE =
\"SPARK\", "
+ + " or NAME LIKE \"matcher\", " + " or RESOURCETYPE =
\"resource_type\", "
+ " or compound predicate with operator AND");
}
}
diff --git a/fe/fe-core/src/main/java/org/apache/doris/catalog/Catalog.java
b/fe/fe-core/src/main/java/org/apache/doris/catalog/Catalog.java
index 58a864e..8b47b11 100755
--- a/fe/fe-core/src/main/java/org/apache/doris/catalog/Catalog.java
+++ b/fe/fe-core/src/main/java/org/apache/doris/catalog/Catalog.java
@@ -4059,10 +4059,14 @@ public class Catalog {
}
// properties
sb.append("\nPROPERTIES (\n");
- sb.append("\"host\" =
\"").append(mysqlTable.getHost()).append("\",\n");
- sb.append("\"port\" =
\"").append(mysqlTable.getPort()).append("\",\n");
- sb.append("\"user\" =
\"").append(mysqlTable.getUserName()).append("\",\n");
- sb.append("\"password\" = \"").append(hidePassword ? "" :
mysqlTable.getPasswd()).append("\",\n");
+ if (mysqlTable.getOdbcCatalogResourceName() == null) {
+ sb.append("\"host\" =
\"").append(mysqlTable.getHost()).append("\",\n");
+ sb.append("\"port\" =
\"").append(mysqlTable.getPort()).append("\",\n");
+ sb.append("\"user\" =
\"").append(mysqlTable.getUserName()).append("\",\n");
+ sb.append("\"password\" = \"").append(hidePassword ? "" :
mysqlTable.getPasswd()).append("\",\n");
+ } else {
+ sb.append("\"odbc_catalog_resource\" =
\"").append(mysqlTable.getOdbcCatalogResourceName()).append("\",\n");
+ }
sb.append("\"database\" =
\"").append(mysqlTable.getMysqlDatabaseName()).append("\",\n");
sb.append("\"table\" =
\"").append(mysqlTable.getMysqlTableName()).append("\"\n");
sb.append(")");
@@ -4073,14 +4077,18 @@ public class Catalog {
}
// properties
sb.append("\nPROPERTIES (\n");
- sb.append("\"host\" =
\"").append(odbcTable.getHost()).append("\",\n");
- sb.append("\"port\" =
\"").append(odbcTable.getPort()).append("\",\n");
- sb.append("\"user\" =
\"").append(odbcTable.getUserName()).append("\",\n");
- sb.append("\"password\" = \"").append(hidePassword ? "" :
odbcTable.getPasswd()).append("\",\n");
+ if (odbcTable.getOdbcCatalogResourceName() == null) {
+ sb.append("\"host\" =
\"").append(odbcTable.getHost()).append("\",\n");
+ sb.append("\"port\" =
\"").append(odbcTable.getPort()).append("\",\n");
+ sb.append("\"user\" =
\"").append(odbcTable.getUserName()).append("\",\n");
+ sb.append("\"password\" = \"").append(hidePassword ? "" :
odbcTable.getPasswd()).append("\",\n");
+ sb.append("\"driver\" =
\"").append(odbcTable.getOdbcDriver()).append("\",\n");
+ sb.append("\"odbc_type\" =
\"").append(odbcTable.getOdbcTableTypeName()).append("\",\n");
+ } else {
+ sb.append("\"odbc_catalog_resource\" =
\"").append(odbcTable.getOdbcCatalogResourceName()).append("\",\n");
+ }
sb.append("\"database\" =
\"").append(odbcTable.getOdbcDatabaseName()).append("\",\n");
- sb.append("\"table\" =
\"").append(odbcTable.getOdbcTableName()).append("\",\n");
- sb.append("\"driver\" =
\"").append(odbcTable.getOdbcDriver()).append("\",\n");
- sb.append("\"type\" =
\"").append(odbcTable.getOdbcTableTypeName()).append("\"\n");
+ sb.append("\"table\" =
\"").append(odbcTable.getOdbcTableName()).append("\"\n");
sb.append(")");
} else if (table.getType() == TableType.BROKER) {
BrokerTable brokerTable = (BrokerTable) table;
diff --git a/fe/fe-core/src/main/java/org/apache/doris/catalog/MysqlTable.java
b/fe/fe-core/src/main/java/org/apache/doris/catalog/MysqlTable.java
index 7fa733b..be7e49e 100644
--- a/fe/fe-core/src/main/java/org/apache/doris/catalog/MysqlTable.java
+++ b/fe/fe-core/src/main/java/org/apache/doris/catalog/MysqlTable.java
@@ -17,8 +17,12 @@
package org.apache.doris.catalog;
+import com.google.common.collect.Maps;
import org.apache.doris.common.DdlException;
+import org.apache.doris.common.FeMetaVersion;
import org.apache.doris.common.io.Text;
+import org.apache.doris.mysql.privilege.PrivPredicate;
+import org.apache.doris.qe.ConnectContext;
import org.apache.doris.thrift.TMySQLTable;
import org.apache.doris.thrift.TTableDescriptor;
import org.apache.doris.thrift.TTableType;
@@ -39,6 +43,7 @@ import java.util.zip.Adler32;
public class MysqlTable extends Table {
private static final Logger LOG = LogManager.getLogger(OlapTable.class);
+ private static final String ODBC_CATALOG_RESOURCE =
"odbc_catalog_resource";
private static final String MYSQL_HOST = "host";
private static final String MYSQL_PORT = "port";
private static final String MYSQL_USER = "user";
@@ -46,6 +51,7 @@ public class MysqlTable extends Table {
private static final String MYSQL_DATABASE = "database";
private static final String MYSQL_TABLE = "table";
+ private String odbcCatalogResourceName;
private String host;
private String port;
private String userName;
@@ -66,43 +72,62 @@ public class MysqlTable extends Table {
private void validate(Map<String, String> properties) throws DdlException {
if (properties == null) {
throw new DdlException("Please set properties of mysql table, "
- + "they are: host, port, user, password, database and
table");
+ + "they are: odbc_catalog_resource or [host, port, user,
password] and database and table");
}
- // Set up
- host = properties.get(MYSQL_HOST);
- if (Strings.isNullOrEmpty(host)) {
- throw new DdlException("Host of MySQL table is null. "
- + "Please add properties('host'='xxx.xxx.xxx.xxx') when
create table");
- }
+ if (properties.containsKey(ODBC_CATALOG_RESOURCE)) {
+ odbcCatalogResourceName = properties.get(ODBC_CATALOG_RESOURCE);
+
+ // 1. check whether resource exist
+ Resource oriResource =
Catalog.getCurrentCatalog().getResourceMgr().getResource(odbcCatalogResourceName);
+ if (oriResource == null) {
+ throw new DdlException("Resource does not exist. name: " +
odbcCatalogResourceName);
+ }
- port = properties.get(MYSQL_PORT);
- if (Strings.isNullOrEmpty(port)) {
- // Maybe null pointer or number convert
- throw new DdlException("Port of MySQL table is null. "
- + "Please add properties('port'='3306') when create
table");
+ // 2. check resource usage privilege
+ if
(!Catalog.getCurrentCatalog().getAuth().checkResourcePriv(ConnectContext.get(),
+ odbcCatalogResourceName,
+ PrivPredicate.USAGE)) {
+ throw new DdlException("USAGE denied to user '" +
ConnectContext.get().getQualifiedUser()
+ + "'@'" + ConnectContext.get().getRemoteIP()
+ + "' for resource '" + odbcCatalogResourceName + "'");
+ }
} else {
- try {
- Integer.valueOf(port);
- } catch (Exception e) {
- throw new DdlException("Port of MySQL table must be a number."
- + "Please add properties('port'='3306') when create
table");
+ // Set up
+ host = properties.get(MYSQL_HOST);
+ if (Strings.isNullOrEmpty(host)) {
+ throw new DdlException("Host of MySQL table is null. "
+ + "Please set proper resource or add
properties('host'='xxx.xxx.xxx.xxx') when create table");
+ }
+ port = properties.get(MYSQL_PORT);
+ if (Strings.isNullOrEmpty(port)) {
+ // Maybe null pointer or number convert
+ throw new DdlException("Port of MySQL table is null. "
+ + "Please set proper resource or add
properties('port'='3306') when create table");
+ } else {
+ try {
+ Integer.valueOf(port);
+ } catch (Exception e) {
+ throw new DdlException("Port of MySQL table must be a
number."
+ + "Please set proper resource or add
properties('port'='3306') when create table");
+
+ }
}
- }
- userName = properties.get(MYSQL_USER);
- if (Strings.isNullOrEmpty(userName)) {
- throw new DdlException("User of MySQL table is null. "
- + "Please add properties('user'='root') when create
table");
- }
+ userName = properties.get(MYSQL_USER);
+ if (Strings.isNullOrEmpty(userName)) {
+ throw new DdlException("User of MySQL table is null. "
+ + "Please set proper resource or add
properties('user'='root') when create table");
+ }
- passwd = properties.get(MYSQL_PASSWORD);
- if (passwd == null) {
- throw new DdlException("Password of MySQL table is null. "
- + "Please add properties('password'='xxxx') when create
table");
+ passwd = properties.get(MYSQL_PASSWORD);
+ if (passwd == null) {
+ throw new DdlException("Password of MySQL table is null. "
+ + "Please set proper resource or add
properties('password'='xxxx') when create table");
+ }
}
-
+
mysqlDatabaseName = properties.get(MYSQL_DATABASE);
if (Strings.isNullOrEmpty(mysqlDatabaseName)) {
throw new DdlException("Database of MySQL table is null. "
@@ -115,21 +140,51 @@ public class MysqlTable extends Table {
+ "Please add properties('table'='xxxx') when create
table");
}
}
+
+ private String getPropertyFromResource(String propertyName) {
+ OdbcCatalogResource odbcCatalogResource = (OdbcCatalogResource)
+
(Catalog.getCurrentCatalog().getResourceMgr().getResource(odbcCatalogResourceName));
+ if (odbcCatalogResource == null) {
+ throw new RuntimeException("Resource does not exist. name: " +
odbcCatalogResourceName);
+ }
+
+ String property = odbcCatalogResource.getProperties(propertyName);
+ if (property == null) {
+ throw new RuntimeException("The property:" + propertyName + " do
not set in resource " + odbcCatalogResourceName);
+ }
+ return property;
+ }
+
+ public String getOdbcCatalogResourceName() {
+ return odbcCatalogResourceName;
+ }
public String getHost() {
- return host;
+ if (host != null) {
+ return host;
+ }
+ return getPropertyFromResource(MYSQL_HOST);
}
public String getPort() {
- return port;
+ if (port != null) {
+ return port;
+ }
+ return getPropertyFromResource(MYSQL_PORT);
}
public String getUserName() {
- return userName;
+ if (userName != null) {
+ return userName;
+ }
+ return getPropertyFromResource(MYSQL_USER);
}
public String getPasswd() {
- return passwd;
+ if (passwd != null) {
+ return passwd;
+ }
+ return getPropertyFromResource(MYSQL_PASSWORD);
}
public String getMysqlDatabaseName() {
@@ -142,7 +197,7 @@ public class MysqlTable extends Table {
public TTableDescriptor toThrift() {
TMySQLTable tMySQLTable =
- new TMySQLTable(host, port, userName, passwd,
mysqlDatabaseName, mysqlTableName);
+ new TMySQLTable(getHost(), getPort(), getUserName(),
getPasswd(), mysqlDatabaseName, mysqlTableName);
TTableDescriptor tTableDescriptor = new TTableDescriptor(getId(),
TTableType.MYSQL_TABLE,
fullSchema.size(), 0, getName(), "");
tTableDescriptor.setMysqlTable(tMySQLTable);
@@ -161,13 +216,13 @@ public class MysqlTable extends Table {
// type
adler32.update(type.name().getBytes(charsetName));
// host
- adler32.update(host.getBytes(charsetName));
+ adler32.update(getHost().getBytes(charsetName));
// port
- adler32.update(port.getBytes(charsetName));
+ adler32.update(getPort().getBytes(charsetName));
// username
- adler32.update(userName.getBytes(charsetName));
+ adler32.update(getUserName().getBytes(charsetName));
// passwd
- adler32.update(passwd.getBytes(charsetName));
+ adler32.update(getPasswd().getBytes(charsetName));
// mysql db
adler32.update(mysqlDatabaseName.getBytes(charsetName));
// mysql table
@@ -185,23 +240,54 @@ public class MysqlTable extends Table {
public void write(DataOutput out) throws IOException {
super.write(out);
- Text.writeString(out, host);
- Text.writeString(out, port);
- Text.writeString(out, userName);
- Text.writeString(out, passwd);
- Text.writeString(out, mysqlDatabaseName);
- Text.writeString(out, mysqlTableName);
+ Map<String, String> serializeMap = Maps.newHashMap();
+ serializeMap.put(ODBC_CATALOG_RESOURCE, odbcCatalogResourceName);
+ serializeMap.put(MYSQL_HOST, host);
+ serializeMap.put(MYSQL_PORT, port);
+ serializeMap.put(MYSQL_USER, userName);
+ serializeMap.put(MYSQL_PASSWORD, passwd);
+ serializeMap.put(MYSQL_DATABASE, mysqlDatabaseName);
+ serializeMap.put(MYSQL_TABLE, mysqlTableName);
+
+ int size = (int) serializeMap.values().stream().filter(v -> {
+ return v != null;
+ }).count();
+ out.writeInt(size);
+ for (Map.Entry<String, String> kv : serializeMap.entrySet()) {
+ if (kv.getValue() != null) {
+ Text.writeString(out, kv.getKey());
+ Text.writeString(out, kv.getValue());
+ }
+ }
}
public void readFields(DataInput in) throws IOException {
super.readFields(in);
- // Read MySQL meta
- host = Text.readString(in);
- port = Text.readString(in);
- userName = Text.readString(in);
- passwd = Text.readString(in);
- mysqlDatabaseName = Text.readString(in);
- mysqlTableName = Text.readString(in);
+ if (Catalog.getCurrentCatalogJournalVersion() >=
FeMetaVersion.VERSION_92) {
+ // Read MySQL meta
+ int size = in.readInt();
+ Map<String, String> serializeMap = Maps.newHashMap();
+ for (int i = 0; i < size; i++) {
+ String key = Text.readString(in);
+ String value = Text.readString(in);
+ serializeMap.put(key, value);
+ }
+
+ odbcCatalogResourceName = serializeMap.get(ODBC_CATALOG_RESOURCE);
+ host = serializeMap.get(MYSQL_HOST);
+ port = serializeMap.get(MYSQL_PORT);
+ userName = serializeMap.get(MYSQL_USER);
+ passwd = serializeMap.get(MYSQL_PASSWORD);
+ mysqlDatabaseName = serializeMap.get(MYSQL_DATABASE);
+ mysqlTableName = serializeMap.get(MYSQL_TABLE);
+ } else {
+ host = Text.readString(in);
+ port = Text.readString(in);
+ userName = Text.readString(in);
+ passwd = Text.readString(in);
+ mysqlDatabaseName = Text.readString(in);
+ mysqlTableName = Text.readString(in);
+ }
}
}
diff --git
a/fe/fe-core/src/main/java/org/apache/doris/catalog/OdbcCatalogResource.java
b/fe/fe-core/src/main/java/org/apache/doris/catalog/OdbcCatalogResource.java
new file mode 100644
index 0000000..4bf547d
--- /dev/null
+++ b/fe/fe-core/src/main/java/org/apache/doris/catalog/OdbcCatalogResource.java
@@ -0,0 +1,109 @@
+// Licensed to the Apache Software Foundation (ASF) under one
+// or more contributor license agreements. See the NOTICE file
+// distributed with this work for additional information
+// regarding copyright ownership. The ASF licenses this file
+// to you under the Apache License, Version 2.0 (the
+// "License"); you may not use this file except in compliance
+// with the License. You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing,
+// software distributed under the License is distributed on an
+// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+// KIND, either express or implied. See the License for the
+// specific language governing permissions and limitations
+// under the License.
+
+package org.apache.doris.catalog;
+
+import org.apache.doris.common.DdlException;
+import org.apache.doris.common.proc.BaseProcResult;
+import com.google.common.base.Preconditions;
+import com.google.common.collect.Lists;
+import com.google.common.collect.Maps;
+import com.google.gson.annotations.SerializedName;
+
+import java.util.Map;
+
+/**
+ * External ODBC Catalog resource for external table query.
+ *
+ * External ODBC Catalog resource example:
+ * CREATE EXTERNAL RESOURCE "odbc_mysql"
+ * PROPERTIES
+ * (
+ * "type" = "external_odbc", [required]
+ * "user" = "root", [required]
+ * "password" = "root", [required]
+ * "host" = "192.168.1.1", [required]
+ * "port" = "8086", [required]
+ * "odbc_type" = "mysql", [optional, external table of ODBC should set]
+ * "driver" = "MySQL driver" [optional, external table of ODBC should set]
+ * );
+ *
+ * DROP RESOURCE "odbc_mysql";
+ */
+public class OdbcCatalogResource extends Resource {
+ // required
+ private static final String HOST = "host";
+ private static final String PORT = "port";
+ private static final String USER = "user";
+ private static final String PASSWORD = "password";
+
+ // optional
+ private static final String TYPE = "odbc_type";
+ private static final String DRIVER = "driver";
+
+ @SerializedName(value = "configs")
+ private Map<String, String> configs;
+
+ public OdbcCatalogResource(String name) {
+ this(name, Maps.newHashMap());
+ }
+
+ private OdbcCatalogResource(String name, Map<String, String> configs) {
+ super(name, ResourceType.ODBC_CATALOG);
+ this.configs = configs;
+ }
+
+ public OdbcCatalogResource getCopiedResource() {
+ return new OdbcCatalogResource(name, Maps.newHashMap(configs));
+ }
+
+ private void checkProperties(String propertieKey) throws DdlException {
+ // check the propertie key
+ String value = configs.get(propertieKey);
+ if (value == null) {
+ throw new DdlException("Missing " + propertieKey + " in
properties");
+ }
+
+ }
+
+ public String getProperties(String propertieKey) {
+ // check the propertie key
+ String value = configs.get(propertieKey);
+ return value;
+ }
+
+ @Override
+ protected void setProperties(Map<String, String> properties) throws
DdlException {
+ Preconditions.checkState(properties != null);
+
+ configs = properties;
+
+ checkProperties(HOST);
+ checkProperties(PORT);
+ checkProperties(USER);
+ checkProperties(PASSWORD);
+ }
+
+ @Override
+ protected void getProcNodeData(BaseProcResult result) {
+ String lowerCaseType = type.name().toLowerCase();
+ for (Map.Entry<String, String> entry : configs.entrySet()) {
+ result.addRow(Lists.newArrayList(name, lowerCaseType,
entry.getKey(), entry.getValue()));
+ }
+ }
+}
+
diff --git a/fe/fe-core/src/main/java/org/apache/doris/catalog/OdbcTable.java
b/fe/fe-core/src/main/java/org/apache/doris/catalog/OdbcTable.java
index dfd9a67..ee38279 100644
--- a/fe/fe-core/src/main/java/org/apache/doris/catalog/OdbcTable.java
+++ b/fe/fe-core/src/main/java/org/apache/doris/catalog/OdbcTable.java
@@ -20,6 +20,8 @@ package org.apache.doris.catalog;
import com.google.common.collect.Maps;
import org.apache.doris.common.DdlException;
import org.apache.doris.common.io.Text;
+import org.apache.doris.mysql.privilege.PrivPredicate;
+import org.apache.doris.qe.ConnectContext;
import org.apache.doris.thrift.TOdbcTable;
import org.apache.doris.thrift.TTableDescriptor;
import org.apache.doris.thrift.TTableType;
@@ -43,6 +45,7 @@ import java.util.zip.Adler32;
public class OdbcTable extends Table {
private static final Logger LOG = LogManager.getLogger(OlapTable.class);
+ private static final String ODBC_CATALOG_RESOURCE =
"odbc_catalog_resource";
private static final String ODBC_HOST = "host";
private static final String ODBC_PORT = "port";
private static final String ODBC_USER = "user";
@@ -50,7 +53,7 @@ public class OdbcTable extends Table {
private static final String ODBC_DATABASE = "database";
private static final String ODBC_TABLE = "table";
private static final String ODBC_DRIVER = "driver";
- private static final String ODBC_TYPE = "type";
+ private static final String ODBC_TYPE = "odbc_type";
private static Map<String, TOdbcTableType> TABLE_TYPE_MAP;
static {
@@ -62,6 +65,7 @@ public class OdbcTable extends Table {
TABLE_TYPE_MAP = Collections.unmodifiableMap(tempMap);
}
+ private String odbcCatalogResourceName;
private String host;
private String port;
private String userName;
@@ -84,41 +88,79 @@ public class OdbcTable extends Table {
private void validate(Map<String, String> properties) throws DdlException {
if (properties == null) {
throw new DdlException("Please set properties of odbc table, "
- + "they are: host, port, user, password, database and
table");
+ + "they are: odbc_catalog_resource or [host, port, user,
password, driver, odbc_type]" +
+ " and database and table");
}
- // Set up
- host = properties.get(ODBC_HOST);
- if (Strings.isNullOrEmpty(host)) {
- throw new DdlException("Host of Odbc table is null. "
- + "Please add properties('host'='xxx.xxx.xxx.xxx') when
create table");
- }
+ if (properties.containsKey(ODBC_CATALOG_RESOURCE)) {
+ odbcCatalogResourceName = properties.get(ODBC_CATALOG_RESOURCE);
+
+ // 1. check whether resource exist
+ Resource oriResource =
Catalog.getCurrentCatalog().getResourceMgr().getResource(odbcCatalogResourceName);
+ if (oriResource == null) {
+ throw new DdlException("Resource does not exist. name: " +
odbcCatalogResourceName);
+ }
- port = properties.get(ODBC_PORT);
- if (Strings.isNullOrEmpty(port)) {
- // Maybe null pointer or number convert
- throw new DdlException("Port of Odbc table is null. "
- + "Please add properties('port'='3306') when create
table");
+ // 2. check resource usage privilege
+ if
(!Catalog.getCurrentCatalog().getAuth().checkResourcePriv(ConnectContext.get(),
+ odbcCatalogResourceName,
+ PrivPredicate.USAGE)) {
+ throw new DdlException("USAGE denied to user '" +
ConnectContext.get().getQualifiedUser()
+ + "'@'" + ConnectContext.get().getRemoteIP()
+ + "' for resource '" + odbcCatalogResourceName + "'");
+ }
} else {
- try {
- Integer.valueOf(port);
- } catch (Exception e) {
- throw new DdlException("Port of Odbc table must be a number."
- + "Please add properties('port'='3306') when create
table");
+ // Set up
+ host = properties.get(ODBC_HOST);
+ if (Strings.isNullOrEmpty(host)) {
+ throw new DdlException("Host of Odbc table is null. "
+ + "Please set proper resource or add
properties('host'='xxx.xxx.xxx.xxx') when create table");
+ }
+ port = properties.get(ODBC_PORT);
+ if (Strings.isNullOrEmpty(port)) {
+ // Maybe null pointer or number convert
+ throw new DdlException("Port of Odbc table is null. "
+ + "Please set odbc_catalog_resource or add
properties('port'='3306') when create table");
+ } else {
+ try {
+ Integer.valueOf(port);
+ } catch (Exception e) {
+ throw new DdlException("Port of Odbc table must be a
number."
+ + "Please set odbc_catalog_resource or add
properties('port'='3306') when create table");
+
+ }
}
- }
- userName = properties.get(ODBC_USER);
- if (Strings.isNullOrEmpty(userName)) {
- throw new DdlException("User of Odbc table is null. "
- + "Please add properties('user'='root') when create
table");
- }
+ userName = properties.get(ODBC_USER);
+ if (Strings.isNullOrEmpty(userName)) {
+ throw new DdlException("User of Odbc table is null. "
+ + "Please set odbc_catalog_resource or add
properties('user'='root') when create table");
+ }
+
+ passwd = properties.get(ODBC_PASSWORD);
+ if (passwd == null) {
+ throw new DdlException("Password of Odbc table is null. "
+ + "Please set odbc_catalog_resource or add
properties('password'='xxxx') when create table");
+ }
- passwd = properties.get(ODBC_PASSWORD);
- if (passwd == null) {
- throw new DdlException("Password of Odbc table is null. "
- + "Please add properties('password'='xxxx') when create
table");
+ driver = properties.get(ODBC_DRIVER);
+ if (Strings.isNullOrEmpty(driver)) {
+ throw new DdlException("Driver of Odbc table is null. "
+ + "Please set odbc_catalog_resource or add
properties('diver'='xxxx') when create table");
+ }
+
+ String tableType = properties.get(ODBC_TYPE);
+ if (Strings.isNullOrEmpty(tableType)) {
+ throw new DdlException("Type of Odbc table is null. "
+ + "Please set odbc_catalog_resource or add
properties('odbc_type'='xxxx') when create table");
+ } else {
+ odbcTableTypeName = tableType.toLowerCase();
+ if (!TABLE_TYPE_MAP.containsKey(odbcTableTypeName)) {
+ throw new DdlException("Invaild Odbc table type:" +
tableType
+ + " Now Odbc table type only support:" +
supportTableType());
+ }
+ }
}
odbcDatabaseName = properties.get(ODBC_DATABASE);
@@ -132,40 +174,52 @@ public class OdbcTable extends Table {
throw new DdlException("Database of Odbc table is null. "
+ "Please add properties('table'='xxxx') when create
table");
}
+ }
- driver = properties.get(ODBC_DRIVER);
- if (Strings.isNullOrEmpty(driver)) {
- throw new DdlException("Driver of Odbc table is null. "
- + "Please add properties('diver'='xxxx') when create
table");
+ private String getPropertyFromResource(String propertyName) {
+ OdbcCatalogResource odbcCatalogResource = (OdbcCatalogResource)
+
(Catalog.getCurrentCatalog().getResourceMgr().getResource(odbcCatalogResourceName));
+ if (odbcCatalogResource == null) {
+ throw new RuntimeException("Resource does not exist. name: " +
odbcCatalogResourceName);
}
- String tableType = properties.get(ODBC_TYPE);
- if (Strings.isNullOrEmpty(tableType)) {
- throw new DdlException("Type of Odbc table is null. "
- + "Please add properties('type'='xxxx') when create
table");
- } else {
- odbcTableTypeName = tableType.toLowerCase();
- if (!TABLE_TYPE_MAP.containsKey(odbcTableTypeName)) {
- throw new DdlException("Invaild Odbc table type:" + tableType
- + " Now Odbc table type only support:" + supportTableType());
- }
+ String property = odbcCatalogResource.getProperties(propertyName);
+ if (property == null) {
+ throw new RuntimeException("The property:" + propertyName + " do
not set in resource " + odbcCatalogResourceName);
}
+ return property;
+ }
+
+ public String getOdbcCatalogResourceName() {
+ return odbcCatalogResourceName;
}
public String getHost() {
- return host;
+ if (host != null) {
+ return host;
+ }
+ return getPropertyFromResource(ODBC_HOST);
}
public String getPort() {
- return port;
+ if (port != null) {
+ return port;
+ }
+ return getPropertyFromResource(ODBC_PORT);
}
public String getUserName() {
- return userName;
+ if (userName != null) {
+ return userName;
+ }
+ return getPropertyFromResource(ODBC_USER);
}
public String getPasswd() {
- return passwd;
+ if (passwd != null) {
+ return passwd;
+ }
+ return getPropertyFromResource(ODBC_PASSWORD);
}
public String getOdbcDatabaseName() {
@@ -177,27 +231,33 @@ public class OdbcTable extends Table {
}
public String getOdbcDriver() {
- return driver;
+ if (driver != null) {
+ return driver;
+ }
+ return getPropertyFromResource(ODBC_DRIVER);
}
public String getOdbcTableTypeName() {
- return odbcTableTypeName;
+ if (odbcTableTypeName != null) {
+ return odbcTableTypeName;
+ }
+ return getPropertyFromResource(ODBC_TYPE);
}
public TOdbcTableType getOdbcTableType() {
- return TABLE_TYPE_MAP.get(odbcTableTypeName);
+ return TABLE_TYPE_MAP.get(getOdbcTableTypeName());
}
public TTableDescriptor toThrift() {
TOdbcTable tOdbcTable = new TOdbcTable();
- tOdbcTable.setHost(host);
- tOdbcTable.setPort(port);
- tOdbcTable.setUser(userName);
- tOdbcTable.setPasswd(passwd);
- tOdbcTable.setDb(odbcDatabaseName);
- tOdbcTable.setTable(odbcTableName);
- tOdbcTable.setDriver(driver);
+ tOdbcTable.setHost(getHost());
+ tOdbcTable.setPort(getPort());
+ tOdbcTable.setUser(getUserName());
+ tOdbcTable.setPasswd(getPasswd());
+ tOdbcTable.setDb(getOdbcDatabaseName());
+ tOdbcTable.setTable(getOdbcTableName());
+ tOdbcTable.setDriver(getOdbcDriver());
tOdbcTable.setType(getOdbcTableType());
TTableDescriptor tTableDescriptor = new TTableDescriptor(getId(),
TTableType.ODBC_TABLE,
@@ -213,6 +273,8 @@ public class OdbcTable extends Table {
String charsetName = "UTF-8";
try {
+ // resource name
+ adler32.update(odbcCatalogResourceName.getBytes(charsetName));
// name
adler32.update(name.getBytes(charsetName));
// type
@@ -246,6 +308,8 @@ public class OdbcTable extends Table {
super.write(out);
Map<String, String> serializeMap = Maps.newHashMap();
+
+ serializeMap.put(ODBC_CATALOG_RESOURCE, odbcCatalogResourceName);
serializeMap.put(ODBC_HOST, host);
serializeMap.put(ODBC_PORT, port);
serializeMap.put(ODBC_USER, userName);
@@ -255,11 +319,16 @@ public class OdbcTable extends Table {
serializeMap.put(ODBC_DRIVER, driver);
serializeMap.put(ODBC_TYPE, odbcTableTypeName);
- int size = serializeMap.size();
+ int size = (int) serializeMap.values().stream().filter(v -> {
+ return v != null;
+ }).count();
out.writeInt(size);
+
for (Map.Entry<String, String> kv : serializeMap.entrySet()) {
- Text.writeString(out, kv.getKey());
- Text.writeString(out, kv.getValue());
+ if (kv.getValue() != null) {
+ Text.writeString(out, kv.getKey());
+ Text.writeString(out, kv.getValue());
+ }
}
}
@@ -275,6 +344,7 @@ public class OdbcTable extends Table {
serializeMap.put(key, value);
}
+ odbcCatalogResourceName = serializeMap.get(ODBC_CATALOG_RESOURCE);
host = serializeMap.get(ODBC_HOST);
port = serializeMap.get(ODBC_PORT);
userName = serializeMap.get(ODBC_USER);
diff --git a/fe/fe-core/src/main/java/org/apache/doris/catalog/Resource.java
b/fe/fe-core/src/main/java/org/apache/doris/catalog/Resource.java
index e140b9f..fed15e6 100644
--- a/fe/fe-core/src/main/java/org/apache/doris/catalog/Resource.java
+++ b/fe/fe-core/src/main/java/org/apache/doris/catalog/Resource.java
@@ -34,7 +34,8 @@ import java.util.Map;
public abstract class Resource implements Writable {
public enum ResourceType {
UNKNOWN,
- SPARK;
+ SPARK,
+ ODBC_CATALOG;
public static ResourceType fromString(String resourceType) {
for (ResourceType type : ResourceType.values()) {
@@ -63,6 +64,9 @@ public abstract class Resource implements Writable {
case SPARK:
resource = new SparkResource(stmt.getResourceName());
break;
+ case ODBC_CATALOG:
+ resource = new OdbcCatalogResource(stmt.getResourceName());
+ break;
default:
throw new DdlException("Only support Spark resource.");
}
diff --git a/fe/fe-core/src/main/java/org/apache/doris/catalog/ResourceMgr.java
b/fe/fe-core/src/main/java/org/apache/doris/catalog/ResourceMgr.java
index e8aa5d1..d112b28 100644
--- a/fe/fe-core/src/main/java/org/apache/doris/catalog/ResourceMgr.java
+++ b/fe/fe-core/src/main/java/org/apache/doris/catalog/ResourceMgr.java
@@ -67,8 +67,8 @@ public class ResourceMgr implements Writable {
}
public void createResource(CreateResourceStmt stmt) throws DdlException {
- if (stmt.getResourceType() != ResourceType.SPARK) {
- throw new DdlException("Only support Spark Resource.");
+ if (stmt.getResourceType() != ResourceType.SPARK &&
stmt.getResourceType() != ResourceType.ODBC_CATALOG) {
+ throw new DdlException("Only support SPARK and ODBC_CATALOG
resource.");
}
String resourceName = stmt.getResourceName();
diff --git
a/fe/fe-core/src/main/java/org/apache/doris/common/FeMetaVersion.java
b/fe/fe-core/src/main/java/org/apache/doris/common/FeMetaVersion.java
index db3f051..5f22b9f 100644
--- a/fe/fe-core/src/main/java/org/apache/doris/common/FeMetaVersion.java
+++ b/fe/fe-core/src/main/java/org/apache/doris/common/FeMetaVersion.java
@@ -194,6 +194,8 @@ public final class FeMetaVersion {
public static final int VERSION_90 = 90;
// sparkLoadAppHandle
public static final int VERSION_91 = 91;
+ // for mysql external table support resource
+ public static final int VERSION_92 = 92;
// note: when increment meta version, should assign the latest version to
VERSION_CURRENT
- public static final int VERSION_CURRENT = VERSION_91;
+ public static final int VERSION_CURRENT = VERSION_92;
}
diff --git
a/fe/fe-core/src/main/java/org/apache/doris/persist/gson/GsonUtils.java
b/fe/fe-core/src/main/java/org/apache/doris/persist/gson/GsonUtils.java
index e455448..575385b 100644
--- a/fe/fe-core/src/main/java/org/apache/doris/persist/gson/GsonUtils.java
+++ b/fe/fe-core/src/main/java/org/apache/doris/persist/gson/GsonUtils.java
@@ -21,6 +21,7 @@ import org.apache.doris.alter.AlterJobV2;
import org.apache.doris.alter.RollupJobV2;
import org.apache.doris.alter.SchemaChangeJobV2;
import org.apache.doris.catalog.DistributionInfo;
+import org.apache.doris.catalog.OdbcCatalogResource;
import org.apache.doris.catalog.HashDistributionInfo;
import org.apache.doris.catalog.RandomDistributionInfo;
import org.apache.doris.catalog.Resource;
@@ -96,7 +97,8 @@ public class GsonUtils {
// runtime adapter for class "Resource"
private static RuntimeTypeAdapterFactory<Resource>
resourceTypeAdapterFactory = RuntimeTypeAdapterFactory
.of(Resource.class, "clazz")
- .registerSubtype(SparkResource.class,
SparkResource.class.getSimpleName());
+ .registerSubtype(SparkResource.class,
SparkResource.class.getSimpleName())
+ .registerSubtype(OdbcCatalogResource.class,
OdbcCatalogResource.class.getSimpleName());
// runtime adapter for class "AlterJobV2"
private static RuntimeTypeAdapterFactory<AlterJobV2>
alterJobV2TypeAdapterFactory = RuntimeTypeAdapterFactory
diff --git
a/fe/fe-core/src/test/java/org/apache/doris/analysis/CreateResourceStmtTest.java
b/fe/fe-core/src/test/java/org/apache/doris/analysis/CreateResourceStmtTest.java
index f26797c..5fc91f7 100644
---
a/fe/fe-core/src/test/java/org/apache/doris/analysis/CreateResourceStmtTest.java
+++
b/fe/fe-core/src/test/java/org/apache/doris/analysis/CreateResourceStmtTest.java
@@ -37,12 +37,14 @@ import java.util.Map;
public class CreateResourceStmtTest {
private Analyzer analyzer;
- private String resourceName;
+ private String resourceName1;
+ private String resourceName2;
@Before()
public void setUp() {
analyzer = AccessTestUtil.fetchAdminAnalyzer(true);
- resourceName = "spark0";
+ resourceName1 = "spark0";
+ resourceName2 = "odbc";
}
@Test
@@ -58,11 +60,20 @@ public class CreateResourceStmtTest {
Map<String, String> properties = Maps.newHashMap();
properties.put("type", "spark");
- CreateResourceStmt stmt = new CreateResourceStmt(true, resourceName,
properties);
+ CreateResourceStmt stmt = new CreateResourceStmt(true, resourceName1,
properties);
stmt.analyze(analyzer);
- Assert.assertEquals(resourceName, stmt.getResourceName());
+ Assert.assertEquals(resourceName1, stmt.getResourceName());
Assert.assertEquals(Resource.ResourceType.SPARK,
stmt.getResourceType());
Assert.assertEquals("CREATE EXTERNAL RESOURCE 'spark0'
PROPERTIES(\"type\" = \"spark\")", stmt.toSql());
+
+ properties = Maps.newHashMap();
+ properties.put("type", "odbc_catalog");
+ stmt = new CreateResourceStmt(true, resourceName2, properties);
+ stmt.analyze(analyzer);
+ Assert.assertEquals(resourceName2, stmt.getResourceName());
+ Assert.assertEquals(Resource.ResourceType.ODBC_CATALOG,
stmt.getResourceType());
+ Assert.assertEquals("CREATE EXTERNAL RESOURCE 'odbc'
PROPERTIES(\"type\" = \"odbc_catalog\")", stmt.toSql());
+
}
@Test(expected = AnalysisException.class)
@@ -78,7 +89,7 @@ public class CreateResourceStmtTest {
Map<String, String> properties = Maps.newHashMap();
properties.put("type", "hadoop");
- CreateResourceStmt stmt = new CreateResourceStmt(true, resourceName,
properties);
+ CreateResourceStmt stmt = new CreateResourceStmt(true, resourceName1,
properties);
stmt.analyze(analyzer);
}
}
\ No newline at end of file
diff --git
a/fe/fe-core/src/test/java/org/apache/doris/catalog/OdbcCatalogResourceTest.java
b/fe/fe-core/src/test/java/org/apache/doris/catalog/OdbcCatalogResourceTest.java
new file mode 100644
index 0000000..9f21f7f
--- /dev/null
+++
b/fe/fe-core/src/test/java/org/apache/doris/catalog/OdbcCatalogResourceTest.java
@@ -0,0 +1,159 @@
+
+// Licensed to the Apache Software Foundation (ASF) under one
+// or more contributor license agreements. See the NOTICE file
+// distributed with this work for additional information
+// regarding copyright ownership. The ASF licenses this file
+// to you under the Apache License, Version 2.0 (the
+// "License"); you may not use this file except in compliance
+// with the License. You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing,
+// software distributed under the License is distributed on an
+// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+// KIND, either express or implied. See the License for the
+// specific language governing permissions and limitations
+// under the License.
+
+package org.apache.doris.catalog;
+
+import org.apache.doris.analysis.AccessTestUtil;
+import org.apache.doris.analysis.Analyzer;
+import org.apache.doris.analysis.CreateResourceStmt;
+import org.apache.doris.common.FeMetaVersion;
+import org.apache.doris.common.UserException;
+import org.apache.doris.common.proc.BaseProcResult;
+import org.apache.doris.meta.MetaContext;
+import org.apache.doris.mysql.privilege.PaloAuth;
+import org.apache.doris.mysql.privilege.PrivPredicate;
+import org.apache.doris.persist.DropInfo;
+import org.apache.doris.qe.ConnectContext;
+
+import com.google.common.collect.Maps;
+import mockit.Expectations;
+import mockit.Injectable;
+import mockit.Mocked;
+import org.junit.Assert;
+import org.junit.Before;
+import org.junit.Test;
+
+import java.io.DataInputStream;
+import java.io.DataOutputStream;
+import java.io.File;
+import java.io.FileInputStream;
+import java.io.FileOutputStream;
+import java.util.HashMap;
+import java.util.Map;
+
+public class OdbcCatalogResourceTest {
+ private String name;
+ private String type;
+
+ private String host;
+ private String port;
+ private String user;
+ private String passwd;
+ private Map<String, String> properties;
+ private Analyzer analyzer;
+
+ @Before
+ public void setUp() {
+ name = "odbc";
+ type = "odbc_catalog";
+ host = "127.0.0.1";
+ port = "7777";
+ user = "doris";
+ passwd = "doris";
+ properties = Maps.newHashMap();
+ properties.put("type", type);
+ properties.put("host", host);
+ properties.put("port", port);
+ properties.put("user", user);
+ properties.put("password", passwd);
+ analyzer = AccessTestUtil.fetchAdminAnalyzer(true);
+ }
+
+ @Test
+ public void testFromStmt(@Mocked Catalog catalog, @Injectable PaloAuth
auth)
+ throws UserException {
+ new Expectations() {
+ {
+ catalog.getAuth();
+ result = auth;
+ auth.checkGlobalPriv((ConnectContext) any,
PrivPredicate.ADMIN);
+ result = true;
+ }
+ };
+
+ // host: 127.0.0.1, port: 7777, without driver and odbc_type
+ CreateResourceStmt stmt = new CreateResourceStmt(true, name,
properties);
+ stmt.analyze(analyzer);
+ OdbcCatalogResource resource = (OdbcCatalogResource)
Resource.fromStmt(stmt);
+ Assert.assertEquals(name, resource.getName());
+ Assert.assertEquals(type, resource.getType().name().toLowerCase());
+ Assert.assertEquals(host, resource.getProperties("host"));
+ Assert.assertEquals(port, resource.getProperties("port"));
+ Assert.assertEquals(user, resource.getProperties("user"));
+ Assert.assertEquals(passwd, resource.getProperties("password"));
+
+ // with driver and odbc_type
+ properties.put("driver", "mysql");
+ properties.put("odbc_type", "mysql");
+ stmt = new CreateResourceStmt(true, name, properties);
+ stmt.analyze(analyzer);
+ resource = (OdbcCatalogResource) Resource.fromStmt(stmt);
+ Assert.assertEquals("mysql", resource.getProperties("driver"));
+ Assert.assertEquals("mysql", resource.getProperties("odbc_type"));
+
+ // test getProcNodeData
+ BaseProcResult result = new BaseProcResult();
+ resource.getProcNodeData(result);
+ Assert.assertEquals(7, result.getRows().size());
+ }
+
+ @Test
+ public void testSerialization() throws Exception {
+ MetaContext metaContext = new MetaContext();
+ metaContext.setMetaVersion(FeMetaVersion.VERSION_92);
+ metaContext.setThreadLocalInfo();
+
+ // 1. Write objects to file
+ File file = new File("./odbcCatalogResource");
+ file.createNewFile();
+ DataOutputStream dos = new DataOutputStream(new
FileOutputStream(file));
+
+ OdbcCatalogResource odbcCatalogResource1 = new
OdbcCatalogResource("odbc1");
+ odbcCatalogResource1.write(dos);
+
+ Map<String, String> configs = new HashMap<>();
+ configs.put("host", "host");
+ configs.put("port", "port");
+ configs.put("user", "user");
+ configs.put("password", "password");
+ OdbcCatalogResource odbcCatalogResource2 = new
OdbcCatalogResource("odbc2");
+ odbcCatalogResource2.setProperties(configs);
+ odbcCatalogResource2.write(dos);
+
+ dos.flush();
+ dos.close();
+
+ // 2. Read objects from file
+ DataInputStream dis = new DataInputStream(new FileInputStream(file));
+
+ OdbcCatalogResource rOdbcCatalogResource1 = (OdbcCatalogResource)
OdbcCatalogResource.read(dis);
+ OdbcCatalogResource rOdbcCatalogResource2 = (OdbcCatalogResource)
OdbcCatalogResource.read(dis);
+
+ Assert.assertEquals("odbc1", rOdbcCatalogResource1.getName());
+ Assert.assertEquals("odbc2", rOdbcCatalogResource2.getName());
+
+ Assert.assertEquals(rOdbcCatalogResource2.getProperties("host"),
"host");
+ Assert.assertEquals(rOdbcCatalogResource2.getProperties("port"),
"port");
+ Assert.assertEquals(rOdbcCatalogResource2.getProperties("user"),
"user");
+ Assert.assertEquals(rOdbcCatalogResource2.getProperties("password"),
"password");
+
+ // 3. delete files
+ dis.close();
+ file.delete();
+ }
+}
\ No newline at end of file
diff --git
a/fe/fe-core/src/test/java/org/apache/doris/planner/QueryPlanTest.java
b/fe/fe-core/src/test/java/org/apache/doris/planner/QueryPlanTest.java
index 75720cb..bc702bd 100644
--- a/fe/fe-core/src/test/java/org/apache/doris/planner/QueryPlanTest.java
+++ b/fe/fe-core/src/test/java/org/apache/doris/planner/QueryPlanTest.java
@@ -346,7 +346,7 @@ public class QueryPlanTest {
"\"database\" = \"db1\",\n" +
"\"table\" = \"tbl1\",\n" +
"\"driver\" = \"Oracle Driver\",\n" +
- "\"type\" = \"oracle\"\n" +
+ "\"odbc_type\" = \"oracle\"\n" +
");");
createTable("create external table test.odbc_mysql\n" +
@@ -360,7 +360,7 @@ public class QueryPlanTest {
"\"database\" = \"db1\",\n" +
"\"table\" = \"tbl1\",\n" +
"\"driver\" = \"Oracle Driver\",\n" +
- "\"type\" = \"mysql\"\n" +
+ "\"odbc_type\" = \"mysql\"\n" +
");");
}
---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]