This is an automated email from the ASF dual-hosted git repository.
casion pushed a commit to branch dev
in repository https://gitbox.apache.org/repos/asf/incubator-linkis-website.git
The following commit(s) were added to refs/heads/dev by this push:
new 74853ac9a update metadatasource docs (#366)
74853ac9a is described below
commit 74853ac9ac71b11363a0f3b887a5bcd61da7150a
Author: Casion <[email protected]>
AuthorDate: Tue Jun 21 11:22:03 2022 +0800
update metadatasource docs (#366)
---
.../current/deployment/start_metadatasource.md | 38 ++++++++++++++++++----
.../deployment/start_metadatasource.md | 37 +++++++++++++++++----
.../deployment/start_metadatasource.md | 38 ++++++++++++++++++----
3 files changed, 92 insertions(+), 21 deletions(-)
diff --git
a/i18n/zh-CN/docusaurus-plugin-content-docs/current/deployment/start_metadatasource.md
b/i18n/zh-CN/docusaurus-plugin-content-docs/current/deployment/start_metadatasource.md
index 3edf76af8..457446b9c 100644
---
a/i18n/zh-CN/docusaurus-plugin-content-docs/current/deployment/start_metadatasource.md
+++
b/i18n/zh-CN/docusaurus-plugin-content-docs/current/deployment/start_metadatasource.md
@@ -306,24 +306,48 @@ object TestMysqlClient {
>只能创建配置数据源,以及测试数据源是否能正常连接,无法进行直接进行元数据查询
先需要进行集群环境信息的配置
+
表`linkis_ps_dm_datasource_env`
```roomsql
-INSERT INTO `linkis_ps_dm_datasource_env`
-(`env_name`, `env_desc`, `datasource_type_id`, `parameter`, `create_user`,
`modify_user`)
-VALUES
-('testEnv', '测试环境', 4, '{\r\n "keytab":
"4dd408ad-a2f9-4501-83b3-139290977ca2",\r\n "uris":
"thrift://clustername:9083",\r\n "principle":"[email protected]"\r\n}',
'user','user');
-
+INSERT INTO `linkis_ps_dm_datasource_env`
+(`env_name`, `env_desc`, `datasource_type_id`,
`parameter`,`create_user`,`modify_user`)
+VALUES
+('testEnv', '测试环境', 4,
+'{\r\n "uris": "thrift://clustername:9083",\r\n "keytab":
"4dd408ad-a2f9-4501-83b3-139290977ca2",\r\n
"principle":"[email protected]",\r\n
"hadoopConf":{"hive.metastore.execute.setugi":"true"}\r\n}',
+'user','user');
```
-主键id,作为envId,在建立连接时,需要通过此envId参数,获取集群配置相关信息。
+主键id作为envId,在建立连接时,需要通过此envId参数,获取集群配置相关信息。
配置字段解释:
```
{
+ "uris": "thrift://clustername:9083", # 必选 如果未开启kerberos认证
下列[keytab][principle]参数可以为空
"keytab": "bml resource id",//keytab 存储再物料库中的resourceId,目前需要通过http接口手动上传。
- "uris": "thrift://clustername:9083",
"principle":"[email protected]" //认证的principle
+ "hadoopConf":{} //额外的连接参数 可选
}
```
+keytab的resourceId获取方式,目前基础数据管理功能还在规划中,可以通过http接口请求获取到
+参考示例
+```shell script
+curl --form "file=@文件路径" \
+--form system=子系统名 \
+-H "Token-Code:认证token" \
+-H "Token-User:认证用户名" \
+http://linkis-gatewayip:port/api/rest_j/v1/bml/upload
+
+示例:
+curl --form "file=@/appcom/keytab/hadoop.keytab" \
+--form system=ABCD \
+-H "Token-Code:QML-AUTH" \
+-H "Token-User:hadoop" \
+http://127.0.0.1:9001/api/rest_j/v1/bml/upload
+
+请求结果中的resourceId 即为对应的`bml resource id`值
+{"method":"/bml/upload","status":0,"message":"The task of submitting and
uploading resources was
successful(提交上传资源任务成功)","data":{"resourceId":"6e4e54fc-cc97-4d0d-8d5e-a311129ec84e","version":"v000001","taskId":35}}
+```
+
+
web端创建:

diff --git
a/i18n/zh-CN/docusaurus-plugin-content-docs/version-1.1.0/deployment/start_metadatasource.md
b/i18n/zh-CN/docusaurus-plugin-content-docs/version-1.1.0/deployment/start_metadatasource.md
index 3edf76af8..7444d00b7 100644
---
a/i18n/zh-CN/docusaurus-plugin-content-docs/version-1.1.0/deployment/start_metadatasource.md
+++
b/i18n/zh-CN/docusaurus-plugin-content-docs/version-1.1.0/deployment/start_metadatasource.md
@@ -306,24 +306,47 @@ object TestMysqlClient {
>只能创建配置数据源,以及测试数据源是否能正常连接,无法进行直接进行元数据查询
先需要进行集群环境信息的配置
+
表`linkis_ps_dm_datasource_env`
```roomsql
-INSERT INTO `linkis_ps_dm_datasource_env`
-(`env_name`, `env_desc`, `datasource_type_id`, `parameter`, `create_user`,
`modify_user`)
-VALUES
-('testEnv', '测试环境', 4, '{\r\n "keytab":
"4dd408ad-a2f9-4501-83b3-139290977ca2",\r\n "uris":
"thrift://clustername:9083",\r\n "principle":"[email protected]"\r\n}',
'user','user');
-
+INSERT INTO `linkis_ps_dm_datasource_env`
+(`env_name`, `env_desc`, `datasource_type_id`,
`parameter`,`create_user`,`modify_user`)
+VALUES
+('testEnv', '测试环境', 4,
+'{\r\n "uris": "thrift://clustername:9083",\r\n "keytab":
"4dd408ad-a2f9-4501-83b3-139290977ca2",\r\n
"principle":"[email protected]",\r\n
"hadoopConf":{"hive.metastore.execute.setugi":"true"}\r\n}',
+'user','user');
```
-主键id,作为envId,在建立连接时,需要通过此envId参数,获取集群配置相关信息。
+主键id作为envId,在建立连接时,需要通过此envId参数,获取集群配置相关信息。
配置字段解释:
```
{
+ "uris": "thrift://clustername:9083", # 必选 如果未开启kerberos认证
下列[keytab][principle]参数可以为空
"keytab": "bml resource id",//keytab 存储再物料库中的resourceId,目前需要通过http接口手动上传。
- "uris": "thrift://clustername:9083",
"principle":"[email protected]" //认证的principle
+ "hadoopConf":{} //额外的连接参数 可选
}
```
+keytab的resourceId获取方式,目前基础数据管理功能还在规划中,可以通过http接口请求获取到
+参考示例
+```shell script
+curl --form "file=@文件路径" \
+--form system=子系统名 \
+-H "Token-Code:认证token" \
+-H "Token-User:认证用户名" \
+http://linkis-gatewayip:port/api/rest_j/v1/bml/upload
+
+示例:
+curl --form "file=@/appcom/keytab/hadoop.keytab" \
+--form system=ABCD \
+-H "Token-Code:QML-AUTH" \
+-H "Token-User:hadoop" \
+http://127.0.0.1:9001/api/rest_j/v1/bml/upload
+
+请求结果中的resourceId 即为对应的`bml resource id`值
+{"method":"/bml/upload","status":0,"message":"The task of submitting and
uploading resources was
successful(提交上传资源任务成功)","data":{"resourceId":"6e4e54fc-cc97-4d0d-8d5e-a311129ec84e","version":"v000001","taskId":35}}
+```
+
web端创建:

diff --git
a/i18n/zh-CN/docusaurus-plugin-content-docs/version-1.1.1/deployment/start_metadatasource.md
b/i18n/zh-CN/docusaurus-plugin-content-docs/version-1.1.1/deployment/start_metadatasource.md
index 3edf76af8..457446b9c 100644
---
a/i18n/zh-CN/docusaurus-plugin-content-docs/version-1.1.1/deployment/start_metadatasource.md
+++
b/i18n/zh-CN/docusaurus-plugin-content-docs/version-1.1.1/deployment/start_metadatasource.md
@@ -306,24 +306,48 @@ object TestMysqlClient {
>只能创建配置数据源,以及测试数据源是否能正常连接,无法进行直接进行元数据查询
先需要进行集群环境信息的配置
+
表`linkis_ps_dm_datasource_env`
```roomsql
-INSERT INTO `linkis_ps_dm_datasource_env`
-(`env_name`, `env_desc`, `datasource_type_id`, `parameter`, `create_user`,
`modify_user`)
-VALUES
-('testEnv', '测试环境', 4, '{\r\n "keytab":
"4dd408ad-a2f9-4501-83b3-139290977ca2",\r\n "uris":
"thrift://clustername:9083",\r\n "principle":"[email protected]"\r\n}',
'user','user');
-
+INSERT INTO `linkis_ps_dm_datasource_env`
+(`env_name`, `env_desc`, `datasource_type_id`,
`parameter`,`create_user`,`modify_user`)
+VALUES
+('testEnv', '测试环境', 4,
+'{\r\n "uris": "thrift://clustername:9083",\r\n "keytab":
"4dd408ad-a2f9-4501-83b3-139290977ca2",\r\n
"principle":"[email protected]",\r\n
"hadoopConf":{"hive.metastore.execute.setugi":"true"}\r\n}',
+'user','user');
```
-主键id,作为envId,在建立连接时,需要通过此envId参数,获取集群配置相关信息。
+主键id作为envId,在建立连接时,需要通过此envId参数,获取集群配置相关信息。
配置字段解释:
```
{
+ "uris": "thrift://clustername:9083", # 必选 如果未开启kerberos认证
下列[keytab][principle]参数可以为空
"keytab": "bml resource id",//keytab 存储再物料库中的resourceId,目前需要通过http接口手动上传。
- "uris": "thrift://clustername:9083",
"principle":"[email protected]" //认证的principle
+ "hadoopConf":{} //额外的连接参数 可选
}
```
+keytab的resourceId获取方式,目前基础数据管理功能还在规划中,可以通过http接口请求获取到
+参考示例
+```shell script
+curl --form "file=@文件路径" \
+--form system=子系统名 \
+-H "Token-Code:认证token" \
+-H "Token-User:认证用户名" \
+http://linkis-gatewayip:port/api/rest_j/v1/bml/upload
+
+示例:
+curl --form "file=@/appcom/keytab/hadoop.keytab" \
+--form system=ABCD \
+-H "Token-Code:QML-AUTH" \
+-H "Token-User:hadoop" \
+http://127.0.0.1:9001/api/rest_j/v1/bml/upload
+
+请求结果中的resourceId 即为对应的`bml resource id`值
+{"method":"/bml/upload","status":0,"message":"The task of submitting and
uploading resources was
successful(提交上传资源任务成功)","data":{"resourceId":"6e4e54fc-cc97-4d0d-8d5e-a311129ec84e","version":"v000001","taskId":35}}
+```
+
+
web端创建:

---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]