This is an automated email from the ASF dual-hosted git repository.
sjwiesman pushed a commit to branch release-1.13
in repository https://gitbox.apache.org/repos/asf/flink.git
The following commit(s) were added to refs/heads/release-1.13 by this push:
new 9710139 [hotfix][docs] Fixed errors references for 'site.scala
version suffix' and 'site.version'
9710139 is described below
commit 97101390755639f0f014794f726a25136637ee58
Author: Roc Marshal <[email protected]>
AuthorDate: Thu Jul 1 18:36:04 2021 +0800
[hotfix][docs] Fixed errors references for 'site.scala version suffix' and
'site.version'
This closes #16340
---
docs/content.zh/docs/connectors/dataset.md | 6 +++---
docs/content.zh/docs/connectors/datastream/kafka.md | 4 ++--
docs/content.zh/docs/connectors/table/hive/overview.md | 8 ++++----
docs/content.zh/docs/deployment/filesystems/overview.md | 2 +-
.../docs/deployment/resource-providers/standalone/docker.md | 4 ++--
.../docs/dev/datastream/fault-tolerance/queryable_state.md | 2 +-
docs/content.zh/docs/dev/table/sql/queries/match_recognize.md | 4 ++--
docs/content.zh/docs/ops/state/state_backends.md | 4 ++--
docs/content.zh/docs/try-flink/local_installation.md | 6 +++---
.../docs/deployment/resource-providers/standalone/docker.md | 4 ++--
.../docs/dev/datastream/fault-tolerance/queryable_state.md | 2 +-
docs/content/docs/dev/table/sql/queries/match_recognize.md | 4 ++--
docs/data/sql_connectors.yml | 2 +-
13 files changed, 26 insertions(+), 26 deletions(-)
diff --git a/docs/content.zh/docs/connectors/dataset.md
b/docs/content.zh/docs/connectors/dataset.md
index 2f6bd97..fd7765e 100644
--- a/docs/content.zh/docs/connectors/dataset.md
+++ b/docs/content.zh/docs/connectors/dataset.md
@@ -55,7 +55,7 @@ Also, the serialization framework of Flink is able to handle
classes generated f
<dependency>
<groupId>org.apache.flink</groupId>
<artifactId>flink-avro</artifactId>
- <version>{{ site.version }}</version>
+ <version>{{< version >}}</version>
</dependency>
```
@@ -108,8 +108,8 @@ curl https://flink.apache.org/q/quickstart.sh | bash
```xml
<dependency>
<groupId>org.apache.flink</groupId>
- <artifactId>flink-hadoop-compatibility{{ site.scala_version_suffix
}}</artifactId>
- <version>{{site.version}}</version>
+ <artifactId>flink-hadoop-compatibility{{< scala_version >}}</artifactId>
+ <version>{{< version >}}</version>
</dependency>
<dependency>
<groupId>com.microsoft.hadoop</groupId>
diff --git a/docs/content.zh/docs/connectors/datastream/kafka.md
b/docs/content.zh/docs/connectors/datastream/kafka.md
index c8d5c30..048fb97 100644
--- a/docs/content.zh/docs/connectors/datastream/kafka.md
+++ b/docs/content.zh/docs/connectors/datastream/kafka.md
@@ -106,7 +106,7 @@ Flink Kafka Consumer 需要知道如何将 Kafka 中的二进制数据转换为
<dependency>
<groupId>org.apache.flink</groupId>
<artifactId>flink-avro</artifactId>
- <version>{{site.version }}</version>
+ <version>{{< version >}}</version>
</dependency>
```
{{< /tab >}}
@@ -115,7 +115,7 @@ Flink Kafka Consumer 需要知道如何将 Kafka 中的二进制数据转换为
<dependency>
<groupId>org.apache.flink</groupId>
<artifactId>flink-avro-confluent-registry</artifactId>
- <version>{{site.version }}</version>
+ <version>{{< version >}}</version>
</dependency>
```
{{< /tab >}}
diff --git a/docs/content.zh/docs/connectors/table/hive/overview.md
b/docs/content.zh/docs/connectors/table/hive/overview.md
index a32a3ff..bc685f2 100644
--- a/docs/content.zh/docs/connectors/table/hive/overview.md
+++ b/docs/content.zh/docs/connectors/table/hive/overview.md
@@ -278,15 +278,15 @@ export HADOOP_CLASSPATH=`hadoop classpath`
<!-- Flink Dependency -->
<dependency>
<groupId>org.apache.flink</groupId>
- <artifactId>flink-connector-hive{{ site.scala_version_suffix }}</artifactId>
- <version>{{site.version}}</version>
+ <artifactId>flink-connector-hive{{< scala_version >}}</artifactId>
+ <version>{{< version >}}</version>
<scope>provided</scope>
</dependency>
<dependency>
<groupId>org.apache.flink</groupId>
- <artifactId>flink-table-api-java-bridge{{ site.scala_version_suffix
}}</artifactId>
- <version>{{site.version}}</version>
+ <artifactId>flink-table-api-java-bridge{{< scala_version >}}</artifactId>
+ <version>{{< version >}}</version>
<scope>provided</scope>
</dependency>
diff --git a/docs/content.zh/docs/deployment/filesystems/overview.md
b/docs/content.zh/docs/deployment/filesystems/overview.md
index 4eff30d..1636e80 100644
--- a/docs/content.zh/docs/deployment/filesystems/overview.md
+++ b/docs/content.zh/docs/deployment/filesystems/overview.md
@@ -59,7 +59,7 @@ Apache Flink 支持下列文件系统:
```bash
mkdir ./plugins/s3-fs-hadoop
-cp ./opt/flink-s3-fs-hadoop-{{ site.version }}.jar ./plugins/s3-fs-hadoop/
+cp ./opt/flink-s3-fs-hadoop-{{< version >}}.jar ./plugins/s3-fs-hadoop/
```
<span class="label label-danger">注意</span> 文件系统的[插件]({{< ref
"docs/deployment/filesystems/plugins" >}})机制在 Flink 版本 1.9 中引入,以支持每个插件专有 Java
类加载器,并避免类隐藏机制。您仍然可以通过旧机制使用文件系统,即将对应的 JAR 文件复制到 `lib` 目录中,或使用您自己的实现方式,但是从版本 1.10
开始,**S3 插件必须通过插件机制加载**,因为这些插件不再被隐藏(版本 1.10 之后类不再被重定位),旧机制不再可用。
diff --git
a/docs/content.zh/docs/deployment/resource-providers/standalone/docker.md
b/docs/content.zh/docs/deployment/resource-providers/standalone/docker.md
index 82fc95b..11962cb 100644
--- a/docs/content.zh/docs/deployment/resource-providers/standalone/docker.md
+++ b/docs/content.zh/docs/deployment/resource-providers/standalone/docker.md
@@ -295,7 +295,7 @@ As described in the [plugins]({{< ref
"docs/deployment/filesystems/plugins" >}})
copied to the correct location in the Flink installation in the Docker
container for them to work.
If you want to enable plugins provided with Flink (in the `opt/` directory of
the Flink distribution), you can pass the environment variable
`ENABLE_BUILT_IN_PLUGINS` when you run the Flink image.
-The `ENABLE_BUILT_IN_PLUGINS` should contain a list of plugin jar file names
separated by `;`. A valid plugin name is for example
`flink-s3-fs-hadoop-{{site.version}}.jar`
+The `ENABLE_BUILT_IN_PLUGINS` should contain a list of plugin jar file names
separated by `;`. A valid plugin name is for example `flink-s3-fs-hadoop-{{<
version >}}.jar`
```sh
$ docker run \
@@ -323,7 +323,7 @@ RUN ln -s /usr/bin/python3 /usr/bin/python
# install Python Flink
{{< stable >}}
-RUN pip3 install apache-flink[=={{site.version}}]
+RUN pip3 install apache-flink[=={{< version >}}]
{{< /stable >}}
{{< unstable >}}
RUN pip3 install apache-flink
diff --git
a/docs/content.zh/docs/dev/datastream/fault-tolerance/queryable_state.md
b/docs/content.zh/docs/dev/datastream/fault-tolerance/queryable_state.md
index e930f0f..779f89b 100644
--- a/docs/content.zh/docs/dev/datastream/fault-tolerance/queryable_state.md
+++ b/docs/content.zh/docs/dev/datastream/fault-tolerance/queryable_state.md
@@ -55,7 +55,7 @@ under the License.
为了在 Flink 集群上使用 queryable state,需要进行以下操作:
- 1. 将 `flink-queryable-state-runtime{{ site.scala_version_suffix
}}-{{site.version }}.jar`
+ 1. 将 `flink-queryable-state-runtime{{< scala_version >}}-{{< version >}}.jar`
从 [Flink distribution](https://flink.apache.org/downloads.html "Apache Flink:
Downloads") 的 `opt/` 目录拷贝到 `lib/` 目录;
2. 将参数 `queryable-state.enable` 设置为 `true`。详细信息以及其它配置可参考文档
[Configuration]({{< ref "docs/deployment/config" >}}#queryable-state)。
diff --git a/docs/content.zh/docs/dev/table/sql/queries/match_recognize.md
b/docs/content.zh/docs/dev/table/sql/queries/match_recognize.md
index 7008de4..fb7ce39 100644
--- a/docs/content.zh/docs/dev/table/sql/queries/match_recognize.md
+++ b/docs/content.zh/docs/dev/table/sql/queries/match_recognize.md
@@ -78,8 +78,8 @@ FROM MyTable
```xml
<dependency>
<groupId>org.apache.flink</groupId>
- <artifactId>flink-cep{{ site.scala_version_suffix }}</artifactId>
- <version>{{ site.version }}</version>
+ <artifactId>flink-cep{{< scala_version >}}</artifactId>
+ <version>{{< version >}}</version>
</dependency>
```
diff --git a/docs/content.zh/docs/ops/state/state_backends.md
b/docs/content.zh/docs/ops/state/state_backends.md
index 02bbdf0..c7ca59a 100644
--- a/docs/content.zh/docs/ops/state/state_backends.md
+++ b/docs/content.zh/docs/ops/state/state_backends.md
@@ -159,8 +159,8 @@ env.setStateBackend(new
FsStateBackend("hdfs://namenode:40010/flink/checkpoints"
```xml
<dependency>
<groupId>org.apache.flink</groupId>
- <artifactId>flink-statebackend-rocksdb{{ site.scala_version_suffix
}}</artifactId>
- <version>{{ site.version }}</version>
+ <artifactId>flink-statebackend-rocksdb{{< scala_version >}}</artifactId>
+ <version>{{< version >}}</version>
<scope>provided</scope>
</dependency>
```
diff --git a/docs/content.zh/docs/try-flink/local_installation.md
b/docs/content.zh/docs/try-flink/local_installation.md
index 2182c4f..19c25b5 100644
--- a/docs/content.zh/docs/try-flink/local_installation.md
+++ b/docs/content.zh/docs/try-flink/local_installation.md
@@ -48,11 +48,11 @@ under the License.
java -version
```
-[下载](https://flink.apache.org/downloads.html) release {{ site.version }} 并解压。
+[下载](https://flink.apache.org/downloads.html) release {{< version >}} 并解压。
```bash
-$ tar -xzf flink-{{ site.version }}-bin-scala{{< scala_version >}}.tgz
-$ cd flink-{{ site.version }}-bin-scala{{< scala_version >}}
+$ tar -xzf flink-{{< version >}}-bin-scala{{< scala_version >}}.tgz
+$ cd flink-{{< version >}}-bin-scala{{< scala_version >}}
```
<a name="step-2-start-a-cluster"></a>
diff --git
a/docs/content/docs/deployment/resource-providers/standalone/docker.md
b/docs/content/docs/deployment/resource-providers/standalone/docker.md
index 3576b11..8707f2a 100644
--- a/docs/content/docs/deployment/resource-providers/standalone/docker.md
+++ b/docs/content/docs/deployment/resource-providers/standalone/docker.md
@@ -295,7 +295,7 @@ As described in the [plugins]({{< ref
"docs/deployment/filesystems/plugins" >}})
copied to the correct location in the Flink installation in the Docker
container for them to work.
If you want to enable plugins provided with Flink (in the `opt/` directory of
the Flink distribution), you can pass the environment variable
`ENABLE_BUILT_IN_PLUGINS` when you run the Flink image.
-The `ENABLE_BUILT_IN_PLUGINS` should contain a list of plugin jar file names
separated by `;`. A valid plugin name is for example
`flink-s3-fs-hadoop-{{site.version}}.jar`
+The `ENABLE_BUILT_IN_PLUGINS` should contain a list of plugin jar file names
separated by `;`. A valid plugin name is for example `flink-s3-fs-hadoop-{{<
version >}}.jar`
```sh
$ docker run \
@@ -323,7 +323,7 @@ RUN ln -s /usr/bin/python3 /usr/bin/python
# install Python Flink
{{< stable >}}
-RUN pip3 install apache-flink[=={{site.version}}]
+RUN pip3 install apache-flink[=={{< version >}}]
{{< /stable >}}
{{< unstable >}}
RUN pip3 install apache-flink
diff --git
a/docs/content/docs/dev/datastream/fault-tolerance/queryable_state.md
b/docs/content/docs/dev/datastream/fault-tolerance/queryable_state.md
index 299bb52..b7f9472 100644
--- a/docs/content/docs/dev/datastream/fault-tolerance/queryable_state.md
+++ b/docs/content/docs/dev/datastream/fault-tolerance/queryable_state.md
@@ -71,7 +71,7 @@ response back to the client.
To enable queryable state on your Flink cluster, you need to do the following:
- 1. copy the `flink-queryable-state-runtime{{ site.scala_version_suffix
}}-{{site.version }}.jar`
+ 1. copy the `flink-queryable-state-runtime{{< scala_version >}}-{{< version
>}}.jar`
from the `opt/` folder of your [Flink
distribution](https://flink.apache.org/downloads.html "Apache Flink:
Downloads"),
to the `lib/` folder.
2. set the property `queryable-state.enable` to `true`. See the
[Configuration]({{< ref "docs/deployment/config" >}}#queryable-state)
documentation for details and additional parameters.
diff --git a/docs/content/docs/dev/table/sql/queries/match_recognize.md
b/docs/content/docs/dev/table/sql/queries/match_recognize.md
index c6fd0dc..dcc0370 100644
--- a/docs/content/docs/dev/table/sql/queries/match_recognize.md
+++ b/docs/content/docs/dev/table/sql/queries/match_recognize.md
@@ -92,8 +92,8 @@ project.
```xml
<dependency>
<groupId>org.apache.flink</groupId>
- <artifactId>flink-cep{{ site.scala_version_suffix }}</artifactId>
- <version>{{ site.version }}</version>
+ <artifactId>flink-cep{{< scala_version >}}</artifactId>
+ <version>{{< version >}}</version>
</dependency>
```
diff --git a/docs/data/sql_connectors.yml b/docs/data/sql_connectors.yml
index 7e20da0..989b5f2 100644
--- a/docs/data/sql_connectors.yml
+++ b/docs/data/sql_connectors.yml
@@ -149,6 +149,6 @@ upsert-kafka:
kinesis:
name: Kinesis
category: connector
- maven: flink-connector-kinesis{{ site.scala_version_suffix }}
+ maven: flink-connector-kinesis$scala_version
sql_url:
https://repo.maven.apache.org/maven2/org/apache/flink/flink-sql-connector-kinesis$scala_version/$version/flink-sql-connector-kinesis$scala_version-$version.jar