This is an automated email from the ASF dual-hosted git repository.
tangyun pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/flink.git
The following commit(s) were added to refs/heads/master by this push:
new 35c5f674041 [hotfix][doc] Refine code examples in data_stream_api
35c5f674041 is described below
commit 35c5f674041bcefea93e1de459cea0d1789f98e0
Author: Mingliang Liu <[email protected]>
AuthorDate: Wed Oct 26 10:04:25 2022 -0700
[hotfix][doc] Refine code examples in data_stream_api
---
docs/content.zh/docs/dev/table/data_stream_api.md | 9 +++++++--
docs/content/docs/dev/table/data_stream_api.md | 10 ++++++++--
2 files changed, 15 insertions(+), 4 deletions(-)
diff --git a/docs/content.zh/docs/dev/table/data_stream_api.md
b/docs/content.zh/docs/dev/table/data_stream_api.md
index 5aa882cdca6..991e105f7fd 100644
--- a/docs/content.zh/docs/dev/table/data_stream_api.md
+++ b/docs/content.zh/docs/dev/table/data_stream_api.md
@@ -975,6 +975,7 @@ import org.apache.flink.table.api.Table;
import org.apache.flink.table.api.bridge.java.StreamTableEnvironment;
import org.apache.flink.types.Row;
import org.apache.flink.util.Collector;
+import java.time.LocalDateTime;
// setup DataStream API
StreamExecutionEnvironment env =
StreamExecutionEnvironment.getExecutionEnvironment();
@@ -1716,7 +1717,7 @@ table.printSchema();
// data types can be extracted reflectively as above or explicitly defined
-Table table3 = tableEnv
+Table table = tableEnv
.fromDataStream(
dataStream,
Schema.newBuilder()
@@ -1758,6 +1759,7 @@ The following code shows how to use `createTemporaryView`
for different scenario
```java
import org.apache.flink.api.java.tuple.Tuple2;
import org.apache.flink.streaming.api.datastream.DataStream;
+import org.apache.flink.table.api.Schema;
// create some DataStream
DataStream<Tuple2<Long, String>> dataStream = env.fromElements(
@@ -2016,6 +2018,8 @@ DataStream<Row> dataStream = tableEnv.toDataStream(table);
DataStream<User> dataStream = tableEnv.toDataStream(table, User.class);
+// === EXAMPLE 3 ===
+
// data types can be extracted reflectively as above or explicitly defined
DataStream<User> dataStream =
@@ -2071,6 +2075,8 @@ val dataStream: DataStream[Row] =
tableEnv.toDataStream(table)
val dataStream: DataStream[User] = tableEnv.toDataStream(table, classOf[User])
+// === EXAMPLE 3 ===
+
// data types can be extracted reflectively as above or explicitly defined
val dataStream: DataStream[User] =
@@ -3058,7 +3064,6 @@ Afterward, the type information semantics of the
DataStream API need to be consi
{{< top >}}
-
Legacy Conversion
-----------------
diff --git a/docs/content/docs/dev/table/data_stream_api.md
b/docs/content/docs/dev/table/data_stream_api.md
index ba0753c04bb..9010c990dd2 100644
--- a/docs/content/docs/dev/table/data_stream_api.md
+++ b/docs/content/docs/dev/table/data_stream_api.md
@@ -973,6 +973,7 @@ import org.apache.flink.table.api.Table;
import org.apache.flink.table.api.bridge.java.StreamTableEnvironment;
import org.apache.flink.types.Row;
import org.apache.flink.util.Collector;
+import java.time.LocalDateTime;
// setup DataStream API
StreamExecutionEnvironment env =
StreamExecutionEnvironment.getExecutionEnvironment();
@@ -1714,7 +1715,7 @@ table.printSchema();
// data types can be extracted reflectively as above or explicitly defined
-Table table3 = tableEnv
+Table table = tableEnv
.fromDataStream(
dataStream,
Schema.newBuilder()
@@ -1756,6 +1757,7 @@ The following code shows how to use `createTemporaryView`
for different scenario
```java
import org.apache.flink.api.java.tuple.Tuple2;
import org.apache.flink.streaming.api.datastream.DataStream;
+import org.apache.flink.table.api.Schema;
// create some DataStream
DataStream<Tuple2<Long, String>> dataStream = env.fromElements(
@@ -2014,6 +2016,8 @@ DataStream<Row> dataStream = tableEnv.toDataStream(table);
DataStream<User> dataStream = tableEnv.toDataStream(table, User.class);
+// === EXAMPLE 3 ===
+
// data types can be extracted reflectively as above or explicitly defined
DataStream<User> dataStream =
@@ -2069,6 +2073,8 @@ val dataStream: DataStream[Row] =
tableEnv.toDataStream(table)
val dataStream: DataStream[User] = tableEnv.toDataStream(table, classOf[User])
+// === EXAMPLE 3 ===
+
// data types can be extracted reflectively as above or explicitly defined
val dataStream: DataStream[User] =
@@ -2892,7 +2898,7 @@ from pyflink.common import Encoder
from pyflink.datastream import StreamExecutionEnvironment
from pyflink.datastream.connectors.file_system import FileSink
from pyflink.table import StreamTableEnvironment, TableDescriptor, Schema,
DataTypes
-
+
env = StreamExecutionEnvironment.get_execution_environment()
table_env = StreamTableEnvironment.create(env)