??????
??????????????????????source??????????????????????????????????????????????????window??????????????????????????????
Traceback (most recent call last):
File "tou.py", line 71, in <module>
from_kafka_to_kafka_demo()
File "tou.py", line 21, in from_kafka_to_kafka_demo
.select(" id, time1 , time1 ")\
File "/usr/local/lib/python3.7/site-packages/pyflink/table/table.py",
line 907, in select
return Table(self._j_table.select(fields), self._t_env)
File "/usr/local/lib/python3.7/site-packages/py4j/java_gateway.py", line
1286, in __call__
answer, self.gateway_client, self.target_id, self.name)
File
"/usr/local/lib/python3.7/site-packages/pyflink/util/exceptions.py", line 147,
in deco
return f(*a, **kw)
File "/usr/local/lib/python3.7/site-packages/py4j/protocol.py", line
328, in get_return_value
format(target_id, ".", name), value)
py4j.protocol.Py4JJavaError: An error occurred while calling o26.select.
: org.apache.flink.table.api.ValidationException: A tumble window expects a
size value literal.
at
org.apache.flink.table.operations.utils.AggregateOperationFactory.getAsValueLiteral(AggregateOperationFactory.java:384)
at
org.apache.flink.table.operations.utils.AggregateOperationFactory.validateAndCreateTumbleWindow(AggregateOperationFactory.java:302)
at
org.apache.flink.table.operations.utils.AggregateOperationFactory.createResolvedWindow(AggregateOperationFactory.java:236)
at
org.apache.flink.table.operations.utils.OperationTreeBuilder.windowAggregate(OperationTreeBuilder.java:250)
at
org.apache.flink.table.api.internal.TableImpl$WindowGroupedTableImpl.select(TableImpl.java:794)
at
org.apache.flink.table.api.internal.TableImpl$WindowGroupedTableImpl.select(TableImpl.java:781)
at
sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
at
sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
at
sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
at java.lang.reflect.Method.invoke(Method.java:498)
at
org.apache.flink.api.python.shaded.py4j.reflection.MethodInvoker.invoke(MethodInvoker.java:244)
at
org.apache.flink.api.python.shaded.py4j.reflection.ReflectionEngine.invoke(ReflectionEngine.java:357)
at
org.apache.flink.api.python.shaded.py4j.Gateway.invoke(Gateway.java:282)
at
org.apache.flink.api.python.shaded.py4j.commands.AbstractCommand.invokeMethod(AbstractCommand.java:132)
at
org.apache.flink.api.python.shaded.py4j.commands.CallCommand.execute(CallCommand.java:79)
at
org.apache.flink.api.python.shaded.py4j.GatewayConnection.run(GatewayConnection.java:238)
at java.lang.Thread.run(Thread.java:748)
def register_rides_source(st_env):
source_ddl = \
"""
create table source1(
id int,
time1 timestamp,
type string,
WATERMARK FOR time1 as time1 - INTERVAL '2' SECOND
) with (
'connector.type' = 'kafka',
'update-mode' = 'append',
'connector.topic' = 'tp1',
'connector.properties.bootstrap.servers' = 'localhost:9092',
'connector.properties.zookeeper.connect' = 'localhost:2181',
'format.type' = 'json',
'format.derive-schema' = 'true',
'connector.version' = 'universal'
)
"""
st_env.sql_update(source_ddl)
s_env = StreamExecutionEnvironment.get_execution_environment()
s_env.set_parallelism(1)
st_env = StreamTableEnvironment.create(s_env)
register_rides_source(st_env)
register_rides_sink(st_env)
st_env.from_path("source1")\
.window(Tumble.over("2.secends").on("time1").alias("w")) \
.group_by("w") \
.select(" id, time1 , time1 ")\
.insert_into("sink1")
st_env.execute("2-from_kafka_to_kafka")
????????
------------------ ???????? ------------------
??????:
"user-zh"
<[email protected]>;
????????: 2020??7??10??(??????) ????9:17
??????: "user-zh"<[email protected]>;
????: Re: pyflink1.11.0window
??????????
????source ddl????????time1?? time attribute????
create table source1(
id int,
time1 timestamp,
type string,
WATERMARK FOR time1 as time1 -
INTERVAL '2' SECOND
) with (...)
?????????????? <[email protected]> ??2020??7??10?????? ????8:43??????
> ------------------&nbsp;????????&nbsp;------------------
> ??????:
>
"??????????????"
>
<
> [email protected]&gt;;
> ????????:&nbsp;2020??7??9??(??????) ????5:08
> ??????:&nbsp;"godfrey he"<[email protected]&gt;;
>
> ????:&nbsp;pyflink1.11.0window
>
>
>
> ??????
> &nbsp; &nbsp;????????pyflink1.11????????window????????????
> : org.apache.flink.table.api.ValidationException: A group window expects a
> time attribute for grouping in a stream environment.
>
>
????????????????????????????????????????????????????????????????????????????????????????
> ????????
> ????
>
>
> def from_kafka_to_kafka_demo():
> &nbsp; &nbsp; s_env =
> StreamExecutionEnvironment.get_execution_environment()
> &nbsp; &nbsp; s_env.set_parallelism(1)
>
>
> &nbsp; &nbsp; # use blink table planner
> &nbsp; &nbsp; st_env = StreamTableEnvironment.create(s_env)
>
>
> &nbsp; &nbsp; # register source and sink
> &nbsp; &nbsp; register_rides_source(st_env)
> &nbsp; &nbsp; register_rides_sink(st_env)
>
>
> &nbsp; &nbsp; st_env.from_path("source1")\
> &nbsp; &nbsp; &nbsp; &nbsp;
> .window(Tumble.over("1.secends").on("time1").alias("w")) \
> &nbsp; &nbsp; &nbsp; &nbsp; .group_by("w") \
> &nbsp; &nbsp; &nbsp; &nbsp; .select(" id,&nbsp; time1
, time1 ")\
> &nbsp; &nbsp; &nbsp; &nbsp; .insert_into("sink1")
> &nbsp; &nbsp;
> &nbsp; &nbsp; st_env.execute("2-from_kafka_to_kafka")
>
>
>
>
> def register_rides_source(st_env):
> &nbsp; &nbsp; source_ddl = \
> &nbsp; &nbsp; '''
> &nbsp; &nbsp; create table source1(
> &nbsp; &nbsp; &nbsp; &nbsp; id int,
> &nbsp; &nbsp; &nbsp;time1 timestamp,
> &nbsp; &nbsp; &nbsp;type string
> &nbsp; &nbsp; &nbsp;) with (
> &nbsp; &nbsp; 'connector.type' = 'kafka',
> &nbsp; &nbsp; 'update-mode' = 'append',
> &nbsp; &nbsp; 'connector.topic' = 'tp1',
> &nbsp; &nbsp; 'connector.properties.bootstrap.servers' =
'localhost:9092'
> &nbsp; &nbsp; &nbsp;)
> &nbsp; &nbsp; '''
> &nbsp; &nbsp; st_env.sql_update(source_ddl)
>
>
>
>
> def register_rides_sink(st_env):
> &nbsp; &nbsp; sink_ddl = \
> &nbsp; &nbsp; '''
> &nbsp; &nbsp; create table sink1(
> &nbsp; &nbsp; &nbsp; &nbsp; id int,
> &nbsp; &nbsp; &nbsp;time1 timestamp,
> &nbsp; &nbsp; &nbsp;time2 timestamp
> &nbsp; &nbsp; &nbsp;) with (
> &nbsp; &nbsp; 'connector.type' = 'kafka',
> &nbsp; &nbsp; 'update-mode' = 'append',
> &nbsp; &nbsp; 'connector.topic' = 'tp3',
> &nbsp; &nbsp; 'connector.properties.bootstrap.servers' =
'localhost:9092'
> &nbsp; &nbsp; &nbsp;)
> &nbsp; &nbsp; '''
> &nbsp; &nbsp; st_env.sql_update(sink_ddl)
>
>
>
>
> if __name__ == '__main__':
> &nbsp; &nbsp; from_kafka_to_kafka_demo()
>
>
> &nbsp; &nbsp;