??????
    
 ??????????????????????source??????????????????????????????????????????????????window??????????????????????????????
Traceback (most recent call last):
&nbsp; File "tou.py", line 71, in <module&gt;
&nbsp; &nbsp; from_kafka_to_kafka_demo()
&nbsp; File "tou.py", line 21, in from_kafka_to_kafka_demo
&nbsp; &nbsp; .select(" id,&nbsp; time1 , time1 ")\
&nbsp; File "/usr/local/lib/python3.7/site-packages/pyflink/table/table.py", 
line 907, in select
&nbsp; &nbsp; return Table(self._j_table.select(fields), self._t_env)
&nbsp; File "/usr/local/lib/python3.7/site-packages/py4j/java_gateway.py", line 
1286, in __call__
&nbsp; &nbsp; answer, self.gateway_client, self.target_id, self.name)
&nbsp; File 
"/usr/local/lib/python3.7/site-packages/pyflink/util/exceptions.py", line 147, 
in deco
&nbsp; &nbsp; return f(*a, **kw)
&nbsp; File "/usr/local/lib/python3.7/site-packages/py4j/protocol.py", line 
328, in get_return_value
&nbsp; &nbsp; format(target_id, ".", name), value)
py4j.protocol.Py4JJavaError: An error occurred while calling o26.select.
: org.apache.flink.table.api.ValidationException: A tumble window expects a 
size value literal.
&nbsp; &nbsp; &nbsp; &nbsp; at 
org.apache.flink.table.operations.utils.AggregateOperationFactory.getAsValueLiteral(AggregateOperationFactory.java:384)
&nbsp; &nbsp; &nbsp; &nbsp; at 
org.apache.flink.table.operations.utils.AggregateOperationFactory.validateAndCreateTumbleWindow(AggregateOperationFactory.java:302)
&nbsp; &nbsp; &nbsp; &nbsp; at 
org.apache.flink.table.operations.utils.AggregateOperationFactory.createResolvedWindow(AggregateOperationFactory.java:236)
&nbsp; &nbsp; &nbsp; &nbsp; at 
org.apache.flink.table.operations.utils.OperationTreeBuilder.windowAggregate(OperationTreeBuilder.java:250)
&nbsp; &nbsp; &nbsp; &nbsp; at 
org.apache.flink.table.api.internal.TableImpl$WindowGroupedTableImpl.select(TableImpl.java:794)
&nbsp; &nbsp; &nbsp; &nbsp; at 
org.apache.flink.table.api.internal.TableImpl$WindowGroupedTableImpl.select(TableImpl.java:781)
&nbsp; &nbsp; &nbsp; &nbsp; at 
sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
&nbsp; &nbsp; &nbsp; &nbsp; at 
sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
&nbsp; &nbsp; &nbsp; &nbsp; at 
sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
&nbsp; &nbsp; &nbsp; &nbsp; at java.lang.reflect.Method.invoke(Method.java:498)
&nbsp; &nbsp; &nbsp; &nbsp; at 
org.apache.flink.api.python.shaded.py4j.reflection.MethodInvoker.invoke(MethodInvoker.java:244)
&nbsp; &nbsp; &nbsp; &nbsp; at 
org.apache.flink.api.python.shaded.py4j.reflection.ReflectionEngine.invoke(ReflectionEngine.java:357)
&nbsp; &nbsp; &nbsp; &nbsp; at 
org.apache.flink.api.python.shaded.py4j.Gateway.invoke(Gateway.java:282)
&nbsp; &nbsp; &nbsp; &nbsp; at 
org.apache.flink.api.python.shaded.py4j.commands.AbstractCommand.invokeMethod(AbstractCommand.java:132)
&nbsp; &nbsp; &nbsp; &nbsp; at 
org.apache.flink.api.python.shaded.py4j.commands.CallCommand.execute(CallCommand.java:79)
&nbsp; &nbsp; &nbsp; &nbsp; at 
org.apache.flink.api.python.shaded.py4j.GatewayConnection.run(GatewayConnection.java:238)
&nbsp; &nbsp; &nbsp; &nbsp; at java.lang.Thread.run(Thread.java:748)























def register_rides_source(st_env):
&nbsp; &nbsp; source_ddl = \
&nbsp; &nbsp; """
&nbsp; &nbsp; create table source1(
&nbsp; &nbsp; &nbsp;id int,
&nbsp; &nbsp; &nbsp;time1 timestamp,
&nbsp; &nbsp; &nbsp;type string,
&nbsp; &nbsp; &nbsp;WATERMARK FOR time1 as time1 - INTERVAL '2' SECOND
&nbsp; &nbsp; &nbsp;) with (
&nbsp; &nbsp; 'connector.type' = 'kafka',
&nbsp; &nbsp; 'update-mode' = 'append',
&nbsp; &nbsp; 'connector.topic' = 'tp1',
&nbsp; &nbsp; 'connector.properties.bootstrap.servers' = 'localhost:9092',
&nbsp; &nbsp; 'connector.properties.zookeeper.connect' = 'localhost:2181',
&nbsp; &nbsp; 'format.type' = 'json',
&nbsp; &nbsp; 'format.derive-schema' = 'true',
&nbsp; &nbsp; 'connector.version' = 'universal'
&nbsp; &nbsp; &nbsp;)
&nbsp; &nbsp; """
&nbsp; &nbsp; st_env.sql_update(source_ddl)

&nbsp; &nbsp;&nbsp;
&nbsp; &nbsp; s_env = StreamExecutionEnvironment.get_execution_environment()
&nbsp; &nbsp; s_env.set_parallelism(1)


&nbsp; &nbsp; st_env = StreamTableEnvironment.create(s_env)


&nbsp; &nbsp; register_rides_source(st_env)
&nbsp; &nbsp; register_rides_sink(st_env)


&nbsp; &nbsp; st_env.from_path("source1")\
&nbsp; &nbsp; &nbsp; &nbsp; 
.window(Tumble.over("2.secends").on("time1").alias("w")) \
&nbsp; &nbsp; &nbsp; &nbsp; .group_by("w") \
&nbsp; &nbsp; &nbsp; &nbsp; .select(" id,&nbsp; time1 , time1 ")\
&nbsp; &nbsp; &nbsp; &nbsp; .insert_into("sink1")
&nbsp; &nbsp;&nbsp;
&nbsp; &nbsp; st_env.execute("2-from_kafka_to_kafka")


????????








------------------&nbsp;????????&nbsp;------------------
??????:                                                                         
                                               "user-zh"                        
                                                            
<[email protected]&gt;;
????????:&nbsp;2020??7??10??(??????) ????9:17
??????:&nbsp;"user-zh"<[email protected]&gt;;

????:&nbsp;Re: pyflink1.11.0window



??????????

????source ddl????????time1?? time attribute????
create table source1(
&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp; id int,
&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp; time1 timestamp,
&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp; type string,
&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp; WATERMARK FOR time1 as time1 - 
INTERVAL '2' SECOND
) with (...)

?????????????? <[email protected]&gt; ??2020??7??10?????? ????8:43??????

&gt; ------------------&amp;nbsp;????????&amp;nbsp;------------------
&gt; ??????:
&gt;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;
 "??????????????"
&gt;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;
 <
&gt; [email protected]&amp;gt;;
&gt; ????????:&amp;nbsp;2020??7??9??(??????) ????5:08
&gt; ??????:&amp;nbsp;"godfrey he"<[email protected]&amp;gt;;
&gt;
&gt; ????:&amp;nbsp;pyflink1.11.0window
&gt;
&gt;
&gt;
&gt; ??????
&gt; &amp;nbsp; &amp;nbsp;????????pyflink1.11????????window????????????
&gt; : org.apache.flink.table.api.ValidationException: A group window expects a
&gt; time attribute for grouping in a stream environment.
&gt;
&gt; 
????????????????????????????????????????????????????????????????????????????????????????
&gt; ????????
&gt; ????
&gt;
&gt;
&gt; def from_kafka_to_kafka_demo():
&gt; &amp;nbsp; &amp;nbsp; s_env =
&gt; StreamExecutionEnvironment.get_execution_environment()
&gt; &amp;nbsp; &amp;nbsp; s_env.set_parallelism(1)
&gt;
&gt;
&gt; &amp;nbsp; &amp;nbsp; # use blink table planner
&gt; &amp;nbsp; &amp;nbsp; st_env = StreamTableEnvironment.create(s_env)
&gt;
&gt;
&gt; &amp;nbsp; &amp;nbsp; # register source and sink
&gt; &amp;nbsp; &amp;nbsp; register_rides_source(st_env)
&gt; &amp;nbsp; &amp;nbsp; register_rides_sink(st_env)
&gt;
&gt;
&gt; &amp;nbsp; &amp;nbsp; st_env.from_path("source1")\
&gt; &amp;nbsp; &amp;nbsp; &amp;nbsp; &amp;nbsp;
&gt; .window(Tumble.over("1.secends").on("time1").alias("w")) \
&gt; &amp;nbsp; &amp;nbsp; &amp;nbsp; &amp;nbsp; .group_by("w") \
&gt; &amp;nbsp; &amp;nbsp; &amp;nbsp; &amp;nbsp; .select(" id,&amp;nbsp; time1 
, time1 ")\
&gt; &amp;nbsp; &amp;nbsp; &amp;nbsp; &amp;nbsp; .insert_into("sink1")
&gt; &amp;nbsp; &amp;nbsp;
&gt; &amp;nbsp; &amp;nbsp; st_env.execute("2-from_kafka_to_kafka")
&gt;
&gt;
&gt;
&gt;
&gt; def register_rides_source(st_env):
&gt; &amp;nbsp; &amp;nbsp; source_ddl = \
&gt; &amp;nbsp; &amp;nbsp; '''
&gt; &amp;nbsp; &amp;nbsp; create table source1(
&gt; &amp;nbsp; &amp;nbsp; &amp;nbsp; &amp;nbsp; id int,
&gt; &amp;nbsp; &amp;nbsp; &amp;nbsp;time1 timestamp,
&gt; &amp;nbsp; &amp;nbsp; &amp;nbsp;type string
&gt; &amp;nbsp; &amp;nbsp; &amp;nbsp;) with (
&gt; &amp;nbsp; &amp;nbsp; 'connector.type' = 'kafka',
&gt; &amp;nbsp; &amp;nbsp; 'update-mode' = 'append',
&gt; &amp;nbsp; &amp;nbsp; 'connector.topic' = 'tp1',
&gt; &amp;nbsp; &amp;nbsp; 'connector.properties.bootstrap.servers' = 
'localhost:9092'
&gt; &amp;nbsp; &amp;nbsp; &amp;nbsp;)
&gt; &amp;nbsp; &amp;nbsp; '''
&gt; &amp;nbsp; &amp;nbsp; st_env.sql_update(source_ddl)
&gt;
&gt;
&gt;
&gt;
&gt; def register_rides_sink(st_env):
&gt; &amp;nbsp; &amp;nbsp; sink_ddl = \
&gt; &amp;nbsp; &amp;nbsp; '''
&gt; &amp;nbsp; &amp;nbsp; create table sink1(
&gt; &amp;nbsp; &amp;nbsp; &amp;nbsp; &amp;nbsp; id int,
&gt; &amp;nbsp; &amp;nbsp; &amp;nbsp;time1 timestamp,
&gt; &amp;nbsp; &amp;nbsp; &amp;nbsp;time2 timestamp
&gt; &amp;nbsp; &amp;nbsp; &amp;nbsp;) with (
&gt; &amp;nbsp; &amp;nbsp; 'connector.type' = 'kafka',
&gt; &amp;nbsp; &amp;nbsp; 'update-mode' = 'append',
&gt; &amp;nbsp; &amp;nbsp; 'connector.topic' = 'tp3',
&gt; &amp;nbsp; &amp;nbsp; 'connector.properties.bootstrap.servers' = 
'localhost:9092'
&gt; &amp;nbsp; &amp;nbsp; &amp;nbsp;)
&gt; &amp;nbsp; &amp;nbsp; '''
&gt; &amp;nbsp; &amp;nbsp; st_env.sql_update(sink_ddl)
&gt;
&gt;
&gt;
&gt;
&gt; if __name__ == '__main__':
&gt; &amp;nbsp; &amp;nbsp; from_kafka_to_kafka_demo()
&gt;
&gt;
&gt; &amp;nbsp; &amp;nbsp;

回复