[
https://issues.apache.org/jira/browse/FLINK-21302?page=com.atlassian.jira.plugin.system.issuetabpanels:all-tabpanel
]
Flink Jira Bot updated FLINK-21302:
-----------------------------------
Labels: pull-request-available stale-assigned (was: pull-request-available)
I am the [Flink Jira Bot|https://github.com/apache/flink-jira-bot/] and I help
the community manage its development. I see this issue is assigned but has not
received an update in 30 days, so it has been labeled "stale-assigned".
If you are still working on the issue, please remove the label and add a
comment updating the community on your progress. If this issue is waiting on
feedback, please consider this a reminder to the committer/reviewer. Flink is a
very active project, and so we appreciate your patience.
If you are no longer working on the issue, please unassign yourself so someone
else may work on it.
> Fix NPE when use row_number() in over agg
> -----------------------------------------
>
> Key: FLINK-21302
> URL: https://issues.apache.org/jira/browse/FLINK-21302
> Project: Flink
> Issue Type: Bug
> Components: Table SQL / Planner
> Reporter: JING ZHANG
> Assignee: JING ZHANG
> Priority: Major
> Labels: pull-request-available, stale-assigned
> Fix For: 1.13.0, 1.14.0
>
>
> `NullPointException` would be thrown out if SQL contains row_number() in Over
> Aggregate.
> {code:scala}
> @Test
> def testRowNumberOnOver(): Unit = {
> val t = failingDataSource(TestData.tupleData5)
> .toTable(tEnv, 'a, 'b, 'c, 'd, 'e, 'proctime.proctime)
> tEnv.registerTable("MyTable", t)
> val sqlQuery = "SELECT a, ROW_NUMBER() OVER (PARTITION BY a ORDER BY
> proctime()) FROM MyTable"
> val sink = new TestingAppendSink
> tEnv.sqlQuery(sqlQuery).toAppendStream[Row].addSink(sink)
> env.execute()
> }{code}
>
> The following exception would be thrown out.
> {code:java}
> java.lang.NullPointerExceptionjava.lang.NullPointerException at
> scala.collection.mutable.ArrayOps$ofInt$.length$extension(ArrayOps.scala:240)
> at scala.collection.mutable.ArrayOps$ofInt.length(ArrayOps.scala:240) at
> scala.collection.SeqLike$class.size(SeqLike.scala:106) at
> scala.collection.mutable.ArrayOps$ofInt.size(ArrayOps.scala:234) at
> scala.collection.mutable.Builder$class.sizeHint(Builder.scala:69) at
> scala.collection.mutable.ArrayBuilder.sizeHint(ArrayBuilder.scala:22) at
> scala.collection.TraversableLike$class.builder$1(TraversableLike.scala:230)
> at scala.collection.TraversableLike$class.map(TraversableLike.scala:233) at
> scala.collection.mutable.ArrayOps$ofInt.map(ArrayOps.scala:234) at
> org.apache.flink.table.planner.codegen.agg.DeclarativeAggCodeGen.<init>(DeclarativeAggCodeGen.scala:82)
> at
> org.apache.flink.table.planner.codegen.agg.AggsHandlerCodeGenerator$$anonfun$3.apply(AggsHandlerCodeGenerator.scala:222)
> at
> org.apache.flink.table.planner.codegen.agg.AggsHandlerCodeGenerator$$anonfun$3.apply(AggsHandlerCodeGenerator.scala:214)
> at
> scala.collection.TraversableLike$$anonfun$map$1.apply(TraversableLike.scala:234)
> at
> scala.collection.TraversableLike$$anonfun$map$1.apply(TraversableLike.scala:234)
> at
> scala.collection.IndexedSeqOptimized$class.foreach(IndexedSeqOptimized.scala:33)
> at scala.collection.mutable.ArrayOps$ofRef.foreach(ArrayOps.scala:186) at
> scala.collection.TraversableLike$class.map(TraversableLike.scala:234) at
> scala.collection.mutable.ArrayOps$ofRef.map(ArrayOps.scala:186) at
> org.apache.flink.table.planner.codegen.agg.AggsHandlerCodeGenerator.initialAggregateInformation(AggsHandlerCodeGenerator.scala:214)
> at
> org.apache.flink.table.planner.codegen.agg.AggsHandlerCodeGenerator.generateAggsHandler(AggsHandlerCodeGenerator.scala:325)
> at
> org.apache.flink.table.planner.plan.nodes.exec.stream.StreamExecOverAggregate.createUnboundedOverProcessFunction(StreamExecOverAggregate.java:262)
> at
> org.apache.flink.table.planner.plan.nodes.exec.stream.StreamExecOverAggregate.translateToPlanInternal(StreamExecOverAggregate.java:154)
> at
> org.apache.flink.table.planner.plan.nodes.exec.ExecNodeBase.translateToPlan(ExecNodeBase.java:128)
> at
> org.apache.flink.table.planner.plan.nodes.exec.ExecEdge.translateToPlan(ExecEdge.java:247)
> at
> org.apache.flink.table.planner.plan.nodes.exec.common.CommonExecCalc.translateToPlanInternal(CommonExecCalc.java:65)
> at
> org.apache.flink.table.planner.plan.nodes.exec.ExecNodeBase.translateToPlan(ExecNodeBase.java:128)
> at
> org.apache.flink.table.planner.plan.nodes.exec.ExecEdge.translateToPlan(ExecEdge.java:247)
> at
> org.apache.flink.table.planner.plan.nodes.exec.common.CommonExecLegacySink.translateToTransformation(CommonExecLegacySink.java:167)
> at
> org.apache.flink.table.planner.plan.nodes.exec.common.CommonExecLegacySink.translateToPlanInternal(CommonExecLegacySink.java:136)
> at
> org.apache.flink.table.planner.plan.nodes.exec.ExecNodeBase.translateToPlan(ExecNodeBase.java:128)
> at
> org.apache.flink.table.planner.delegation.StreamPlanner$$anonfun$translateToPlan$1.apply(StreamPlanner.scala:67)
> at
> org.apache.flink.table.planner.delegation.StreamPlanner$$anonfun$translateToPlan$1.apply(StreamPlanner.scala:66)
> at
> scala.collection.TraversableLike$$anonfun$map$1.apply(TraversableLike.scala:234)
> at
> scala.collection.TraversableLike$$anonfun$map$1.apply(TraversableLike.scala:234)
> at scala.collection.Iterator$class.foreach(Iterator.scala:891) at
> scala.collection.AbstractIterator.foreach(Iterator.scala:1334) at
> scala.collection.IterableLike$class.foreach(IterableLike.scala:72) at
> scala.collection.AbstractIterable.foreach(Iterable.scala:54) at
> scala.collection.TraversableLike$class.map(TraversableLike.scala:234) at
> scala.collection.AbstractTraversable.map(Traversable.scala:104) at
> org.apache.flink.table.planner.delegation.StreamPlanner.translateToPlan(StreamPlanner.scala:66)
> at
> org.apache.flink.table.planner.delegation.PlannerBase.translate(PlannerBase.scala:163)
> at
> org.apache.flink.table.api.bridge.scala.internal.StreamTableEnvironmentImpl.toDataStream(StreamTableEnvironmentImpl.scala:178)
> at
> org.apache.flink.table.api.bridge.scala.internal.StreamTableEnvironmentImpl.toAppendStream(StreamTableEnvironmentImpl.scala:103)
> at
> org.apache.flink.table.api.bridge.scala.TableConversions.toAppendStream(TableConversions.scala:78)
> at
> org.apache.flink.table.planner.runtime.stream.sql.OverAggregateITCase.testRowNumberOnOver(OverAggregateITCase.scala:67)
> at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) at
> sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
> at
> sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
> at java.lang.reflect.Method.invoke(Method.java:498) at
> org.junit.runners.model.FrameworkMethod$1.runReflectiveCall(FrameworkMethod.java:50)
> at
> org.junit.internal.runners.model.ReflectiveCallable.run(ReflectiveCallable.java:12)
> at
> org.junit.runners.model.FrameworkMethod.invokeExplosively(FrameworkMethod.java:47)
> at
> org.junit.internal.runners.statements.InvokeMethod.evaluate(InvokeMethod.java:17)
> at
> org.junit.internal.runners.statements.RunBefores.evaluate(RunBefores.java:26)
> at
> org.junit.internal.runners.statements.RunAfters.evaluate(RunAfters.java:27)
> at
> org.junit.rules.ExpectedException$ExpectedExceptionStatement.evaluate(ExpectedException.java:239)
> at org.junit.rules.ExternalResource$1.evaluate(ExternalResource.java:48) at
> org.junit.rules.TestWatcher$1.evaluate(TestWatcher.java:55) at
> org.junit.rules.RunRules.evaluate(RunRules.java:20) at
> org.junit.runners.ParentRunner.runLeaf(ParentRunner.java:325) at
> org.junit.runners.BlockJUnit4ClassRunner.runChild(BlockJUnit4ClassRunner.java:78)
> at
> org.junit.runners.BlockJUnit4ClassRunner.runChild(BlockJUnit4ClassRunner.java:57)
> at org.junit.runners.ParentRunner$3.run(ParentRunner.java:290) at
> org.junit.runners.ParentRunner$1.schedule(ParentRunner.java:71) at
> org.junit.runners.ParentRunner.runChildren(ParentRunner.java:288) at
> org.junit.runners.ParentRunner.access$000(ParentRunner.java:58) at
> org.junit.runners.ParentRunner$2.evaluate(ParentRunner.java:268) at
> org.junit.runners.ParentRunner.run(ParentRunner.java:363) at
> org.junit.runners.Suite.runChild(Suite.java:128) at
> org.junit.runners.Suite.runChild(Suite.java:27) at
> org.junit.runners.ParentRunner$3.run(ParentRunner.java:290) at
> org.junit.runners.ParentRunner$1.schedule(ParentRunner.java:71) at
> org.junit.runners.ParentRunner.runChildren(ParentRunner.java:288) at
> org.junit.runners.ParentRunner.access$000(ParentRunner.java:58) at
> org.junit.runners.ParentRunner$2.evaluate(ParentRunner.java:268) at
> org.junit.rules.ExternalResource$1.evaluate(ExternalResource.java:48) at
> org.junit.rules.ExternalResource$1.evaluate(ExternalResource.java:48) at
> org.junit.rules.RunRules.evaluate(RunRules.java:20) at
> org.junit.runners.ParentRunner.run(ParentRunner.java:363) at
> org.junit.runner.JUnitCore.run(JUnitCore.java:137) at
> com.intellij.junit4.JUnit4IdeaTestRunner.startRunnerWithArgs(JUnit4IdeaTestRunner.java:68)
> at
> com.intellij.rt.junit.IdeaTestRunner$Repeater.startRunnerWithArgs(IdeaTestRunner.java:33)
> at
> com.intellij.rt.junit.JUnitStarter.prepareStreamsAndStart(JUnitStarter.java:230)
> at com.intellij.rt.junit.JUnitStarter.main(JUnitStarter.java:58)
> {code}
--
This message was sent by Atlassian Jira
(v8.3.4#803005)