xiaojin.wy created FLINK-23188:
----------------------------------

             Summary: Unsupported function definition: IFNULL. Only user 
defined functions are supported as inline functions
                 Key: FLINK-23188
                 URL: https://issues.apache.org/jira/browse/FLINK-23188
             Project: Flink
          Issue Type: Bug
          Components: Table SQL / Planner
    Affects Versions: 1.14.0
            Reporter: xiaojin.wy


CREATE TABLE database0_t0(
c0 FLOAT
) WITH (
  'connector' = 'filesystem',
  'path' = 'hdfs:///tmp/database0_t0.csv',
  'format' = 'csv'
);
INSERT OVERWRITE database0_t0(c0) VALUES(0.40445197);

SELECT database0_t0.c0 AS ref0 FROM database0_t0 WHERE 
((IFNULL(database0_t0.c1, database0_t0.c1)) IS NULL);

The errors:
"<Exception on server side: org.apache.flink.table.api.TableException: 
Unsupported function definition: IFNULL. Only user defined functions are 
supported as inline functions.  at 
org.apache.flink.table.planner.functions.bridging.BridgingUtils.lambda$createInlineFunctionName$0(BridgingUtils.java:81)
  at java.util.Optional.orElseThrow(Optional.java:290)  at 
org.apache.flink.table.planner.functions.bridging.BridgingUtils.createInlineFunctionName(BridgingUtils.java:78)
  at 
org.apache.flink.table.planner.functions.bridging.BridgingUtils.createName(BridgingUtils.java:58)
  at 
org.apache.flink.table.planner.functions.bridging.BridgingSqlFunction.<init>(BridgingSqlFunction.java:76)
  at 
org.apache.flink.table.planner.functions.bridging.BridgingSqlFunction.of(BridgingSqlFunction.java:116)
  at 
org.apache.flink.table.planner.expressions.converter.FunctionDefinitionConvertRule.convert(FunctionDefinitionConvertRule.java:65)
  at 
org.apache.flink.table.planner.expressions.converter.ExpressionConverter.visit(ExpressionConverter.java:97)
  at 
org.apache.flink.table.planner.expressions.converter.ExpressionConverter.visit(ExpressionConverter.java:71)
  at 
org.apache.flink.table.expressions.CallExpression.accept(CallExpression.java:134)
  at 
org.apache.flink.table.planner.expressions.converter.ExpressionConverter$1.toRexNode(ExpressionConverter.java:247)
  at java.util.stream.ReferencePipeline$3$1.accept(ReferencePipeline.java:193)  
at 
java.util.ArrayList$ArrayListSpliterator.forEachRemaining(ArrayList.java:1374)  
at java.util.stream.AbstractPipeline.copyInto(AbstractPipeline.java:481)  at 
java.util.stream.AbstractPipeline.wrapAndCopyInto(AbstractPipeline.java:471)  
at java.util.stream.ReduceOps$ReduceOp.evaluateSequential(ReduceOps.java:708)  
at java.util.stream.AbstractPipeline.evaluate(AbstractPipeline.java:234)  at 
java.util.stream.ReferencePipeline.collect(ReferencePipeline.java:499)  at 
org.apache.flink.table.planner.expressions.converter.ExpressionConverter.toRexNodes(ExpressionConverter.java:240)
  at 
org.apache.flink.table.planner.expressions.converter.DirectConvertRule.lambda$convert$0(DirectConvertRule.java:220)
  at java.util.Optional.map(Optional.java:215)  at 
org.apache.flink.table.planner.expressions.converter.DirectConvertRule.convert(DirectConvertRule.java:217)
  at 
org.apache.flink.table.planner.expressions.converter.ExpressionConverter.visit(ExpressionConverter.java:97)
  at 
org.apache.flink.table.planner.expressions.converter.ExpressionConverter.visit(ExpressionConverter.java:71)
  at 
org.apache.flink.table.expressions.CallExpression.accept(CallExpression.java:134)
  at 
org.apache.flink.table.planner.plan.rules.logical.PushFilterIntoSourceScanRuleBase.lambda$convertExpressionToRexNode$0(PushFilterIntoSourceScanRuleBase.java:73)
  at java.util.stream.ReferencePipeline$3$1.accept(ReferencePipeline.java:193)  
at 
java.util.ArrayList$ArrayListSpliterator.forEachRemaining(ArrayList.java:1374)  
at java.util.stream.AbstractPipeline.copyInto(AbstractPipeline.java:481)  at 
java.util.stream.AbstractPipeline.wrapAndCopyInto(AbstractPipeline.java:471)  
at java.util.stream.ReduceOps$ReduceOp.evaluateSequential(ReduceOps.java:708)  
at java.util.stream.AbstractPipeline.evaluate(AbstractPipeline.java:234)  at 
java.util.stream.ReferencePipeline.collect(ReferencePipeline.java:499)  at 
org.apache.flink.table.planner.plan.rules.logical.PushFilterIntoSourceScanRuleBase.convertExpressionToRexNode(PushFilterIntoSourceScanRuleBase.java:73)
  at 
org.apache.flink.table.planner.plan.rules.logical.PushFilterIntoSourceScanRuleBase.resolveFiltersAndCreateTableSourceTable(PushFilterIntoSourceScanRuleBase.java:116)
  at 
org.apache.flink.table.planner.plan.rules.logical.PushFilterIntoTableSourceScanRule.pushFilterIntoScan(PushFilterIntoTableSourceScanRule.java:95)
  at 
org.apache.flink.table.planner.plan.rules.logical.PushFilterIntoTableSourceScanRule.onMatch(PushFilterIntoTableSourceScanRule.java:70)
  at 
org.apache.calcite.plan.AbstractRelOptPlanner.fireRule(AbstractRelOptPlanner.java:333)
  at org.apache.calcite.plan.hep.HepPlanner.applyRule(HepPlanner.java:542)  at 
org.apache.calcite.plan.hep.HepPlanner.applyRules(HepPlanner.java:407)  at 
org.apache.calcite.plan.hep.HepPlanner.executeInstruction(HepPlanner.java:243)  
at 
org.apache.calcite.plan.hep.HepInstruction$RuleInstance.execute(HepInstruction.java:127)
  at org.apache.calcite.plan.hep.HepPlanner.executeProgram(HepPlanner.java:202) 
 at org.apache.calcite.plan.hep.HepPlanner.findBestExp(HepPlanner.java:189)  at 
org.apache.flink.table.planner.plan.optimize.program.FlinkHepProgram.optimize(FlinkHepProgram.scala:69)
  at 
org.apache.flink.table.planner.plan.optimize.program.FlinkHepRuleSetProgram.optimize(FlinkHepRuleSetProgram.scala:87)
  at 
org.apache.flink.table.planner.plan.optimize.program.FlinkGroupProgram$$anonfun$optimize$1$$anonfun$apply$1.apply(FlinkGroupProgram.scala:63)
  at 
org.apache.flink.table.planner.plan.optimize.program.FlinkGroupProgram$$anonfun$optimize$1$$anonfun$apply$1.apply(FlinkGroupProgram.scala:60)
  at 
scala.collection.TraversableOnce$$anonfun$foldLeft$1.apply(TraversableOnce.scala:157)
  at 
scala.collection.TraversableOnce$$anonfun$foldLeft$1.apply(TraversableOnce.scala:157)
  at scala.collection.Iterator$class.foreach(Iterator.scala:891)  at 
scala.collection.AbstractIterator.foreach(Iterator.scala:1334)  at 
scala.collection.IterableLike$class.foreach(IterableLike.scala:72)  at 
scala.collection.AbstractIterable.foreach(Iterable.scala:54)  at 
scala.collection.TraversableOnce$class.foldLeft(TraversableOnce.scala:157)  at 
scala.collection.AbstractTraversable.foldLeft(Traversable.scala:104)  at 
org.apache.flink.table.planner.plan.optimize.program.FlinkGroupProgram$$anonfun$optimize$1.apply(FlinkGroupProgram.scala:60)
  at 
org.apache.flink.table.planner.plan.optimize.program.FlinkGroupProgram$$anonfun$optimize$1.apply(FlinkGroupProgram.scala:55)
  at 
scala.collection.TraversableOnce$$anonfun$foldLeft$1.apply(TraversableOnce.scala:157)
  at 
scala.collection.TraversableOnce$$anonfun$foldLeft$1.apply(TraversableOnce.scala:157)
  at scala.collection.immutable.Range.foreach(Range.scala:160)  at 
scala.collection.TraversableOnce$class.foldLeft(TraversableOnce.scala:157)  at 
scala.collection.AbstractTraversable.foldLeft(Traversable.scala:104)  at 
org.apache.flink.table.planner.plan.optimize.program.FlinkGroupProgram.optimize(FlinkGroupProgram.scala:55)
  at 
org.apache.flink.table.planner.plan.optimize.program.FlinkChainedProgram$$anonfun$optimize$1.apply(FlinkChainedProgram.scala:62)
  at 
org.apache.flink.table.planner.plan.optimize.program.FlinkChainedProgram$$anonfun$optimize$1.apply(FlinkChainedProgram.scala:58)
  at 
scala.collection.TraversableOnce$$anonfun$foldLeft$1.apply(TraversableOnce.scala:157)
  at 
scala.collection.TraversableOnce$$anonfun$foldLeft$1.apply(TraversableOnce.scala:157)
  at scala.collection.Iterator$class.foreach(Iterator.scala:891)  at 
scala.collection.AbstractIterator.foreach(Iterator.scala:1334)  at 
scala.collection.IterableLike$class.foreach(IterableLike.scala:72)  at 
scala.collection.AbstractIterable.foreach(Iterable.scala:54)  at 
scala.collection.TraversableOnce$class.foldLeft(TraversableOnce.scala:157)  at 
scala.collection.AbstractTraversable.foldLeft(Traversable.scala:104)  at 
org.apache.flink.table.planner.plan.optimize.program.FlinkChainedProgram.optimize(FlinkChainedProgram.scala:57)
  at 
org.apache.flink.table.planner.plan.optimize.BatchCommonSubGraphBasedOptimizer.optimizeTree(BatchCommonSubGraphBasedOptimizer.scala:87)
  at 
org.apache.flink.table.planner.plan.optimize.BatchCommonSubGraphBasedOptimizer.org$apache$flink$table$planner$plan$optimize$BatchCommonSubGraphBasedOptimizer$$optimizeBlock(BatchCommonSubGraphBasedOptimizer.scala:58)
  at 
org.apache.flink.table.planner.plan.optimize.BatchCommonSubGraphBasedOptimizer$$anonfun$doOptimize$1.apply(BatchCommonSubGraphBasedOptimizer.scala:46)
  at 
org.apache.flink.table.planner.plan.optimize.BatchCommonSubGraphBasedOptimizer$$anonfun$doOptimize$1.apply(BatchCommonSubGraphBasedOptimizer.scala:46)
  at 
scala.collection.mutable.ResizableArray$class.foreach(ResizableArray.scala:59)  
at scala.collection.mutable.ArrayBuffer.foreach(ArrayBuffer.scala:48)  at 
org.apache.flink.table.planner.plan.optimize.BatchCommonSubGraphBasedOptimizer.doOptimize(BatchCommonSubGraphBasedOptimizer.scala:46)
  at 
org.apache.flink.table.planner.plan.optimize.CommonSubGraphBasedOptimizer.optimize(CommonSubGraphBasedOptimizer.scala:93)
  at 
org.apache.flink.table.planner.delegation.PlannerBase.optimize(PlannerBase.scala:310)
  at 
org.apache.flink.table.planner.delegation.PlannerBase.translate(PlannerBase.scala:172)
  at 
com.ververica.flink.table.gateway.operation.SelectOperation.lambda$executeQueryInternal$0(SelectOperation.java:183)
  at 
com.ververica.flink.table.gateway.context.ExecutionContext.wrapClassLoader(ExecutionContext.java:130)
  at 
com.ververica.flink.table.gateway.operation.SelectOperation.executeQueryInternal(SelectOperation.java:182)
  at 
com.ververica.flink.table.gateway.operation.SelectOperation.execute(SelectOperation.java:82)
  at 
com.ververica.flink.table.gateway.operation.executor.OneByOneOperationExecutor.execute(OneByOneOperationExecutor.java:57)
  at 
com.ververica.flink.table.gateway.rest.session.Session.lambda$runStatement$1(Session.java:115)
  at 
com.ververica.flink.table.gateway.utils.EnvironmentUtil.lambda$wrapWithHadoopUsernameIfNeeded$0(EnvironmentUtil.java:57)
  at 
com.ververica.flink.table.gateway.utils.EnvironmentUtil.wrapWithHadoopUsernameIfNeeded(EnvironmentUtil.java:65)
  at 
com.ververica.flink.table.gateway.utils.EnvironmentUtil.wrapWithHadoopUsernameIfNeeded(EnvironmentUtil.java:56)
  at 
com.ververica.flink.table.gateway.rest.session.Session.runStatement(Session.java:114)
  at 
com.ververica.flink.table.gateway.rest.handler.StatementExecuteHandler.handleRequest(StatementExecuteHandler.java:83)
  at 
com.ververica.flink.table.gateway.rest.handler.AbstractRestHandler.respondToRequest(AbstractRestHandler.java:85)
  at 
com.ververica.flink.table.gateway.rest.handler.AbstractHandler.channelRead0(AbstractHandler.java:184)
  at 
com.ververica.flink.table.gateway.rest.handler.AbstractHandler.channelRead0(AbstractHandler.java:76)
  at 
org.apache.flink.shaded.netty4.io.netty.channel.SimpleChannelInboundHandler.channelRead(SimpleChannelInboundHandler.java:99)
  at 
org.apache.flink.shaded.netty4.io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:379)
  at 
org.apache.flink.shaded.netty4.io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:365)
  at 
org.apache.flink.shaded.netty4.io.netty.channel.AbstractChannelHandlerContext.fireChannelRead(AbstractChannelHandlerContext.java:357)
  at 
org.apache.flink.runtime.rest.handler.router.RouterHandler.routed(RouterHandler.java:115)
  at 
org.apache.flink.runtime.rest.handler.router.RouterHandler.channelRead0(RouterHandler.java:94)
  at 
org.apache.flink.runtime.rest.handler.router.RouterHandler.channelRead0(RouterHandler.java:55)
  at 
org.apache.flink.shaded.netty4.io.netty.channel.SimpleChannelInboundHandler.channelRead(SimpleChannelInboundHandler.java:99)
  at 
org.apache.flink.shaded.netty4.io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:379)
  at 
org.apache.flink.shaded.netty4.io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:365)
  at 
org.apache.flink.shaded.netty4.io.netty.channel.AbstractChannelHandlerContext.fireChannelRead(AbstractChannelHandlerContext.java:357)
  at 
org.apache.flink.shaded.netty4.io.netty.handler.codec.MessageToMessageDecoder.channelRead(MessageToMessageDecoder.java:103)
  at 
org.apache.flink.shaded.netty4.io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:379)
  at 
org.apache.flink.shaded.netty4.io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:365)
  at 
org.apache.flink.shaded.netty4.io.netty.channel.AbstractChannelHandlerContext.fireChannelRead(AbstractChannelHandlerContext.java:357)
  at 
org.apache.flink.runtime.rest.FileUploadHandler.channelRead0(FileUploadHandler.java:208)
  at 
org.apache.flink.runtime.rest.FileUploadHandler.channelRead0(FileUploadHandler.java:69)
  at 
org.apache.flink.shaded.netty4.io.netty.channel.SimpleChannelInboundHandler.channelRead(SimpleChannelInboundHandler.java:99)
  at 
org.apache.flink.shaded.netty4.io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:379)
  at 
org.apache.flink.shaded.netty4.io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:365)
  at 
org.apache.flink.shaded.netty4.io.netty.channel.AbstractChannelHandlerContext.fireChannelRead(AbstractChannelHandlerContext.java:357)
  at 
org.apache.flink.shaded.netty4.io.netty.channel.CombinedChannelDuplexHandler$DelegatingChannelHandlerContext.fireChannelRead(CombinedChannelDuplexHandler.java:436)
  at 
org.apache.flink.shaded.netty4.io.netty.handler.codec.ByteToMessageDecoder.fireChannelRead(ByteToMessageDecoder.java:324)
  at 
org.apache.flink.shaded.netty4.io.netty.handler.codec.ByteToMessageDecoder.channelRead(ByteToMessageDecoder.java:296)
  at 
org.apache.flink.shaded.netty4.io.netty.channel.CombinedChannelDuplexHandler.channelRead(CombinedChannelDuplexHandler.java:251)
  at 
org.apache.flink.shaded.netty4.io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:379)
  at 
org.apache.flink.shaded.netty4.io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:365)
  at 
org.apache.flink.shaded.netty4.io.netty.channel.AbstractChannelHandlerContext.fireChannelRead(AbstractChannelHandlerContext.java:357)
  at 
org.apache.flink.shaded.netty4.io.netty.channel.DefaultChannelPipeline$HeadContext.channelRead(DefaultChannelPipeline.java:1410)
  at 
org.apache.flink.shaded.netty4.io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:379)
  at 
org.apache.flink.shaded.netty4.io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:365)
  at 
org.apache.flink.shaded.netty4.io.netty.channel.DefaultChannelPipeline.fireChannelRead(DefaultChannelPipeline.java:919)
  at 
org.apache.flink.shaded.netty4.io.netty.channel.nio.AbstractNioByteChannel$NioByteUnsafe.read(AbstractNioByteChannel.java:163)
  at 
org.apache.flink.shaded.netty4.io.netty.channel.nio.NioEventLoop.processSelectedKey(NioEventLoop.java:714)
  at 
org.apache.flink.shaded.netty4.io.netty.channel.nio.NioEventLoop.processSelectedKeysOptimized(NioEventLoop.java:650)
  at 
org.apache.flink.shaded.netty4.io.netty.channel.nio.NioEventLoop.processSelectedKeys(NioEventLoop.java:576)
  at 
org.apache.flink.shaded.netty4.io.netty.channel.nio.NioEventLoop.run(NioEventLoop.java:493)
  at 
org.apache.flink.shaded.netty4.io.netty.util.concurrent.SingleThreadEventExecutor$4.run(SingleThreadEventExecutor.java:989)
  at 
org.apache.flink.shaded.netty4.io.netty.util.internal.ThreadExecutorMap$2.run(ThreadExecutorMap.java:74)
  at java.lang.Thread.run(Thread.java:834) End of exception on server side>"



--
This message was sent by Atlassian Jira
(v8.3.4#803005)

Reply via email to