[ 
https://issues.apache.org/jira/browse/FLINK-19887?page=com.atlassian.jira.plugin.system.issuetabpanels:comment-tabpanel&focusedCommentId=17336053#comment-17336053
 ] 

Flink Jira Bot commented on FLINK-19887:
----------------------------------------

This issue was labeled "stale-major" 7 ago and has not received any updates so 
it is being deprioritized. If this ticket is actually Major, please raise the 
priority and ask a committer to assign you the issue or revive the public 
discussion.


> Table program cannot be compiled when using Scala package object
> ----------------------------------------------------------------
>
>                 Key: FLINK-19887
>                 URL: https://issues.apache.org/jira/browse/FLINK-19887
>             Project: Flink
>          Issue Type: Bug
>          Components: Table SQL / Runtime
>    Affects Versions: 1.11.2
>         Environment: <flink.version>1.11.2</flink.version>
> <scala.binary.version>2.12</scala.binary.version>
> jdk:1.8
>  
>            Reporter: 谢波
>            Priority: Major
>              Labels: stale-major
>             Fix For: 1.11.4
>
>
> {code:scala}
> package object analysis {
>  case class UserBehavior(userId: Long, productId: Long, categoryId: Long, 
> behavior: String, ts: Long)
>  case class ItemViewCount(var windowEnd: Long,var itemId: Long,var count: 
> Long)
> }
>  
> def main(args: Array[String]): Unit = {
>  val env = StreamExecutionEnvironment.getExecutionEnvironment
>  val tableEnv = StreamTableEnvironment.create(env)
>  env.setParallelism(1)
>  env.setStreamTimeCharacteristic(TimeCharacteristic.EventTime)
>  val input = env.readTextFile("mock-data/UserBehavior.csv")
>  .map(e => {
>  val split = e.split(",")
>  UserBehavior(split(0).toLong, split(1).toLong, split(2).toLong, split(3), 
> split(4).toLong)
>  })
>  .assignAscendingTimestamps(_.ts * 1000L)
>  // dataStreamApi(input)
>  //包对象下的样例类会导致编译错误,这是一个BUG
>  val table = tableEnv.fromDataStream(input, $"productId", $"behavior", 
> $"ts".rowtime)
>  table.printSchema()
>  table
>  .window(Slide over 1.hour every 5.minutes on $"ts" as $"w")
>  .groupBy($"w", $"productId")
>  .select($"w".end, $"productId", $"productId".count)
>  .toAppendStream[Row]
>  .print("table ")
>  // table.toAppendStream[Row].print("table")
> // tableEnv.execute("table")
>  env.execute("hot item analysis")
>  }
> {code}
>  
>  
> {code}
> rootroot |-- productId: BIGINT |-- behavior: STRING |-- ts: TIMESTAMP(3) 
> *ROWTIME*
> /* 1 *//* 2 */      public class SourceConversion$4 extends 
> org.apache.flink.table.runtime.operators.AbstractProcessStreamOperator/* 3 */ 
>          implements 
> org.apache.flink.streaming.api.operators.OneInputStreamOperator \{/* 4 *//* 5 
> */        private final Object[] references;/* 6 */        private transient 
> org.apache.flink.table.data.util.DataFormatConverters.CaseClassConverter 
> converter$0;/* 7 */        org.apache.flink.table.data.GenericRowData out = 
> new org.apache.flink.table.data.GenericRowData(3);/* 8 */        private 
> final org.apache.flink.streaming.runtime.streamrecord.StreamRecord outElement 
> = new org.apache.flink.streaming.runtime.streamrecord.StreamRecord(null);/* 9 
> *//* 10 */        public SourceConversion$4(/* 11 */            Object[] 
> references,/* 12 */            
> org.apache.flink.streaming.runtime.tasks.StreamTask task,/* 13 */            
> org.apache.flink.streaming.api.graph.StreamConfig config,/* 14 */            
> org.apache.flink.streaming.api.operators.Output output,/* 15 */            
> org.apache.flink.streaming.runtime.tasks.ProcessingTimeService 
> processingTimeService) throws Exception {/* 16 */          this.references = 
> references;/* 17 */          converter$0 = 
> (((org.apache.flink.table.data.util.DataFormatConverters.CaseClassConverter) 
> references[0]));/* 18 */          this.setup(task, config, output);/* 19 */   
>        if (this instanceof 
> org.apache.flink.streaming.api.operators.AbstractStreamOperator) {/* 20 */    
>         ((org.apache.flink.streaming.api.operators.AbstractStreamOperator) 
> this)/* 21 */              
> .setProcessingTimeService(processingTimeService);/* 22 */          }/* 23 */  
>       }/* 24 *//* 25 */        @Override/* 26 */        public void open() 
> throws Exception \{/* 27 */          super.open();/* 28 */          /* 29 */  
>       }/* 30 *//* 31 */        @Override/* 32 */        public void 
> processElement(org.apache.flink.streaming.runtime.streamrecord.StreamRecord 
> element) throws Exception \{/* 33 */          
> org.apache.flink.table.data.RowData in1 = 
> (org.apache.flink.table.data.RowData) (org.apache.flink.table.data.RowData) 
> converter$0.toInternal((com.hiscat.flink.user.behavior.analysis.package.UserBehavior)
>  element.getValue());/* 34 */          /* 35 */          long field$1;/* 36 
> */          boolean isNull$1;/* 37 */          
> org.apache.flink.table.data.binary.BinaryStringData field$2;/* 38 */          
> boolean isNull$2;/* 39 */          org.apache.flink.table.data.TimestampData 
> result$3;/* 40 */          boolean isNull$3;/* 41 */          isNull$1 = 
> in1.isNullAt(1);/* 42 */          field$1 = -1L;/* 43 */          if 
> (!isNull$1) {/* 44 */            field$1 = in1.getLong(1);/* 45 */          
> }/* 46 */          isNull$2 = in1.isNullAt(3);/* 47 */          field$2 = 
> org.apache.flink.table.data.binary.BinaryStringData.EMPTY_UTF8;/* 48 */       
>    if (!isNull$2) \{/* 49 */            field$2 = 
> ((org.apache.flink.table.data.binary.BinaryStringData) in1.getString(3));/* 
> 50 */          }/* 51 */          /* 52 */          ctx.element = element;/* 
> 53 */          /* 54 */          /* 55 */          /* 56 */          /* 57 */ 
>          if (isNull$1) \{/* 58 */            out.setField(0, null);/* 59 */   
>        } else \{/* 60 */            out.setField(0, field$1);/* 61 */         
>  }/* 62 */                    /* 63 */          /* 64 */          /* 65 */    
>       if (isNull$2) \{/* 66 */            out.setField(1, null);/* 67 */      
>     } else \{/* 68 */            out.setField(1, field$2);/* 69 */          
> }/* 70 */                    /* 71 */          /* 72 */          result$3 = 
> org.apache.flink.table.data.TimestampData.fromEpochMillis(ctx.timestamp());/* 
> 73 */          if (result$3 == null) \{/* 74 */            throw new 
> RuntimeException("Rowtime timestamp is null. Please make sure that a " +/* 75 
> */              "proper TimestampAssigner is defined and the stream 
> environment uses the EventTime " +/* 76 */              "time 
> characteristic.");/* 77 */          }/* 78 */          isNull$3 = false;/* 79 
> */          if (isNull$3) \{/* 80 */            out.setField(2, null);/* 81 
> */          } else \{/* 82 */            out.setField(2, result$3);/* 83 */   
>        }/* 84 */                    /* 85 */                  /* 86 */        
>   output.collect(outElement.replace(out));/* 87 */          ctx.element = 
> null;/* 88 */          /* 89 */        }/* 90 *//* 91 */        /* 92 *//* 93 
> */        @Override/* 94 */        public void close() throws Exception \{/* 
> 95 */           super.close();/* 96 */          /* 97 */        }/* 98 *//* 
> 99 */        /* 100 */      }/* 101 */    
> 10:17:58,568 ERROR org.apache.flink.streaming.runtime.tasks.StreamTask        
>   [] - Error during disposal of stream 
> operator.java.lang.NullPointerException: null at 
> org.apache.flink.table.runtime.operators.window.WindowOperator.dispose(WindowOperator.java:318)
>  ~[flink-table-runtime-blink_2.12-1.11.2.jar:1.11.2] at 
> org.apache.flink.streaming.runtime.tasks.StreamTask.disposeAllOperators(StreamTask.java:729)
>  [flink-streaming-java_2.12-1.11.2.jar:1.11.2] at 
> org.apache.flink.streaming.runtime.tasks.StreamTask.cleanUpInvoke(StreamTask.java:645)
>  [flink-streaming-java_2.12-1.11.2.jar:1.11.2] at 
> org.apache.flink.streaming.runtime.tasks.StreamTask.invoke(StreamTask.java:549)
>  [flink-streaming-java_2.12-1.11.2.jar:1.11.2] at 
> org.apache.flink.runtime.taskmanager.Task.doRun(Task.java:721) 
> [flink-runtime_2.12-1.11.2.jar:1.11.2] at 
> org.apache.flink.runtime.taskmanager.Task.run(Task.java:546) 
> [flink-runtime_2.12-1.11.2.jar:1.11.2] at 
> java.lang.Thread.run(Thread.java:748) [?:1.8.0_202]Exception in thread "main" 
> java.util.concurrent.ExecutionException: 
> org.apache.flink.runtime.client.JobExecutionException: Job execution failed. 
> at 
> java.util.concurrent.CompletableFuture.reportGet(CompletableFuture.java:357) 
> at java.util.concurrent.CompletableFuture.get(CompletableFuture.java:1895) at 
> org.apache.flink.streaming.api.environment.StreamExecutionEnvironment.execute(StreamExecutionEnvironment.java:1717)
>  at 
> org.apache.flink.streaming.api.environment.LocalStreamEnvironment.execute(LocalStreamEnvironment.java:74)
>  at 
> org.apache.flink.streaming.api.environment.StreamExecutionEnvironment.execute(StreamExecutionEnvironment.java:1697)
>  at 
> org.apache.flink.streaming.api.scala.StreamExecutionEnvironment.execute(StreamExecutionEnvironment.scala:699)
>  at 
> com.hiscat.flink.user.behavior.analysis.HotItemAnalysis$.main(HotItemAnalysis.scala:57)
>  at 
> com.hiscat.flink.user.behavior.analysis.HotItemAnalysis.main(HotItemAnalysis.scala)Caused
>  by: org.apache.flink.runtime.client.JobExecutionException: Job execution 
> failed. at 
> org.apache.flink.runtime.jobmaster.JobResult.toJobExecutionResult(JobResult.java:147)
>  at 
> org.apache.flink.client.program.PerJobMiniClusterFactory$PerJobMiniClusterJobClient.lambda$getJobExecutionResult$2(PerJobMiniClusterFactory.java:186)
>  at 
> java.util.concurrent.CompletableFuture.uniApply(CompletableFuture.java:602) 
> at 
> java.util.concurrent.CompletableFuture$UniApply.tryFire(CompletableFuture.java:577)
>  at 
> java.util.concurrent.CompletableFuture.postComplete(CompletableFuture.java:474)
>  at 
> java.util.concurrent.CompletableFuture.complete(CompletableFuture.java:1962) 
> at 
> org.apache.flink.runtime.rpc.akka.AkkaInvocationHandler.lambda$invokeRpc$0(AkkaInvocationHandler.java:229)
>  at 
> java.util.concurrent.CompletableFuture.uniWhenComplete(CompletableFuture.java:760)
>  at 
> java.util.concurrent.CompletableFuture$UniWhenComplete.tryFire(CompletableFuture.java:736)
>  at 
> java.util.concurrent.CompletableFuture.postComplete(CompletableFuture.java:474)
>  at 
> java.util.concurrent.CompletableFuture.complete(CompletableFuture.java:1962) 
> at 
> org.apache.flink.runtime.concurrent.FutureUtils$1.onComplete(FutureUtils.java:892)
>  at akka.dispatch.OnComplete.internal(Future.scala:264) at 
> akka.dispatch.OnComplete.internal(Future.scala:261) at 
> akka.dispatch.japi$CallbackBridge.apply(Future.scala:191) at 
> akka.dispatch.japi$CallbackBridge.apply(Future.scala:188) at 
> scala.concurrent.impl.CallbackRunnable.run$$$capture(Promise.scala:60) at 
> scala.concurrent.impl.CallbackRunnable.run(Promise.scala) at 
> org.apache.flink.runtime.concurrent.Executors$DirectExecutionContext.execute(Executors.java:74)
>  at scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:68) 
> at 
> scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:284)
>  at 
> scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:284)
>  at 
> scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:284) 
> at akka.pattern.PromiseActorRef.$bang(AskSupport.scala:573) at 
> akka.pattern.PipeToSupport$PipeableFuture$$anonfun$pipeTo$1.applyOrElse(PipeToSupport.scala:22)
>  at 
> akka.pattern.PipeToSupport$PipeableFuture$$anonfun$pipeTo$1.applyOrElse(PipeToSupport.scala:21)
>  at scala.concurrent.Future.$anonfun$andThen$1(Future.scala:532) at 
> scala.concurrent.impl.Promise.liftedTree1$1(Promise.scala:29) at 
> scala.concurrent.impl.Promise.$anonfun$transform$1(Promise.scala:29) at 
> scala.concurrent.impl.CallbackRunnable.run$$$capture(Promise.scala:60) at 
> scala.concurrent.impl.CallbackRunnable.run(Promise.scala) at 
> akka.dispatch.BatchingExecutor$AbstractBatch.processBatch(BatchingExecutor.scala:55)
>  at 
> akka.dispatch.BatchingExecutor$BlockableBatch.$anonfun$run$1(BatchingExecutor.scala:91)
>  at scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.java:12) at 
> scala.concurrent.BlockContext$.withBlockContext(BlockContext.scala:81) at 
> akka.dispatch.BatchingExecutor$BlockableBatch.run(BatchingExecutor.scala:91) 
> at akka.dispatch.TaskInvocation.run(AbstractDispatcher.scala:40) at 
> akka.dispatch.ForkJoinExecutorConfigurator$AkkaForkJoinTask.exec(ForkJoinExecutorConfigurator.scala:44)
>  at akka.dispatch.forkjoin.ForkJoinTask.doExec(ForkJoinTask.java:260) at 
> akka.dispatch.forkjoin.ForkJoinPool$WorkQueue.runTask(ForkJoinPool.java:1339) 
> at akka.dispatch.forkjoin.ForkJoinPool.runWorker(ForkJoinPool.java:1979) at 
> akka.dispatch.forkjoin.ForkJoinWorkerThread.run(ForkJoinWorkerThread.java:107)Caused
>  by: org.apache.flink.runtime.JobException: Recovery is suppressed by 
> NoRestartBackoffTimeStrategy at 
> org.apache.flink.runtime.executiongraph.failover.flip1.ExecutionFailureHandler.handleFailure(ExecutionFailureHandler.java:116)
>  at 
> org.apache.flink.runtime.executiongraph.failover.flip1.ExecutionFailureHandler.getFailureHandlingResult(ExecutionFailureHandler.java:78)
>  at 
> org.apache.flink.runtime.scheduler.DefaultScheduler.handleTaskFailure(DefaultScheduler.java:192)
>  at 
> org.apache.flink.runtime.scheduler.DefaultScheduler.maybeHandleTaskFailure(DefaultScheduler.java:185)
>  at 
> org.apache.flink.runtime.scheduler.DefaultScheduler.updateTaskExecutionStateInternal(DefaultScheduler.java:179)
>  at 
> org.apache.flink.runtime.scheduler.SchedulerBase.updateTaskExecutionState(SchedulerBase.java:503)
>  at 
> org.apache.flink.runtime.jobmaster.JobMaster.updateTaskExecutionState(JobMaster.java:386)
>  at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) at 
> sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) 
> at 
> sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
>  at java.lang.reflect.Method.invoke(Method.java:498) at 
> org.apache.flink.runtime.rpc.akka.AkkaRpcActor.handleRpcInvocation(AkkaRpcActor.java:284)
>  at 
> org.apache.flink.runtime.rpc.akka.AkkaRpcActor.handleRpcMessage(AkkaRpcActor.java:199)
>  at 
> org.apache.flink.runtime.rpc.akka.FencedAkkaRpcActor.handleRpcMessage(FencedAkkaRpcActor.java:74)
>  at 
> org.apache.flink.runtime.rpc.akka.AkkaRpcActor.handleMessage(AkkaRpcActor.java:152)
>  at akka.japi.pf.UnitCaseStatement.apply(CaseStatements.scala:26) at 
> akka.japi.pf.UnitCaseStatement.apply(CaseStatements.scala:21) at 
> scala.PartialFunction.applyOrElse(PartialFunction.scala:123) at 
> scala.PartialFunction.applyOrElse$(PartialFunction.scala:122) at 
> akka.japi.pf.UnitCaseStatement.applyOrElse(CaseStatements.scala:21) at 
> scala.PartialFunction$OrElse.applyOrElse(PartialFunction.scala:171) at 
> scala.PartialFunction$OrElse.applyOrElse(PartialFunction.scala:172) at 
> scala.PartialFunction$OrElse.applyOrElse(PartialFunction.scala:172) at 
> akka.actor.Actor.aroundReceive(Actor.scala:517) at 
> akka.actor.Actor.aroundReceive$(Actor.scala:515) at 
> akka.actor.AbstractActor.aroundReceive(AbstractActor.scala:225) at 
> akka.actor.ActorCell.receiveMessage(ActorCell.scala:592) at 
> akka.actor.ActorCell.invoke(ActorCell.scala:561) at 
> akka.dispatch.Mailbox.processMailbox(Mailbox.scala:258) at 
> akka.dispatch.Mailbox.run(Mailbox.scala:225) at 
> akka.dispatch.Mailbox.exec(Mailbox.scala:235) ... 4 moreCaused by: 
> java.lang.RuntimeException: Could not instantiate generated class 
> 'SourceConversion$4' at 
> org.apache.flink.table.runtime.generated.GeneratedClass.newInstance(GeneratedClass.java:67)
>  at 
> org.apache.flink.table.runtime.operators.CodeGenOperatorFactory.createStreamOperator(CodeGenOperatorFactory.java:40)
>  at 
> org.apache.flink.streaming.api.operators.StreamOperatorFactoryUtil.createOperator(StreamOperatorFactoryUtil.java:70)
>  at 
> org.apache.flink.streaming.runtime.tasks.OperatorChain.createChainedOperator(OperatorChain.java:470)
>  at 
> org.apache.flink.streaming.runtime.tasks.OperatorChain.createOutputCollector(OperatorChain.java:393)
>  at 
> org.apache.flink.streaming.runtime.tasks.OperatorChain.createChainedOperator(OperatorChain.java:459)
>  at 
> org.apache.flink.streaming.runtime.tasks.OperatorChain.createOutputCollector(OperatorChain.java:393)
>  at 
> org.apache.flink.streaming.runtime.tasks.OperatorChain.createChainedOperator(OperatorChain.java:459)
>  at 
> org.apache.flink.streaming.runtime.tasks.OperatorChain.createOutputCollector(OperatorChain.java:393)
>  at 
> org.apache.flink.streaming.runtime.tasks.OperatorChain.<init>(OperatorChain.java:155)
>  at 
> org.apache.flink.streaming.runtime.tasks.StreamTask.beforeInvoke(StreamTask.java:459)
>  at 
> org.apache.flink.streaming.runtime.tasks.StreamTask.invoke(StreamTask.java:528)
>  at org.apache.flink.runtime.taskmanager.Task.doRun(Task.java:721) at 
> org.apache.flink.runtime.taskmanager.Task.run(Task.java:546) at 
> java.lang.Thread.run(Thread.java:748)Caused by: 
> org.apache.flink.util.FlinkRuntimeException: 
> org.apache.flink.api.common.InvalidProgramException: Table program cannot be 
> compiled. This is a bug. Please file an issue. at 
> org.apache.flink.table.runtime.generated.CompileUtils.compile(CompileUtils.java:68)
>  at 
> org.apache.flink.table.runtime.generated.GeneratedClass.compile(GeneratedClass.java:78)
>  at 
> org.apache.flink.table.runtime.generated.GeneratedClass.newInstance(GeneratedClass.java:65)
>  ... 14 moreCaused by: 
> org.apache.flink.shaded.guava18.com.google.common.util.concurrent.UncheckedExecutionException:
>  org.apache.flink.api.common.InvalidProgramException: Table program cannot be 
> compiled. This is a bug. Please file an issue. at 
> org.apache.flink.shaded.guava18.com.google.common.cache.LocalCache$Segment.get(LocalCache.java:2203)
>  at 
> org.apache.flink.shaded.guava18.com.google.common.cache.LocalCache.get(LocalCache.java:3937)
>  at 
> org.apache.flink.shaded.guava18.com.google.common.cache.LocalCache$LocalManualCache.get(LocalCache.java:4739)
>  at 
> org.apache.flink.table.runtime.generated.CompileUtils.compile(CompileUtils.java:66)
>  ... 16 moreCaused by: org.apache.flink.api.common.InvalidProgramException: 
> Table program cannot be compiled. This is a bug. Please file an issue. at 
> org.apache.flink.table.runtime.generated.CompileUtils.doCompile(CompileUtils.java:81)
>  at 
> org.apache.flink.table.runtime.generated.CompileUtils.lambda$compile$1(CompileUtils.java:66)
>  at 
> org.apache.flink.shaded.guava18.com.google.common.cache.LocalCache$LocalManualCache$1.load(LocalCache.java:4742)
>  at 
> org.apache.flink.shaded.guava18.com.google.common.cache.LocalCache$LoadingValueReference.loadFuture(LocalCache.java:3527)
>  at 
> org.apache.flink.shaded.guava18.com.google.common.cache.LocalCache$Segment.loadSync(LocalCache.java:2319)
>  at 
> org.apache.flink.shaded.guava18.com.google.common.cache.LocalCache$Segment.lockedGetOrLoad(LocalCache.java:2282)
>  at 
> org.apache.flink.shaded.guava18.com.google.common.cache.LocalCache$Segment.get(LocalCache.java:2197)
>  ... 19 moreCaused by: org.codehaus.commons.compiler.CompileException: Line 
> 33, Column 200: Unexpected selector 'package' after "." at 
> org.codehaus.janino.Parser.compileException(Parser.java:3482) at 
> org.codehaus.janino.Parser.parseSelector(Parser.java:3147) at 
> org.codehaus.janino.Parser.parseUnaryExpression(Parser.java:2761) at 
> org.codehaus.janino.Parser.parseMultiplicativeExpression(Parser.java:2717) at 
> org.codehaus.janino.Parser.parseAdditiveExpression(Parser.java:2696) at 
> org.codehaus.janino.Parser.parseShiftExpression(Parser.java:2675) at 
> org.codehaus.janino.Parser.parseRelationalExpression(Parser.java:2599) at 
> org.codehaus.janino.Parser.parseEqualityExpression(Parser.java:2573) at 
> org.codehaus.janino.Parser.parseAndExpression(Parser.java:2552) at 
> org.codehaus.janino.Parser.parseExclusiveOrExpression(Parser.java:2531) at 
> org.codehaus.janino.Parser.parseInclusiveOrExpression(Parser.java:2510) at 
> org.codehaus.janino.Parser.parseConditionalAndExpression(Parser.java:2489) at 
> org.codehaus.janino.Parser.parseConditionalOrExpression(Parser.java:2468) at 
> org.codehaus.janino.Parser.parseConditionalExpression(Parser.java:2449) at 
> org.codehaus.janino.Parser.parseAssignmentExpression(Parser.java:2428) at 
> org.codehaus.janino.Parser.parseExpression(Parser.java:2413) at 
> org.codehaus.janino.Parser.parsePrimary(Parser.java:2828) at 
> org.codehaus.janino.Parser.parseUnaryExpression(Parser.java:2758) at 
> org.codehaus.janino.Parser.parseMultiplicativeExpression(Parser.java:2717) at 
> org.codehaus.janino.Parser.parseAdditiveExpression(Parser.java:2696) at 
> org.codehaus.janino.Parser.parseShiftExpression(Parser.java:2675) at 
> org.codehaus.janino.Parser.parseRelationalExpression(Parser.java:2599) at 
> org.codehaus.janino.Parser.parseEqualityExpression(Parser.java:2573) at 
> org.codehaus.janino.Parser.parseAndExpression(Parser.java:2552) at 
> org.codehaus.janino.Parser.parseExclusiveOrExpression(Parser.java:2531) at 
> org.codehaus.janino.Parser.parseInclusiveOrExpression(Parser.java:2510) at 
> org.codehaus.janino.Parser.parseConditionalAndExpression(Parser.java:2489) at 
> org.codehaus.janino.Parser.parseConditionalOrExpression(Parser.java:2468) at 
> org.codehaus.janino.Parser.parseConditionalExpression(Parser.java:2449) at 
> org.codehaus.janino.Parser.parseAssignmentExpression(Parser.java:2428) at 
> org.codehaus.janino.Parser.parseExpression(Parser.java:2413) at 
> org.codehaus.janino.Parser.parseArgumentList(Parser.java:3214) at 
> org.codehaus.janino.Parser.parseArguments(Parser.java:3200) at 
> org.codehaus.janino.Parser.parsePrimary(Parser.java:2868) at 
> org.codehaus.janino.Parser.parseUnaryExpression(Parser.java:2758) at 
> org.codehaus.janino.Parser.parsePrimary(Parser.java:2841) at 
> org.codehaus.janino.Parser.parseUnaryExpression(Parser.java:2758) at 
> org.codehaus.janino.Parser.parsePrimary(Parser.java:2841) at 
> org.codehaus.janino.Parser.parseUnaryExpression(Parser.java:2758) at 
> org.codehaus.janino.Parser.parseMultiplicativeExpression(Parser.java:2717) at 
> org.codehaus.janino.Parser.parseAdditiveExpression(Parser.java:2696) at 
> org.codehaus.janino.Parser.parseShiftExpression(Parser.java:2675) at 
> org.codehaus.janino.Parser.parseRelationalExpression(Parser.java:2599) at 
> org.codehaus.janino.Parser.parseEqualityExpression(Parser.java:2573) at 
> org.codehaus.janino.Parser.parseAndExpression(Parser.java:2552) at 
> org.codehaus.janino.Parser.parseExclusiveOrExpression(Parser.java:2531) at 
> org.codehaus.janino.Parser.parseInclusiveOrExpression(Parser.java:2510) at 
> org.codehaus.janino.Parser.parseConditionalAndExpression(Parser.java:2489) at 
> org.codehaus.janino.Parser.parseConditionalOrExpression(Parser.java:2468) at 
> org.codehaus.janino.Parser.parseConditionalExpression(Parser.java:2449) at 
> org.codehaus.janino.Parser.parseAssignmentExpression(Parser.java:2428) at 
> org.codehaus.janino.Parser.parseExpression(Parser.java:2413) at 
> org.codehaus.janino.Parser.parseVariableInitializer(Parser.java:1412) at 
> org.codehaus.janino.Parser.parseVariableDeclaratorRest(Parser.java:1695) at 
> org.codehaus.janino.Parser.parseVariableDeclarator(Parser.java:1678) at 
> org.codehaus.janino.Parser.parseVariableDeclarators(Parser.java:1640) at 
> org.codehaus.janino.Parser.parseBlockStatement(Parser.java:1625) at 
> org.codehaus.janino.Parser.parseBlockStatements(Parser.java:1544) at 
> org.codehaus.janino.Parser.parseMethodDeclarationRest(Parser.java:1381) at 
> org.codehaus.janino.Parser.parseClassBodyDeclaration(Parser.java:834) at 
> org.codehaus.janino.Parser.parseClassBody(Parser.java:732) at 
> org.codehaus.janino.Parser.parseClassDeclarationRest(Parser.java:638) at 
> org.codehaus.janino.Parser.parsePackageMemberTypeDeclarationRest(Parser.java:366)
>  at org.codehaus.janino.Parser.parseCompilationUnit(Parser.java:237) at 
> org.codehaus.janino.SimpleCompiler.cook(SimpleCompiler.java:216) at 
> org.codehaus.janino.SimpleCompiler.cook(SimpleCompiler.java:207) at 
> org.codehaus.commons.compiler.Cookable.cook(Cookable.java:80) at 
> org.codehaus.commons.compiler.Cookable.cook(Cookable.java:75) at 
> org.apache.flink.table.runtime.generated.CompileUtils.doCompile(CompileUtils.java:78)
>  ... 25 more
> {code}



--
This message was sent by Atlassian Jira
(v8.3.4#803005)

Reply via email to