[ 
https://issues.apache.org/jira/browse/HIVE-24104?page=com.atlassian.jira.plugin.system.issuetabpanels:all-tabpanel
 ]

Zoltan Haindrich resolved HIVE-24104.
-------------------------------------
    Fix Version/s: 4.0.0
       Resolution: Fixed

merged into master. Thank you [~zabetak]!

> NPE due to null key columns in ReduceSink after deduplication
> -------------------------------------------------------------
>
>                 Key: HIVE-24104
>                 URL: https://issues.apache.org/jira/browse/HIVE-24104
>             Project: Hive
>          Issue Type: Bug
>          Components: Physical Optimizer
>            Reporter: Stamatis Zampetakis
>            Assignee: Stamatis Zampetakis
>            Priority: Major
>              Labels: pull-request-available
>             Fix For: 4.0.0
>
>          Time Spent: 40m
>  Remaining Estimate: 0h
>
> In some cases the {{ReduceSinkDeDuplication}} optimization creates ReduceSink 
> operators where the key columns are null. This can lead to NPE in various 
> places in the code. 
> The following stracktraces show some places where a NPE appears. Note that 
> the stacktraces do not correspond to the same query.
> +NPE  during planning+
> {noformat}
> java.lang.NullPointerException
>       at 
> org.apache.hadoop.hive.ql.plan.ExprNodeDesc$ExprNodeDescEqualityWrapper.equals(ExprNodeDesc.java:141)
>       at java.util.AbstractList.equals(AbstractList.java:523)
>       at 
> org.apache.hadoop.hive.ql.optimizer.SetReducerParallelism.process(SetReducerParallelism.java:101)
>       at 
> org.apache.hadoop.hive.ql.lib.DefaultRuleDispatcher.dispatch(DefaultRuleDispatcher.java:90)
>       at 
> org.apache.hadoop.hive.ql.lib.DefaultGraphWalker.dispatchAndReturn(DefaultGraphWalker.java:105)
>       at 
> org.apache.hadoop.hive.ql.lib.DefaultGraphWalker.dispatch(DefaultGraphWalker.java:89)
>       at 
> org.apache.hadoop.hive.ql.lib.ForwardWalker.walk(ForwardWalker.java:74)
>       at 
> org.apache.hadoop.hive.ql.lib.DefaultGraphWalker.startWalking(DefaultGraphWalker.java:120)
>       at 
> org.apache.hadoop.hive.ql.parse.TezCompiler.runStatsDependentOptimizations(TezCompiler.java:492)
>       at 
> org.apache.hadoop.hive.ql.parse.TezCompiler.optimizeOperatorPlan(TezCompiler.java:226)
>       at 
> org.apache.hadoop.hive.ql.parse.TaskCompiler.compile(TaskCompiler.java:161)
>       at 
> org.apache.hadoop.hive.ql.parse.SemanticAnalyzer.analyzeInternal(SemanticAnalyzer.java:12643)
>       at 
> org.apache.hadoop.hive.ql.parse.CalcitePlanner.analyzeInternal(CalcitePlanner.java:443)
>       at 
> org.apache.hadoop.hive.ql.parse.BaseSemanticAnalyzer.analyze(BaseSemanticAnalyzer.java:301)
>       at 
> org.apache.hadoop.hive.ql.parse.ExplainSemanticAnalyzer.analyzeInternal(ExplainSemanticAnalyzer.java:171)
>       at 
> org.apache.hadoop.hive.ql.parse.BaseSemanticAnalyzer.analyze(BaseSemanticAnalyzer.java:301)
>       at org.apache.hadoop.hive.ql.Compiler.analyze(Compiler.java:220)
>       at org.apache.hadoop.hive.ql.Compiler.compile(Compiler.java:104)
>       at org.apache.hadoop.hive.ql.Driver.compile(Driver.java:173)
>       at org.apache.hadoop.hive.ql.Driver.compileInternal(Driver.java:414)
>       at org.apache.hadoop.hive.ql.Driver.compileAndRespond(Driver.java:363)
>       at org.apache.hadoop.hive.ql.Driver.compileAndRespond(Driver.java:357)
>       at 
> org.apache.hadoop.hive.ql.reexec.ReExecDriver.compileAndRespond(ReExecDriver.java:129)
>       at 
> org.apache.hadoop.hive.ql.reexec.ReExecDriver.run(ReExecDriver.java:231)
>       at 
> org.apache.hadoop.hive.cli.CliDriver.processLocalCmd(CliDriver.java:258)
>       at org.apache.hadoop.hive.cli.CliDriver.processCmd1(CliDriver.java:203)
>       at org.apache.hadoop.hive.cli.CliDriver.processCmd(CliDriver.java:129)
>       at org.apache.hadoop.hive.cli.CliDriver.processLine(CliDriver.java:424)
>       at org.apache.hadoop.hive.cli.CliDriver.processLine(CliDriver.java:355)
>       at 
> org.apache.hadoop.hive.ql.QTestUtil.executeClientInternal(QTestUtil.java:740)
>       at org.apache.hadoop.hive.ql.QTestUtil.executeClient(QTestUtil.java:710)
>       at 
> org.apache.hadoop.hive.cli.control.CoreCliDriver.runTest(CoreCliDriver.java:170)
>       at 
> org.apache.hadoop.hive.cli.control.CliAdapter.runTest(CliAdapter.java:157)
>       at 
> org.apache.hadoop.hive.cli.TestMiniLlapLocalCliDriver.testCliDriver(TestMiniLlapLocalCliDriver.java:62)
>       at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
>       at 
> sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
>       at 
> sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
>       at java.lang.reflect.Method.invoke(Method.java:498)
>       at 
> org.junit.runners.model.FrameworkMethod$1.runReflectiveCall(FrameworkMethod.java:59)
>       at 
> org.junit.internal.runners.model.ReflectiveCallable.run(ReflectiveCallable.java:12)
>       at 
> org.junit.runners.model.FrameworkMethod.invokeExplosively(FrameworkMethod.java:56)
>       at 
> org.junit.internal.runners.statements.InvokeMethod.evaluate(InvokeMethod.java:17)
>       at 
> org.apache.hadoop.hive.cli.control.CliAdapter$2$1.evaluate(CliAdapter.java:135)
>       at org.junit.runners.ParentRunner$3.evaluate(ParentRunner.java:306)
>       at 
> org.junit.runners.BlockJUnit4ClassRunner$1.evaluate(BlockJUnit4ClassRunner.java:100)
>       at org.junit.runners.ParentRunner.runLeaf(ParentRunner.java:366)
>       at 
> org.junit.runners.BlockJUnit4ClassRunner.runChild(BlockJUnit4ClassRunner.java:103)
>       at 
> org.junit.runners.BlockJUnit4ClassRunner.runChild(BlockJUnit4ClassRunner.java:63)
>       at org.junit.runners.ParentRunner$4.run(ParentRunner.java:331)
>       at org.junit.runners.ParentRunner$1.schedule(ParentRunner.java:79)
>       at org.junit.runners.ParentRunner.runChildren(ParentRunner.java:329)
>       at org.junit.runners.ParentRunner.access$100(ParentRunner.java:66)
>       at org.junit.runners.ParentRunner$2.evaluate(ParentRunner.java:293)
>       at org.junit.runners.ParentRunner.run(ParentRunner.java:413)
>       at org.junit.runners.Suite.runChild(Suite.java:128)
>       at org.junit.runners.Suite.runChild(Suite.java:27)
>       at org.junit.runners.ParentRunner$4.run(ParentRunner.java:331)
>       at org.junit.runners.ParentRunner$1.schedule(ParentRunner.java:79)
>       at org.junit.runners.ParentRunner.runChildren(ParentRunner.java:329)
>       at org.junit.runners.ParentRunner.access$100(ParentRunner.java:66)
>       at org.junit.runners.ParentRunner$2.evaluate(ParentRunner.java:293)
>       at 
> org.apache.hadoop.hive.cli.control.CliAdapter$1$1.evaluate(CliAdapter.java:95)
>       at org.junit.rules.RunRules.evaluate(RunRules.java:20)
>       at org.junit.runners.ParentRunner$3.evaluate(ParentRunner.java:306)
>       at org.junit.runners.ParentRunner.run(ParentRunner.java:413)
>       at 
> org.apache.maven.surefire.junit4.JUnit4Provider.execute(JUnit4Provider.java:365)
>       at 
> org.apache.maven.surefire.junit4.JUnit4Provider.executeWithRerun(JUnit4Provider.java:273)
>       at 
> org.apache.maven.surefire.junit4.JUnit4Provider.executeTestSet(JUnit4Provider.java:238)
>       at 
> org.apache.maven.surefire.junit4.JUnit4Provider.invoke(JUnit4Provider.java:159)
>       at 
> org.apache.maven.surefire.booter.ForkedBooter.runSuitesInProcess(ForkedBooter.java:377)
>       at 
> org.apache.maven.surefire.booter.ForkedBooter.execute(ForkedBooter.java:138)
>       at 
> org.apache.maven.surefire.booter.ForkedBooter.run(ForkedBooter.java:465)
>       at 
> org.apache.maven.surefire.booter.ForkedBooter.main(ForkedBooter.java:451)
> {noformat}
> +NPE at runtime+
> {noformat}
> org.apache.hadoop.hive.ql.metadata.HiveException: Vertex failed, 
> vertexName=Map 1, vertexId=vertex_1598975134540_0001_2_00, diagnostics=[Task 
> failed, taskId=task_1598975134540_0001_2_00_000000, diagnostics=[TaskAttempt 
> 0 failed, info=[Error: Error while running task ( failure ) : 
> attempt_1598975134540_0001_2_00_000000_0:java.lang.RuntimeException: 
> java.lang.RuntimeException: Map operator initialization failed
>       at 
> org.apache.hadoop.hive.ql.exec.tez.TezProcessor.initializeAndRunProcessor(TezProcessor.java:296)
>       at 
> org.apache.hadoop.hive.ql.exec.tez.TezProcessor.run(TezProcessor.java:250)
>       at 
> org.apache.tez.runtime.LogicalIOProcessorRuntimeTask.run(LogicalIOProcessorRuntimeTask.java:374)
>       at 
> org.apache.tez.runtime.task.TaskRunner2Callable$1.run(TaskRunner2Callable.java:73)
>       at 
> org.apache.tez.runtime.task.TaskRunner2Callable$1.run(TaskRunner2Callable.java:61)
>       at java.security.AccessController.doPrivileged(Native Method)
>       at javax.security.auth.Subject.doAs(Subject.java:422)
>       at 
> org.apache.hadoop.security.UserGroupInformation.doAs(UserGroupInformation.java:1682)
>       at 
> org.apache.tez.runtime.task.TaskRunner2Callable.callInternal(TaskRunner2Callable.java:61)
>       at 
> org.apache.tez.runtime.task.TaskRunner2Callable.callInternal(TaskRunner2Callable.java:37)
>       at org.apache.tez.common.CallableWithNdc.call(CallableWithNdc.java:36)
>       at 
> org.apache.hadoop.hive.llap.daemon.impl.StatsRecordingThreadPool$WrappedCallable.call(StatsRecordingThreadPool.java:118)
>       at java.util.concurrent.FutureTask.run(FutureTask.java:266)
>       at 
> java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1149)
>       at 
> java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:624)
>       at java.lang.Thread.run(Thread.java:748)
> Caused by: java.lang.RuntimeException: Map operator initialization failed
>       at 
> org.apache.hadoop.hive.ql.exec.tez.MapRecordProcessor.init(MapRecordProcessor.java:351)
>       at 
> org.apache.hadoop.hive.ql.exec.tez.TezProcessor.initializeAndRunProcessor(TezProcessor.java:266)
>       ... 15 more
> Caused by: java.lang.RuntimeException: java.lang.NullPointerException
>       at 
> org.apache.hadoop.hive.ql.exec.ReduceSinkOperator.initializeOp(ReduceSinkOperator.java:242)
>       at org.apache.hadoop.hive.ql.exec.Operator.initialize(Operator.java:359)
>       at org.apache.hadoop.hive.ql.exec.Operator.initialize(Operator.java:548)
>       at 
> org.apache.hadoop.hive.ql.exec.Operator.initializeChildren(Operator.java:502)
>       at org.apache.hadoop.hive.ql.exec.Operator.initialize(Operator.java:368)
>       at org.apache.hadoop.hive.ql.exec.Operator.initialize(Operator.java:548)
>       at 
> org.apache.hadoop.hive.ql.exec.Operator.initializeChildren(Operator.java:502)
>       at org.apache.hadoop.hive.ql.exec.Operator.initialize(Operator.java:368)
>       at org.apache.hadoop.hive.ql.exec.Operator.initialize(Operator.java:548)
>       at 
> org.apache.hadoop.hive.ql.exec.Operator.initializeChildren(Operator.java:502)
>       at org.apache.hadoop.hive.ql.exec.Operator.initialize(Operator.java:368)
>       at org.apache.hadoop.hive.ql.exec.Operator.initialize(Operator.java:548)
>       at 
> org.apache.hadoop.hive.ql.exec.Operator.initializeChildren(Operator.java:502)
>       at org.apache.hadoop.hive.ql.exec.Operator.initialize(Operator.java:368)
>       at org.apache.hadoop.hive.ql.exec.Operator.initialize(Operator.java:548)
>       at 
> org.apache.hadoop.hive.ql.exec.Operator.initializeChildren(Operator.java:502)
>       at org.apache.hadoop.hive.ql.exec.Operator.initialize(Operator.java:368)
>       at org.apache.hadoop.hive.ql.exec.Operator.initialize(Operator.java:548)
>       at 
> org.apache.hadoop.hive.ql.exec.Operator.initializeChildren(Operator.java:502)
>       at org.apache.hadoop.hive.ql.exec.Operator.initialize(Operator.java:368)
>       at 
> org.apache.hadoop.hive.ql.exec.MapOperator.initializeMapOperator(MapOperator.java:506)
>       at 
> org.apache.hadoop.hive.ql.exec.tez.MapRecordProcessor.init(MapRecordProcessor.java:314)
>       ... 16 more
> Caused by: java.lang.NullPointerException
>       at 
> org.apache.hadoop.hive.ql.exec.ReduceSinkOperator.initializeOp(ReduceSinkOperator.java:160)
>       ... 37 more
> ], TaskAttempt 1 failed, info=[Error: Error while running task ( failure ) : 
> attempt_1598975134540_0001_2_00_000000_1:java.lang.RuntimeException: 
> java.lang.RuntimeException: Map operator initialization failed
>       at 
> org.apache.hadoop.hive.ql.exec.tez.TezProcessor.initializeAndRunProcessor(TezProcessor.java:296)
>       at 
> org.apache.hadoop.hive.ql.exec.tez.TezProcessor.run(TezProcessor.java:250)
>       at 
> org.apache.tez.runtime.LogicalIOProcessorRuntimeTask.run(LogicalIOProcessorRuntimeTask.java:374)
>       at 
> org.apache.tez.runtime.task.TaskRunner2Callable$1.run(TaskRunner2Callable.java:73)
>       at 
> org.apache.tez.runtime.task.TaskRunner2Callable$1.run(TaskRunner2Callable.java:61)
>       at java.security.AccessController.doPrivileged(Native Method)
>       at javax.security.auth.Subject.doAs(Subject.java:422)
>       at 
> org.apache.hadoop.security.UserGroupInformation.doAs(UserGroupInformation.java:1682)
>       at 
> org.apache.tez.runtime.task.TaskRunner2Callable.callInternal(TaskRunner2Callable.java:61)
>       at 
> org.apache.tez.runtime.task.TaskRunner2Callable.callInternal(TaskRunner2Callable.java:37)
>       at org.apache.tez.common.CallableWithNdc.call(CallableWithNdc.java:36)
>       at 
> org.apache.hadoop.hive.llap.daemon.impl.StatsRecordingThreadPool$WrappedCallable.call(StatsRecordingThreadPool.java:118)
>       at java.util.concurrent.FutureTask.run(FutureTask.java:266)
>       at 
> java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1149)
>       at 
> java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:624)
>       at java.lang.Thread.run(Thread.java:748)
> Caused by: java.lang.RuntimeException: Map operator initialization failed
>       at 
> org.apache.hadoop.hive.ql.exec.tez.MapRecordProcessor.init(MapRecordProcessor.java:351)
>       at 
> org.apache.hadoop.hive.ql.exec.tez.TezProcessor.initializeAndRunProcessor(TezProcessor.java:266)
>       ... 15 more
> Caused by: java.lang.RuntimeException: java.lang.NullPointerException
>       at 
> org.apache.hadoop.hive.ql.exec.ReduceSinkOperator.initializeOp(ReduceSinkOperator.java:242)
>       at org.apache.hadoop.hive.ql.exec.Operator.initialize(Operator.java:359)
>       at org.apache.hadoop.hive.ql.exec.Operator.initialize(Operator.java:548)
>       at 
> org.apache.hadoop.hive.ql.exec.Operator.initializeChildren(Operator.java:502)
>       at org.apache.hadoop.hive.ql.exec.Operator.initialize(Operator.java:368)
>       at org.apache.hadoop.hive.ql.exec.Operator.initialize(Operator.java:548)
>       at 
> org.apache.hadoop.hive.ql.exec.Operator.initializeChildren(Operator.java:502)
>       at org.apache.hadoop.hive.ql.exec.Operator.initialize(Operator.java:368)
>       at org.apache.hadoop.hive.ql.exec.Operator.initialize(Operator.java:548)
>       at 
> org.apache.hadoop.hive.ql.exec.Operator.initializeChildren(Operator.java:502)
>       at org.apache.hadoop.hive.ql.exec.Operator.initialize(Operator.java:368)
>       at org.apache.hadoop.hive.ql.exec.Operator.initialize(Operator.java:548)
>       at 
> org.apache.hadoop.hive.ql.exec.Operator.initializeChildren(Operator.java:502)
>       at org.apache.hadoop.hive.ql.exec.Operator.initialize(Operator.java:368)
>       at org.apache.hadoop.hive.ql.exec.Operator.initialize(Operator.java:548)
>       at 
> org.apache.hadoop.hive.ql.exec.Operator.initializeChildren(Operator.java:502)
>       at org.apache.hadoop.hive.ql.exec.Operator.initialize(Operator.java:368)
>       at org.apache.hadoop.hive.ql.exec.Operator.initialize(Operator.java:548)
>       at 
> org.apache.hadoop.hive.ql.exec.Operator.initializeChildren(Operator.java:502)
>       at org.apache.hadoop.hive.ql.exec.Operator.initialize(Operator.java:368)
>       at 
> org.apache.hadoop.hive.ql.exec.MapOperator.initializeMapOperator(MapOperator.java:506)
>       at 
> org.apache.hadoop.hive.ql.exec.tez.MapRecordProcessor.init(MapRecordProcessor.java:314)
>       ... 16 more
> Caused by: java.lang.NullPointerException
>       at 
> org.apache.hadoop.hive.ql.exec.ReduceSinkOperator.initializeOp(ReduceSinkOperator.java:160)
>       ... 37 more
> ]], Vertex did not succeed due to OWN_TASK_FAILURE, failedTasks:1 
> killedTasks:0, Vertex vertex_1598975134540_0001_2_00 [Map 1] killed/failed 
> due to:OWN_TASK_FAILURE]Vertex killed, vertexName=Reducer 2, 
> vertexId=vertex_1598975134540_0001_2_01, diagnostics=[Vertex received Kill 
> while in RUNNING state., Vertex did not succeed due to OTHER_VERTEX_FAILURE, 
> failedTasks:0 killedTasks:1, Vertex vertex_1598975134540_0001_2_01 [Reducer 
> 2] killed/failed due to:OTHER_VERTEX_FAILURE]DAG did not succeed due to 
> VERTEX_FAILURE. failedVertices:1 killedVertices:1
>       at org.apache.hadoop.hive.ql.exec.tez.TezTask.execute(TezTask.java:244)
>       at org.apache.hadoop.hive.ql.exec.Task.executeTask(Task.java:213)
>       at 
> org.apache.hadoop.hive.ql.exec.TaskRunner.runSequential(TaskRunner.java:105)
>       at org.apache.hadoop.hive.ql.Executor.launchTask(Executor.java:361)
>       at org.apache.hadoop.hive.ql.Executor.launchTasks(Executor.java:334)
>       at org.apache.hadoop.hive.ql.Executor.runTasks(Executor.java:245)
>       at org.apache.hadoop.hive.ql.Executor.execute(Executor.java:108)
>       at org.apache.hadoop.hive.ql.Driver.runInternal(Driver.java:498)
>       at org.apache.hadoop.hive.ql.Driver.run(Driver.java:307)
>       at org.apache.hadoop.hive.ql.Driver.run(Driver.java:302)
>       at 
> org.apache.hadoop.hive.ql.reexec.ReExecDriver.run(ReExecDriver.java:166)
>       at 
> org.apache.hadoop.hive.ql.reexec.ReExecDriver.run(ReExecDriver.java:232)
>       at 
> org.apache.hadoop.hive.cli.CliDriver.processLocalCmd(CliDriver.java:258)
>       at org.apache.hadoop.hive.cli.CliDriver.processCmd1(CliDriver.java:203)
>       at org.apache.hadoop.hive.cli.CliDriver.processCmd(CliDriver.java:129)
>       at org.apache.hadoop.hive.cli.CliDriver.processLine(CliDriver.java:424)
>       at org.apache.hadoop.hive.cli.CliDriver.processLine(CliDriver.java:355)
>       at 
> org.apache.hadoop.hive.ql.QTestUtil.executeClientInternal(QTestUtil.java:740)
>       at org.apache.hadoop.hive.ql.QTestUtil.executeClient(QTestUtil.java:710)
>       at 
> org.apache.hadoop.hive.cli.control.CoreCliDriver.runTest(CoreCliDriver.java:170)
>       at 
> org.apache.hadoop.hive.cli.control.CliAdapter.runTest(CliAdapter.java:157)
>       at 
> org.apache.hadoop.hive.cli.TestMiniLlapLocalCliDriver.testCliDriver(TestMiniLlapLocalCliDriver.java:62)
>       at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
>       at 
> sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
>       at 
> sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
>       at java.lang.reflect.Method.invoke(Method.java:498)
>       at 
> org.junit.runners.model.FrameworkMethod$1.runReflectiveCall(FrameworkMethod.java:59)
>       at 
> org.junit.internal.runners.model.ReflectiveCallable.run(ReflectiveCallable.java:12)
>       at 
> org.junit.runners.model.FrameworkMethod.invokeExplosively(FrameworkMethod.java:56)
>       at 
> org.junit.internal.runners.statements.InvokeMethod.evaluate(InvokeMethod.java:17)
>       at 
> org.apache.hadoop.hive.cli.control.CliAdapter$2$1.evaluate(CliAdapter.java:135)
>       at org.junit.runners.ParentRunner$3.evaluate(ParentRunner.java:306)
>       at 
> org.junit.runners.BlockJUnit4ClassRunner$1.evaluate(BlockJUnit4ClassRunner.java:100)
>       at org.junit.runners.ParentRunner.runLeaf(ParentRunner.java:366)
>       at 
> org.junit.runners.BlockJUnit4ClassRunner.runChild(BlockJUnit4ClassRunner.java:103)
>       at 
> org.junit.runners.BlockJUnit4ClassRunner.runChild(BlockJUnit4ClassRunner.java:63)
>       at org.junit.runners.ParentRunner$4.run(ParentRunner.java:331)
>       at org.junit.runners.ParentRunner$1.schedule(ParentRunner.java:79)
>       at org.junit.runners.ParentRunner.runChildren(ParentRunner.java:329)
>       at org.junit.runners.ParentRunner.access$100(ParentRunner.java:66)
>       at org.junit.runners.ParentRunner$2.evaluate(ParentRunner.java:293)
>       at org.junit.runners.ParentRunner.run(ParentRunner.java:413)
>       at org.junit.runners.Suite.runChild(Suite.java:128)
>       at org.junit.runners.Suite.runChild(Suite.java:27)
>       at org.junit.runners.ParentRunner$4.run(ParentRunner.java:331)
>       at org.junit.runners.ParentRunner$1.schedule(ParentRunner.java:79)
>       at org.junit.runners.ParentRunner.runChildren(ParentRunner.java:329)
>       at org.junit.runners.ParentRunner.access$100(ParentRunner.java:66)
>       at org.junit.runners.ParentRunner$2.evaluate(ParentRunner.java:293)
>       at 
> org.apache.hadoop.hive.cli.control.CliAdapter$1$1.evaluate(CliAdapter.java:95)
>       at org.junit.rules.RunRules.evaluate(RunRules.java:20)
>       at org.junit.runners.ParentRunner$3.evaluate(ParentRunner.java:306)
>       at org.junit.runners.ParentRunner.run(ParentRunner.java:413)
>       at 
> org.apache.maven.surefire.junit4.JUnit4Provider.execute(JUnit4Provider.java:365)
>       at 
> org.apache.maven.surefire.junit4.JUnit4Provider.executeWithRerun(JUnit4Provider.java:273)
>       at 
> org.apache.maven.surefire.junit4.JUnit4Provider.executeTestSet(JUnit4Provider.java:238)
>       at 
> org.apache.maven.surefire.junit4.JUnit4Provider.invoke(JUnit4Provider.java:159)
>       at 
> org.apache.maven.surefire.booter.ForkedBooter.runSuitesInProcess(ForkedBooter.java:377)
>       at 
> org.apache.maven.surefire.booter.ForkedBooter.execute(ForkedBooter.java:138)
>       at 
> org.apache.maven.surefire.booter.ForkedBooter.run(ForkedBooter.java:465)
>       at 
> org.apache.maven.surefire.booter.ForkedBooter.main(ForkedBooter.java:451)
> {noformat}



--
This message was sent by Atlassian Jira
(v8.3.4#803005)

Reply via email to