lemme guess. your query contains an 'in' clause with 1 million static values? :)
* brute force solution is to set: HADOOP_CLIENT_OPTS=-Xmx8G (or whatever) before you run beeline to force a larger memory size (i'm pretty sure beeline uses that env var though i didn't actually check the script) * more scalable solution is (if my premise above is correct) would be to join to a table containing your values. But hey maybe you actually have a 8MM byte sql query that doesn't contain static data - that could be a world record! Cheers, Stephen On Thu, Sep 1, 2016 at 6:21 AM, Adam <work....@gmail.com> wrote: > Hive Version: 2.1.0 > I have a very large, multi-line input query (8,668,519 chars) and I have > gone up to 16g heap and still get the same OOM. > > > Error: Error running query: java.lang.OutOfMemoryError: Java heap space > (state=,code=0) > org.apache.hive.service.cli.HiveSQLException: Error running query: > java.lang.OutOfMemoryError: Java heap space > at org.apache.hive.jdbc.Utils.verifySuccess(Utils.java:264) > at org.apache.hive.jdbc.Utils.verifySuccessWithInfo(Utils.java: > 250) > at org.apache.hive.jdbc.HiveStatement.runAsyncOnServer(HiveStat > ement.java:309) > at org.apache.hive.jdbc.HiveStatement.execute(HiveStatement. > java:250) > at org.apache.hive.beeline.Commands.executeInternal(Commands. > java:977) > at org.apache.hive.beeline.Commands.execute(Commands.java:1148) > at org.apache.hive.beeline.Commands.sql(Commands.java:1063) > at org.apache.hive.beeline.BeeLine.dispatch(BeeLine.java:1134) > at org.apache.hive.beeline.BeeLine.execute(BeeLine.java:965) > at org.apache.hive.beeline.BeeLine.executeFile(BeeLine.java:940) > at org.apache.hive.beeline.BeeLine.begin(BeeLine.java:867) > at org.apache.hive.beeline.BeeLine.mainWithInputRedirection(Bee > Line.java:499) > at org.apache.hive.beeline.BeeLine.main(BeeLine.java:482) > at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) > at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAcce > ssorImpl.java:62) > at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMe > thodAccessorImpl.java:43) > at java.lang.reflect.Method.invoke(Method.java:498) > at org.apache.hadoop.util.RunJar.run(RunJar.java:221) > at org.apache.hadoop.util.RunJar.main(RunJar.java:136) > Caused by: org.apache.hive.service.cli.HiveSQLException: Error running > query: java.lang.OutOfMemoryError: Java heap space > at org.apache.hive.service.cli.operation.SQLOperation.prepare( > SQLOperation.java:218) > at org.apache.hive.service.cli.operation.SQLOperation.runIntern > al(SQLOperation.java:269) > at org.apache.hive.service.cli.operation.Operation.run(Operatio > n.java:324) > at org.apache.hive.service.cli.session.HiveSessionImpl.executeS > tatementInternal(HiveSessionImpl.java:460) > at org.apache.hive.service.cli.session.HiveSessionImpl.executeS > tatementAsync(HiveSessionImpl.java:447) > at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) > at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAcce > ssorImpl.java:62) > at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMe > thodAccessorImpl.java:43) > at java.lang.reflect.Method.invoke(Method.java:498) > at org.apache.hive.service.cli.session.HiveSessionProxy.invoke( > HiveSessionProxy.java:78) > at org.apache.hive.service.cli.session.HiveSessionProxy.access$ > 000(HiveSessionProxy.java:36) > at org.apache.hive.service.cli.session.HiveSessionProxy$1.run( > HiveSessionProxy.java:63) > at java.security.AccessController.doPrivileged(Native Method) > at javax.security.auth.Subject.doAs(Subject.java:422) > at org.apache.hadoop.security.UserGroupInformation.doAs(UserGro > upInformation.java:1657) > at org.apache.hive.service.cli.session.HiveSessionProxy.invoke( > HiveSessionProxy.java:59) > at com.sun.proxy.$Proxy33.executeStatementAsync(Unknown Source) > at org.apache.hive.service.cli.CLIService.executeStatementAsync > (CLIService.java:294) > at org.apache.hive.service.cli.thrift.ThriftCLIService.ExecuteS > tatement(ThriftCLIService.java:497) > at org.apache.hive.service.rpc.thrift.TCLIService$Processor$Exe > cuteStatement.getResult(TCLIService.java:1437) > at org.apache.hive.service.rpc.thrift.TCLIService$Processor$Exe > cuteStatement.getResult(TCLIService.java:1422) > at org.apache.thrift.ProcessFunction.process(ProcessFunction. > java:39) > at org.apache.thrift.TBaseProcessor.process(TBaseProcessor. > java:39) > at org.apache.hive.service.auth.TSetIpAddressProcessor.process( > TSetIpAddressProcessor.java:56) > at org.apache.thrift.server.TThreadPoolServer$WorkerProcess. > run(TThreadPoolServer.java:286) > at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPool > Executor.java:1142) > at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoo > lExecutor.java:617) > at java.lang.Thread.run(Thread.java:745) > Caused by: java.lang.OutOfMemoryError: Java heap space > at java.util.Arrays.copyOf(Arrays.java:3332) > at java.lang.AbstractStringBuilder.ensureCapacityInternal(Abstr > actStringBuilder.java:124) > at java.lang.AbstractStringBuilder.append(AbstractStringBuilder > .java:448) > at java.lang.StringBuffer.append(StringBuffer.java:270) > at java.io.StringWriter.write(StringWriter.java:112) > at java.io.PrintWriter.write(PrintWriter.java:456) > at java.io.PrintWriter.write(PrintWriter.java:473) > at org.apache.calcite.rel.AbstractRelNode$1.explain_(AbstractRe > lNode.java:409) > at org.apache.calcite.rel.externalize.RelWriterImpl.done( > RelWriterImpl.java:157) > at org.apache.calcite.rel.AbstractRelNode.explain(AbstractRelNo > de.java:308) > at org.apache.calcite.rel.AbstractRelNode.computeDigest(Abstrac > tRelNode.java:416) > at org.apache.calcite.rel.AbstractRelNode.recomputeDigest(Abstr > actRelNode.java:352) > at org.apache.calcite.plan.hep.HepPlanner.buildFinalPlan(HepPla > nner.java:881) > at org.apache.calcite.plan.hep.HepPlanner.findBestExp(HepPlanne > r.java:199) > at org.apache.hadoop.hive.ql.parse.CalcitePlanner$CalcitePlanne > rAction.hepPlan(CalcitePlanner.java:1309) > at org.apache.hadoop.hive.ql.parse.CalcitePlanner$CalcitePlanne > rAction.hepPlan(CalcitePlanner.java:1263) > at org.apache.hadoop.hive.ql.parse.CalcitePlanner$CalcitePlanne > rAction.applyPreJoinOrderingTransforms(CalcitePlanner.java:1130) > at org.apache.hadoop.hive.ql.parse.CalcitePlanner$CalcitePlanne > rAction.apply(CalcitePlanner.java:962) > at org.apache.hadoop.hive.ql.parse.CalcitePlanner$CalcitePlanne > rAction.apply(CalcitePlanner.java:893) > at org.apache.calcite.tools.Frameworks$1.apply(Frameworks.java: > 113) > at org.apache.calcite.prepare.CalcitePrepareImpl.perform(Calcit > ePrepareImpl.java:969) > at org.apache.calcite.tools.Frameworks.withPrepare(Frameworks. > java:149) > at org.apache.calcite.tools.Frameworks.withPlanner(Frameworks. > java:106) > at org.apache.hadoop.hive.ql.parse.CalcitePlanner.getOptimizedA > ST(CalcitePlanner.java:712) > at org.apache.hadoop.hive.ql.parse.CalcitePlanner.genOPTree( > CalcitePlanner.java:280) > at org.apache.hadoop.hive.ql.parse.SemanticAnalyzer.analyzeInte > rnal(SemanticAnalyzer.java:10755) > at org.apache.hadoop.hive.ql.parse.CalcitePlanner.analyzeIntern > al(CalcitePlanner.java:239) > at org.apache.hadoop.hive.ql.parse.BaseSemanticAnalyzer.analyze > (BaseSemanticAnalyzer.java:250) > at org.apache.hadoop.hive.ql.Driver.compile(Driver.java:437) > at org.apache.hadoop.hive.ql.Driver.compile(Driver.java:329) > at org.apache.hadoop.hive.ql.Driver.compileInternal(Driver.java > :1158) > at org.apache.hadoop.hive.ql.Driver.compileAndRespond(Driver. > java:1145) >