[ 
https://issues.apache.org/jira/browse/PHOENIX-3322?page=com.atlassian.jira.plugin.system.issuetabpanels:comment-tabpanel&focusedCommentId=15514507#comment-15514507
 ] 

James Taylor commented on PHOENIX-3322:
---------------------------------------

Two options to fix this:
- Add the /*+ USE_SORT_MERGE_JOIN */ after SELECT.
- Increase the allowed size of the hash cache through the client-side 
phoenix.query.maxServerCacheBytes setting in your hbase-site.xml

> TPCH 100 query 2 exceeds size of hash cache
> -------------------------------------------
>
>                 Key: PHOENIX-3322
>                 URL: https://issues.apache.org/jira/browse/PHOENIX-3322
>             Project: Phoenix
>          Issue Type: Bug
>    Affects Versions: 4.8.0
>         Environment: HDP 2.4.2 + 4.0.8 binary download
>            Reporter: Aaron Molitor
>
> Executing  TPC-H query 2 results in the following error:
> h5. output from sqlline:
> {noformat}
> SLF4J: Class path contains multiple SLF4J bindings.
> SLF4J: Found binding in 
> [jar:file:/opt/phoenix/apache-phoenix-4.8.0-HBase-1.1-bin/phoenix-4.8.0-HBase-1.1-client.jar!/org/slf4j/impl/StaticLoggerBinder.class]
> SLF4J: Found binding in 
> [jar:file:/usr/hdp/2.4.2.0-258/hadoop/lib/slf4j-log4j12-1.7.10.jar!/org/slf4j/impl/StaticLoggerBinder.class]
> SLF4J: See http://www.slf4j.org/codes.html#multiple_bindings for an 
> explanation.
> 16/09/13 20:35:29 WARN util.NativeCodeLoader: Unable to load native-hadoop 
> library for your platform... using builtin-java classes where applicable
> 16/09/13 20:35:30 WARN shortcircuit.DomainSocketFactory: The short-circuit 
> local reads feature cannot be used because libhadoop cannot be loaded.
> 1/1          SELECT 
> S_ACCTBAL, 
> S_NAME, 
> N_NAME, 
> P_PARTKEY, 
> P_MFGR, 
> S_ADDRESS, 
> S_PHONE, 
> S_COMMENT 
> FROM 
> TPCH.PART, 
> TPCH.SUPPLIER, 
> TPCH.PARTSUPP, 
> TPCH.NATION, 
> TPCH.REGION 
> WHERE 
> P_PARTKEY = PS_PARTKEY 
> AND S_SUPPKEY = PS_SUPPKEY 
> AND P_SIZE = 15  
> AND P_TYPE LIKE '%BRASS' 
> AND S_NATIONKEY = N_NATIONKEY 
> AND N_REGIONKEY = R_REGIONKEY 
> AND R_NAME = 'EUROPE' 
> AND PS_SUPPLYCOST = ( 
> SELECT MIN(PS_SUPPLYCOST) 
> FROM 
> TPCH.PARTSUPP, 
> TPCH.SUPPLIER, 
> TPCH.NATION, 
> TPCH.REGION 
> WHERE 
> P_PARTKEY = PS_PARTKEY 
> AND S_SUPPKEY = PS_SUPPKEY 
> AND S_NATIONKEY = N_NATIONKEY 
> AND N_REGIONKEY = R_REGIONKEY 
> AND R_NAME = 'EUROPE' 
> ) 
> ORDER BY  
> S_ACCTBAL DESC, 
> N_NAME, 
> S_NAME, 
> P_PARTKEY 
> LIMIT 100 
> ;
> Error: Encountered exception in sub plan [0] execution. (state=,code=0)
> java.sql.SQLException: Encountered exception in sub plan [0] execution.
>     at org.apache.phoenix.execute.HashJoinPlan.iterator(HashJoinPlan.java:198)
>     at org.apache.phoenix.execute.HashJoinPlan.iterator(HashJoinPlan.java:143)
>     at org.apache.phoenix.execute.HashJoinPlan.iterator(HashJoinPlan.java:138)
>     at 
> org.apache.phoenix.jdbc.PhoenixStatement$1.call(PhoenixStatement.java:281)
>     at 
> org.apache.phoenix.jdbc.PhoenixStatement$1.call(PhoenixStatement.java:266)
>     at org.apache.phoenix.call.CallRunner.run(CallRunner.java:53)
>     at 
> org.apache.phoenix.jdbc.PhoenixStatement.executeQuery(PhoenixStatement.java:265)
>     at 
> org.apache.phoenix.jdbc.PhoenixStatement.execute(PhoenixStatement.java:1444)
>     at sqlline.Commands.execute(Commands.java:822)
>     at sqlline.Commands.sql(Commands.java:732)
>     at sqlline.SqlLine.dispatch(SqlLine.java:807)
>     at sqlline.SqlLine.runCommands(SqlLine.java:1710)
>     at sqlline.Commands.run(Commands.java:1285)
>     at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
>     at 
> sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:57)
>     at 
> sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
>     at java.lang.reflect.Method.invoke(Method.java:606)
>     at 
> sqlline.ReflectiveCommandHandler.execute(ReflectiveCommandHandler.java:36)
>     at sqlline.SqlLine.dispatch(SqlLine.java:803)
>     at sqlline.SqlLine.initArgs(SqlLine.java:613)
>     at sqlline.SqlLine.begin(SqlLine.java:656)
>     at sqlline.SqlLine.start(SqlLine.java:398)
>     at sqlline.SqlLine.main(SqlLine.java:292)
> Caused by: org.apache.phoenix.join.MaxServerCacheSizeExceededException: Size 
> of hash cache (104857615 bytes) exceeds the maximum allowed size (104857600 
> bytes)
>     at 
> org.apache.phoenix.join.HashCacheClient.serialize(HashCacheClient.java:110)
>     at 
> org.apache.phoenix.join.HashCacheClient.addHashCache(HashCacheClient.java:83)
>     at 
> org.apache.phoenix.execute.HashJoinPlan$HashSubPlan.execute(HashJoinPlan.java:385)
>     at org.apache.phoenix.execute.HashJoinPlan$1.call(HashJoinPlan.java:167)
>     at org.apache.phoenix.execute.HashJoinPlan$1.call(HashJoinPlan.java:163)
>     at java.util.concurrent.FutureTask.run(FutureTask.java:262)
>     at 
> org.apache.phoenix.job.JobManager$InstrumentedJobFutureTask.run(JobManager.java:183)
>     at 
> java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1145)
>     at 
> java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:615)
>     at java.lang.Thread.run(Thread.java:745)
> Aborting command set because "force" is false and command failed: "SELECT 
> S_ACCTBAL, 
> S_NAME, 
> N_NAME, 
> P_PARTKEY, 
> P_MFGR, 
> S_ADDRESS, 
> S_PHONE, 
> S_COMMENT 
> FROM 
> TPCH.PART, 
> TPCH.SUPPLIER, 
> TPCH.PARTSUPP, 
> TPCH.NATION, 
> TPCH.REGION 
> WHERE 
> P_PARTKEY = PS_PARTKEY 
> AND S_SUPPKEY = PS_SUPPKEY 
> AND P_SIZE = 15  
> AND P_TYPE LIKE '%BRASS' 
> AND S_NATIONKEY = N_NATIONKEY 
> AND N_REGIONKEY = R_REGIONKEY 
> AND R_NAME = 'EUROPE' 
> AND PS_SUPPLYCOST = ( 
> SELECT MIN(PS_SUPPLYCOST) 
> FROM 
> TPCH.PARTSUPP, 
> TPCH.SUPPLIER, 
> TPCH.NATION, 
> TPCH.REGION 
> WHERE 
> P_PARTKEY = PS_PARTKEY 
> AND S_SUPPKEY = PS_SUPPKEY 
> AND S_NATIONKEY = N_NATIONKEY 
> AND N_REGIONKEY = R_REGIONKEY 
> AND R_NAME = 'EUROPE' 
> ) 
> ORDER BY  
> S_ACCTBAL DESC, 
> N_NAME, 
> S_NAME, 
> P_PARTKEY 
> LIMIT 100 
> ;"
> Closing: org.apache.phoenix.jdbc.PhoenixConnection
> {noformat}
> adjusting the hash cache to 2GB results in the following:
> {noformat}
> SLF4J: Class path contains multiple SLF4J bindings.
> SLF4J: Found binding in 
> [jar:file:/opt/phoenix/apache-phoenix-4.8.0-HBase-1.1-bin/phoenix-4.8.0-HBase-1.1-client.jar!/org/slf4j/impl/StaticLoggerBinder.class]
> SLF4J: Found binding in 
> [jar:file:/usr/hdp/2.4.2.0-258/hadoop/lib/slf4j-log4j12-1.7.10.jar!/org/slf4j/impl/StaticLoggerBinder.class]
> SLF4J: See http://www.slf4j.org/codes.html#multiple_bindings for an 
> explanation.
> 16/09/14 03:58:11 WARN util.NativeCodeLoader: Unable to load native-hadoop 
> library for your platform... using builtin-java classes where applicable
> 16/09/14 03:58:12 WARN shortcircuit.DomainSocketFactory: The short-circuit 
> local reads feature cannot be used because libhadoop cannot be loaded.
> 1/1          SELECT 
> S_ACCTBAL, 
> S_NAME, 
> N_NAME, 
> P_PARTKEY, 
> P_MFGR, 
> S_ADDRESS, 
> S_PHONE, 
> S_COMMENT 
> FROM 
> TPCH.PART, 
> TPCH.SUPPLIER, 
> TPCH.PARTSUPP, 
> TPCH.NATION, 
> TPCH.REGION 
> WHERE 
> P_PARTKEY = PS_PARTKEY 
> AND S_SUPPKEY = PS_SUPPKEY 
> AND P_SIZE = 15  
> AND P_TYPE LIKE '%BRASS' 
> AND S_NATIONKEY = N_NATIONKEY 
> AND N_REGIONKEY = R_REGIONKEY 
> AND R_NAME = 'EUROPE' 
> AND PS_SUPPLYCOST = ( 
> SELECT MIN(PS_SUPPLYCOST) 
> FROM 
> TPCH.PARTSUPP, 
> TPCH.SUPPLIER, 
> TPCH.NATION, 
> TPCH.REGION 
> WHERE 
> P_PARTKEY = PS_PARTKEY 
> AND S_SUPPKEY = PS_SUPPKEY 
> AND S_NATIONKEY = N_NATIONKEY 
> AND N_REGIONKEY = R_REGIONKEY 
> AND R_NAME = 'EUROPE' 
> ) 
> ORDER BY  
> S_ACCTBAL DESC, 
> N_NAME, 
> S_NAME, 
> P_PARTKEY 
> LIMIT 100 
> ;
> 16/09/14 03:59:03 WARN execute.HashJoinPlan: Hash plan [0] execution seems 
> too slow. Earlier hash cache(s) might have expired on servers.
> 16/09/14 03:59:08 WARN execute.HashJoinPlan: Hash plan [0] execution seems 
> too slow. Earlier hash cache(s) might have expired on servers.
> Error: Encountered exception in sub plan [1] execution. (state=,code=0)
> java.sql.SQLException: Encountered exception in sub plan [1] execution.
>     at org.apache.phoenix.execute.HashJoinPlan.iterator(HashJoinPlan.java:198)
>     at org.apache.phoenix.execute.HashJoinPlan.iterator(HashJoinPlan.java:143)
>     at org.apache.phoenix.execute.HashJoinPlan.iterator(HashJoinPlan.java:138)
>     at 
> org.apache.phoenix.jdbc.PhoenixStatement$1.call(PhoenixStatement.java:281)
>     at 
> org.apache.phoenix.jdbc.PhoenixStatement$1.call(PhoenixStatement.java:266)
>     at org.apache.phoenix.call.CallRunner.run(CallRunner.java:53)
>     at 
> org.apache.phoenix.jdbc.PhoenixStatement.executeQuery(PhoenixStatement.java:265)
>     at 
> org.apache.phoenix.jdbc.PhoenixStatement.execute(PhoenixStatement.java:1444)
>     at sqlline.Commands.execute(Commands.java:822)
>     at sqlline.Commands.sql(Commands.java:732)
>     at sqlline.SqlLine.dispatch(SqlLine.java:807)
>     at sqlline.SqlLine.runCommands(SqlLine.java:1710)
>     at sqlline.Commands.run(Commands.java:1285)
>     at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
>     at 
> sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:57)
>     at 
> sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
>     at java.lang.reflect.Method.invoke(Method.java:606)
>     at 
> sqlline.ReflectiveCommandHandler.execute(ReflectiveCommandHandler.java:36)
>     at sqlline.SqlLine.dispatch(SqlLine.java:803)
>     at sqlline.SqlLine.initArgs(SqlLine.java:613)
>     at sqlline.SqlLine.begin(SqlLine.java:656)
>     at sqlline.SqlLine.start(SqlLine.java:398)
>     at sqlline.SqlLine.main(SqlLine.java:292)
> Caused by: java.lang.OutOfMemoryError: Requested array size exceeds VM limit
>     at java.util.Arrays.copyOf(Arrays.java:2271)
>     at java.io.ByteArrayOutputStream.grow(ByteArrayOutputStream.java:118)
>     at 
> java.io.ByteArrayOutputStream.ensureCapacity(ByteArrayOutputStream.java:93)
>     at java.io.ByteArrayOutputStream.write(ByteArrayOutputStream.java:153)
>     at java.io.DataOutputStream.write(DataOutputStream.java:107)
>     at org.apache.phoenix.util.TupleUtil.write(TupleUtil.java:153)
>     at 
> org.apache.phoenix.join.HashCacheClient.serialize(HashCacheClient.java:108)
>     at 
> org.apache.phoenix.join.HashCacheClient.addHashCache(HashCacheClient.java:83)
>     at 
> org.apache.phoenix.execute.HashJoinPlan$HashSubPlan.execute(HashJoinPlan.java:385)
>     at org.apache.phoenix.execute.HashJoinPlan$1.call(HashJoinPlan.java:167)
>     at org.apache.phoenix.execute.HashJoinPlan$1.call(HashJoinPlan.java:163)
>     at java.util.concurrent.FutureTask.run(FutureTask.java:262)
>     at 
> org.apache.phoenix.job.JobManager$InstrumentedJobFutureTask.run(JobManager.java:183)
>     at 
> java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1145)
>     at 
> java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:615)
>     at java.lang.Thread.run(Thread.java:745)
> Aborting command set because "force" is false and command failed: "SELECT 
> S_ACCTBAL, 
> S_NAME, 
> N_NAME, 
> P_PARTKEY, 
> P_MFGR, 
> S_ADDRESS, 
> S_PHONE, 
> S_COMMENT 
> FROM 
> TPCH.PART, 
> TPCH.SUPPLIER, 
> TPCH.PARTSUPP, 
> TPCH.NATION, 
> TPCH.REGION 
> WHERE 
> P_PARTKEY = PS_PARTKEY 
> AND S_SUPPKEY = PS_SUPPKEY 
> AND P_SIZE = 15  
> AND P_TYPE LIKE '%BRASS' 
> AND S_NATIONKEY = N_NATIONKEY 
> AND N_REGIONKEY = R_REGIONKEY 
> AND R_NAME = 'EUROPE' 
> AND PS_SUPPLYCOST = ( 
> SELECT MIN(PS_SUPPLYCOST) 
> FROM 
> TPCH.PARTSUPP, 
> TPCH.SUPPLIER, 
> TPCH.NATION, 
> TPCH.REGION 
> WHERE 
> P_PARTKEY = PS_PARTKEY 
> AND S_SUPPKEY = PS_SUPPKEY 
> AND S_NATIONKEY = N_NATIONKEY 
> AND N_REGIONKEY = R_REGIONKEY 
> AND R_NAME = 'EUROPE' 
> ) 
> ORDER BY  
> S_ACCTBAL DESC, 
> N_NAME, 
> S_NAME, 
> P_PARTKEY 
> LIMIT 100 
> ;"
> Closing: org.apache.phoenix.jdbc.PhoenixConnection
> {noformat}



--
This message was sent by Atlassian JIRA
(v6.3.4#6332)

Reply via email to