See <https://builds.apache.org/job/Phoenix-master/644/changes>

Changes:

[samarth.jain] PHOENIX-1722 Speedup CONVERT_TZ function (Vaclav Loffelmann)

------------------------------------------
[...truncated 821 lines...]
        at 
org.apache.phoenix.end2end.ViewIT.testNonSaltedUpdatableViewWithLocalIndex(ViewIT.java:113)

testNonSaltedUpdatableViewWithIndex(org.apache.phoenix.end2end.ViewIT)  Time 
elapsed: 1.602 sec  <<< FAILURE!
java.lang.AssertionError: expected:<6> but was:<1>
        at org.junit.Assert.fail(Assert.java:88)
        at org.junit.Assert.failNotEquals(Assert.java:834)
        at org.junit.Assert.assertEquals(Assert.java:645)
        at org.junit.Assert.assertEquals(Assert.java:631)
        at 
org.apache.phoenix.end2end.BaseViewIT.testUpdatableViewIndex(BaseViewIT.java:121)
        at 
org.apache.phoenix.end2end.BaseViewIT.testUpdatableViewWithIndex(BaseViewIT.java:54)
        at 
org.apache.phoenix.end2end.ViewIT.testNonSaltedUpdatableViewWithIndex(ViewIT.java:108)

testReadOnlyOnReadOnlyView(org.apache.phoenix.end2end.ViewIT)  Time elapsed: 
0.698 sec  <<< FAILURE!
java.lang.AssertionError: expected:<4> but was:<1>
        at org.junit.Assert.fail(Assert.java:88)
        at org.junit.Assert.failNotEquals(Assert.java:834)
        at org.junit.Assert.assertEquals(Assert.java:645)
        at org.junit.Assert.assertEquals(Assert.java:631)
        at org.apache.phoenix.end2end.ViewIT.testReadOnlyView(ViewIT.java:65)
        at 
org.apache.phoenix.end2end.ViewIT.testReadOnlyOnReadOnlyView(ViewIT.java:86)

testReadOnlyView(org.apache.phoenix.end2end.ViewIT)  Time elapsed: 0.701 sec  
<<< FAILURE!
java.lang.AssertionError: expected:<4> but was:<1>
        at org.junit.Assert.fail(Assert.java:88)
        at org.junit.Assert.failNotEquals(Assert.java:834)
        at org.junit.Assert.assertEquals(Assert.java:645)
        at org.junit.Assert.assertEquals(Assert.java:631)
        at org.apache.phoenix.end2end.ViewIT.testReadOnlyView(ViewIT.java:65)

Tests run: 11, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 49.852 sec - 
in org.apache.phoenix.hbase.index.covered.example.EndToEndCoveredIndexingIT
Running org.apache.phoenix.end2end.index.MutableIndexFailureIT
Tests run: 16, Failures: 1, Errors: 0, Skipped: 0, Time elapsed: 48.592 sec <<< 
FAILURE! - in org.apache.phoenix.end2end.TenantSpecificTablesDMLIT
testBasicUpsertSelect2(org.apache.phoenix.end2end.TenantSpecificTablesDMLIT)  
Time elapsed: 3.923 sec  <<< FAILURE!
java.lang.AssertionError: expected:<3> but was:<1>
        at org.junit.Assert.fail(Assert.java:88)
        at org.junit.Assert.failNotEquals(Assert.java:834)
        at org.junit.Assert.assertEquals(Assert.java:645)
        at org.junit.Assert.assertEquals(Assert.java:631)
        at 
org.apache.phoenix.end2end.TenantSpecificTablesDMLIT.testBasicUpsertSelect2(TenantSpecificTablesDMLIT.java:158)

Tests run: 11, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 49.453 sec - 
in 
org.apache.phoenix.hbase.index.covered.example.EndtoEndIndexingWithCompressionIT
Running org.apache.phoenix.end2end.TenantSpecificTablesDDLIT
Running org.apache.phoenix.end2end.index.MutableIndexReplicationIT
Tests run: 9, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 94.306 sec - in 
org.apache.phoenix.hbase.index.balancer.IndexLoadBalancerIT
Running org.apache.phoenix.end2end.index.DropIndexDuringUpsertIT
Running org.apache.phoenix.end2end.index.ImmutableIndexWithStatsIT
Tests run: 1, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 7.67 sec - in 
org.apache.phoenix.end2end.index.ImmutableIndexWithStatsIT
Tests run: 1, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 17.265 sec - in 
org.apache.phoenix.end2end.index.MutableIndexReplicationIT
Running org.apache.phoenix.end2end.KeyOnlyIT
Running org.apache.phoenix.end2end.StatsCollectorIT
Tests run: 3, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 12.691 sec - in 
org.apache.phoenix.end2end.KeyOnlyIT
Tests run: 18, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 52.378 sec - 
in org.apache.phoenix.end2end.TenantSpecificTablesDDLIT
Running org.apache.phoenix.end2end.QueryTimeoutIT
Running org.apache.phoenix.end2end.ContextClassloaderIT
Tests run: 3, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 2.478 sec - in 
org.apache.phoenix.end2end.ContextClassloaderIT
Tests run: 1, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 12.6 sec - in 
org.apache.phoenix.end2end.QueryTimeoutIT
Running org.apache.phoenix.end2end.MultiCfQueryExecIT
Running org.apache.phoenix.end2end.QueryWithLimitIT
Tests run: 4, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 52.114 sec - in 
org.apache.phoenix.end2end.StatsCollectorIT
Tests run: 2, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 13.287 sec - in 
org.apache.phoenix.end2end.QueryWithLimitIT
Tests run: 7, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 22.824 sec - in 
org.apache.phoenix.end2end.MultiCfQueryExecIT
Running org.apache.phoenix.end2end.ParallelIteratorsIT
Running org.apache.phoenix.end2end.SaltedViewIT
Running org.apache.phoenix.end2end.SpillableGroupByIT
Tests run: 2, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 11.149 sec - in 
org.apache.phoenix.end2end.ParallelIteratorsIT
Tests run: 2, Failures: 0, Errors: 1, Skipped: 0, Time elapsed: 12.295 sec <<< 
FAILURE! - in org.apache.phoenix.end2end.SaltedViewIT
testSaltedUpdatableViewWithLocalIndex(org.apache.phoenix.end2end.SaltedViewIT)  
Time elapsed: 3.175 sec  <<< ERROR!
org.apache.phoenix.exception.PhoenixIOException: 
org.apache.phoenix.exception.PhoenixIOException: 
org.apache.hadoop.hbase.TableNotFoundException: Table '_LOCAL_IDX__LOCAL_IDX_T' 
was not found, got: _LOCAL_IDX_T.
        at 
org.apache.hadoop.hbase.client.ConnectionManager$HConnectionImplementation.locateRegionInMeta(ConnectionManager.java:1225)
        at 
org.apache.hadoop.hbase.client.ConnectionManager$HConnectionImplementation.locateRegion(ConnectionManager.java:1109)
        at 
org.apache.hadoop.hbase.client.CoprocessorHConnection.locateRegion(CoprocessorHConnection.java:41)
        at 
org.apache.hadoop.hbase.client.ConnectionManager$HConnectionImplementation.locateRegion(ConnectionManager.java:1093)
        at 
org.apache.hadoop.hbase.client.CoprocessorHConnection.locateRegion(CoprocessorHConnection.java:41)
        at 
org.apache.hadoop.hbase.client.ConnectionManager$HConnectionImplementation.locateRegion(ConnectionManager.java:1050)
        at 
org.apache.hadoop.hbase.client.CoprocessorHConnection.locateRegion(CoprocessorHConnection.java:41)
        at 
org.apache.hadoop.hbase.client.ConnectionManager$HConnectionImplementation.getRegionLocation(ConnectionManager.java:885)
        at 
org.apache.hadoop.hbase.client.CoprocessorHConnection.getRegionLocation(CoprocessorHConnection.java:41)
        at 
org.apache.hadoop.hbase.client.RegionServerCallable.prepare(RegionServerCallable.java:78)
        at 
org.apache.hadoop.hbase.client.RpcRetryingCaller.callWithRetries(RpcRetryingCaller.java:124)
        at org.apache.hadoop.hbase.client.HTable.get(HTable.java:881)
        at 
org.apache.hadoop.hbase.client.HTableWrapper.get(HTableWrapper.java:125)
        at 
org.apache.phoenix.util.IndexUtil.wrapResultUsingOffset(IndexUtil.java:492)
        at 
org.apache.phoenix.coprocessor.BaseScannerRegionObserver$2.nextRaw(BaseScannerRegionObserver.java:310)
        at 
org.apache.phoenix.coprocessor.DelegateRegionScanner.nextRaw(DelegateRegionScanner.java:76)
        at 
org.apache.hadoop.hbase.regionserver.RSRpcServices.scan(RSRpcServices.java:2101)
        at 
org.apache.hadoop.hbase.protobuf.generated.ClientProtos$ClientService$2.callBlockingMethod(ClientProtos.java:31305)
        at org.apache.hadoop.hbase.ipc.RpcServer.call(RpcServer.java:2031)
        at org.apache.hadoop.hbase.ipc.CallRunner.run(CallRunner.java:107)
        at 
org.apache.hadoop.hbase.ipc.RpcExecutor.consumerLoop(RpcExecutor.java:130)
        at org.apache.hadoop.hbase.ipc.RpcExecutor$1.run(RpcExecutor.java:107)
        at java.lang.Thread.run(Thread.java:724)

        at java.util.concurrent.FutureTask$Sync.innerGet(FutureTask.java:262)
        at java.util.concurrent.FutureTask.get(FutureTask.java:119)
        at 
org.apache.phoenix.iterate.BaseResultIterators.getIterators(BaseResultIterators.java:536)
        at 
org.apache.phoenix.iterate.MergeSortResultIterator.getIterators(MergeSortResultIterator.java:48)
        at 
org.apache.phoenix.iterate.MergeSortResultIterator.minIterator(MergeSortResultIterator.java:84)
        at 
org.apache.phoenix.iterate.MergeSortResultIterator.next(MergeSortResultIterator.java:111)
        at 
org.apache.phoenix.jdbc.PhoenixResultSet.next(PhoenixResultSet.java:756)
        at 
org.apache.phoenix.end2end.BaseViewIT.testUpdatableViewIndex(BaseViewIT.java:125)
        at 
org.apache.phoenix.end2end.BaseViewIT.testUpdatableViewWithIndex(BaseViewIT.java:54)
        at 
org.apache.phoenix.end2end.SaltedViewIT.testSaltedUpdatableViewWithLocalIndex(SaltedViewIT.java:39)
Caused by: org.apache.phoenix.exception.PhoenixIOException: 
org.apache.hadoop.hbase.TableNotFoundException: Table '_LOCAL_IDX__LOCAL_IDX_T' 
was not found, got: _LOCAL_IDX_T.
        at 
org.apache.hadoop.hbase.client.ConnectionManager$HConnectionImplementation.locateRegionInMeta(ConnectionManager.java:1225)
        at 
org.apache.hadoop.hbase.client.ConnectionManager$HConnectionImplementation.locateRegion(ConnectionManager.java:1109)
        at 
org.apache.hadoop.hbase.client.CoprocessorHConnection.locateRegion(CoprocessorHConnection.java:41)
        at 
org.apache.hadoop.hbase.client.ConnectionManager$HConnectionImplementation.locateRegion(ConnectionManager.java:1093)
        at 
org.apache.hadoop.hbase.client.CoprocessorHConnection.locateRegion(CoprocessorHConnection.java:41)
        at 
org.apache.hadoop.hbase.client.ConnectionManager$HConnectionImplementation.locateRegion(ConnectionManager.java:1050)
        at 
org.apache.hadoop.hbase.client.CoprocessorHConnection.locateRegion(CoprocessorHConnection.java:41)
        at 
org.apache.hadoop.hbase.client.ConnectionManager$HConnectionImplementation.getRegionLocation(ConnectionManager.java:885)
        at 
org.apache.hadoop.hbase.client.CoprocessorHConnection.getRegionLocation(CoprocessorHConnection.java:41)
        at 
org.apache.hadoop.hbase.client.RegionServerCallable.prepare(RegionServerCallable.java:78)
        at 
org.apache.hadoop.hbase.client.RpcRetryingCaller.callWithRetries(RpcRetryingCaller.java:124)
        at org.apache.hadoop.hbase.client.HTable.get(HTable.java:881)
        at 
org.apache.hadoop.hbase.client.HTableWrapper.get(HTableWrapper.java:125)
        at 
org.apache.phoenix.util.IndexUtil.wrapResultUsingOffset(IndexUtil.java:492)
        at 
org.apache.phoenix.coprocessor.BaseScannerRegionObserver$2.nextRaw(BaseScannerRegionObserver.java:310)
        at 
org.apache.phoenix.coprocessor.DelegateRegionScanner.nextRaw(DelegateRegionScanner.java:76)
        at 
org.apache.hadoop.hbase.regionserver.RSRpcServices.scan(RSRpcServices.java:2101)
        at 
org.apache.hadoop.hbase.protobuf.generated.ClientProtos$ClientService$2.callBlockingMethod(ClientProtos.java:31305)
        at org.apache.hadoop.hbase.ipc.RpcServer.call(RpcServer.java:2031)
        at org.apache.hadoop.hbase.ipc.CallRunner.run(CallRunner.java:107)
        at 
org.apache.hadoop.hbase.ipc.RpcExecutor.consumerLoop(RpcExecutor.java:130)
        at org.apache.hadoop.hbase.ipc.RpcExecutor$1.run(RpcExecutor.java:107)
        at java.lang.Thread.run(Thread.java:724)

        at 
org.apache.phoenix.util.ServerUtil.parseServerException(ServerUtil.java:108)
        at 
org.apache.phoenix.iterate.ScanningResultIterator.next(ScanningResultIterator.java:56)
        at 
org.apache.phoenix.iterate.TableResultIterator.next(TableResultIterator.java:104)
        at 
org.apache.phoenix.iterate.ChunkedResultIterator$SingleChunkResultIterator.next(ChunkedResultIterator.java:149)
        at 
org.apache.phoenix.iterate.SpoolingResultIterator.<init>(SpoolingResultIterator.java:107)
        at 
org.apache.phoenix.iterate.SpoolingResultIterator.<init>(SpoolingResultIterator.java:74)
        at 
org.apache.phoenix.iterate.SpoolingResultIterator$SpoolingResultIteratorFactory.newIterator(SpoolingResultIterator.java:68)
        at 
org.apache.phoenix.iterate.ChunkedResultIterator.<init>(ChunkedResultIterator.java:92)
        at 
org.apache.phoenix.iterate.ChunkedResultIterator$ChunkedResultIteratorFactory.newIterator(ChunkedResultIterator.java:72)
        at 
org.apache.phoenix.iterate.ParallelIterators$1.call(ParallelIterators.java:93)
        at 
org.apache.phoenix.iterate.ParallelIterators$1.call(ParallelIterators.java:84)
        at java.util.concurrent.FutureTask$Sync.innerRun(FutureTask.java:334)
        at java.util.concurrent.FutureTask.run(FutureTask.java:166)
        at 
org.apache.phoenix.job.JobManager$InstrumentedJobFutureTask.run(JobManager.java:172)
        at 
java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1145)
        at 
java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:615)
        at java.lang.Thread.run(Thread.java:724)
Caused by: org.apache.hadoop.hbase.TableNotFoundException: 
org.apache.hadoop.hbase.TableNotFoundException: Table '_LOCAL_IDX__LOCAL_IDX_T' 
was not found, got: _LOCAL_IDX_T.
        at 
org.apache.hadoop.hbase.client.ConnectionManager$HConnectionImplementation.locateRegionInMeta(ConnectionManager.java:1225)
        at 
org.apache.hadoop.hbase.client.ConnectionManager$HConnectionImplementation.locateRegion(ConnectionManager.java:1109)
        at 
org.apache.hadoop.hbase.client.CoprocessorHConnection.locateRegion(CoprocessorHConnection.java:41)
        at 
org.apache.hadoop.hbase.client.ConnectionManager$HConnectionImplementation.locateRegion(ConnectionManager.java:1093)
        at 
org.apache.hadoop.hbase.client.CoprocessorHConnection.locateRegion(CoprocessorHConnection.java:41)
        at 
org.apache.hadoop.hbase.client.ConnectionManager$HConnectionImplementation.locateRegion(ConnectionManager.java:1050)
        at 
org.apache.hadoop.hbase.client.CoprocessorHConnection.locateRegion(CoprocessorHConnection.java:41)
        at 
org.apache.hadoop.hbase.client.ConnectionManager$HConnectionImplementation.getRegionLocation(ConnectionManager.java:885)
        at 
org.apache.hadoop.hbase.client.CoprocessorHConnection.getRegionLocation(CoprocessorHConnection.java:41)
        at 
org.apache.hadoop.hbase.client.RegionServerCallable.prepare(RegionServerCallable.java:78)
        at 
org.apache.hadoop.hbase.client.RpcRetryingCaller.callWithRetries(RpcRetryingCaller.java:124)
        at org.apache.hadoop.hbase.client.HTable.get(HTable.java:881)
        at 
org.apache.hadoop.hbase.client.HTableWrapper.get(HTableWrapper.java:125)
        at 
org.apache.phoenix.util.IndexUtil.wrapResultUsingOffset(IndexUtil.java:492)
        at 
org.apache.phoenix.coprocessor.BaseScannerRegionObserver$2.nextRaw(BaseScannerRegionObserver.java:310)
        at 
org.apache.phoenix.coprocessor.DelegateRegionScanner.nextRaw(DelegateRegionScanner.java:76)
        at 
org.apache.hadoop.hbase.regionserver.RSRpcServices.scan(RSRpcServices.java:2101)
        at 
org.apache.hadoop.hbase.protobuf.generated.ClientProtos$ClientService$2.callBlockingMethod(ClientProtos.java:31305)
        at org.apache.hadoop.hbase.ipc.RpcServer.call(RpcServer.java:2031)
        at org.apache.hadoop.hbase.ipc.CallRunner.run(CallRunner.java:107)
        at 
org.apache.hadoop.hbase.ipc.RpcExecutor.consumerLoop(RpcExecutor.java:130)
        at org.apache.hadoop.hbase.ipc.RpcExecutor$1.run(RpcExecutor.java:107)
        at java.lang.Thread.run(Thread.java:724)

        at sun.reflect.NativeConstructorAccessorImpl.newInstance0(Native Method)
        at 
sun.reflect.NativeConstructorAccessorImpl.newInstance(NativeConstructorAccessorImpl.java:57)
        at 
sun.reflect.DelegatingConstructorAccessorImpl.newInstance(DelegatingConstructorAccessorImpl.java:45)
        at java.lang.reflect.Constructor.newInstance(Constructor.java:526)
        at 
org.apache.hadoop.ipc.RemoteException.instantiateException(RemoteException.java:106)
        at 
org.apache.hadoop.ipc.RemoteException.unwrapRemoteException(RemoteException.java:95)
        at 
org.apache.hadoop.hbase.protobuf.ProtobufUtil.getRemoteException(ProtobufUtil.java:313)
        at 
org.apache.hadoop.hbase.client.ScannerCallable.call(ScannerCallable.java:229)
        at 
org.apache.hadoop.hbase.client.ScannerCallable.call(ScannerCallable.java:62)
        at 
org.apache.hadoop.hbase.client.RpcRetryingCaller.callWithRetries(RpcRetryingCaller.java:126)
        at 
org.apache.hadoop.hbase.client.ScannerCallableWithReplicas$RetryingRPC.call(ScannerCallableWithReplicas.java:294)
        at 
org.apache.hadoop.hbase.client.ScannerCallableWithReplicas$RetryingRPC.call(ScannerCallableWithReplicas.java:275)
        at java.util.concurrent.FutureTask$Sync.innerRun(FutureTask.java:334)
        at java.util.concurrent.FutureTask.run(FutureTask.java:166)
        at 
java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1145)
        at 
java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:615)
        at java.lang.Thread.run(Thread.java:724)
Caused by: org.apache.hadoop.hbase.ipc.RemoteWithExtrasException: 
org.apache.hadoop.hbase.TableNotFoundException: Table '_LOCAL_IDX__LOCAL_IDX_T' 
was not found, got: _LOCAL_IDX_T.
        at 
org.apache.hadoop.hbase.client.ConnectionManager$HConnectionImplementation.locateRegionInMeta(ConnectionManager.java:1225)
        at 
org.apache.hadoop.hbase.client.ConnectionManager$HConnectionImplementation.locateRegion(ConnectionManager.java:1109)
        at 
org.apache.hadoop.hbase.client.CoprocessorHConnection.locateRegion(CoprocessorHConnection.java:41)
        at 
org.apache.hadoop.hbase.client.ConnectionManager$HConnectionImplementation.locateRegion(ConnectionManager.java:1093)
        at 
org.apache.hadoop.hbase.client.CoprocessorHConnection.locateRegion(CoprocessorHConnection.java:41)
        at 
org.apache.hadoop.hbase.client.ConnectionManager$HConnectionImplementation.locateRegion(ConnectionManager.java:1050)
        at 
org.apache.hadoop.hbase.client.CoprocessorHConnection.locateRegion(CoprocessorHConnection.java:41)
        at 
org.apache.hadoop.hbase.client.ConnectionManager$HConnectionImplementation.getRegionLocation(ConnectionManager.java:885)
        at 
org.apache.hadoop.hbase.client.CoprocessorHConnection.getRegionLocation(CoprocessorHConnection.java:41)
        at 
org.apache.hadoop.hbase.client.RegionServerCallable.prepare(RegionServerCallable.java:78)
        at 
org.apache.hadoop.hbase.client.RpcRetryingCaller.callWithRetries(RpcRetryingCaller.java:124)
        at org.apache.hadoop.hbase.client.HTable.get(HTable.java:881)
        at 
org.apache.hadoop.hbase.client.HTableWrapper.get(HTableWrapper.java:125)
        at 
org.apache.phoenix.util.IndexUtil.wrapResultUsingOffset(IndexUtil.java:492)
        at 
org.apache.phoenix.coprocessor.BaseScannerRegionObserver$2.nextRaw(BaseScannerRegionObserver.java:310)
        at 
org.apache.phoenix.coprocessor.DelegateRegionScanner.nextRaw(DelegateRegionScanner.java:76)
        at 
org.apache.hadoop.hbase.regionserver.RSRpcServices.scan(RSRpcServices.java:2101)
        at 
org.apache.hadoop.hbase.protobuf.generated.ClientProtos$ClientService$2.callBlockingMethod(ClientProtos.java:31305)
        at org.apache.hadoop.hbase.ipc.RpcServer.call(RpcServer.java:2031)
        at org.apache.hadoop.hbase.ipc.CallRunner.run(CallRunner.java:107)
        at 
org.apache.hadoop.hbase.ipc.RpcExecutor.consumerLoop(RpcExecutor.java:130)
        at org.apache.hadoop.hbase.ipc.RpcExecutor$1.run(RpcExecutor.java:107)
        at java.lang.Thread.run(Thread.java:724)

        at 
org.apache.hadoop.hbase.ipc.RpcClientImpl.call(RpcClientImpl.java:1199)
        at 
org.apache.hadoop.hbase.ipc.AbstractRpcClient.callBlockingMethod(AbstractRpcClient.java:216)
        at 
org.apache.hadoop.hbase.ipc.AbstractRpcClient$BlockingRpcChannelImplementation.callBlockingMethod(AbstractRpcClient.java:300)
        at 
org.apache.hadoop.hbase.protobuf.generated.ClientProtos$ClientService$BlockingStub.scan(ClientProtos.java:31751)
        at 
org.apache.hadoop.hbase.client.ScannerCallable.call(ScannerCallable.java:199)
        at 
org.apache.hadoop.hbase.client.ScannerCallable.call(ScannerCallable.java:62)
        at 
org.apache.hadoop.hbase.client.RpcRetryingCaller.callWithRetries(RpcRetryingCaller.java:126)
        at 
org.apache.hadoop.hbase.client.ScannerCallableWithReplicas$RetryingRPC.call(ScannerCallableWithReplicas.java:294)
        at 
org.apache.hadoop.hbase.client.ScannerCallableWithReplicas$RetryingRPC.call(ScannerCallableWithReplicas.java:275)
        at java.util.concurrent.FutureTask$Sync.innerRun(FutureTask.java:334)
        at java.util.concurrent.FutureTask.run(FutureTask.java:166)
        at 
java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1145)
        at 
java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:615)
        at java.lang.Thread.run(Thread.java:724)

Tests run: 1, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 12.573 sec - in 
org.apache.phoenix.end2end.SpillableGroupByIT
Running org.apache.phoenix.end2end.AlterTableIT
Tests run: 3, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 142.06 sec - in 
org.apache.phoenix.end2end.index.MutableIndexFailureIT
Running org.apache.phoenix.end2end.StatsCollectorWithSplitsAndMultiCFIT
Running org.apache.phoenix.end2end.CountDistinctCompressionIT
Tests run: 1, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 11.954 sec - in 
org.apache.phoenix.end2end.CountDistinctCompressionIT
Running org.apache.phoenix.mapreduce.IndexToolIT
Tests run: 2, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 30.392 sec - in 
org.apache.phoenix.end2end.StatsCollectorWithSplitsAndMultiCFIT
Running org.apache.phoenix.mapreduce.CsvBulkLoadToolIT
Tests run: 5, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 213.538 sec - 
in org.apache.phoenix.mapreduce.IndexToolIT
Tests run: 51, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 282.946 sec - 
in org.apache.phoenix.end2end.AlterTableIT
Tests run: 6, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 255.929 sec - 
in org.apache.phoenix.mapreduce.CsvBulkLoadToolIT
Build timed out (after 120 minutes). Marking the build as failed.
Build was aborted
Archiving artifacts
Sending artifact delta relative to Phoenix | Master #628
Archived 1003 artifacts
Archive block size is 32768
Received 6594 blocks and 758839963 bytes
Compression is 22.2%
Took 4 min 39 sec
Updating PHOENIX-1722
Recording test results

Reply via email to