thomassarens created HBASE-28483: ------------------------------------ Summary: Merge of incremental backups fails on bulkloaded Hfiles Key: HBASE-28483 URL: https://issues.apache.org/jira/browse/HBASE-28483 Project: HBase Issue Type: Bug Components: backup&restore Affects Versions: 2.6.0, 4.0.0-alpha-1 Reporter: thomassarens Attachments: TestIncrementalBackupMergeWithBulkLoad.java
The merge of incremental backups fails in case one of the backups contains a bulk loaded HFile and the other backups doesn't. See test in attachements based on {code:java} org/apache/hadoop/hbase/backup/TestBackupRestoreWithModifications.java{code} that reproduces the exception when useBulkLoad is set to true [^TestIncrementalBackupMergeWithBulkLoad.java]. This exception occurs in the call to`HFileRecordReader#initialize` as it tries to read a directory path as an HFile. I'll see if I can create a patch on master to fix this. {code:java} 2024-04-04T14:55:15,462 INFO LocalJobRunner Map Task Executor #0 {} mapreduce.HFileInputFormat$HFileRecordReader(95): Initialize HFileRecordReader for hdfs://localhost:34093/user/thomass/backupIT/backup_1712235269368/default/table-true/eaeb223066c24d3e77a2ee6987e30cb3/0 2024-04-04T14:55:15,482 WARN [Thread-1429 {}] mapred.LocalJobRunner$Job(590): job_local1854345815_0018 java.lang.Exception: java.io.FileNotFoundException: Path is not a file: /user/thomass/backupIT/backup_1712235269368/default/table-true/eaeb223066c24d3e77a2ee6987e30cb3/0 at org.apache.hadoop.hdfs.server.namenode.INodeFile.valueOf(INodeFile.java:90) at org.apache.hadoop.hdfs.server.namenode.INodeFile.valueOf(INodeFile.java:76) at org.apache.hadoop.hdfs.server.namenode.FSDirStatAndListingOp.getBlockLocations(FSDirStatAndListingOp.java:156) at org.apache.hadoop.hdfs.server.namenode.FSNamesystem.getBlockLocations(FSNamesystem.java:2124) at org.apache.hadoop.hdfs.server.namenode.NameNodeRpcServer.getBlockLocations(NameNodeRpcServer.java:769) at org.apache.hadoop.hdfs.protocolPB.ClientNamenodeProtocolServerSideTranslatorPB.getBlockLocations(ClientNamenodeProtocolServerSideTranslatorPB.java:460) at org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos$ClientNamenodeProtocol$2.callBlockingMethod(ClientNamenodeProtocolProtos.java) at org.apache.hadoop.ipc.ProtobufRpcEngine2$Server$ProtoBufRpcInvoker.call(ProtobufRpcEngine2.java:621) at org.apache.hadoop.ipc.ProtobufRpcEngine2$Server$ProtoBufRpcInvoker.call(ProtobufRpcEngine2.java:589) at org.apache.hadoop.ipc.ProtobufRpcEngine2$Server$ProtoBufRpcInvoker.call(ProtobufRpcEngine2.java:573) at org.apache.hadoop.ipc.RPC$Server.call(RPC.java:1213) at org.apache.hadoop.ipc.Server$RpcCall.run(Server.java:1089) at org.apache.hadoop.ipc.Server$RpcCall.run(Server.java:1012) at java.base/java.security.AccessController.doPrivileged(Native Method) at java.base/javax.security.auth.Subject.doAs(Subject.java:423) at org.apache.hadoop.security.UserGroupInformation.doAs(UserGroupInformation.java:1899) at org.apache.hadoop.ipc.Server$Handler.run(Server.java:3026) at org.apache.hadoop.mapred.LocalJobRunner$Job.runTasks(LocalJobRunner.java:492) ~[hadoop-mapreduce-client-common-3.3.5.jar:?] at org.apache.hadoop.mapred.LocalJobRunner$Job.run(LocalJobRunner.java:552) ~[hadoop-mapreduce-client-common-3.3.5.jar:?] Caused by: java.io.FileNotFoundException: Path is not a file: /user/thomass/backupIT/backup_1712235269368/default/table-true/eaeb223066c24d3e77a2ee6987e30cb3/0 at org.apache.hadoop.hdfs.server.namenode.INodeFile.valueOf(INodeFile.java:90) at org.apache.hadoop.hdfs.server.namenode.INodeFile.valueOf(INodeFile.java:76) at org.apache.hadoop.hdfs.server.namenode.FSDirStatAndListingOp.getBlockLocations(FSDirStatAndListingOp.java:156) at org.apache.hadoop.hdfs.server.namenode.FSNamesystem.getBlockLocations(FSNamesystem.java:2124) at org.apache.hadoop.hdfs.server.namenode.NameNodeRpcServer.getBlockLocations(NameNodeRpcServer.java:769) at org.apache.hadoop.hdfs.protocolPB.ClientNamenodeProtocolServerSideTranslatorPB.getBlockLocations(ClientNamenodeProtocolServerSideTranslatorPB.java:460) at org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos$ClientNamenodeProtocol$2.callBlockingMethod(ClientNamenodeProtocolProtos.java) at org.apache.hadoop.ipc.ProtobufRpcEngine2$Server$ProtoBufRpcInvoker.call(ProtobufRpcEngine2.java:621) at org.apache.hadoop.ipc.ProtobufRpcEngine2$Server$ProtoBufRpcInvoker.call(ProtobufRpcEngine2.java:589) at org.apache.hadoop.ipc.ProtobufRpcEngine2$Server$ProtoBufRpcInvoker.call(ProtobufRpcEngine2.java:573) at org.apache.hadoop.ipc.RPC$Server.call(RPC.java:1213) at org.apache.hadoop.ipc.Server$RpcCall.run(Server.java:1089) at org.apache.hadoop.ipc.Server$RpcCall.run(Server.java:1012) at java.base/java.security.AccessController.doPrivileged(Native Method) at java.base/javax.security.auth.Subject.doAs(Subject.java:423) at org.apache.hadoop.security.UserGroupInformation.doAs(UserGroupInformation.java:1899) at org.apache.hadoop.ipc.Server$Handler.run(Server.java:3026) at jdk.internal.reflect.NativeConstructorAccessorImpl.newInstance0(Native Method) ~[?:?] at jdk.internal.reflect.NativeConstructorAccessorImpl.newInstance(NativeConstructorAccessorImpl.java:62) ~[?:?] at jdk.internal.reflect.DelegatingConstructorAccessorImpl.newInstance(DelegatingConstructorAccessorImpl.java:45) ~[?:?] at java.lang.reflect.Constructor.newInstance(Constructor.java:490) ~[?:?] at org.apache.hadoop.ipc.RemoteException.instantiateException(RemoteException.java:121) ~[hadoop-common-3.3.5.jar:?] at org.apache.hadoop.ipc.RemoteException.unwrapRemoteException(RemoteException.java:88) ~[hadoop-common-3.3.5.jar:?] at org.apache.hadoop.hdfs.DFSClient.callGetBlockLocations(DFSClient.java:902) ~[hadoop-hdfs-client-3.3.5.jar:?] at org.apache.hadoop.hdfs.DFSClient.getLocatedBlocks(DFSClient.java:889) ~[hadoop-hdfs-client-3.3.5.jar:?] at org.apache.hadoop.hdfs.DFSClient.getLocatedBlocks(DFSClient.java:878) ~[hadoop-hdfs-client-3.3.5.jar:?] at org.apache.hadoop.hdfs.DFSClient.open(DFSClient.java:1046) ~[hadoop-hdfs-client-3.3.5.jar:?] at org.apache.hadoop.hdfs.DistributedFileSystem$4.doCall(DistributedFileSystem.java:340) ~[hadoop-hdfs-client-3.3.5.jar:?] at org.apache.hadoop.hdfs.DistributedFileSystem$4.doCall(DistributedFileSystem.java:336) ~[hadoop-hdfs-client-3.3.5.jar:?] at org.apache.hadoop.fs.FileSystemLinkResolver.resolve(FileSystemLinkResolver.java:81) ~[hadoop-common-3.3.5.jar:?] at org.apache.hadoop.hdfs.DistributedFileSystem.open(DistributedFileSystem.java:353) ~[hadoop-hdfs-client-3.3.5.jar:?] at org.apache.hadoop.fs.FilterFileSystem.open(FilterFileSystem.java:168) ~[hadoop-common-3.3.5.jar:?] at org.apache.hadoop.fs.FileSystem.open(FileSystem.java:997) ~[hadoop-common-3.3.5.jar:?] at org.apache.hadoop.hbase.io.FSDataInputStreamWrapper.<init>(FSDataInputStreamWrapper.java:122) ~[classes/:?] at org.apache.hadoop.hbase.io.FSDataInputStreamWrapper.<init>(FSDataInputStreamWrapper.java:99) ~[classes/:?] at org.apache.hadoop.hbase.io.FSDataInputStreamWrapper.<init>(FSDataInputStreamWrapper.java:94) ~[classes/:?] at org.apache.hadoop.hbase.io.hfile.HFile.createReader(HFile.java:543) ~[classes/:?] at org.apache.hadoop.hbase.io.hfile.HFile.createReader(HFile.java:525) ~[classes/:?] at org.apache.hadoop.hbase.mapreduce.HFileInputFormat$HFileRecordReader.initialize(HFileInputFormat.java:96) ~[classes/:?] at org.apache.hadoop.mapred.MapTask$NewTrackingRecordReader.initialize(MapTask.java:561) ~[hadoop-mapreduce-client-core-3.3.5.jar:?] at org.apache.hadoop.mapred.MapTask.runNewMapper(MapTask.java:799) ~[hadoop-mapreduce-client-core-3.3.5.jar:?] at org.apache.hadoop.mapred.MapTask.run(MapTask.java:348) ~[hadoop-mapreduce-client-core-3.3.5.jar:?] at org.apache.hadoop.mapred.LocalJobRunner$Job$MapTaskRunnable.run(LocalJobRunner.java:271) ~[hadoop-mapreduce-client-common-3.3.5.jar:?] at java.util.concurrent.Executors$RunnableAdapter.call(Executors.java:515) ~[?:?] at java.util.concurrent.FutureTask.run(FutureTask.java:264) ~[?:?] at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1128) ~[?:?] at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:628) ~[?:?] at java.lang.Thread.run(Thread.java:829) ~[?:?] Caused by: org.apache.hadoop.ipc.RemoteException: Path is not a file: /user/thomass/backupIT/backup_1712235269368/default/table-true/eaeb223066c24d3e77a2ee6987e30cb3/0 at org.apache.hadoop.hdfs.server.namenode.INodeFile.valueOf(INodeFile.java:90) at org.apache.hadoop.hdfs.server.namenode.INodeFile.valueOf(INodeFile.java:76) at org.apache.hadoop.hdfs.server.namenode.FSDirStatAndListingOp.getBlockLocations(FSDirStatAndListingOp.java:156) at org.apache.hadoop.hdfs.server.namenode.FSNamesystem.getBlockLocations(FSNamesystem.java:2124) at org.apache.hadoop.hdfs.server.namenode.NameNodeRpcServer.getBlockLocations(NameNodeRpcServer.java:769) at org.apache.hadoop.hdfs.protocolPB.ClientNamenodeProtocolServerSideTranslatorPB.getBlockLocations(ClientNamenodeProtocolServerSideTranslatorPB.java:460) at org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos$ClientNamenodeProtocol$2.callBlockingMethod(ClientNamenodeProtocolProtos.java) at org.apache.hadoop.ipc.ProtobufRpcEngine2$Server$ProtoBufRpcInvoker.call(ProtobufRpcEngine2.java:621) at org.apache.hadoop.ipc.ProtobufRpcEngine2$Server$ProtoBufRpcInvoker.call(ProtobufRpcEngine2.java:589) at org.apache.hadoop.ipc.ProtobufRpcEngine2$Server$ProtoBufRpcInvoker.call(ProtobufRpcEngine2.java:573) at org.apache.hadoop.ipc.RPC$Server.call(RPC.java:1213) at org.apache.hadoop.ipc.Server$RpcCall.run(Server.java:1089) at org.apache.hadoop.ipc.Server$RpcCall.run(Server.java:1012) at java.base/java.security.AccessController.doPrivileged(Native Method) at java.base/javax.security.auth.Subject.doAs(Subject.java:423) at org.apache.hadoop.security.UserGroupInformation.doAs(UserGroupInformation.java:1899) at org.apache.hadoop.ipc.Server$Handler.run(Server.java:3026) at org.apache.hadoop.ipc.Client.getRpcResponse(Client.java:1584) ~[hadoop-common-3.3.5.jar:?] at org.apache.hadoop.ipc.Client.call(Client.java:1530) ~[hadoop-common-3.3.5.jar:?] at org.apache.hadoop.ipc.Client.call(Client.java:1427) ~[hadoop-common-3.3.5.jar:?] at org.apache.hadoop.ipc.ProtobufRpcEngine2$Invoker.invoke(ProtobufRpcEngine2.java:258) ~[hadoop-common-3.3.5.jar:?] at org.apache.hadoop.ipc.ProtobufRpcEngine2$Invoker.invoke(ProtobufRpcEngine2.java:139) ~[hadoop-common-3.3.5.jar:?] at com.sun.proxy.$Proxy44.getBlockLocations(Unknown Source) ~[?:?] at org.apache.hadoop.hdfs.protocolPB.ClientNamenodeProtocolTranslatorPB.getBlockLocations(ClientNamenodeProtocolTranslatorPB.java:334) ~[hadoop-hdfs-client-3.3.5.jar:?] at jdk.internal.reflect.GeneratedMethodAccessor18.invoke(Unknown Source) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:566) ~[?:?] at org.apache.hadoop.io.retry.RetryInvocationHandler.invokeMethod(RetryInvocationHandler.java:433) ~[hadoop-common-3.3.5.jar:?] at org.apache.hadoop.io.retry.RetryInvocationHandler$Call.invokeMethod(RetryInvocationHandler.java:166) ~[hadoop-common-3.3.5.jar:?] at org.apache.hadoop.io.retry.RetryInvocationHandler$Call.invoke(RetryInvocationHandler.java:158) ~[hadoop-common-3.3.5.jar:?] at org.apache.hadoop.io.retry.RetryInvocationHandler$Call.invokeOnce(RetryInvocationHandler.java:96) ~[hadoop-common-3.3.5.jar:?] at org.apache.hadoop.io.retry.RetryInvocationHandler.invoke(RetryInvocationHandler.java:362) ~[hadoop-common-3.3.5.jar:?] at com.sun.proxy.$Proxy45.getBlockLocations(Unknown Source) ~[?:?] at jdk.internal.reflect.GeneratedMethodAccessor18.invoke(Unknown Source) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:566) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at com.sun.proxy.$Proxy48.getBlockLocations(Unknown Source) ~[?:?] at jdk.internal.reflect.GeneratedMethodAccessor18.invoke(Unknown Source) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:566) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at com.sun.proxy.$Proxy48.getBlockLocations(Unknown Source) ~[?:?] at jdk.internal.reflect.GeneratedMethodAccessor18.invoke(Unknown Source) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:566) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at com.sun.proxy.$Proxy48.getBlockLocations(Unknown Source) ~[?:?] at jdk.internal.reflect.GeneratedMethodAccessor18.invoke(Unknown Source) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:566) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at com.sun.proxy.$Proxy48.getBlockLocations(Unknown Source) ~[?:?] at jdk.internal.reflect.GeneratedMethodAccessor18.invoke(Unknown Source) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:566) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at com.sun.proxy.$Proxy48.getBlockLocations(Unknown Source) ~[?:?] at org.apache.hadoop.hdfs.DFSClient.callGetBlockLocations(DFSClient.java:900) ~[hadoop-hdfs-client-3.3.5.jar:?] at org.apache.hadoop.hdfs.DFSClient.getLocatedBlocks(DFSClient.java:889) ~[hadoop-hdfs-client-3.3.5.jar:?] at org.apache.hadoop.hdfs.DFSClient.getLocatedBlocks(DFSClient.java:878) ~[hadoop-hdfs-client-3.3.5.jar:?] at org.apache.hadoop.hdfs.DFSClient.open(DFSClient.java:1046) ~[hadoop-hdfs-client-3.3.5.jar:?] at org.apache.hadoop.hdfs.DistributedFileSystem$4.doCall(DistributedFileSystem.java:340) ~[hadoop-hdfs-client-3.3.5.jar:?] at org.apache.hadoop.hdfs.DistributedFileSystem$4.doCall(DistributedFileSystem.java:336) ~[hadoop-hdfs-client-3.3.5.jar:?] at org.apache.hadoop.fs.FileSystemLinkResolver.resolve(FileSystemLinkResolver.java:81) ~[hadoop-common-3.3.5.jar:?] at org.apache.hadoop.hdfs.DistributedFileSystem.open(DistributedFileSystem.java:353) ~[hadoop-hdfs-client-3.3.5.jar:?] at org.apache.hadoop.fs.FilterFileSystem.open(FilterFileSystem.java:168) ~[hadoop-common-3.3.5.jar:?] at org.apache.hadoop.fs.FileSystem.open(FileSystem.java:997) ~[hadoop-common-3.3.5.jar:?] at org.apache.hadoop.hbase.io.FSDataInputStreamWrapper.<init>(FSDataInputStreamWrapper.java:122) ~[classes/:?] at org.apache.hadoop.hbase.io.FSDataInputStreamWrapper.<init>(FSDataInputStreamWrapper.java:99) ~[classes/:?] at org.apache.hadoop.hbase.io.FSDataInputStreamWrapper.<init>(FSDataInputStreamWrapper.java:94) ~[classes/:?] at org.apache.hadoop.hbase.io.hfile.HFile.createReader(HFile.java:543) ~[classes/:?] at org.apache.hadoop.hbase.io.hfile.HFile.createReader(HFile.java:525) ~[classes/:?] at org.apache.hadoop.hbase.mapreduce.HFileInputFormat$HFileRecordReader.initialize(HFileInputFormat.java:96) ~[classes/:?] at org.apache.hadoop.mapred.MapTask$NewTrackingRecordReader.initialize(MapTask.java:561) ~[hadoop-mapreduce-client-core-3.3.5.jar:?] at org.apache.hadoop.mapred.MapTask.runNewMapper(MapTask.java:799) ~[hadoop-mapreduce-client-core-3.3.5.jar:?] at org.apache.hadoop.mapred.MapTask.run(MapTask.java:348) ~[hadoop-mapreduce-client-core-3.3.5.jar:?] at org.apache.hadoop.mapred.LocalJobRunner$Job$MapTaskRunnable.run(LocalJobRunner.java:271) ~[hadoop-mapreduce-client-common-3.3.5.jar:?] at java.util.concurrent.Executors$RunnableAdapter.call(Executors.java:515) ~[?:?] at java.util.concurrent.FutureTask.run(FutureTask.java:264) ~[?:?] at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1128) ~[?:?] at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:628) ~[?:?] at java.lang.Thread.run(Thread.java:829) ~[?:?] 2024-04-04T14:55:16,451 ERROR [Listener at localhost/33145 {}] mapreduce.MapReduceBackupMergeJob(188): java.io.IOException: Can not merge backup images for backupIT/backup_1712235269368/default/table-true,backupIT/backup_1712235285491/default/table-true (check Hadoop/MR and HBase logs). Player return code =1 java.io.IOException: Can not merge backup images for backupIT/backup_1712235269368/default/table-true,backupIT/backup_1712235285491/default/table-true (check Hadoop/MR and HBase logs). Player return code =1 at org.apache.hadoop.hbase.backup.mapreduce.MapReduceBackupMergeJob.run(MapReduceBackupMergeJob.java:141) ~[classes/:?] at org.apache.hadoop.hbase.backup.impl.BackupAdminImpl.mergeBackups(BackupAdminImpl.java:622) ~[classes/:?] at org.apache.hadoop.hbase.backup.TestIncrementalBackupMergeWithBulkLoad.merge(TestIncrementalBackupMergeWithBulkLoad.java:222) ~[test-classes/:?] at org.apache.hadoop.hbase.backup.TestIncrementalBackupMergeWithBulkLoad.test(TestIncrementalBackupMergeWithBulkLoad.java:137) ~[test-classes/:?] at jdk.internal.reflect.NativeMethodAccessorImpl.invoke0(Native Method) ~[?:?] at jdk.internal.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:566) ~[?:?] at org.junit.runners.model.FrameworkMethod$1.runReflectiveCall(FrameworkMethod.java:59) ~[junit-4.13.2.jar:4.13.2] at org.junit.internal.runners.model.ReflectiveCallable.run(ReflectiveCallable.java:12) ~[junit-4.13.2.jar:4.13.2] at org.junit.runners.model.FrameworkMethod.invokeExplosively(FrameworkMethod.java:56) ~[junit-4.13.2.jar:4.13.2] at org.junit.internal.runners.statements.InvokeMethod.evaluate(InvokeMethod.java:17) ~[junit-4.13.2.jar:4.13.2] at org.junit.internal.runners.statements.RunBefores.evaluate(RunBefores.java:26) ~[junit-4.13.2.jar:4.13.2] at org.junit.runners.ParentRunner$3.evaluate(ParentRunner.java:306) ~[junit-4.13.2.jar:4.13.2] at org.junit.runners.BlockJUnit4ClassRunner$1.evaluate(BlockJUnit4ClassRunner.java:100) ~[junit-4.13.2.jar:4.13.2] at org.junit.runners.ParentRunner.runLeaf(ParentRunner.java:366) ~[junit-4.13.2.jar:4.13.2] at org.junit.runners.BlockJUnit4ClassRunner.runChild(BlockJUnit4ClassRunner.java:103) ~[junit-4.13.2.jar:4.13.2] at org.junit.runners.BlockJUnit4ClassRunner.runChild(BlockJUnit4ClassRunner.java:63) ~[junit-4.13.2.jar:4.13.2] at org.junit.runners.ParentRunner$4.run(ParentRunner.java:331) ~[junit-4.13.2.jar:4.13.2] at org.junit.runners.ParentRunner$1.schedule(ParentRunner.java:79) ~[junit-4.13.2.jar:4.13.2] at org.junit.runners.ParentRunner.runChildren(ParentRunner.java:329) ~[junit-4.13.2.jar:4.13.2] at org.junit.runners.ParentRunner.access$100(ParentRunner.java:66) ~[junit-4.13.2.jar:4.13.2] at org.junit.runners.ParentRunner$2.evaluate(ParentRunner.java:293) ~[junit-4.13.2.jar:4.13.2] at org.junit.runners.ParentRunner.run(ParentRunner.java:413) ~[junit-4.13.2.jar:4.13.2] at org.junit.runners.Suite.runChild(Suite.java:128) ~[junit-4.13.2.jar:4.13.2] at org.junit.runners.Suite.runChild(Suite.java:27) ~[junit-4.13.2.jar:4.13.2] at org.junit.runners.ParentRunner$4.run(ParentRunner.java:331) ~[junit-4.13.2.jar:4.13.2] at org.junit.runners.ParentRunner$1.schedule(ParentRunner.java:79) ~[junit-4.13.2.jar:4.13.2] at org.junit.runners.ParentRunner.runChildren(ParentRunner.java:329) ~[junit-4.13.2.jar:4.13.2] at org.junit.runners.ParentRunner.access$100(ParentRunner.java:66) ~[junit-4.13.2.jar:4.13.2] at org.junit.runners.ParentRunner$2.evaluate(ParentRunner.java:293) ~[junit-4.13.2.jar:4.13.2] at org.junit.internal.runners.statements.RunBefores.evaluate(RunBefores.java:26) ~[junit-4.13.2.jar:4.13.2] at org.junit.internal.runners.statements.RunAfters.evaluate(RunAfters.java:27) ~[junit-4.13.2.jar:4.13.2] at org.junit.internal.runners.statements.FailOnTimeout$CallableStatement.call(FailOnTimeout.java:299) ~[junit-4.13.2.jar:4.13.2] at org.junit.internal.runners.statements.FailOnTimeout$CallableStatement.call(FailOnTimeout.java:293) ~[junit-4.13.2.jar:4.13.2] at java.util.concurrent.FutureTask.run(FutureTask.java:264) ~[?:?] at java.lang.Thread.run(Thread.java:829) ~[?:?] 2024-04-04T14:55:16,456 DEBUG [Listener at localhost/33145 {}] impl.BackupSystemTable(657): Finish backup exclusive operation 2024-04-04T14:55:16,459 INFO [Listener at localhost/33145 {}] client.AsyncConnectionImpl(233): Connection has been closed by Listener at localhost/33145. 2024-04-04T14:55:16,459 DEBUG [Listener at localhost/33145 {}] client.AsyncConnectionImpl(264): Call stack: at java.base/java.lang.Thread.getStackTrace(Thread.java:1602) at org.apache.hadoop.hbase.client.AsyncConnectionImpl.lambda$close$5(AsyncConnectionImpl.java:235) at org.apache.hadoop.hbase.trace.TraceUtil.trace(TraceUtil.java:187) at org.apache.hadoop.hbase.trace.TraceUtil.trace(TraceUtil.java:177) at org.apache.hadoop.hbase.client.AsyncConnectionImpl.close(AsyncConnectionImpl.java:229) at org.apache.hadoop.hbase.client.ConnectionOverAsyncConnection.close(ConnectionOverAsyncConnection.java:130) at org.apache.hadoop.hbase.backup.mapreduce.MapReduceBackupMergeJob.run(MapReduceBackupMergeJob.java:203) at org.apache.hadoop.hbase.backup.impl.BackupAdminImpl.mergeBackups(BackupAdminImpl.java:622) at org.apache.hadoop.hbase.backup.TestIncrementalBackupMergeWithBulkLoad.merge(TestIncrementalBackupMergeWithBulkLoad.java:222) at org.apache.hadoop.hbase.backup.TestIncrementalBackupMergeWithBulkLoad.test(TestIncrementalBackupMergeWithBulkLoad.java:137) at java.base/jdk.internal.reflect.NativeMethodAccessorImpl.invoke0(Native Method) at java.base/jdk.internal.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) at java.base/jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) at java.base/java.lang.reflect.Method.invoke(Method.java:566) at org.junit.runners.model.FrameworkMethod$1.runReflectiveCall(FrameworkMethod.java:59) at org.junit.internal.runners.model.ReflectiveCallable.run(ReflectiveCallable.java:12) at org.junit.runners.model.FrameworkMethod.invokeExplosively(FrameworkMethod.java:56) at org.junit.internal.runners.statements.InvokeMethod.evaluate(InvokeMethod.java:17) at org.junit.internal.runners.statements.RunBefores.evaluate(RunBefores.java:26) at org.junit.runners.ParentRunner$3.evaluate(ParentRunner.java:306) at org.junit.runners.BlockJUnit4ClassRunner$1.evaluate(BlockJUnit4ClassRunner.java:100) at org.junit.runners.ParentRunner.runLeaf(ParentRunner.java:366) at org.junit.runners.BlockJUnit4ClassRunner.runChild(BlockJUnit4ClassRunner.java:103) at org.junit.runners.BlockJUnit4ClassRunner.runChild(BlockJUnit4ClassRunner.java:63) at org.junit.runners.ParentRunner$4.run(ParentRunner.java:331) at org.junit.runners.ParentRunner$1.schedule(ParentRunner.java:79) at org.junit.runners.ParentRunner.runChildren(ParentRunner.java:329) at org.junit.runners.ParentRunner.access$100(ParentRunner.java:66) at org.junit.runners.ParentRunner$2.evaluate(ParentRunner.java:293) at org.junit.runners.ParentRunner.run(ParentRunner.java:413) at org.junit.runners.Suite.runChild(Suite.java:128) at org.junit.runners.Suite.runChild(Suite.java:27) at org.junit.runners.ParentRunner$4.run(ParentRunner.java:331) at org.junit.runners.ParentRunner$1.schedule(ParentRunner.java:79) at org.junit.runners.ParentRunner.runChildren(ParentRunner.java:329) at org.junit.runners.ParentRunner.access$100(ParentRunner.java:66) at org.junit.runners.ParentRunner$2.evaluate(ParentRunner.java:293) at org.junit.internal.runners.statements.RunBefores.evaluate(RunBefores.java:26) at org.junit.internal.runners.statements.RunAfters.evaluate(RunAfters.java:27) at org.junit.internal.runners.statements.FailOnTimeout$CallableStatement.call(FailOnTimeout.java:299) at org.junit.internal.runners.statements.FailOnTimeout$CallableStatement.call(FailOnTimeout.java:293) at java.base/java.util.concurrent.FutureTask.run(FutureTask.java:264) at java.base/java.lang.Thread.run(Thread.java:829) 2024-04-04T14:55:16,459 DEBUG [Listener at localhost/33145 {}] ipc.AbstractRpcClient(514): Stopping rpc client 2024-04-04T14:55:16,459 DEBUG [Listener at localhost/33145 {}] ipc.AbstractRpcClient(514): Stopping rpc client 2024-04-04T14:55:16,459 INFO [Registry-endpoints-refresh-end-points {}] client.RegistryEndpointsRefresher(78): Registry end points refresher loop exited. 2024-04-04T14:55:16,460 INFO [Listener at localhost/33145 {}] client.AsyncConnectionImpl(233): Connection has been closed by Listener at localhost/33145. 2024-04-04T14:55:16,460 DEBUG [Listener at localhost/33145 {}] client.AsyncConnectionImpl(264): Call stack: at java.base/java.lang.Thread.getStackTrace(Thread.java:1602) at org.apache.hadoop.hbase.client.AsyncConnectionImpl.lambda$close$5(AsyncConnectionImpl.java:235) at org.apache.hadoop.hbase.trace.TraceUtil.trace(TraceUtil.java:187) at org.apache.hadoop.hbase.trace.TraceUtil.trace(TraceUtil.java:177) at org.apache.hadoop.hbase.client.AsyncConnectionImpl.close(AsyncConnectionImpl.java:229) at org.apache.hadoop.hbase.client.ConnectionOverAsyncConnection.close(ConnectionOverAsyncConnection.java:130) at org.apache.hadoop.hbase.backup.TestIncrementalBackupMergeWithBulkLoad.merge(TestIncrementalBackupMergeWithBulkLoad.java:220) at org.apache.hadoop.hbase.backup.TestIncrementalBackupMergeWithBulkLoad.test(TestIncrementalBackupMergeWithBulkLoad.java:137) at java.base/jdk.internal.reflect.NativeMethodAccessorImpl.invoke0(Native Method) at java.base/jdk.internal.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) at java.base/jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) at java.base/java.lang.reflect.Method.invoke(Method.java:566) at org.junit.runners.model.FrameworkMethod$1.runReflectiveCall(FrameworkMethod.java:59) at org.junit.internal.runners.model.ReflectiveCallable.run(ReflectiveCallable.java:12) at org.junit.runners.model.FrameworkMethod.invokeExplosively(FrameworkMethod.java:56) at org.junit.internal.runners.statements.InvokeMethod.evaluate(InvokeMethod.java:17) at org.junit.internal.runners.statements.RunBefores.evaluate(RunBefores.java:26) at org.junit.runners.ParentRunner$3.evaluate(ParentRunner.java:306) at org.junit.runners.BlockJUnit4ClassRunner$1.evaluate(BlockJUnit4ClassRunner.java:100) at org.junit.runners.ParentRunner.runLeaf(ParentRunner.java:366) at org.junit.runners.BlockJUnit4ClassRunner.runChild(BlockJUnit4ClassRunner.java:103) at org.junit.runners.BlockJUnit4ClassRunner.runChild(BlockJUnit4ClassRunner.java:63) at org.junit.runners.ParentRunner$4.run(ParentRunner.java:331) at org.junit.runners.ParentRunner$1.schedule(ParentRunner.java:79) at org.junit.runners.ParentRunner.runChildren(ParentRunner.java:329) at org.junit.runners.ParentRunner.access$100(ParentRunner.java:66) at org.junit.runners.ParentRunner$2.evaluate(ParentRunner.java:293) at org.junit.runners.ParentRunner.run(ParentRunner.java:413) at org.junit.runners.Suite.runChild(Suite.java:128) at org.junit.runners.Suite.runChild(Suite.java:27) at org.junit.runners.ParentRunner$4.run(ParentRunner.java:331) at org.junit.runners.ParentRunner$1.schedule(ParentRunner.java:79) at org.junit.runners.ParentRunner.runChildren(ParentRunner.java:329) at org.junit.runners.ParentRunner.access$100(ParentRunner.java:66) at org.junit.runners.ParentRunner$2.evaluate(ParentRunner.java:293) at org.junit.internal.runners.statements.RunBefores.evaluate(RunBefores.java:26) at org.junit.internal.runners.statements.RunAfters.evaluate(RunAfters.java:27) at org.junit.internal.runners.statements.FailOnTimeout$CallableStatement.call(FailOnTimeout.java:299) at org.junit.internal.runners.statements.FailOnTimeout$CallableStatement.call(FailOnTimeout.java:293) at java.base/java.util.concurrent.FutureTask.run(FutureTask.java:264) at java.base/java.lang.Thread.run(Thread.java:829) 2024-04-04T14:55:16,460 DEBUG [Listener at localhost/33145 {}] ipc.AbstractRpcClient(514): Stopping rpc client 2024-04-04T14:55:16,460 DEBUG [Listener at localhost/33145 {}] ipc.AbstractRpcClient(514): Stopping rpc client 2024-04-04T14:55:16,460 INFO [Registry-endpoints-refresh-end-points {}] client.RegistryEndpointsRefresher(78): Registry end points refresher loop exited. 2024-04-04T14:55:16,468 INFO [Listener at localhost/33145 {}] hbase.HBaseTestingUtil(1041): Shutting down minicluster 2024-04-04T14:55:16,468 INFO [Listener at localhost/33145 {}] client.AsyncConnectionImpl(233): Connection has been closed by Listener at localhost/33145. 2024-04-04T14:55:16,468 DEBUG [Listener at localhost/33145 {}] client.AsyncConnectionImpl(264): Call stack: java.io.IOException: Backup merge operation failed, you should try it again at org.apache.hadoop.hbase.backup.mapreduce.MapReduceBackupMergeJob.run(MapReduceBackupMergeJob.java:195) at org.apache.hadoop.hbase.backup.impl.BackupAdminImpl.mergeBackups(BackupAdminImpl.java:622) at org.apache.hadoop.hbase.backup.TestIncrementalBackupMergeWithBulkLoad.merge(TestIncrementalBackupMergeWithBulkLoad.java:222) at org.apache.hadoop.hbase.backup.TestIncrementalBackupMergeWithBulkLoad.test(TestIncrementalBackupMergeWithBulkLoad.java:137) at java.base/jdk.internal.reflect.NativeMethodAccessorImpl.invoke0(Native Method) at java.base/jdk.internal.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) at java.base/jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) at java.base/java.lang.reflect.Method.invoke(Method.java:566) at org.junit.runners.model.FrameworkMethod$1.runReflectiveCall(FrameworkMethod.java:59) at org.junit.internal.runners.model.ReflectiveCallable.run(ReflectiveCallable.java:12) at org.junit.runners.model.FrameworkMethod.invokeExplosively(FrameworkMethod.java:56) at org.junit.internal.runners.statements.InvokeMethod.evaluate(InvokeMethod.java:17) at org.junit.internal.runners.statements.RunBefores.evaluate(RunBefores.java:26) at org.junit.runners.ParentRunner$3.evaluate(ParentRunner.java:306) at org.junit.runners.BlockJUnit4ClassRunner$1.evaluate(BlockJUnit4ClassRunner.java:100) at org.junit.runners.ParentRunner.runLeaf(ParentRunner.java:366) at org.junit.runners.BlockJUnit4ClassRunner.runChild(BlockJUnit4ClassRunner.java:103) at org.junit.runners.BlockJUnit4ClassRunner.runChild(BlockJUnit4ClassRunner.java:63) at org.junit.runners.ParentRunner$4.run(ParentRunner.java:331) at org.junit.runners.ParentRunner$1.schedule(ParentRunner.java:79) at org.junit.runners.ParentRunner.runChildren(ParentRunner.java:329) at org.junit.runners.ParentRunner.access$100(ParentRunner.java:66) at org.junit.runners.ParentRunner$2.evaluate(ParentRunner.java:293) at org.junit.runners.ParentRunner.run(ParentRunner.java:413) at org.junit.runners.Suite.runChild(Suite.java:128) at org.junit.runners.Suite.runChild(Suite.java:27) at org.junit.runners.ParentRunner$4.run(ParentRunner.java:331) at org.junit.runners.ParentRunner$1.schedule(ParentRunner.java:79) at org.junit.runners.ParentRunner.runChildren(ParentRunner.java:329) at org.junit.runners.ParentRunner.access$100(ParentRunner.java:66) at org.junit.runners.ParentRunner$2.evaluate(ParentRunner.java:293) at org.junit.internal.runners.statements.RunBefores.evaluate(RunBefores.java:26) at org.junit.internal.runners.statements.RunAfters.evaluate(RunAfters.java:27) at org.junit.internal.runners.statements.FailOnTimeout$CallableStatement.call(FailOnTimeout.java:299) at org.junit.internal.runners.statements.FailOnTimeout$CallableStatement.call(FailOnTimeout.java:293) at java.base/java.util.concurrent.FutureTask.run(FutureTask.java:264) at java.base/java.lang.Thread.run(Thread.java:829) Caused by: java.io.IOException: Can not merge backup images for backupIT/backup_1712235269368/default/table-true,backupIT/backup_1712235285491/default/table-true (check Hadoop/MR and HBase logs). Player return code =1 at org.apache.hadoop.hbase.backup.mapreduce.MapReduceBackupMergeJob.run(MapReduceBackupMergeJob.java:141) ... 36 more{code} -- This message was sent by Atlassian Jira (v8.20.10#820010)