[ 
https://issues.apache.org/jira/browse/HBASE-25206?page=com.atlassian.jira.plugin.system.issuetabpanels:all-tabpanel
 ]

Toshihiro Suzuki updated HBASE-25206:
-------------------------------------
    Description: 
Steps to reproduce are as follows:

1. Create a table and put some data into the table:
{code:java}
create 'test1','cf'
put 'test1','r1','cf','v1'
put 'test1','r2','cf','v2'
put 'test1','r3','cf','v3'
put 'test1','r4','cf','v4'
put 'test1','r5','cf','v5'
{code}
2. Take a snapshot for the table:
{code:java}
snapshot 'test1','snap_test'
{code}
3. Clone the snapshot to another table
{code:java}
clone_snapshot 'snap_test','test2'
{code}
4. Delete the snapshot
{code:java}
delete_snapshot 'snap_test'
{code}
5. Split the original table
{code:java}
split 'test1','r3'
{code}
6. Drop the original table
{code:java}
disable 'test1'
drop 'test1'
{code}
After that, we see the error like the following in RS log when opening the 
regions of the cloned table:
{code:java}
2020-10-20 13:32:18,415 WARN org.apache.hadoop.hbase.regionserver.HRegion: 
Failed initialize of region= 
test2,,1603200595702.bebdc4f740626206eeccad96b7643261., starting to roll back 
memstore
java.io.IOException: java.io.IOException: java.io.FileNotFoundException: Unable 
to open link: org.apache.hadoop.hbase.io.HFileLink locations=[hdfs://<NN 
HOST>:8020/hbase/data/default/test1/349b766b1b38e21f627ed4e441ae643c/cf/b6e39865710345c8998dec0bcc94cc89,
 hdfs://<NN 
HOST>:8020/hbase/.tmp/data/default/test1/349b766b1b38e21f627ed4e441ae643c/cf/b6e39865710345c8998dec0bcc94cc89,
 hdfs://<NN 
HOST>:8020/hbase/mobdir/data/default/test1/349b766b1b38e21f627ed4e441ae643c/cf/b6e39865710345c8998dec0bcc94cc89,
 hdfs://<NN 
HOST>:8020/hbase/archive/data/default/test1/349b766b1b38e21f627ed4e441ae643c/cf/b6e39865710345c8998dec0bcc94cc89]
        at 
org.apache.hadoop.hbase.regionserver.HRegion.initializeStores(HRegion.java:1095)
        at 
org.apache.hadoop.hbase.regionserver.HRegion.initializeRegionInternals(HRegion.java:943)
        at 
org.apache.hadoop.hbase.regionserver.HRegion.initialize(HRegion.java:899)
        at 
org.apache.hadoop.hbase.regionserver.HRegion.openHRegion(HRegion.java:7246)
        at 
org.apache.hadoop.hbase.regionserver.HRegion.openHRegion(HRegion.java:7204)
        at 
org.apache.hadoop.hbase.regionserver.HRegion.openHRegion(HRegion.java:7176)
        at 
org.apache.hadoop.hbase.regionserver.HRegion.openHRegion(HRegion.java:7134)
        at 
org.apache.hadoop.hbase.regionserver.HRegion.openHRegion(HRegion.java:7085)
        at 
org.apache.hadoop.hbase.regionserver.handler.OpenRegionHandler.openRegion(OpenRegionHandler.java:283)
        at 
org.apache.hadoop.hbase.regionserver.handler.OpenRegionHandler.process(OpenRegionHandler.java:108)
        at 
org.apache.hadoop.hbase.executor.EventHandler.run(EventHandler.java:104)
        at 
java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1149)
        at 
java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:624)
        at java.lang.Thread.run(Thread.java:748)
Caused by: java.io.IOException: java.io.FileNotFoundException: Unable to open 
link: org.apache.hadoop.hbase.io.HFileLink locations=[hdfs://<NN 
HOST>:8020/hbase/data/default/test1/349b766b1b38e21f627ed4e441ae643c/cf/b6e39865710345c8998dec0bcc94cc89,
 hdfs://<NN 
HOST>:8020/hbase/.tmp/data/default/test1/349b766b1b38e21f627ed4e441ae643c/cf/b6e39865710345c8998dec0bcc94cc89,
 hdfs://<NN 
HOST>:8020/hbase/mobdir/data/default/test1/349b766b1b38e21f627ed4e441ae643c/cf/b6e39865710345c8998dec0bcc94cc89,
 hdfs://<NN 
HOST>:8020/hbase/archive/data/default/test1/349b766b1b38e21f627ed4e441ae643c/cf/b6e39865710345c8998dec0bcc94cc89]
        at 
org.apache.hadoop.hbase.regionserver.HStore.openStoreFiles(HStore.java:590)
        at 
org.apache.hadoop.hbase.regionserver.HStore.loadStoreFiles(HStore.java:557)
        at org.apache.hadoop.hbase.regionserver.HStore.<init>(HStore.java:303)
        at 
org.apache.hadoop.hbase.regionserver.HRegion.instantiateHStore(HRegion.java:5731)
        at 
org.apache.hadoop.hbase.regionserver.HRegion$1.call(HRegion.java:1059)
        at 
org.apache.hadoop.hbase.regionserver.HRegion$1.call(HRegion.java:1056)
        at java.util.concurrent.FutureTask.run(FutureTask.java:266)
        at 
java.util.concurrent.Executors$RunnableAdapter.call(Executors.java:511)
        at java.util.concurrent.FutureTask.run(FutureTask.java:266)
        ... 3 more
Caused by: java.io.FileNotFoundException: Unable to open link: 
org.apache.hadoop.hbase.io.HFileLink locations=[hdfs://<NN 
HOST>:8020/hbase/data/default/test1/349b766b1b38e21f627ed4e441ae643c/cf/b6e39865710345c8998dec0bcc94cc89,
 hdfs://<NN 
HOST>:8020/hbase/.tmp/data/default/test1/349b766b1b38e21f627ed4e441ae643c/cf/b6e39865710345c8998dec0bcc94cc89,
 hdfs://<NN 
HOST>:8020/hbase/mobdir/data/default/test1/349b766b1b38e21f627ed4e441ae643c/cf/b6e39865710345c8998dec0bcc94cc89,
 hdfs://<NN 
HOST>:8020/hbase/archive/data/default/test1/349b766b1b38e21f627ed4e441ae643c/cf/b6e39865710345c8998dec0bcc94cc89]
        at 
org.apache.hadoop.hbase.io.FileLink$FileLinkInputStream.tryOpen(FileLink.java:322)
        at 
org.apache.hadoop.hbase.io.FileLink$FileLinkInputStream.<init>(FileLink.java:126)
        at 
org.apache.hadoop.hbase.io.FileLink$FileLinkInputStream.<init>(FileLink.java:117)
        at org.apache.hadoop.hbase.io.FileLink.open(FileLink.java:429)
        at 
org.apache.hadoop.hbase.io.FSDataInputStreamWrapper.<init>(FSDataInputStreamWrapper.java:125)
        at 
org.apache.hadoop.hbase.io.FSDataInputStreamWrapper.<init>(FSDataInputStreamWrapper.java:107)
        at 
org.apache.hadoop.hbase.regionserver.StoreFileInfo.open(StoreFileInfo.java:261)
        at 
org.apache.hadoop.hbase.regionserver.HStoreFile.open(HStoreFile.java:368)
        at 
org.apache.hadoop.hbase.regionserver.HStoreFile.initReader(HStoreFile.java:476)
        at 
org.apache.hadoop.hbase.regionserver.HStore.createStoreFileAndReader(HStore.java:703)
        at 
org.apache.hadoop.hbase.regionserver.HStore.lambda$openStoreFiles$1(HStore.java:573)
        ... 6 more
{code}

  was:
Steps to reproduce are as follows:

1. Create a table and put some data into the table:
{code:java}
create 'test1','cf'
put 'test1','r1','cf','v1'
put 'test1','r2','cf','v2'
put 'test1','r3','cf','v3'
put 'test1','r4','cf','v4'
put 'test1','r5','cf','v5'
{code}
2. Take a snapshot for the table:
{code:java}
snapshot 'test1','snap_test'
{code}
3. Clone the snapshot to another table
{code:java}
clone_snapshot 'snap_test','test2'
{code}
4. Delete the snapshot
{code:java}
delete_snapshot 'snap_test'
{code}
5. Split the original table
{code:java}
split 'test1','r3'
{code}
6. Drop the original table
{code:java}
disable 'test1'
drop 'test1'
{code}
After that, we see the error like the following in RS log when opening the 
regions of the cloned table:
{code:java}
2020-10-20 22:15:47,554 WARN  [RS_OPEN_REGION-regionserver/10.0.1.8:0-0] 
regionserver.HRegion(965): Failed initialize of region= 
testCloneSnapshotBeforeSplittingRegionAndDroppingTable_0__regionReplication_1_-1603199739880,,1603199732706.92f431fab12aaded92a23513901daa5a.,
 starting to roll back memstore
java.io.IOException: java.io.IOException: java.io.FileNotFoundException: 
HFileLink 
locations=[hdfs://localhost:62716/user/tsuzuki/test-data/c00e6c6b-1c3b-5e40-4227-831ae42cf2f4/data/default/testCloneSnapshotBeforeSplittingRegionAndDroppingTable_0__regionReplication_1_1603199732705/f4658c2b6fb129d95f62e63d3742177d/cf/719b64120a0f4394ae7af8926bc56402,
 
hdfs://localhost:62716/user/tsuzuki/test-data/c00e6c6b-1c3b-5e40-4227-831ae42cf2f4/.tmp/data/default/testCloneSnapshotBeforeSplittingRegionAndDroppingTable_0__regionReplication_1_1603199732705/f4658c2b6fb129d95f62e63d3742177d/cf/719b64120a0f4394ae7af8926bc56402,
 
hdfs://localhost:62716/user/tsuzuki/test-data/c00e6c6b-1c3b-5e40-4227-831ae42cf2f4/mobdir/data/default/testCloneSnapshotBeforeSplittingRegionAndDroppingTable_0__regionReplication_1_1603199732705/f4658c2b6fb129d95f62e63d3742177d/cf/719b64120a0f4394ae7af8926bc56402,
 
hdfs://localhost:62716/user/tsuzuki/test-data/c00e6c6b-1c3b-5e40-4227-831ae42cf2f4/archive/data/default/testCloneSnapshotBeforeSplittingRegionAndDroppingTable_0__regionReplication_1_1603199732705/f4658c2b6fb129d95f62e63d3742177d/cf/719b64120a0f4394ae7af8926bc56402]
        at 
org.apache.hadoop.hbase.regionserver.HRegion.initializeStores(HRegion.java:1179)
        at 
org.apache.hadoop.hbase.regionserver.HRegion.initializeStores(HRegion.java:1121)
        at 
org.apache.hadoop.hbase.regionserver.HRegion.initializeRegionInternals(HRegion.java:1011)
        at 
org.apache.hadoop.hbase.regionserver.HRegion.initialize(HRegion.java:962)
        at 
org.apache.hadoop.hbase.regionserver.HRegion.openHRegion(HRegion.java:7999)
        at 
org.apache.hadoop.hbase.regionserver.HRegion.openHRegionFromTableDir(HRegion.java:7955)
        at 
org.apache.hadoop.hbase.regionserver.HRegion.openHRegion(HRegion.java:7930)
        at 
org.apache.hadoop.hbase.regionserver.HRegion.openHRegion(HRegion.java:7888)
        at 
org.apache.hadoop.hbase.regionserver.HRegion.openHRegion(HRegion.java:7839)
        at 
org.apache.hadoop.hbase.regionserver.handler.AssignRegionHandler.process(AssignRegionHandler.java:132)
        at 
org.apache.hadoop.hbase.executor.EventHandler.run(EventHandler.java:104)
        at 
java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1149)
        at 
java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:624)
        at java.lang.Thread.run(Thread.java:748)
Caused by: java.io.IOException: java.io.FileNotFoundException: HFileLink 
locations=[hdfs://localhost:62716/user/tsuzuki/test-data/c00e6c6b-1c3b-5e40-4227-831ae42cf2f4/data/default/testCloneSnapshotBeforeSplittingRegionAndDroppingTable_0__regionReplication_1_1603199732705/f4658c2b6fb129d95f62e63d3742177d/cf/719b64120a0f4394ae7af8926bc56402,
 
hdfs://localhost:62716/user/tsuzuki/test-data/c00e6c6b-1c3b-5e40-4227-831ae42cf2f4/.tmp/data/default/testCloneSnapshotBeforeSplittingRegionAndDroppingTable_0__regionReplication_1_1603199732705/f4658c2b6fb129d95f62e63d3742177d/cf/719b64120a0f4394ae7af8926bc56402,
 
hdfs://localhost:62716/user/tsuzuki/test-data/c00e6c6b-1c3b-5e40-4227-831ae42cf2f4/mobdir/data/default/testCloneSnapshotBeforeSplittingRegionAndDroppingTable_0__regionReplication_1_1603199732705/f4658c2b6fb129d95f62e63d3742177d/cf/719b64120a0f4394ae7af8926bc56402,
 
hdfs://localhost:62716/user/tsuzuki/test-data/c00e6c6b-1c3b-5e40-4227-831ae42cf2f4/archive/data/default/testCloneSnapshotBeforeSplittingRegionAndDroppingTable_0__regionReplication_1_1603199732705/f4658c2b6fb129d95f62e63d3742177d/cf/719b64120a0f4394ae7af8926bc56402]
        at 
org.apache.hadoop.hbase.regionserver.HStore.openStoreFiles(HStore.java:601)
        at 
org.apache.hadoop.hbase.regionserver.HStore.loadStoreFiles(HStore.java:561)
        at org.apache.hadoop.hbase.regionserver.HStore.<init>(HStore.java:310)
        at 
org.apache.hadoop.hbase.regionserver.HRegion.instantiateHStore(HRegion.java:6452)
        at 
org.apache.hadoop.hbase.regionserver.HRegion$1.call(HRegion.java:1143)
        at 
org.apache.hadoop.hbase.regionserver.HRegion$1.call(HRegion.java:1140)
        at java.util.concurrent.FutureTask.run(FutureTask.java:266)
        at 
java.util.concurrent.Executors$RunnableAdapter.call(Executors.java:511)
        at java.util.concurrent.FutureTask.run(FutureTask.java:266)
        ... 3 more
Caused by: java.io.FileNotFoundException: HFileLink 
locations=[hdfs://localhost:62716/user/tsuzuki/test-data/c00e6c6b-1c3b-5e40-4227-831ae42cf2f4/data/default/testCloneSnapshotBeforeSplittingRegionAndDroppingTable_0__regionReplication_1_1603199732705/f4658c2b6fb129d95f62e63d3742177d/cf/719b64120a0f4394ae7af8926bc56402,
 
hdfs://localhost:62716/user/tsuzuki/test-data/c00e6c6b-1c3b-5e40-4227-831ae42cf2f4/.tmp/data/default/testCloneSnapshotBeforeSplittingRegionAndDroppingTable_0__regionReplication_1_1603199732705/f4658c2b6fb129d95f62e63d3742177d/cf/719b64120a0f4394ae7af8926bc56402,
 
hdfs://localhost:62716/user/tsuzuki/test-data/c00e6c6b-1c3b-5e40-4227-831ae42cf2f4/mobdir/data/default/testCloneSnapshotBeforeSplittingRegionAndDroppingTable_0__regionReplication_1_1603199732705/f4658c2b6fb129d95f62e63d3742177d/cf/719b64120a0f4394ae7af8926bc56402,
 
hdfs://localhost:62716/user/tsuzuki/test-data/c00e6c6b-1c3b-5e40-4227-831ae42cf2f4/archive/data/default/testCloneSnapshotBeforeSplittingRegionAndDroppingTable_0__regionReplication_1_1603199732705/f4658c2b6fb129d95f62e63d3742177d/cf/719b64120a0f4394ae7af8926bc56402]
        at 
org.apache.hadoop.hbase.io.FileLink.handleAccessLocationException(FileLink.java:438)
        at org.apache.hadoop.hbase.io.FileLink.getFileStatus(FileLink.java:411)
        at 
org.apache.hadoop.hbase.regionserver.StoreFileInfo.getReferencedFileStatus(StoreFileInfo.java:384)
        at 
org.apache.hadoop.hbase.regionserver.StoreFileInfo.computeHDFSBlocksDistributionInternal(StoreFileInfo.java:345)
        at 
org.apache.hadoop.hbase.regionserver.StoreFileInfo.computeHDFSBlocksDistribution(StoreFileInfo.java:331)
        at 
org.apache.hadoop.hbase.regionserver.StoreFileInfo.initHDFSBlocksDistribution(StoreFileInfo.java:675)
        at 
org.apache.hadoop.hbase.regionserver.HStoreFile.open(HStoreFile.java:356)
        at 
org.apache.hadoop.hbase.regionserver.HStoreFile.initReader(HStoreFile.java:482)
        at 
org.apache.hadoop.hbase.regionserver.HStore.createStoreFileAndReader(HStore.java:738)
        at 
org.apache.hadoop.hbase.regionserver.HStore.lambda$openStoreFiles$1(HStore.java:579)
        ... 6 more
{code}


> Data loss can happen if a cloned table loses original split region(delete 
> table)
> --------------------------------------------------------------------------------
>
>                 Key: HBASE-25206
>                 URL: https://issues.apache.org/jira/browse/HBASE-25206
>             Project: HBase
>          Issue Type: Bug
>            Reporter: Toshihiro Suzuki
>            Assignee: Toshihiro Suzuki
>            Priority: Major
>
> Steps to reproduce are as follows:
> 1. Create a table and put some data into the table:
> {code:java}
> create 'test1','cf'
> put 'test1','r1','cf','v1'
> put 'test1','r2','cf','v2'
> put 'test1','r3','cf','v3'
> put 'test1','r4','cf','v4'
> put 'test1','r5','cf','v5'
> {code}
> 2. Take a snapshot for the table:
> {code:java}
> snapshot 'test1','snap_test'
> {code}
> 3. Clone the snapshot to another table
> {code:java}
> clone_snapshot 'snap_test','test2'
> {code}
> 4. Delete the snapshot
> {code:java}
> delete_snapshot 'snap_test'
> {code}
> 5. Split the original table
> {code:java}
> split 'test1','r3'
> {code}
> 6. Drop the original table
> {code:java}
> disable 'test1'
> drop 'test1'
> {code}
> After that, we see the error like the following in RS log when opening the 
> regions of the cloned table:
> {code:java}
> 2020-10-20 13:32:18,415 WARN org.apache.hadoop.hbase.regionserver.HRegion: 
> Failed initialize of region= 
> test2,,1603200595702.bebdc4f740626206eeccad96b7643261., starting to roll back 
> memstore
> java.io.IOException: java.io.IOException: java.io.FileNotFoundException: 
> Unable to open link: org.apache.hadoop.hbase.io.HFileLink 
> locations=[hdfs://<NN 
> HOST>:8020/hbase/data/default/test1/349b766b1b38e21f627ed4e441ae643c/cf/b6e39865710345c8998dec0bcc94cc89,
>  hdfs://<NN 
> HOST>:8020/hbase/.tmp/data/default/test1/349b766b1b38e21f627ed4e441ae643c/cf/b6e39865710345c8998dec0bcc94cc89,
>  hdfs://<NN 
> HOST>:8020/hbase/mobdir/data/default/test1/349b766b1b38e21f627ed4e441ae643c/cf/b6e39865710345c8998dec0bcc94cc89,
>  hdfs://<NN 
> HOST>:8020/hbase/archive/data/default/test1/349b766b1b38e21f627ed4e441ae643c/cf/b6e39865710345c8998dec0bcc94cc89]
>         at 
> org.apache.hadoop.hbase.regionserver.HRegion.initializeStores(HRegion.java:1095)
>         at 
> org.apache.hadoop.hbase.regionserver.HRegion.initializeRegionInternals(HRegion.java:943)
>         at 
> org.apache.hadoop.hbase.regionserver.HRegion.initialize(HRegion.java:899)
>         at 
> org.apache.hadoop.hbase.regionserver.HRegion.openHRegion(HRegion.java:7246)
>         at 
> org.apache.hadoop.hbase.regionserver.HRegion.openHRegion(HRegion.java:7204)
>         at 
> org.apache.hadoop.hbase.regionserver.HRegion.openHRegion(HRegion.java:7176)
>         at 
> org.apache.hadoop.hbase.regionserver.HRegion.openHRegion(HRegion.java:7134)
>         at 
> org.apache.hadoop.hbase.regionserver.HRegion.openHRegion(HRegion.java:7085)
>         at 
> org.apache.hadoop.hbase.regionserver.handler.OpenRegionHandler.openRegion(OpenRegionHandler.java:283)
>         at 
> org.apache.hadoop.hbase.regionserver.handler.OpenRegionHandler.process(OpenRegionHandler.java:108)
>         at 
> org.apache.hadoop.hbase.executor.EventHandler.run(EventHandler.java:104)
>         at 
> java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1149)
>         at 
> java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:624)
>         at java.lang.Thread.run(Thread.java:748)
> Caused by: java.io.IOException: java.io.FileNotFoundException: Unable to open 
> link: org.apache.hadoop.hbase.io.HFileLink locations=[hdfs://<NN 
> HOST>:8020/hbase/data/default/test1/349b766b1b38e21f627ed4e441ae643c/cf/b6e39865710345c8998dec0bcc94cc89,
>  hdfs://<NN 
> HOST>:8020/hbase/.tmp/data/default/test1/349b766b1b38e21f627ed4e441ae643c/cf/b6e39865710345c8998dec0bcc94cc89,
>  hdfs://<NN 
> HOST>:8020/hbase/mobdir/data/default/test1/349b766b1b38e21f627ed4e441ae643c/cf/b6e39865710345c8998dec0bcc94cc89,
>  hdfs://<NN 
> HOST>:8020/hbase/archive/data/default/test1/349b766b1b38e21f627ed4e441ae643c/cf/b6e39865710345c8998dec0bcc94cc89]
>         at 
> org.apache.hadoop.hbase.regionserver.HStore.openStoreFiles(HStore.java:590)
>         at 
> org.apache.hadoop.hbase.regionserver.HStore.loadStoreFiles(HStore.java:557)
>         at org.apache.hadoop.hbase.regionserver.HStore.<init>(HStore.java:303)
>         at 
> org.apache.hadoop.hbase.regionserver.HRegion.instantiateHStore(HRegion.java:5731)
>         at 
> org.apache.hadoop.hbase.regionserver.HRegion$1.call(HRegion.java:1059)
>         at 
> org.apache.hadoop.hbase.regionserver.HRegion$1.call(HRegion.java:1056)
>         at java.util.concurrent.FutureTask.run(FutureTask.java:266)
>         at 
> java.util.concurrent.Executors$RunnableAdapter.call(Executors.java:511)
>         at java.util.concurrent.FutureTask.run(FutureTask.java:266)
>         ... 3 more
> Caused by: java.io.FileNotFoundException: Unable to open link: 
> org.apache.hadoop.hbase.io.HFileLink locations=[hdfs://<NN 
> HOST>:8020/hbase/data/default/test1/349b766b1b38e21f627ed4e441ae643c/cf/b6e39865710345c8998dec0bcc94cc89,
>  hdfs://<NN 
> HOST>:8020/hbase/.tmp/data/default/test1/349b766b1b38e21f627ed4e441ae643c/cf/b6e39865710345c8998dec0bcc94cc89,
>  hdfs://<NN 
> HOST>:8020/hbase/mobdir/data/default/test1/349b766b1b38e21f627ed4e441ae643c/cf/b6e39865710345c8998dec0bcc94cc89,
>  hdfs://<NN 
> HOST>:8020/hbase/archive/data/default/test1/349b766b1b38e21f627ed4e441ae643c/cf/b6e39865710345c8998dec0bcc94cc89]
>         at 
> org.apache.hadoop.hbase.io.FileLink$FileLinkInputStream.tryOpen(FileLink.java:322)
>         at 
> org.apache.hadoop.hbase.io.FileLink$FileLinkInputStream.<init>(FileLink.java:126)
>         at 
> org.apache.hadoop.hbase.io.FileLink$FileLinkInputStream.<init>(FileLink.java:117)
>         at org.apache.hadoop.hbase.io.FileLink.open(FileLink.java:429)
>         at 
> org.apache.hadoop.hbase.io.FSDataInputStreamWrapper.<init>(FSDataInputStreamWrapper.java:125)
>         at 
> org.apache.hadoop.hbase.io.FSDataInputStreamWrapper.<init>(FSDataInputStreamWrapper.java:107)
>         at 
> org.apache.hadoop.hbase.regionserver.StoreFileInfo.open(StoreFileInfo.java:261)
>         at 
> org.apache.hadoop.hbase.regionserver.HStoreFile.open(HStoreFile.java:368)
>         at 
> org.apache.hadoop.hbase.regionserver.HStoreFile.initReader(HStoreFile.java:476)
>         at 
> org.apache.hadoop.hbase.regionserver.HStore.createStoreFileAndReader(HStore.java:703)
>         at 
> org.apache.hadoop.hbase.regionserver.HStore.lambda$openStoreFiles$1(HStore.java:573)
>         ... 6 more
> {code}



--
This message was sent by Atlassian Jira
(v8.3.4#803005)

Reply via email to