diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.java index 8ee0b6b98ada..5816b8ff1602 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.java @@ -2108,6 +2108,7 @@ public void notifyFileCachingCompleted(Path fileName, int totalBlockCount, int d for (ReentrantReadWriteLock lock : locks) { lock.readLock().unlock(); } + locks.clear(); LOG.debug("There are still blocks pending caching for file {}. Will sleep 100ms " + "and try the verification again.", fileName); Thread.sleep(100); diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/CacheTestUtils.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/CacheTestUtils.java index 262408e91a82..848f33bb9c3a 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/CacheTestUtils.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/CacheTestUtils.java @@ -32,6 +32,7 @@ import java.util.concurrent.ThreadLocalRandom; import java.util.concurrent.atomic.AtomicInteger; import org.apache.hadoop.conf.Configuration; +import org.apache.hadoop.fs.Path; import org.apache.hadoop.hbase.HConstants; import org.apache.hadoop.hbase.MultithreadedTestUtil; import org.apache.hadoop.hbase.MultithreadedTestUtil.TestThread; @@ -275,6 +276,10 @@ public BlockType getBlockType() { } public static HFileBlockPair[] generateHFileBlocks(int blockSize, int numBlocks) { + return generateBlocksForPath(blockSize, numBlocks, null); + } + + public static HFileBlockPair[] generateBlocksForPath(int blockSize, int numBlocks, Path path) { HFileBlockPair[] returnedBlocks = new HFileBlockPair[numBlocks]; Random rand = ThreadLocalRandom.current(); HashSet usedStrings = new HashSet<>(); @@ -299,16 +304,20 @@ public static HFileBlockPair[] generateHFileBlocks(int blockSize, int numBlocks) prevBlockOffset, ByteBuff.wrap(cachedBuffer), HFileBlock.DONT_FILL_HEADER, blockSize, onDiskSizeWithoutHeader + HConstants.HFILEBLOCK_HEADER_SIZE, -1, meta, ByteBuffAllocator.HEAP); - - String strKey; - /* No conflicting keys */ - strKey = Long.toString(rand.nextLong()); - while (!usedStrings.add(strKey)) { - strKey = Long.toString(rand.nextLong()); + String key = null; + long offset = 0; + if (path != null) { + key = path.getName(); + offset = i * blockSize; + } else { + /* No conflicting keys */ + key = Long.toString(rand.nextLong()); + while (!usedStrings.add(key)) { + key = Long.toString(rand.nextLong()); + } } - returnedBlocks[i] = new HFileBlockPair(); - returnedBlocks[i].blockName = new BlockCacheKey(strKey, 0); + returnedBlocks[i].blockName = new BlockCacheKey(key, offset); returnedBlocks[i].block = generated; } return returnedBlocks; diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/bucket/TestBucketCache.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/bucket/TestBucketCache.java index 6a9b5bf382a6..78a781994e83 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/bucket/TestBucketCache.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/bucket/TestBucketCache.java @@ -890,4 +890,57 @@ public void testBlockAdditionWaitWhenCache() throws Exception { HBASE_TESTING_UTILITY.cleanupTestDir(); } } + + @Test + public void testNotifyFileCachingCompletedSuccess() throws Exception { + BucketCache bucketCache = null; + try { + Path filePath = + new Path(HBASE_TESTING_UTILITY.getDataTestDir(), "testNotifyFileCachingCompletedSuccess"); + bucketCache = testNotifyFileCachingCompleted(filePath, 10); + assertTrue(bucketCache.fullyCachedFiles.containsKey(filePath.getName())); + } finally { + if (bucketCache != null) { + bucketCache.shutdown(); + } + HBASE_TESTING_UTILITY.cleanupTestDir(); + } + } + + @Test + public void testNotifyFileCachingCompletedNotAllCached() throws Exception { + BucketCache bucketCache = null; + try { + Path filePath = new Path(HBASE_TESTING_UTILITY.getDataTestDir(), + "testNotifyFileCachingCompletedNotAllCached"); + // Deliberately passing more blocks than we have created to test that + // notifyFileCachingCompleted will not consider the file fully cached + bucketCache = testNotifyFileCachingCompleted(filePath, 12); + assertFalse(bucketCache.fullyCachedFiles.containsKey(filePath.getName())); + } finally { + if (bucketCache != null) { + bucketCache.shutdown(); + } + HBASE_TESTING_UTILITY.cleanupTestDir(); + } + } + + private BucketCache testNotifyFileCachingCompleted(Path filePath, int totalBlocks) + throws Exception { + final Path dataTestDir = createAndGetTestDir(); + String ioEngineName = "file:" + dataTestDir + "/bucketNoRecycler.cache"; + BucketCache bucketCache = new BucketCache(ioEngineName, capacitySize, constructedBlockSize, + constructedBlockSizes, 1, 1, null); + long usedByteSize = bucketCache.getAllocator().getUsedSize(); + assertEquals(0, usedByteSize); + HFileBlockPair[] hfileBlockPairs = + CacheTestUtils.generateBlocksForPath(constructedBlockSize, 10, filePath); + // Add blocks + for (HFileBlockPair hfileBlockPair : hfileBlockPairs) { + bucketCache.cacheBlock(hfileBlockPair.getBlockName(), hfileBlockPair.getBlock(), false, true); + } + bucketCache.notifyFileCachingCompleted(filePath, totalBlocks, totalBlocks, + totalBlocks * constructedBlockSize); + return bucketCache; + } }