diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/KeyValueUtil.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/KeyValueUtil.java index a2fdcc4e6d8c..da843f1f7d6a 100644 --- a/hbase-common/src/main/java/org/apache/hadoop/hbase/KeyValueUtil.java +++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/KeyValueUtil.java @@ -131,6 +131,8 @@ public static byte[] copyToNewByteArray(final Cell cell) { //Cell#getSerializedSize returns the serialized size of the Source cell, which may //not serialize all fields. We are constructing a KeyValue backing array here, //which does include all fields, and must allocate accordingly. + //TODO we could probably use Cell#getSerializedSize safely, the errors were + //caused by cells corrupted by use-after-free bugs int v1Length = length(cell.getRowLength(), cell.getFamilyLength(), cell.getQualifierLength(), cell.getValueLength(), cell.getTagsLength(), true); byte[] backingBytes = new byte[v1Length]; diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HRegion.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HRegion.java index 1acbf162b098..10c626d77e04 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HRegion.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HRegion.java @@ -94,6 +94,7 @@ import org.apache.hadoop.hbase.HConstants.OperationStatusCode; import org.apache.hadoop.hbase.HDFSBlocksDistribution; import org.apache.hadoop.hbase.KeyValue; +import org.apache.hadoop.hbase.KeyValueUtil; import org.apache.hadoop.hbase.MetaCellComparator; import org.apache.hadoop.hbase.NamespaceDescriptor; import org.apache.hadoop.hbase.NotServingRegionException; @@ -7867,7 +7868,7 @@ private List getInternal(Get get, boolean withCoprocessor, long nonceGroup // See more details in HBASE-26036. for (Cell cell : tmp) { results.add(cell instanceof ByteBufferExtendedCell ? - ((ByteBufferExtendedCell) cell).deepClone(): cell); + KeyValueUtil.copyToNewKeyValue(cell) : cell); } } diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestCheckAndMutateWithByteBuff.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestCheckAndMutateWithByteBuff.java index de1d02bfe734..1489c1f0400b 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestCheckAndMutateWithByteBuff.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestCheckAndMutateWithByteBuff.java @@ -32,6 +32,7 @@ import org.apache.hadoop.hbase.TableName; import org.apache.hadoop.hbase.io.ByteBuffAllocator; import org.apache.hadoop.hbase.io.DeallocateRewriteByteBuffAllocator; +import org.apache.hadoop.hbase.io.encoding.DataBlockEncoding; import org.apache.hadoop.hbase.io.hfile.BlockCacheFactory; import org.apache.hadoop.hbase.regionserver.HRegion; import org.apache.hadoop.hbase.regionserver.HRegionFileSystem; @@ -87,8 +88,20 @@ public static void tearDownAfterClass() throws Exception { } @Test - public void testCheckAndMutateWithByteBuff() throws Exception { - Table testTable = createTable(TableName.valueOf(name.getMethodName())); + public void testCheckAndMutateWithByteBuffNoEncode() throws Exception { + testCheckAndMutateWithByteBuff(TableName.valueOf(name.getMethodName()), DataBlockEncoding.NONE); + } + + @Test + public void testCheckAndMutateWithByteBuffEncode() throws Exception { + // Tests for HBASE-26777. + // As most HBase.getRegion() calls have been factored out from HBase, you'd need to revert + // both HBASE-26777, and the HBase.get() replacements from HBASE-26036 for this test to fail + testCheckAndMutateWithByteBuff(TableName.valueOf(name.getMethodName()), DataBlockEncoding.FAST_DIFF); + } + + private void testCheckAndMutateWithByteBuff(TableName tableName, DataBlockEncoding dbe) throws Exception { + Table testTable = createTable(tableName, dbe); byte[] checkRow = Bytes.toBytes("checkRow"); byte[] checkQualifier = Bytes.toBytes("cq"); byte[] checkValue = Bytes.toBytes("checkValue"); @@ -104,10 +117,13 @@ public void testCheckAndMutateWithByteBuff() throws Exception { Bytes.toBytes("testValue")))); } - private Table createTable(TableName tableName) + private Table createTable(TableName tableName, DataBlockEncoding dbe) throws IOException { TableDescriptor td = TableDescriptorBuilder.newBuilder(tableName) - .setColumnFamily(ColumnFamilyDescriptorBuilder.newBuilder(CF).setBlocksize(100).build()) + .setColumnFamily(ColumnFamilyDescriptorBuilder.newBuilder(CF) + .setBlocksize(100) + .setDataBlockEncoding(dbe) + .build()) .build(); return TEST_UTIL.createTable(td, null); }