diff --git a/hbase-client/src/test/java/org/apache/hadoop/hbase/client/TestCoprocessorDescriptor.java b/hbase-client/src/test/java/org/apache/hadoop/hbase/client/TestCoprocessorDescriptor.java index b288f98f1f92..0a59f8f88fa7 100644 --- a/hbase-client/src/test/java/org/apache/hadoop/hbase/client/TestCoprocessorDescriptor.java +++ b/hbase-client/src/test/java/org/apache/hadoop/hbase/client/TestCoprocessorDescriptor.java @@ -33,8 +33,6 @@ import org.junit.Test; import org.junit.experimental.categories.Category; import org.junit.rules.TestName; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; @Category({ MiscTests.class, SmallTests.class }) public class TestCoprocessorDescriptor { @@ -43,8 +41,6 @@ public class TestCoprocessorDescriptor { public static final HBaseClassTestRule CLASS_RULE = HBaseClassTestRule.forClass(TestCoprocessorDescriptor.class); - private static final Logger LOG = LoggerFactory.getLogger(TestCoprocessorDescriptor.class); - @Rule public TestName name = new TestName(); @@ -71,7 +67,7 @@ public void testSetCoprocessor() throws IOException { List cps = new ArrayList<>(); for (String className : Arrays.asList("className0", "className1", "className2")) { String path = "path"; - int priority = Math.abs(className.hashCode()); + int priority = 100; String propertyValue = "propertyValue"; cps.add( CoprocessorDescriptorBuilder.newBuilder(className).setJarPath(path).setPriority(priority) diff --git a/hbase-client/src/test/java/org/apache/hadoop/hbase/client/TestScan.java b/hbase-client/src/test/java/org/apache/hadoop/hbase/client/TestScan.java index c653f7b1256f..94ed163a747c 100644 --- a/hbase-client/src/test/java/org/apache/hadoop/hbase/client/TestScan.java +++ b/hbase-client/src/test/java/org/apache/hadoop/hbase/client/TestScan.java @@ -17,6 +17,7 @@ */ package org.apache.hadoop.hbase.client; +import static org.junit.Assert.assertArrayEquals; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertFalse; import static org.junit.Assert.assertTrue; @@ -107,7 +108,7 @@ public void testGetToScan() throws Exception { assertEquals(get.getColumnFamilyTimeRange().get(Bytes.toBytes("cf")).getMax(), scan.getColumnFamilyTimeRange().get(Bytes.toBytes("cf")).getMax()); assertEquals(get.getReplicaId(), scan.getReplicaId()); - assertEquals(get.getACL(), scan.getACL()); + assertArrayEquals(get.getACL(), scan.getACL()); assertEquals(get.getAuthorizations().getLabels(), scan.getAuthorizations().getLabels()); assertEquals(get.getPriority(), scan.getPriority()); } @@ -259,9 +260,9 @@ public void testScanCopyConstructor() throws Exception { Scan scanCopy = new Scan(scan); // validate fields of copied scan object match with the original scan object - assertEquals(scan.getACL(), scanCopy.getACL()); + assertArrayEquals(scan.getACL(), scanCopy.getACL()); assertEquals(scan.getAllowPartialResults(), scanCopy.getAllowPartialResults()); - assertEquals(scan.getAttribute("test_key"), scanCopy.getAttribute("test_key")); + assertArrayEquals(scan.getAttribute("test_key"), scanCopy.getAttribute("test_key")); assertEquals(scan.getAttributeSize(), scanCopy.getAttributeSize()); assertEquals(scan.getAttributesMap(), scanCopy.getAttributesMap()); assertEquals(scan.getAuthorizations().getLabels(), scanCopy.getAuthorizations().getLabels()); @@ -270,7 +271,7 @@ public void testScanCopyConstructor() throws Exception { assertEquals(scan.getCaching(), scanCopy.getCaching()); assertEquals(scan.getConsistency(), scanCopy.getConsistency()); assertEquals(scan.getFamilies().length, scanCopy.getFamilies().length); - assertEquals(scan.getFamilies()[0], scanCopy.getFamilies()[0]); + assertArrayEquals(scan.getFamilies()[0], scanCopy.getFamilies()[0]); assertEquals(scan.getFamilyMap(), scanCopy.getFamilyMap()); assertEquals(scan.getFilter(), scanCopy.getFilter()); assertEquals(scan.getId(), scanCopy.getId()); @@ -286,8 +287,8 @@ public void testScanCopyConstructor() throws Exception { assertEquals(scan.getReadType(), scanCopy.getReadType()); assertEquals(scan.getReplicaId(), scanCopy.getReplicaId()); assertEquals(scan.getRowOffsetPerColumnFamily(), scanCopy.getRowOffsetPerColumnFamily()); - assertEquals(scan.getStartRow(), scanCopy.getStartRow()); - assertEquals(scan.getStopRow(), scanCopy.getStopRow()); + assertArrayEquals(scan.getStartRow(), scanCopy.getStartRow()); + assertArrayEquals(scan.getStopRow(), scanCopy.getStopRow()); assertEquals(scan.getTimeRange(), scanCopy.getTimeRange()); assertTrue("Make sure copy constructor adds all the fields in the copied object", diff --git a/hbase-client/src/test/java/org/apache/hadoop/hbase/security/token/TestClientTokenUtil.java b/hbase-client/src/test/java/org/apache/hadoop/hbase/security/token/TestClientTokenUtil.java index 50db3a99d22a..2fec41c751ee 100644 --- a/hbase-client/src/test/java/org/apache/hadoop/hbase/security/token/TestClientTokenUtil.java +++ b/hbase-client/src/test/java/org/apache/hadoop/hbase/security/token/TestClientTokenUtil.java @@ -19,6 +19,7 @@ import static org.junit.Assert.assertFalse; import static org.junit.Assert.fail; +import static org.mockito.Mockito.mock; import java.io.IOException; import java.lang.reflect.Field; @@ -66,7 +67,7 @@ public void testObtainToken() throws Exception { shouldInjectFault.set(null, injected); try { - ClientTokenUtil.obtainToken((Connection)null); + ClientTokenUtil.obtainToken(mock(Connection.class)); fail("Should have injected exception."); } catch (IOException e) { Throwable t = e; diff --git a/hbase-client/src/test/java/org/apache/hadoop/hbase/util/TestRoundRobinPoolMap.java b/hbase-client/src/test/java/org/apache/hadoop/hbase/util/TestRoundRobinPoolMap.java index ef7cb4e6512a..2fd73caea46a 100644 --- a/hbase-client/src/test/java/org/apache/hadoop/hbase/util/TestRoundRobinPoolMap.java +++ b/hbase-client/src/test/java/org/apache/hadoop/hbase/util/TestRoundRobinPoolMap.java @@ -25,11 +25,9 @@ import java.util.Collections; import java.util.Iterator; import java.util.List; -import java.util.Random; import java.util.concurrent.CompletableFuture; import java.util.concurrent.CompletionException; import java.util.concurrent.ExecutionException; -import java.util.concurrent.ThreadLocalRandom; import java.util.concurrent.atomic.AtomicInteger; import org.apache.hadoop.hbase.HBaseClassTestRule; import org.apache.hadoop.hbase.testclassification.MiscTests; diff --git a/hbase-client/src/test/java/org/apache/hadoop/hbase/util/TestThreadLocalPoolMap.java b/hbase-client/src/test/java/org/apache/hadoop/hbase/util/TestThreadLocalPoolMap.java index a1cb610e8544..2f497c6fdfb5 100644 --- a/hbase-client/src/test/java/org/apache/hadoop/hbase/util/TestThreadLocalPoolMap.java +++ b/hbase-client/src/test/java/org/apache/hadoop/hbase/util/TestThreadLocalPoolMap.java @@ -20,11 +20,9 @@ import static org.junit.Assert.assertEquals; import java.io.IOException; -import java.util.Random; import java.util.concurrent.CompletableFuture; import java.util.concurrent.CompletionException; import java.util.concurrent.ExecutionException; -import java.util.concurrent.ThreadLocalRandom; import java.util.concurrent.atomic.AtomicInteger; import org.apache.hadoop.hbase.HBaseClassTestRule; import org.apache.hadoop.hbase.testclassification.MiscTests; diff --git a/hbase-common/src/test/java/org/apache/hadoop/hbase/nio/TestMultiByteBuff.java b/hbase-common/src/test/java/org/apache/hadoop/hbase/nio/TestMultiByteBuff.java index b40ac0c22a88..87504044dcdc 100644 --- a/hbase-common/src/test/java/org/apache/hadoop/hbase/nio/TestMultiByteBuff.java +++ b/hbase-common/src/test/java/org/apache/hadoop/hbase/nio/TestMultiByteBuff.java @@ -17,6 +17,7 @@ */ package org.apache.hadoop.hbase.nio; +import static org.junit.Assert.assertArrayEquals; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertFalse; import static org.junit.Assert.assertNotEquals; @@ -153,7 +154,7 @@ public void testArrayBasedMethods() { mbb1 = new SingleByteBuff(bb1); assertTrue(mbb1.hasArray()); assertEquals(1, mbb1.arrayOffset()); - assertEquals(b, mbb1.array()); + assertArrayEquals(b, mbb1.array()); mbb1 = new SingleByteBuff(ByteBuffer.allocateDirect(10)); assertFalse(mbb1.hasArray()); try { diff --git a/hbase-common/src/test/java/org/apache/hadoop/hbase/util/TestSimpleKdcServerUtil.java b/hbase-common/src/test/java/org/apache/hadoop/hbase/util/TestSimpleKdcServerUtil.java index 1ebe82f5722f..d73b3c7cf1cd 100644 --- a/hbase-common/src/test/java/org/apache/hadoop/hbase/util/TestSimpleKdcServerUtil.java +++ b/hbase-common/src/test/java/org/apache/hadoop/hbase/util/TestSimpleKdcServerUtil.java @@ -49,7 +49,9 @@ public void testBindException() throws KrbException, IOException { getRunningSimpleKdcServer(dir, HBaseCommonTestingUtility::randomFreePort, true); kdc.createPrincipal("wah"); } finally { - kdc.stop(); + if (kdc != null) { + kdc.stop(); + } } } } diff --git a/hbase-http/src/test/java/org/apache/hadoop/hbase/http/HttpServerFunctionalTest.java b/hbase-http/src/test/java/org/apache/hadoop/hbase/http/HttpServerFunctionalTest.java index 7f1223980e3d..0a80c39126d7 100644 --- a/hbase-http/src/test/java/org/apache/hadoop/hbase/http/HttpServerFunctionalTest.java +++ b/hbase-http/src/test/java/org/apache/hadoop/hbase/http/HttpServerFunctionalTest.java @@ -260,12 +260,15 @@ protected static String readOutput(URL url) throws IOException { */ protected static void deleteRecursively(File d) { if (d.isDirectory()) { - for (String name : d.list()) { - File child = new File(d, name); - if (child.isFile()) { - child.delete(); - } else { - deleteRecursively(child); + String[] names = d.list(); + if (names != null) { + for (String name: names) { + File child = new File(d, name); + if (child.isFile()) { + child.delete(); + } else { + deleteRecursively(child); + } } } } diff --git a/hbase-it/src/test/java/org/apache/hadoop/hbase/TestIntegrationTestBase.java b/hbase-it/src/test/java/org/apache/hadoop/hbase/TestIntegrationTestBase.java index dfdb5f050f7b..df146a1820c4 100644 --- a/hbase-it/src/test/java/org/apache/hadoop/hbase/TestIntegrationTestBase.java +++ b/hbase-it/src/test/java/org/apache/hadoop/hbase/TestIntegrationTestBase.java @@ -40,11 +40,10 @@ public void testMonkeyPropertiesParsing() { conf.set(MonkeyConstants.BATCH_RESTART_RS_RATIO, "0.85"); conf.set(MonkeyConstants.MOVE_REGIONS_MAX_TIME, "60000"); conf.set("hbase.rootdir", "/foo/bar/baz"); - final Properties props = new Properties(); - IntegrationTestBase testBase = new IntegrationTestDDLMasterFailover(); assertEquals(0, props.size()); - testBase.loadMonkeyProperties(props, conf); + new IntegrationTestDDLMasterFailover(); + IntegrationTestBase.loadMonkeyProperties(props, conf); assertEquals(2, props.size()); assertEquals("0.85", props.getProperty(MonkeyConstants.BATCH_RESTART_RS_RATIO)); assertEquals("60000", props.getProperty(MonkeyConstants.MOVE_REGIONS_MAX_TIME)); diff --git a/hbase-it/src/test/java/org/apache/hadoop/hbase/ipc/IntegrationTestRpcClient.java b/hbase-it/src/test/java/org/apache/hadoop/hbase/ipc/IntegrationTestRpcClient.java index 2dd163305b95..915399ac36af 100644 --- a/hbase-it/src/test/java/org/apache/hadoop/hbase/ipc/IntegrationTestRpcClient.java +++ b/hbase-it/src/test/java/org/apache/hadoop/hbase/ipc/IntegrationTestRpcClient.java @@ -136,15 +136,14 @@ void stopRandomServer() throws Exception { int size = rpcServers.size(); int rand = random.nextInt(size); rpcServer = serverList.remove(rand); - InetSocketAddress address = rpcServer.getListenerAddress(); - if (address == null) { - // Throw exception here. We can't remove this instance from the server map because - // we no longer have access to its map key - throw new IOException("Listener channel is closed"); - } - rpcServers.remove(address); - if (rpcServer != null) { + InetSocketAddress address = rpcServer.getListenerAddress(); + if (address == null) { + // Throw exception here. We can't remove this instance from the server map because + // we no longer have access to its map key + throw new IOException("Listener channel is closed"); + } + rpcServers.remove(address); stopServer(rpcServer); } } finally { diff --git a/hbase-it/src/test/java/org/apache/hadoop/hbase/mttr/IntegrationTestMTTR.java b/hbase-it/src/test/java/org/apache/hadoop/hbase/mttr/IntegrationTestMTTR.java index 13e0f0ae888c..af7f5f3e2831 100644 --- a/hbase-it/src/test/java/org/apache/hadoop/hbase/mttr/IntegrationTestMTTR.java +++ b/hbase-it/src/test/java/org/apache/hadoop/hbase/mttr/IntegrationTestMTTR.java @@ -132,7 +132,7 @@ public class IntegrationTestMTTR { /** * Util to get at the cluster. */ - private static IntegrationTestingUtility util; + private static final IntegrationTestingUtility util = new IntegrationTestingUtility(); /** * Executor for test threads. @@ -156,10 +156,6 @@ public class IntegrationTestMTTR { @BeforeClass public static void setUp() throws Exception { - // Set up the integration test util - if (util == null) { - util = new IntegrationTestingUtility(); - } // Make sure there are three servers. util.initializeCluster(3); @@ -250,7 +246,6 @@ private static void setupTables() throws IOException { public static void after() throws IOException { // Clean everything up. util.restoreCluster(); - util = null; // Stop the threads so that we know everything is complete. executorService.shutdown(); diff --git a/hbase-it/src/test/java/org/apache/hadoop/hbase/test/IntegrationTestBigLinkedList.java b/hbase-it/src/test/java/org/apache/hadoop/hbase/test/IntegrationTestBigLinkedList.java index 02e6383890b6..78b08e5040e4 100644 --- a/hbase-it/src/test/java/org/apache/hadoop/hbase/test/IntegrationTestBigLinkedList.java +++ b/hbase-it/src/test/java/org/apache/hadoop/hbase/test/IntegrationTestBigLinkedList.java @@ -1316,15 +1316,15 @@ private StringBuilder dumpExtraInfoOnRefs(final BytesWritable key, final Context hrl = rl.getRegionLocation(key.getBytes()); if (hrl != null) keyRegionLocation = hrl.toString(); } + LOG.error("Extras on ref without a def, ref=" + Bytes.toStringBinary(ref) + + ", refPrevEqualsKey=" + + (Bytes.compareTo(key.getBytes(), 0, key.getLength(), b, 0, b.length) == 0) + + ", key=" + Bytes.toStringBinary(key.getBytes(), 0, key.getLength()) + + ", ref row date=" + ts + ", jobStr=" + jobStr + + ", ref row count=" + count + + ", ref row regionLocation=" + refRegionLocation + + ", key row regionLocation=" + keyRegionLocation); } - LOG.error("Extras on ref without a def, ref=" + Bytes.toStringBinary(ref) + - ", refPrevEqualsKey=" + - (Bytes.compareTo(key.getBytes(), 0, key.getLength(), b, 0, b.length) == 0) + - ", key=" + Bytes.toStringBinary(key.getBytes(), 0, key.getLength()) + - ", ref row date=" + ts + ", jobStr=" + jobStr + - ", ref row count=" + count + - ", ref row regionLocation=" + refRegionLocation + - ", key row regionLocation=" + keyRegionLocation); refsSb.append(comma); comma = ","; refsSb.append(Bytes.toStringBinary(ref)); diff --git a/hbase-it/src/test/java/org/apache/hadoop/hbase/test/IntegrationTestReplication.java b/hbase-it/src/test/java/org/apache/hadoop/hbase/test/IntegrationTestReplication.java index 1796db749489..3e2782fdd6c1 100644 --- a/hbase-it/src/test/java/org/apache/hadoop/hbase/test/IntegrationTestReplication.java +++ b/hbase-it/src/test/java/org/apache/hadoop/hbase/test/IntegrationTestReplication.java @@ -22,6 +22,7 @@ import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.conf.Configured; import org.apache.hadoop.fs.Path; +import org.apache.hadoop.hbase.HBaseCommonTestingUtil; import org.apache.hadoop.hbase.HBaseConfiguration; import org.apache.hadoop.hbase.HRegionLocation; import org.apache.hadoop.hbase.IntegrationTestingUtility; @@ -155,9 +156,16 @@ public void closeConnection() throws Exception { this.connection = null; } - public boolean equals(ClusterID other) { + public boolean equals(Object other) { + if (other == null) { + return false; + } return this.toString().equalsIgnoreCase(other.toString()); } + + public int hashCode() { + return this.toString().hashCode(); + } } /** @@ -264,7 +272,7 @@ protected void waitForReplication() throws Exception { */ protected void runGenerator() throws Exception { Path outputPath = new Path(outputDir); - UUID uuid = util.getRandomUUID(); //create a random UUID. + UUID uuid = HBaseCommonTestingUtil.getRandomUUID(); //create a random UUID. Path generatorOutput = new Path(outputPath, uuid.toString()); Generator generator = new Generator(); @@ -288,7 +296,7 @@ protected void runGenerator() throws Exception { */ protected void runVerify(long expectedNumNodes) throws Exception { Path outputPath = new Path(outputDir); - UUID uuid = util.getRandomUUID(); //create a random UUID. + UUID uuid = HBaseCommonTestingUtil.getRandomUUID(); //create a random UUID. Path iterationOutput = new Path(outputPath, uuid.toString()); Verify verify = new Verify(); diff --git a/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapreduce/TestSyncTable.java b/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapreduce/TestSyncTable.java index caacfc648559..b961bbd77c6d 100644 --- a/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapreduce/TestSyncTable.java +++ b/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapreduce/TestSyncTable.java @@ -314,7 +314,7 @@ private void assertTargetDoDeletesFalse(int expectedRows, TableName sourceTableN if (!CellUtil.matchingQualifier(sourceCell, targetCell)) { Assert.fail("Qualifiers don't match"); } - if (targetRowKey < 80 && targetRowKey >= 90){ + if (targetRowKey < 80 || targetRowKey >= 90){ if (!CellUtil.matchingTimestamp(sourceCell, targetCell)) { Assert.fail("Timestamps don't match"); } diff --git a/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapreduce/TestTableSnapshotInputFormat.java b/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapreduce/TestTableSnapshotInputFormat.java index 188fc1f70706..ffdb49ce61f9 100644 --- a/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapreduce/TestTableSnapshotInputFormat.java +++ b/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapreduce/TestTableSnapshotInputFormat.java @@ -100,7 +100,7 @@ protected byte[] getEndRow() { @Test public void testGetBestLocations() throws IOException { - TableSnapshotInputFormatImpl tsif = new TableSnapshotInputFormatImpl(); + new TableSnapshotInputFormatImpl(); Configuration conf = UTIL.getConfiguration(); HDFSBlocksDistribution blockDistribution = new HDFSBlocksDistribution(); diff --git a/hbase-procedure/src/test/java/org/apache/hadoop/hbase/procedure2/TestProcedureInMemoryChore.java b/hbase-procedure/src/test/java/org/apache/hadoop/hbase/procedure2/TestProcedureInMemoryChore.java index 75c8d16485b6..70c645f12944 100644 --- a/hbase-procedure/src/test/java/org/apache/hadoop/hbase/procedure2/TestProcedureInMemoryChore.java +++ b/hbase-procedure/src/test/java/org/apache/hadoop/hbase/procedure2/TestProcedureInMemoryChore.java @@ -19,6 +19,7 @@ import static org.junit.Assert.assertFalse; import static org.junit.Assert.assertTrue; +import static org.junit.Assert.fail; import java.io.IOException; import java.util.concurrent.CountDownLatch; @@ -88,8 +89,8 @@ public void testChoreAddAndRemove() throws Exception { procExecutor.removeChore(chore); latch = new CountDownLatch(nCountDown); chore.setLatch(latch); - latch.await(timeoutMSec * nCountDown, TimeUnit.MILLISECONDS); - LOG.info("chore latch count=" + latch.getCount()); + boolean reached = latch.await(timeoutMSec * nCountDown, TimeUnit.MILLISECONDS); + LOG.info("chore latch reached={} count={}", reached, latch.getCount()); assertFalse(chore.isWaiting()); assertTrue("latchCount=" + latch.getCount(), latch.getCount() > 0); } diff --git a/hbase-rest/src/test/java/org/apache/hadoop/hbase/rest/TestMultiRowResource.java b/hbase-rest/src/test/java/org/apache/hadoop/hbase/rest/TestMultiRowResource.java index 27d3f037a4e7..87dd204e4bcb 100644 --- a/hbase-rest/src/test/java/org/apache/hadoop/hbase/rest/TestMultiRowResource.java +++ b/hbase-rest/src/test/java/org/apache/hadoop/hbase/rest/TestMultiRowResource.java @@ -26,8 +26,6 @@ import java.util.Collection; import javax.ws.rs.core.MediaType; import javax.xml.bind.JAXBContext; -import javax.xml.bind.Marshaller; -import javax.xml.bind.Unmarshaller; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hbase.HBaseClassTestRule; @@ -80,8 +78,6 @@ public class TestMultiRowResource { private static Client client; private static JAXBContext context; - private static Marshaller marshaller; - private static Unmarshaller unmarshaller; private static Configuration conf; private static Header extraHdr = null; @@ -93,7 +89,11 @@ public static Collection data() { } public TestMultiRowResource(Boolean csrf) { - csrfEnabled = csrf; + setCsrfEnabled(csrf); + } + + protected static void setCsrfEnabled(boolean flag) { + csrfEnabled = flag; } @BeforeClass @@ -110,8 +110,6 @@ public static void setUpBeforeClass() throws Exception { CellModel.class, CellSetModel.class, RowModel.class); - marshaller = context.createMarshaller(); - unmarshaller = context.createUnmarshaller(); client = new Client(new Cluster().add("localhost", REST_TEST_UTIL.getServletPort())); Admin admin = TEST_UTIL.getAdmin(); if (admin.tableExists(TABLE)) { diff --git a/hbase-rest/src/test/java/org/apache/hadoop/hbase/rest/TestSchemaResource.java b/hbase-rest/src/test/java/org/apache/hadoop/hbase/rest/TestSchemaResource.java index 609ee011e765..8cba0a45707f 100644 --- a/hbase-rest/src/test/java/org/apache/hadoop/hbase/rest/TestSchemaResource.java +++ b/hbase-rest/src/test/java/org/apache/hadoop/hbase/rest/TestSchemaResource.java @@ -80,7 +80,7 @@ public static Collection parameters() { } public TestSchemaResource(Boolean csrf) { - csrfEnabled = csrf; + TestMultiRowResource.setCsrfEnabled(csrf); } @BeforeClass diff --git a/hbase-rsgroup/src/test/java/org/apache/hadoop/hbase/rsgroup/TestRSGroupsAdmin2.java b/hbase-rsgroup/src/test/java/org/apache/hadoop/hbase/rsgroup/TestRSGroupsAdmin2.java index cbbb86cde374..414a36c1be65 100644 --- a/hbase-rsgroup/src/test/java/org/apache/hadoop/hbase/rsgroup/TestRSGroupsAdmin2.java +++ b/hbase-rsgroup/src/test/java/org/apache/hadoop/hbase/rsgroup/TestRSGroupsAdmin2.java @@ -20,6 +20,7 @@ import static org.apache.hadoop.hbase.util.Threads.sleep; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertFalse; +import static org.junit.Assert.assertNotNull; import static org.junit.Assert.assertTrue; import static org.junit.Assert.fail; @@ -122,6 +123,7 @@ public boolean evaluate() throws Exception { break; } } + assertNotNull(tmpTargetServer); final ServerName targetServer = tmpTargetServer; // move target server to group rsGroupAdmin.moveServers(Sets.newHashSet(targetServer.getAddress()), newGroup.getName()); diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/coprocessor/MetaTableMetrics.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/coprocessor/MetaTableMetrics.java index e36877a4111a..2c08f98a6ebe 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/coprocessor/MetaTableMetrics.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/coprocessor/MetaTableMetrics.java @@ -206,6 +206,9 @@ private String opWithClientMeterName(Object op) { return ""; } MetaTableOps ops = opsNameMap.get(op.getClass()); + if (ops == null) { + return ""; + } String opWithClientMeterName = ""; switch (ops) { case GET: @@ -226,6 +229,9 @@ private String opWithClientMeterName(Object op) { private String opMeterName(Object op) { // Extract meter name containing the access type MetaTableOps ops = opsNameMap.get(op.getClass()); + if (ops == null) { + return ""; + } String opMeterName = ""; switch (ops) { case GET: diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/LruCachedBlockQueue.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/LruCachedBlockQueue.java index 4c67c9a96191..bf97426976fb 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/LruCachedBlockQueue.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/LruCachedBlockQueue.java @@ -65,15 +65,15 @@ public LruCachedBlockQueue(long maxSize, long blockSize) { * @param cb block to try to add to the queue */ public void add(LruCachedBlock cb) { - if(heapSize < maxSize) { + if (heapSize < maxSize) { queue.add(cb); heapSize += cb.heapSize(); } else { LruCachedBlock head = queue.peek(); - if(cb.compareTo(head) > 0) { + if (head != null && cb.compareTo(head) > 0) { heapSize += cb.heapSize(); heapSize -= head.heapSize(); - if(heapSize > maxSize) { + if (heapSize > maxSize) { queue.poll(); } else { heapSize += head.heapSize(); diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/AssignmentVerificationReport.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/AssignmentVerificationReport.java index 420eef7dbaff..850382bad213 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/AssignmentVerificationReport.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/AssignmentVerificationReport.java @@ -230,7 +230,7 @@ public void fillUp(TableName tableName, SnapshotOfRegionAssignmentFromMeta snaps this.maxDispersionScoreServerSet.clear(); this.maxDispersionScoreServerSet.add(primaryRS); this.maxDispersionScore = dispersionScore; - } else if (dispersionScore == this.maxDispersionScore) { + } else if (dispersionScore >= this.maxDispersionScore) { this.maxDispersionScoreServerSet.add(primaryRS); } @@ -248,7 +248,7 @@ public void fillUp(TableName tableName, SnapshotOfRegionAssignmentFromMeta snaps this.minDispersionScoreServerSet.clear(); this.minDispersionScoreServerSet.add(primaryRS); this.minDispersionScore = dispersionScore; - } else if (dispersionScore == this.minDispersionScore) { + } else if (dispersionScore <= this.minDispersionScore) { this.minDispersionScoreServerSet.add(primaryRS); } @@ -404,7 +404,7 @@ public void fillUpDispersion(TableName tableName, this.minDispersionScoreServerSet.clear(); this.minDispersionScoreServerSet.add(primaryRS); this.minDispersionScore = dispersionScore; - } else if (dispersionScore == this.minDispersionScore) { + } else if (dispersionScore <= this.minDispersionScore) { this.minDispersionScoreServerSet.add(primaryRS); } diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/locking/LockManager.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/locking/LockManager.java index 4f020ac67919..7ff1091d5af5 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/locking/LockManager.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/locking/LockManager.java @@ -160,8 +160,11 @@ public boolean tryAcquire(final long timeoutMs) throws InterruptedException { Long.MAX_VALUE; while (deadline >= EnvironmentEdgeManager.currentTime() && !proc.isLocked()) { try { - lockAcquireLatch.await(deadline - EnvironmentEdgeManager.currentTime(), + boolean released = lockAcquireLatch.await(deadline - EnvironmentEdgeManager.currentTime(), TimeUnit.MILLISECONDS); + if (!released) { + LOG.warn("Timed out waiting for latch acquisition"); + } } catch (InterruptedException e) { LOG.info("InterruptedException when waiting for lock: " + proc.toString()); // kind of weird, releasing a lock which is not locked. This is to make the procedure diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HRegion.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HRegion.java index da22c5b69259..3ec0c340a339 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HRegion.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HRegion.java @@ -7961,7 +7961,8 @@ private WriteEntry doWALAppend(WALEdit walEdit, Durability durability, List= backoffTime); } diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestAdmin1.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestAdmin1.java index fb3b4dd8bfd0..76dd653af6db 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestAdmin1.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestAdmin1.java @@ -356,6 +356,7 @@ public void run() { } catch (IOException e) { e.printStackTrace(); } + assertNotNull(regions); assertEquals(2, regions.size()); if (splitPoint != null) { // make sure the split point matches our explicit configuration diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestBlockEvictionFromClient.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestBlockEvictionFromClient.java index d81d70432907..c2026ca4a519 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestBlockEvictionFromClient.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestBlockEvictionFromClient.java @@ -1610,8 +1610,8 @@ private void slowdownCode(final ObserverContext e, countOfNext.incrementAndGet(); } LOG.info("Waiting for the counterCountDownLatch"); - latch.await(2, TimeUnit.MINUTES); // To help the tests to finish. - if (latch.getCount() > 0) { + boolean released = latch.await(2, TimeUnit.MINUTES); // To help the tests to finish. + if (!released) { throw new RuntimeException("Can't wait more"); } } diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestReplicaWithCluster.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestReplicaWithCluster.java index 722df932c30a..67abbf1eb31f 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestReplicaWithCluster.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestReplicaWithCluster.java @@ -117,8 +117,8 @@ public void preGetOp(final ObserverContext e, Thread.sleep(sleepTime.get()); } else if (latch.getCount() > 0) { LOG.info("Waiting for the counterCountDownLatch"); - latch.await(2, TimeUnit.MINUTES); // To help the tests to finish. - if (latch.getCount() > 0) { + boolean released = latch.await(2, TimeUnit.MINUTES); // To help the tests to finish. + if (!released) { throw new RuntimeException("Can't wait more"); } } diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestReplicasClient.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestReplicasClient.java index fb3116601619..621bdddc059a 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestReplicasClient.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestReplicasClient.java @@ -166,8 +166,8 @@ private void slowdownCode(final ObserverContext e) Thread.sleep(sleepTime.get()); } else if (latch.getCount() > 0) { LOG.info("Waiting for the counterCountDownLatch"); - latch.await(2, TimeUnit.MINUTES); // To help the tests to finish. - if (latch.getCount() > 0) { + boolean released = latch.await(2, TimeUnit.MINUTES); // To help the tests to finish. + if (!released) { throw new RuntimeException("Can't wait more"); } } diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/TestCoprocessorConfiguration.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/TestCoprocessorConfiguration.java index b3012ecf61e1..1e0fd59730a1 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/TestCoprocessorConfiguration.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/TestCoprocessorConfiguration.java @@ -190,13 +190,13 @@ public void testReadOnlyConfiguration() throws Exception { RegionCoprocessorHost rcp = new RegionCoprocessorHost(region, rsServices, conf); boolean found = false; for (String cpStr: rcp.getCoprocessors()) { - CoprocessorEnvironment cpenv = rcp.findCoprocessorEnvironment(cpStr); + CoprocessorEnvironment cpenv = rcp.findCoprocessorEnvironment(cpStr); if (cpenv != null) { found = true; + Configuration c = cpenv.getConfiguration(); + thrown.expect(UnsupportedOperationException.class); + c.set("one.two.three", "four.five.six"); } - Configuration c = cpenv.getConfiguration(); - thrown.expect(UnsupportedOperationException.class); - c.set("one.two.three", "four.five.six"); } assertTrue("Should be at least one CP found", found); } diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/TestMasterCoprocessorExceptionWithAbort.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/TestMasterCoprocessorExceptionWithAbort.java index d0031ff65dd3..5ac54730e79e 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/TestMasterCoprocessorExceptionWithAbort.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/TestMasterCoprocessorExceptionWithAbort.java @@ -117,9 +117,7 @@ public void postCreateTable(ObserverContext env, TableDescriptor desc, RegionInfo[] regions) throws IOException { // cause a NullPointerException and don't catch it: this will cause the // master to abort(). - Integer i; - i = null; - i = i++; + throw new NullPointerException("injected fault"); } public boolean wasCreateTableCalled() { diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/TestMasterCoprocessorExceptionWithRemove.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/TestMasterCoprocessorExceptionWithRemove.java index f3e0d4c9f17c..8e4cea345734 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/TestMasterCoprocessorExceptionWithRemove.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/TestMasterCoprocessorExceptionWithRemove.java @@ -95,9 +95,7 @@ public void postCreateTable(ObserverContext env, // Cause a NullPointerException and don't catch it: this should cause the // master to throw an o.apache.hadoop.hbase.DoNotRetryIOException to the // client. - Integer i; - i = null; - i = i++; + throw new NullPointerException("injected fault"); } public boolean wasCreateTableCalled() { diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/TestReadOnlyConfiguration.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/TestReadOnlyConfiguration.java index a91c505f175a..b3c3847b4a31 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/TestReadOnlyConfiguration.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/TestReadOnlyConfiguration.java @@ -39,8 +39,8 @@ public class TestReadOnlyConfiguration { @Test public void testAddDefaultResource() { - Configuration configuration = new Configuration(); - Configuration readOnlyConf = new ReadOnlyConfiguration(configuration); - configuration.addDefaultResource("abc.xml"); + // XXX: Does this test really do anything? + Configuration readOnlyConf = new ReadOnlyConfiguration(new Configuration()); + readOnlyConf.addDefaultResource("abc.xml"); } } diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/TestRegionServerCoprocessorExceptionWithAbort.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/TestRegionServerCoprocessorExceptionWithAbort.java index 8309f471570d..388b96f2053b 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/TestRegionServerCoprocessorExceptionWithAbort.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/TestRegionServerCoprocessorExceptionWithAbort.java @@ -146,8 +146,7 @@ public static class FailedInitializationObserver implements RegionServerCoproces @Override public void start(CoprocessorEnvironment e) throws IOException { // Trigger a NPE to fail the coprocessor - Integer i = null; - i = i + 1; + throw new NullPointerException("injected fault"); } } @@ -161,8 +160,7 @@ public void prePut(final ObserverContext c, c.getEnvironment().getRegion().getRegionInfo().getTable().getNameAsString(); if (tableName.equals("observed_table")) { // Trigger a NPE to fail the coprocessor - Integer i = null; - i = i + 1; + throw new NullPointerException("injected fault"); } } } diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/TestRegionServerCoprocessorExceptionWithRemove.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/TestRegionServerCoprocessorExceptionWithRemove.java index 2d564b671364..b03ba20ac44f 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/TestRegionServerCoprocessorExceptionWithRemove.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/TestRegionServerCoprocessorExceptionWithRemove.java @@ -64,8 +64,7 @@ public void prePut(final ObserverContext c, c.getEnvironment().getRegion().getRegionInfo().getTable().getNameAsString(); if (tableName.equals("observed_table")) { // Trigger a NPE to fail the coprocessor - Integer i = null; - i = i + 1; + throw new NullPointerException("injected fault"); } } } diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/executor/TestExecutorService.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/executor/TestExecutorService.java index 5df089b597ec..c06233aec0d0 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/executor/TestExecutorService.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/executor/TestExecutorService.java @@ -248,7 +248,8 @@ public void process() throws IOException { }); //Wait EventHandler to start - waitForEventToStart.await(10, TimeUnit.SECONDS); + boolean released = waitForEventToStart.await(10, TimeUnit.SECONDS); + Assert.assertTrue("Latch was not released", released); int activeCount = executorService.getExecutor(ExecutorType.MASTER_SNAPSHOT_OPERATIONS) .getThreadPoolExecutor().getActiveCount(); Assert.assertEquals(1, activeCount); diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/io/TestHeapSize.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/io/TestHeapSize.java index 398817f5c2c1..86402f97a06f 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/io/TestHeapSize.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/io/TestHeapSize.java @@ -557,7 +557,6 @@ public void testMutations(){ assertEquals(expected, actual); } - byte[] row = new byte[] { 0 }; cl = Put.class; actual = Mutation.MUTATION_OVERHEAD + ClassSize.align(ClassSize.ARRAY); expected = ClassSize.estimateBase(cl, false); diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/io/compress/HFileTestBase.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/io/compress/HFileTestBase.java index a99074f1c92e..778732264aa3 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/io/compress/HFileTestBase.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/io/compress/HFileTestBase.java @@ -98,8 +98,12 @@ public void doTest(Configuration conf, Path path, Compression.Algorithm compress i++; } while (scanner.next()); } finally { - reader.close(); - scanner.close(); + if (reader != null) { + reader.close(); + } + if (scanner != null) { + scanner.close(); + } } assertEquals("Did not read back as many KVs as written", i, testKvs.size()); diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/io/encoding/TestBufferedDataBlockEncoder.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/io/encoding/TestBufferedDataBlockEncoder.java index 9c078772af39..56e9e626dd0b 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/io/encoding/TestBufferedDataBlockEncoder.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/io/encoding/TestBufferedDataBlockEncoder.java @@ -111,8 +111,6 @@ public void testKVCodecWithTagsForDecodedCellsWithNoTags() throws Exception { kv2.getTimestamp(), kv2.getTypeByte(), kv2.getValueArray(), kv2.getValueOffset(), kv2.getValueLength(), kv2.getSequenceId(), kv2.getTagsArray(), kv2.getTagsOffset(), kv2.getTagsLength()); - KeyValue kv3 = new KeyValue(Bytes.toBytes("r3"), Bytes.toBytes("cf"), Bytes.toBytes("qual"), - HConstants.LATEST_TIMESTAMP, Bytes.toBytes("3")); BufferedDataBlockEncoder.OffheapDecodedExtendedCell c3 = new BufferedDataBlockEncoder.OffheapDecodedExtendedCell(ByteBuffer.wrap(kv2.getKey()), kv2.getRowLength(), kv2.getFamilyOffset() - KeyValue.ROW_OFFSET, kv2.getFamilyLength(), diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestCacheConfig.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestCacheConfig.java index a5b66872ca4d..ba5e50bc8244 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestCacheConfig.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestCacheConfig.java @@ -372,7 +372,6 @@ public void testL2CacheWithInvalidBucketSize() { @Test public void testIndexOnlyLruBlockCache() { - CacheConfig cc = new CacheConfig(this.conf); conf.set(BlockCacheFactory.BLOCKCACHE_POLICY_KEY, "IndexOnlyLRU"); BlockCache blockCache = BlockCacheFactory.createBlockCache(this.conf); assertTrue(blockCache instanceof IndexOnlyLruBlockCache); diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestFixedFileTrailer.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestFixedFileTrailer.java index 6382a0d74701..9d1e40b1f8ef 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestFixedFileTrailer.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestFixedFileTrailer.java @@ -118,31 +118,38 @@ public void testComparatorIsHBase1Compatible() { @Test public void testCreateComparator() throws IOException { - FixedFileTrailer t = new FixedFileTrailer(version, HFileReaderImpl.PBUF_TRAILER_MINOR_VERSION); try { assertEquals(CellComparatorImpl.class, - t.createComparator(KeyValue.COMPARATOR.getLegacyKeyComparatorName()).getClass()); + FixedFileTrailer.createComparator(KeyValue.COMPARATOR.getLegacyKeyComparatorName()) + .getClass()); assertEquals(CellComparatorImpl.class, - t.createComparator(KeyValue.COMPARATOR.getClass().getName()).getClass()); + FixedFileTrailer.createComparator(KeyValue.COMPARATOR.getClass().getName()).getClass()); assertEquals(CellComparatorImpl.class, - t.createComparator(CellComparator.class.getName()).getClass()); + FixedFileTrailer.createComparator(CellComparator.class.getName()).getClass()); assertEquals(MetaCellComparator.class, - t.createComparator(KeyValue.META_COMPARATOR.getLegacyKeyComparatorName()).getClass()); + FixedFileTrailer.createComparator(KeyValue.META_COMPARATOR.getLegacyKeyComparatorName()) + .getClass()); assertEquals(MetaCellComparator.class, - t.createComparator(KeyValue.META_COMPARATOR.getClass().getName()).getClass()); + FixedFileTrailer.createComparator(KeyValue.META_COMPARATOR.getClass().getName()) + .getClass()); assertEquals(MetaCellComparator.class, - t.createComparator("org.apache.hadoop.hbase.CellComparator$MetaCellComparator").getClass()); + FixedFileTrailer.createComparator( + "org.apache.hadoop.hbase.CellComparator$MetaCellComparator").getClass()); assertEquals(MetaCellComparator.class, - t.createComparator("org.apache.hadoop.hbase.CellComparatorImpl$MetaCellComparator") - .getClass()); - assertEquals(MetaCellComparator.class, t.createComparator( + FixedFileTrailer.createComparator( + "org.apache.hadoop.hbase.CellComparatorImpl$MetaCellComparator").getClass()); + assertEquals(MetaCellComparator.class, FixedFileTrailer.createComparator( MetaCellComparator.META_COMPARATOR.getClass().getName()).getClass()); - assertEquals(MetaCellComparator.META_COMPARATOR.getClass(), t.createComparator( - MetaCellComparator.META_COMPARATOR.getClass().getName()).getClass()); - assertEquals(CellComparatorImpl.COMPARATOR.getClass(), t.createComparator( - MetaCellComparator.COMPARATOR.getClass().getName()).getClass()); - assertNull(t.createComparator(Bytes.BYTES_RAWCOMPARATOR.getClass().getName())); - assertNull(t.createComparator("org.apache.hadoop.hbase.KeyValue$RawBytesComparator")); + assertEquals(MetaCellComparator.META_COMPARATOR.getClass(), + FixedFileTrailer.createComparator(MetaCellComparator.META_COMPARATOR.getClass().getName()) + .getClass()); + assertEquals(CellComparatorImpl.COMPARATOR.getClass(), + FixedFileTrailer.createComparator(MetaCellComparator.COMPARATOR.getClass().getName()) + .getClass()); + assertNull(FixedFileTrailer.createComparator(Bytes.BYTES_RAWCOMPARATOR.getClass() + .getName())); + assertNull(FixedFileTrailer.createComparator( + "org.apache.hadoop.hbase.KeyValue$RawBytesComparator")); } catch (IOException e) { fail("Unexpected exception while testing FixedFileTrailer#createComparator(), " + e.getMessage()); @@ -150,8 +157,7 @@ public void testCreateComparator() throws IOException { // Test an invalid comparatorClassName expectedEx.expect(IOException.class); - t.createComparator(""); - + FixedFileTrailer.createComparator(""); } @Test diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestHFileBlockIndex.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestHFileBlockIndex.java index 526072b73add..8e83c29df167 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestHFileBlockIndex.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestHFileBlockIndex.java @@ -361,7 +361,7 @@ public void testSecondaryIndexBinarySearch() throws IOException { while (msgPrefix.length() + padding.length() < 70) padding.append(' '); msgPrefix += padding; - if (i % 2 == 1) { + if ((i & 1) == 1) { dos.writeInt(curAllEntriesSize); secondaryIndexEntries[i] = curAllEntriesSize; LOG.info(msgPrefix + "secondary index entry #" + ((i - 1) / 2) + @@ -423,7 +423,7 @@ public void testSecondaryIndexBinarySearch() throws IOException { int expectedResult; int referenceItem; - if (i % 2 == 1) { + if ((i & 1) == 1) { // This key is in the array we search as the element (i - 1) / 2. Make // sure we find it. expectedResult = (i - 1) / 2; @@ -699,7 +699,7 @@ public void testHFileWriterAndReader() throws IOException { // Validate the mid-key. assertEquals( Bytes.toStringBinary(blockKeys.get((blockKeys.size() - 1) / 2)), - reader.midKey()); + reader.midKey().toString()); assertEquals(UNCOMPRESSED_INDEX_SIZES[testI], reader.getTrailer().getUncompressedDataIndexSize()); diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestHFileEncryption.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestHFileEncryption.java index 0bd36614f1ab..b82deb4154cf 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestHFileEncryption.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestHFileEncryption.java @@ -261,8 +261,12 @@ public void testHFileEncryption() throws Exception { i++; } while (scanner.next()); } finally { - reader.close(); - scanner.close(); + if (reader != null) { + reader.close(); + } + if (scanner != null) { + scanner.close(); + } } assertEquals("Did not read back as many KVs as written", i, testKvs.size()); diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestMasterAbortAndRSGotKilled.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestMasterAbortAndRSGotKilled.java index 3df49290f6ca..6dc48ea6470b 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestMasterAbortAndRSGotKilled.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestMasterAbortAndRSGotKilled.java @@ -17,6 +17,8 @@ */ package org.apache.hadoop.hbase.master; +import static org.junit.Assert.assertNotNull; + import java.io.IOException; import java.util.Optional; import java.util.concurrent.CountDownLatch; @@ -89,6 +91,7 @@ public void test() throws Exception { } } // find the rs and hri of the table + assertNotNull(rsThread); HRegionServer rs = rsThread.getRegionServer(); RegionInfo hri = rs.getRegions(TABLE_NAME).get(0).getRegionInfo(); TransitRegionStateProcedure moveRegionProcedure = TransitRegionStateProcedure.reopen( diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestServerCrashProcedureCarryingMetaStuck.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestServerCrashProcedureCarryingMetaStuck.java index 7301c750f173..1da74bf159c2 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestServerCrashProcedureCarryingMetaStuck.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestServerCrashProcedureCarryingMetaStuck.java @@ -17,6 +17,8 @@ */ package org.apache.hadoop.hbase.master; +import static org.junit.Assert.assertNotNull; + import java.util.concurrent.CompletableFuture; import java.util.concurrent.TimeUnit; import org.apache.hadoop.hbase.HBaseClassTestRule; @@ -69,6 +71,7 @@ public void test() throws Exception { break; } } + assertNotNull(rsThread); HRegionServer rs = rsThread.getRegionServer(); RegionInfo hri = rs.getRegions(TableName.META_TABLE_NAME).get(0).getRegionInfo(); HMaster master = UTIL.getMiniHBaseCluster().getMaster(); diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestServerCrashProcedureStuck.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestServerCrashProcedureStuck.java index 5fb2f73f9feb..910da05d061e 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestServerCrashProcedureStuck.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestServerCrashProcedureStuck.java @@ -17,6 +17,8 @@ */ package org.apache.hadoop.hbase.master; +import static org.junit.Assert.assertNotNull; + import java.util.concurrent.CompletableFuture; import java.util.concurrent.TimeUnit; import org.apache.hadoop.hbase.HBaseClassTestRule; @@ -78,6 +80,7 @@ public void test() throws Exception { break; } } + assertNotNull(rsThread); HRegionServer rs = rsThread.getRegionServer(); RegionInfo hri = rs.getRegions(TABLE_NAME).get(0).getRegionInfo(); HMaster master = UTIL.getMiniHBaseCluster().getMaster(); diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/assignment/TestExceptionInAssignRegion.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/assignment/TestExceptionInAssignRegion.java index 24b6dfab79f0..b7c896561726 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/assignment/TestExceptionInAssignRegion.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/assignment/TestExceptionInAssignRegion.java @@ -17,6 +17,8 @@ */ package org.apache.hadoop.hbase.master.assignment; +import static org.junit.Assert.assertNotNull; + import java.util.Optional; import java.util.concurrent.CountDownLatch; import org.apache.hadoop.hbase.HBaseClassTestRule; @@ -73,6 +75,7 @@ public static void tearDown() throws Exception { } @Test + @SuppressWarnings({ "rawtypes", "unchecked" }) public void testExceptionInAssignRegion() { ProcedureExecutor procedureExecutor = UTIL.getMiniHBaseCluster().getMaster().getMasterProcedureExecutor(); @@ -86,6 +89,7 @@ public void testExceptionInAssignRegion() { } } // find the rs and hri of the table + assertNotNull(rsThread); HRegionServer rs = rsThread.getRegionServer(); RegionInfo hri = rs.getRegions(TABLE_NAME).get(0).getRegionInfo(); TransitRegionStateProcedure assignRegionProcedure = TransitRegionStateProcedure.move( diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/assignment/TestExceptionInUnassignedRegion.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/assignment/TestExceptionInUnassignedRegion.java index bdaff5aef7c5..94e3f4cdfe16 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/assignment/TestExceptionInUnassignedRegion.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/assignment/TestExceptionInUnassignedRegion.java @@ -17,6 +17,8 @@ */ package org.apache.hadoop.hbase.master.assignment; +import static org.junit.Assert.assertNotNull; + import java.util.Optional; import org.apache.hadoop.hbase.HBaseClassTestRule; import org.apache.hadoop.hbase.HBaseTestingUtility; @@ -83,6 +85,7 @@ public void testExceptionInUnassignRegion() { } } // find the rs and hri of the table + assertNotNull(rsThread); HRegionServer rs = rsThread.getRegionServer(); RegionInfo hri = rs.getRegions(TABLE_NAME).get(0).getRegionInfo(); TransitRegionStateProcedure moveRegionProcedure = TransitRegionStateProcedure.reopen( diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/mob/TestDefaultMobStoreFlusher.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/mob/TestDefaultMobStoreFlusher.java index a2d0b9901dac..81ac4ef9a34a 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/mob/TestDefaultMobStoreFlusher.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/mob/TestDefaultMobStoreFlusher.java @@ -131,7 +131,9 @@ private void testFlushFile(HTableDescriptor htd) throws Exception { scanner.close(); Assert.assertEquals(1, size); } finally { - table.close(); + if (table != null) { + table.close(); + } } } } diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/procedure/SimpleMasterProcedureManager.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/procedure/SimpleMasterProcedureManager.java index e1d46ef92eef..97c36a0da845 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/procedure/SimpleMasterProcedureManager.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/procedure/SimpleMasterProcedureManager.java @@ -112,7 +112,7 @@ public byte[] execProcedureWithRet(ProcedureDescription desc) throws IOException monitor.receive(e); } // return the first value for testing - return returnData.values().iterator().next(); + return returnData != null ? returnData.values().iterator().next() : null; } @Override diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/quotas/TestQuotaAdmin.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/quotas/TestQuotaAdmin.java index c2ab2432a261..f4caa2daf917 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/quotas/TestQuotaAdmin.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/quotas/TestQuotaAdmin.java @@ -19,6 +19,7 @@ import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertFalse; +import static org.junit.Assert.assertNotNull; import static org.junit.Assert.assertNull; import static org.junit.Assert.assertTrue; import static org.junit.Assert.fail; @@ -797,6 +798,7 @@ private void assertRPCQuota(ThrottleType type, long limit, TimeUnit tu, QuotaSco default: } + assertNotNull(t); assertEquals(scope, ProtobufUtil.toQuotaScope(t.getScope())); assertEquals(t.getSoftLimit(), limit); assertEquals(t.getTimeUnit(), ProtobufUtil.toProtoTimeUnit(tu)); diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestBulkLoadReplicationHFileRefs.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestBulkLoadReplicationHFileRefs.java index 134ea4771263..fbd963ea2cdd 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestBulkLoadReplicationHFileRefs.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestBulkLoadReplicationHFileRefs.java @@ -242,7 +242,6 @@ public void testWhenExcludeNamespace() throws Exception { // Bulk load data into the table of the namespace that is not replicated. byte[] row = Bytes.toBytes("001"); - byte[] value = Bytes.toBytes("v1"); bulkLoadOnCluster(NO_REPLICATE_TABLE, CF_A); Threads.sleep(1000); diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestCompactingToCellFlatMapMemStore.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestCompactingToCellFlatMapMemStore.java index f621fef4a4a5..462fa3370cfe 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestCompactingToCellFlatMapMemStore.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestCompactingToCellFlatMapMemStore.java @@ -67,7 +67,7 @@ public static Object[] data() { // Helpers ////////////////////////////////////////////////////////////////////////////// public TestCompactingToCellFlatMapMemStore(String type){ - if (type == "CHUNK_MAP") { + if (type.equals("CHUNK_MAP")) { toCellChunkMap = true; } else { toCellChunkMap = false; diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestCompaction.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestCompaction.java index bc894ae2920d..cb7b240c32d4 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestCompaction.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestCompaction.java @@ -419,7 +419,8 @@ public void testCompactionFailure() throws Exception { thread.requestCompaction(mockRegion, store, "test custom comapction", PRIORITY_USER, tracker, null); // wait for the latch to complete. - latch.await(120, TimeUnit.SECONDS); + boolean released = latch.await(120, TimeUnit.SECONDS); + assertTrue("Latch was not released", released); // compaction should have completed and been marked as failed due to error in split request long postCompletedCount = metricsWrapper.getNumCompactionsCompleted(); diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestFSErrorsExposed.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestFSErrorsExposed.java index a5668cfba42c..bbbe8630054f 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestFSErrorsExposed.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestFSErrorsExposed.java @@ -257,8 +257,11 @@ public FSDataInputStream open(Path p, int bufferSize) throws IOException { * Starts to simulate faults on all streams opened so far */ public void startFaults() { - for (SoftReference is: inStreams) { - is.get().startFaults(); + for (SoftReference ref: inStreams) { + FaultyInputStream is = ref.get(); + if (is != null) { + is.startFaults(); + } } } } diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestHRegion.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestHRegion.java index 839cf34d3e24..44b673010e9a 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestHRegion.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestHRegion.java @@ -4721,7 +4721,7 @@ public void testWritesWhileScanning() throws IOException, InterruptedException { protected class PutThread extends Thread { private volatile boolean done; - private volatile int numPutsFinished = 0; + private int numPutsFinished = 0; private Throwable error = null; private int numRows; @@ -7170,7 +7170,8 @@ public Void call() throws Exception { return null; } }); - obtainedRowLock.await(30, TimeUnit.SECONDS); + boolean released = obtainedRowLock.await(30, TimeUnit.SECONDS); + assertTrue("Latch was not released", released); Future f2 = exec.submit(new Callable() { @Override @@ -7256,7 +7257,8 @@ public Void call() throws Exception { return null; } }); - obtainedRowLock.await(30, TimeUnit.SECONDS); + boolean released = obtainedRowLock.await(30, TimeUnit.SECONDS); + assertTrue("Latch was not released", released); Future f2 = exec.submit(new Callable() { @Override diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestRegionServerCrashDisableWAL.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestRegionServerCrashDisableWAL.java index 208fa98ef6ae..5da743e5ee30 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestRegionServerCrashDisableWAL.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestRegionServerCrashDisableWAL.java @@ -92,8 +92,10 @@ public void test() throws InterruptedException, IOException { } } // shutdown rs - thread.getRegionServer().abort("For testing"); - thread.join(); + if (thread != null) { + thread.getRegionServer().abort("For testing"); + thread.join(); + } // restart master UTIL.getMiniHBaseCluster().startMaster(); // make sure that we can schedule a SCP for the crashed server which WAL is disabled and bring diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestReversibleScanners.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestReversibleScanners.java index abcaa4fe366f..173c5584c041 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestReversibleScanners.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestReversibleScanners.java @@ -474,12 +474,12 @@ private void internalTestSeekAndNextForReversibleKeyValueHeap( ReversedKeyValueHeap kvHeap, int startRowNum) throws IOException { // Test next and seek for (int i = startRowNum; i >= 0; i--) { - if (i % 2 == 1 && i - 2 >= 0) { + if ((i & 1) == 1 && i - 2 >= 0) { i = i - 2; kvHeap.seekToPreviousRow(KeyValueUtil.createFirstOnRow(ROWS[i + 1])); } for (int j = 0; j < QUALSIZE; j++) { - if (j % 2 == 1 && (j + 1) < QUALSIZE) { + if ((j & 1) == 1 && (j + 1) < QUALSIZE) { j = j + 1; kvHeap.backwardSeek(makeKV(i, j)); } @@ -709,7 +709,7 @@ private static void writeMemstore(MemStore memstore) throws IOException { // Add another half of the keyvalues to snapshot for (int i = 0; i < ROWSIZE; i++) { for (int j = 0; j < QUALSIZE; j++) { - if ((i + j) % 2 == 1) { + if (((i + j) & 1) == 1) { memstore.add(makeKV(i, j), null); } } diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestScannerRetriableFailure.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestScannerRetriableFailure.java index 588ff78a2fc1..67ffe4297098 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestScannerRetriableFailure.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestScannerRetriableFailure.java @@ -21,12 +21,9 @@ import static org.junit.Assert.assertTrue; import java.io.IOException; -import java.util.ArrayList; import java.util.List; import java.util.Optional; import org.apache.hadoop.conf.Configuration; -import org.apache.hadoop.fs.FileSystem; -import org.apache.hadoop.fs.Path; import org.apache.hadoop.hbase.HBaseClassTestRule; import org.apache.hadoop.hbase.HBaseTestingUtility; import org.apache.hadoop.hbase.TableName; @@ -130,16 +127,8 @@ public void testFaultyScanner() throws Exception { // ========================================================================== // Helpers // ========================================================================== - private FileSystem getFileSystem() { - return UTIL.getHBaseCluster().getMaster().getMasterFileSystem().getFileSystem(); - } - - private Path getRootDir() { - return UTIL.getHBaseCluster().getMaster().getMasterFileSystem().getRootDir(); - } public void loadTable(final Table table, int numRows) throws IOException { - List puts = new ArrayList<>(numRows); for (int i = 0; i < numRows; ++i) { byte[] row = Bytes.toBytes(String.format("%09d", i)); Put put = new Put(row); diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/compactions/TestCompactedHFilesDischarger.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/compactions/TestCompactedHFilesDischarger.java index 8547e650c984..54e33bd50128 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/compactions/TestCompactedHFilesDischarger.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/compactions/TestCompactedHFilesDischarger.java @@ -438,7 +438,9 @@ private void initiateScan(HRegion region) throws IOException { } } finally { scanCompletedCounter.incrementAndGet(); - resScanner.close(); + if (resScanner != null) { + resScanner.close(); + } } } } diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/throttle/TestStoreHotnessProtector.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/throttle/TestStoreHotnessProtector.java index a73d36631d40..f04d44fe6ed5 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/throttle/TestStoreHotnessProtector.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/throttle/TestStoreHotnessProtector.java @@ -99,7 +99,8 @@ public void testPreparePutCounter() throws Exception { }); } - countDownLatch.await(60, TimeUnit.SECONDS); + boolean released = countDownLatch.await(60, TimeUnit.SECONDS); + Assert.assertTrue("Latch was not released", released); //no exception Assert.assertEquals(exception.get(), null); Assert.assertEquals(storeHotnessProtector.getPreparePutToStoreMap().size(), 1); diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/wal/AbstractTestWALReplay.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/wal/AbstractTestWALReplay.java index 69b9ea407ff0..861a440364a2 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/wal/AbstractTestWALReplay.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/wal/AbstractTestWALReplay.java @@ -1106,12 +1106,11 @@ private MockWAL createMockWAL() throws IOException { // Flusher used in this test. Keep count of how often we are called and // actually run the flush inside here. static class TestFlusher implements FlushRequester { - private HRegion r; @Override public boolean requestFlush(HRegion region, FlushLifeCycleTracker tracker) { try { - r.flush(false); + region.flush(false); return true; } catch (IOException e) { throw new RuntimeException("Exception flushing", e); diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/security/token/SecureTestCluster.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/security/token/SecureTestCluster.java index bff7d9d3f148..ef60147e2039 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/security/token/SecureTestCluster.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/security/token/SecureTestCluster.java @@ -97,8 +97,8 @@ public static void tearDown() throws Exception { try { if (CLUSTER != null) { CLUSTER.shutdown(); + CLUSTER.join(); } - CLUSTER.join(); if (KDC != null) { KDC.stop(); } diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/security/visibility/TestVisibilityLabels.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/security/visibility/TestVisibilityLabels.java index a7187cdec8b9..7d3652d23150 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/security/visibility/TestVisibilityLabels.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/security/visibility/TestVisibilityLabels.java @@ -479,6 +479,7 @@ public Void run() throws Exception { } } catch (Throwable e) { } + assertNotNull(authsResponse); List authsList = new ArrayList<>(authsResponse.getAuthList().size()); for (ByteString authBS : authsResponse.getAuthList()) { authsList.add(Bytes.toString(authBS.toByteArray())); diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/util/ProcessBasedLocalHBaseCluster.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/util/ProcessBasedLocalHBaseCluster.java index 0de4b830e881..ba5c71d70ec6 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/util/ProcessBasedLocalHBaseCluster.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/util/ProcessBasedLocalHBaseCluster.java @@ -484,11 +484,14 @@ private void scanDirs() throws FileNotFoundException { dirList.clear(); dirList.addAll(logTailDirs); for (String d : dirList) { - for (File f : new File(d).listFiles(LOG_FILES)) { - String filePath = f.getAbsolutePath(); - if (!tailedFiles.contains(filePath)) { - tailedFiles.add(filePath); - startTailingFile(filePath); + final File[] files = new File(d).listFiles(LOG_FILES); + if (files != null) { + for (File f : files) { + String filePath = f.getAbsolutePath(); + if (!tailedFiles.contains(filePath)) { + tailedFiles.add(filePath); + startTailingFile(filePath); + } } } } diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/util/TestStealJobQueue.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/util/TestStealJobQueue.java index 4d69f8e99c98..7731f8ae0a3b 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/util/TestStealJobQueue.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/util/TestStealJobQueue.java @@ -209,8 +209,10 @@ protected void afterExecute(Runnable r, Throwable t) { stealPool.execute(task); } - stealJobCountDown.await(1, TimeUnit.SECONDS); - stealFromCountDown.await(1, TimeUnit.SECONDS); + boolean released = stealJobCountDown.await(1, TimeUnit.SECONDS); + assertTrue("stealJobCountDown was not released", released); + released = stealFromCountDown.await(1, TimeUnit.SECONDS); + assertTrue("stealFromCountDown was not released", released); assertEquals(0, stealFromCountDown.getCount()); assertEquals(0, stealJobCountDown.getCount()); } diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/wal/TestWALSplit.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/wal/TestWALSplit.java index fb278fd0e4c4..6c964db245bc 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/wal/TestWALSplit.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/wal/TestWALSplit.java @@ -395,10 +395,6 @@ public void testOldRecoveredEditsFileSidelined() throws IOException { private Path createRecoveredEditsPathForRegion() throws IOException { byte[] encoded = RegionInfoBuilder.FIRST_META_REGIONINFO.getEncodedNameAsBytes(); - long now = EnvironmentEdgeManager.currentTime(); - Entry entry = new Entry( - new WALKeyImpl(encoded, TableName.META_TABLE_NAME, 1, now, HConstants.DEFAULT_CLUSTER_ID), - new WALEdit()); Path p = WALSplitUtil .getRegionSplitEditsPath(TableName.META_TABLE_NAME, encoded, 1, FILENAME_BEING_SPLIT, TMPDIRNAME, conf); diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/wal/WALPerformanceEvaluation.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/wal/WALPerformanceEvaluation.java index 6b6175115be3..fba08f5375ea 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/wal/WALPerformanceEvaluation.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/wal/WALPerformanceEvaluation.java @@ -200,53 +200,49 @@ public int run(String[] args) throws Exception { // Process command line args for (int i = 0; i < args.length; i++) { String cmd = args[i]; - try { - if (cmd.equals("-threads")) { - numThreads = Integer.parseInt(args[++i]); - } else if (cmd.equals("-iterations")) { - numIterations = Long.parseLong(args[++i]); - } else if (cmd.equals("-path")) { - rootRegionDir = new Path(args[++i]); - } else if (cmd.equals("-families")) { - numFamilies = Integer.parseInt(args[++i]); - } else if (cmd.equals("-qualifiers")) { - numQualifiers = Integer.parseInt(args[++i]); - } else if (cmd.equals("-keySize")) { - keySize = Integer.parseInt(args[++i]); - } else if (cmd.equals("-valueSize")) { - valueSize = Integer.parseInt(args[++i]); - } else if (cmd.equals("-syncInterval")) { - syncInterval = Integer.parseInt(args[++i]); - } else if (cmd.equals("-nosync")) { - noSync = true; - } else if (cmd.equals("-verify")) { - verify = true; - } else if (cmd.equals("-verbose")) { - verbose = true; - } else if (cmd.equals("-nocleanup")) { - cleanup = false; - } else if (cmd.equals("-noclosefs")) { - noclosefs = true; - } else if (cmd.equals("-roll")) { - roll = Long.parseLong(args[++i]); - } else if (cmd.equals("-compress")) { - compress = true; - } else if (cmd.equals("-encryption")) { - cipher = args[++i]; - } else if (cmd.equals("-regions")) { - numRegions = Integer.parseInt(args[++i]); - } else if (cmd.equals("-traceFreq")) { - // keep it here for compatible - System.err.println("-traceFreq is not supported any more"); - } else if (cmd.equals("-h")) { - printUsageAndExit(); - } else if (cmd.equals("--help")) { - printUsageAndExit(); - } else { - System.err.println("UNEXPECTED: " + cmd); - printUsageAndExit(); - } - } catch (Exception e) { + if (cmd.equals("-threads")) { + numThreads = Integer.parseInt(args[++i]); + } else if (cmd.equals("-iterations")) { + numIterations = Long.parseLong(args[++i]); + } else if (cmd.equals("-path")) { + rootRegionDir = new Path(args[++i]); + } else if (cmd.equals("-families")) { + numFamilies = Integer.parseInt(args[++i]); + } else if (cmd.equals("-qualifiers")) { + numQualifiers = Integer.parseInt(args[++i]); + } else if (cmd.equals("-keySize")) { + keySize = Integer.parseInt(args[++i]); + } else if (cmd.equals("-valueSize")) { + valueSize = Integer.parseInt(args[++i]); + } else if (cmd.equals("-syncInterval")) { + syncInterval = Integer.parseInt(args[++i]); + } else if (cmd.equals("-nosync")) { + noSync = true; + } else if (cmd.equals("-verify")) { + verify = true; + } else if (cmd.equals("-verbose")) { + verbose = true; + } else if (cmd.equals("-nocleanup")) { + cleanup = false; + } else if (cmd.equals("-noclosefs")) { + noclosefs = true; + } else if (cmd.equals("-roll")) { + roll = Long.parseLong(args[++i]); + } else if (cmd.equals("-compress")) { + compress = true; + } else if (cmd.equals("-encryption")) { + cipher = args[++i]; + } else if (cmd.equals("-regions")) { + numRegions = Integer.parseInt(args[++i]); + } else if (cmd.equals("-traceFreq")) { + // keep it here for compatible + System.err.println("-traceFreq is not supported any more"); + } else if (cmd.equals("-h")) { + printUsageAndExit(); + } else if (cmd.equals("--help")) { + printUsageAndExit(); + } else { + System.err.println("UNEXPECTED: " + cmd); printUsageAndExit(); } }