From e7605b22b6e629977f03f592682de71559c77756 Mon Sep 17 00:00:00 2001 From: sadanand48 Date: Thu, 16 Jul 2020 14:51:52 +0530 Subject: [PATCH 1/7] HDDS-3972.Add option to limit number of items while displaying through ldb tool. --- .../apache/hadoop/ozone/om/TestOmLDBCli.java | 125 ++++++++++++++++++ .../apache/hadoop/ozone/debug/DBScanner.java | 49 ++++++- .../apache/hadoop/ozone/debug/RDBParser.java | 4 + 3 files changed, 171 insertions(+), 7 deletions(-) create mode 100644 hadoop-ozone/integration-test/src/test/java/org/apache/hadoop/ozone/om/TestOmLDBCli.java diff --git a/hadoop-ozone/integration-test/src/test/java/org/apache/hadoop/ozone/om/TestOmLDBCli.java b/hadoop-ozone/integration-test/src/test/java/org/apache/hadoop/ozone/om/TestOmLDBCli.java new file mode 100644 index 000000000000..68c86d6a3017 --- /dev/null +++ b/hadoop-ozone/integration-test/src/test/java/org/apache/hadoop/ozone/om/TestOmLDBCli.java @@ -0,0 +1,125 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with this + * work for additional information regarding copyright ownership. The ASF + * licenses this file to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + *

+ * http://www.apache.org/licenses/LICENSE-2.0 + *

+ * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS,WITHOUT + * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the + * License for the specific language governing permissions and limitations under + * the License. + */ +package org.apache.hadoop.ozone.om; + +import org.apache.hadoop.hdds.HddsConfigKeys; +import org.apache.hadoop.hdds.conf.OzoneConfiguration; +import org.apache.hadoop.ozone.MiniOzoneCluster; +import org.apache.hadoop.ozone.TestDataUtil; +import org.apache.hadoop.ozone.client.OzoneBucket; +import org.apache.hadoop.ozone.debug.DBScanner; +import org.apache.hadoop.ozone.debug.RDBParser; +import org.apache.hadoop.ozone.om.helpers.OmBucketInfo; +import org.apache.hadoop.ozone.om.helpers.OmKeyInfo; +import org.apache.hadoop.ozone.om.helpers.OmVolumeArgs; +import org.junit.After; +import org.junit.Assert; +import org.junit.Before; +import org.junit.Test; + +import java.util.ArrayList; +import java.util.List; + +import static org.apache.hadoop.ozone.OzoneConsts.OM_DB_NAME; + +/** + * This class tests the Debug LDB CLI that reads from an om.db file. + */ +public class TestOmLDBCli { + private MiniOzoneCluster cluster = null; + private OzoneConfiguration conf; + + private RDBParser rdbParser; + private DBScanner dbScanner; + + @Before + public void setup() throws Exception { + conf = new OzoneConfiguration(); + cluster = MiniOzoneCluster.newBuilder(conf).build(); + cluster.waitForClusterToBeReady(); + String volumeName0 = "volume10"; + String bucketName0 = "bucket10"; + OzoneBucket bucket0 = TestDataUtil.createVolumeAndBucket(cluster, + volumeName0, bucketName0); + String volumeName1 = "volume11"; + String bucketName1 = "bucket11"; + OzoneBucket bucket1 = TestDataUtil.createVolumeAndBucket(cluster, + volumeName1, bucketName1); + String keyName0 = "key0"; + TestDataUtil.createKey(bucket0, keyName0, ""); + String keyName1 = "key1"; + TestDataUtil.createKey(bucket1, keyName1, ""); + cluster.getOzoneManager().stop(); + cluster.getStorageContainerManager().stop(); + rdbParser = new RDBParser(); + dbScanner = new DBScanner(); + } + + @After + public void shutdown() { + if (cluster != null) { + cluster.shutdown(); + } + } + + @Test(expected = IllegalArgumentException.class) + public void testOMDB() throws Exception { + String dbRootPath = conf.get(HddsConfigKeys.OZONE_METADATA_DIRS); + String dbPath = dbRootPath + "/" + OM_DB_NAME; + rdbParser.setDbPath(dbPath); + dbScanner.setParent(rdbParser); + // list will store volumeNames/bucketNames/keyNames + List entityNames = new ArrayList<>(); + getEntityNames(dbScanner, "volumeTable", entityNames); + Assert.assertTrue(entityNames.contains("volume10")); + Assert.assertTrue(entityNames.contains("volume11")); + getEntityNames(dbScanner, "bucketTable", entityNames); + Assert.assertTrue(entityNames.contains("bucket10")); + Assert.assertTrue(entityNames.contains("bucket11")); + getEntityNames(dbScanner, "keyTable", entityNames); + Assert.assertTrue(entityNames.contains("key0")); + Assert.assertTrue(entityNames.contains("key1")); + //test maxLimit + Assert.assertEquals(2, entityNames.size()); + dbScanner.setLimit(1); + getEntityNames(dbScanner, "keyTable", entityNames); + Assert.assertEquals(1, entityNames.size()); + dbScanner.setLimit(-1); + getEntityNames(dbScanner, "keyTable", entityNames); + Assert.assertEquals(0, entityNames.size()); + } + + private static void getEntityNames(DBScanner dbScanner, + String tableName, List entityNames) throws Exception { + dbScanner.setTableName(tableName); + dbScanner.call(); + entityNames.clear(); + Assert.assertFalse(dbScanner.getScannedObjects().isEmpty()); + for (Object o : dbScanner.getScannedObjects()){ + if(o instanceof OmVolumeArgs) { + OmVolumeArgs volumeArgs = (OmVolumeArgs) o; + entityNames.add(volumeArgs.getVolume()); + } else if (o instanceof OmBucketInfo){ + OmBucketInfo bucketInfo = (OmBucketInfo)o; + entityNames.add(bucketInfo.getBucketName()); + } else { + OmKeyInfo keyInfo = (OmKeyInfo)o; + entityNames.add(keyInfo.getKeyName()); + } + } + } +} diff --git a/hadoop-ozone/tools/src/main/java/org/apache/hadoop/ozone/debug/DBScanner.java b/hadoop-ozone/tools/src/main/java/org/apache/hadoop/ozone/debug/DBScanner.java index 47fc8bc9cfb2..1abcbb54b0f5 100644 --- a/hadoop-ozone/tools/src/main/java/org/apache/hadoop/ozone/debug/DBScanner.java +++ b/hadoop-ozone/tools/src/main/java/org/apache/hadoop/ozone/debug/DBScanner.java @@ -47,11 +47,18 @@ public class DBScanner implements Callable { @CommandLine.ParentCommand private RDBParser parent; + @CommandLine.Option(names = {"--length", "-l"}, + description = "Maximum number of items to list") + private int limit = 100; + private HashMap columnFamilyMap; - private static void displayTable(RocksDB rocksDB, - DBColumnFamilyDefinition dbColumnFamilyDefinition, - List list) throws IOException { + private List scannedObjects; + + private static List displayTable(RocksDB rocksDB, + DBColumnFamilyDefinition dbColumnFamilyDefinition, + List list, int maxValueLimit) throws IOException { + List outputs = new ArrayList<>(); ColumnFamilyHandle columnFamilyHandle = getColumnFamilyHandle( dbColumnFamilyDefinition.getTableName() .getBytes(StandardCharsets.UTF_8), list); @@ -60,14 +67,33 @@ private static void displayTable(RocksDB rocksDB, } RocksIterator iterator = rocksDB.newIterator(columnFamilyHandle); iterator.seekToFirst(); - while (iterator.isValid()){ + while (iterator.isValid() && maxValueLimit > 0){ Object o = dbColumnFamilyDefinition.getValueCodec() .fromPersistedFormat(iterator.value()); + outputs.add(o); Gson gson = new GsonBuilder().setPrettyPrinting().create(); String result = gson.toJson(o); System.out.println(result); + maxValueLimit--; iterator.next(); } + return outputs; + } + + public void setTableName(String tableName) { + this.tableName = tableName; + } + + public RDBParser getParent() { + return parent; + } + + public void setParent(RDBParser parent) { + this.parent = parent; + } + + public void setLimit(int limit) { + this.limit = limit; } private static ColumnFamilyHandle getColumnFamilyHandle( @@ -99,6 +125,10 @@ private void constructColumnFamilyMap(DBDefinition dbDefinition) { } } + public List getScannedObjects() { + return scannedObjects; + } + @Override public Void call() throws Exception { List cfs = new ArrayList<>(); @@ -116,13 +146,17 @@ public Void call() throws Exception { rocksDB = RocksDB.openReadOnly(parent.getDbPath(), cfs, columnFamilyHandleList); this.printAppropriateTable(columnFamilyHandleList, - rocksDB, parent.getDbPath()); + rocksDB, parent.getDbPath(), limit); return null; } private void printAppropriateTable( List columnFamilyHandleList, - RocksDB rocksDB, String dbPath) throws IOException { + RocksDB rocksDB, String dbPath, int maxValues) throws IOException { + if (maxValues < 1) { + throw new IllegalArgumentException( + "List length should be a positive number"); + } dbPath = removeTrailingSlashIfNeeded(dbPath); this.constructColumnFamilyMap(DBDefinitionFactory. getDefinition(new File(dbPath).getName())); @@ -132,7 +166,8 @@ private void printAppropriateTable( } else { DBColumnFamilyDefinition columnFamilyDefinition = this.columnFamilyMap.get(tableName); - displayTable(rocksDB, columnFamilyDefinition, columnFamilyHandleList); + scannedObjects = displayTable(rocksDB, + columnFamilyDefinition, columnFamilyHandleList, maxValues); } } else { System.out.println("Incorrect db Path"); diff --git a/hadoop-ozone/tools/src/main/java/org/apache/hadoop/ozone/debug/RDBParser.java b/hadoop-ozone/tools/src/main/java/org/apache/hadoop/ozone/debug/RDBParser.java index ae82ba110353..017a23a228fc 100644 --- a/hadoop-ozone/tools/src/main/java/org/apache/hadoop/ozone/debug/RDBParser.java +++ b/hadoop-ozone/tools/src/main/java/org/apache/hadoop/ozone/debug/RDBParser.java @@ -41,6 +41,10 @@ public String getDbPath() { return dbPath; } + public void setDbPath(String dbPath) { + this.dbPath = dbPath; + } + @Override public void execute(String[] argv) { new RDBParser().run(argv); From 118fc81a217fd79923f812644ecbe0943b0d0176 Mon Sep 17 00:00:00 2001 From: sadanand48 Date: Wed, 29 Jul 2020 17:15:51 +0530 Subject: [PATCH 2/7] Addressed Review Comments & Deleted TestOmSQLCI --- .../apache/hadoop/ozone/om/TestOmLDBCli.java | 108 ++++---- .../apache/hadoop/ozone/om/TestOmSQLCli.java | 235 ------------------ 2 files changed, 51 insertions(+), 292 deletions(-) delete mode 100644 hadoop-ozone/integration-test/src/test/java/org/apache/hadoop/ozone/om/TestOmSQLCli.java diff --git a/hadoop-ozone/integration-test/src/test/java/org/apache/hadoop/ozone/om/TestOmLDBCli.java b/hadoop-ozone/integration-test/src/test/java/org/apache/hadoop/ozone/om/TestOmLDBCli.java index 68c86d6a3017..ba1bdf7a5be7 100644 --- a/hadoop-ozone/integration-test/src/test/java/org/apache/hadoop/ozone/om/TestOmLDBCli.java +++ b/hadoop-ozone/integration-test/src/test/java/org/apache/hadoop/ozone/om/TestOmLDBCli.java @@ -16,25 +16,28 @@ */ package org.apache.hadoop.ozone.om; -import org.apache.hadoop.hdds.HddsConfigKeys; + import org.apache.hadoop.hdds.conf.OzoneConfiguration; +import org.apache.hadoop.hdds.protocol.proto.HddsProtos; +import org.apache.hadoop.hdds.utils.db.DBStore; +import org.apache.hadoop.hdds.utils.db.DBStoreBuilder; +import org.apache.hadoop.hdds.utils.db.Table; import org.apache.hadoop.ozone.MiniOzoneCluster; -import org.apache.hadoop.ozone.TestDataUtil; -import org.apache.hadoop.ozone.client.OzoneBucket; import org.apache.hadoop.ozone.debug.DBScanner; import org.apache.hadoop.ozone.debug.RDBParser; -import org.apache.hadoop.ozone.om.helpers.OmBucketInfo; import org.apache.hadoop.ozone.om.helpers.OmKeyInfo; -import org.apache.hadoop.ozone.om.helpers.OmVolumeArgs; +import org.apache.hadoop.ozone.om.request.TestOMRequestUtils; import org.junit.After; -import org.junit.Assert; import org.junit.Before; +import org.junit.Rule; import org.junit.Test; +import org.junit.Assert; +import org.junit.rules.TemporaryFolder; -import java.util.ArrayList; +import java.io.File; import java.util.List; +import java.util.ArrayList; -import static org.apache.hadoop.ozone.OzoneConsts.OM_DB_NAME; /** * This class tests the Debug LDB CLI that reads from an om.db file. @@ -45,81 +48,72 @@ public class TestOmLDBCli { private RDBParser rdbParser; private DBScanner dbScanner; + private DBStore dbStore = null; + + @Rule + public TemporaryFolder folder = new TemporaryFolder(); @Before public void setup() throws Exception { conf = new OzoneConfiguration(); - cluster = MiniOzoneCluster.newBuilder(conf).build(); - cluster.waitForClusterToBeReady(); - String volumeName0 = "volume10"; - String bucketName0 = "bucket10"; - OzoneBucket bucket0 = TestDataUtil.createVolumeAndBucket(cluster, - volumeName0, bucketName0); - String volumeName1 = "volume11"; - String bucketName1 = "bucket11"; - OzoneBucket bucket1 = TestDataUtil.createVolumeAndBucket(cluster, - volumeName1, bucketName1); - String keyName0 = "key0"; - TestDataUtil.createKey(bucket0, keyName0, ""); - String keyName1 = "key1"; - TestDataUtil.createKey(bucket1, keyName1, ""); - cluster.getOzoneManager().stop(); - cluster.getStorageContainerManager().stop(); rdbParser = new RDBParser(); dbScanner = new DBScanner(); } @After - public void shutdown() { - if (cluster != null) { - cluster.shutdown(); + public void shutdown() throws Exception { + if (dbStore!=null){ + dbStore.close(); } } @Test(expected = IllegalArgumentException.class) public void testOMDB() throws Exception { - String dbRootPath = conf.get(HddsConfigKeys.OZONE_METADATA_DIRS); - String dbPath = dbRootPath + "/" + OM_DB_NAME; - rdbParser.setDbPath(dbPath); + File newFolder = folder.newFolder(); + if(!newFolder.exists()) { + Assert.assertTrue(newFolder.mkdirs()); + } + // Dummy om.db with only keyTable + dbStore = DBStoreBuilder.newBuilder(conf) + .setName("om.db") + .setPath(newFolder.toPath()) + .addTable("keyTable") + .build(); + // insert 5 keys + for (int i = 0; i<5; i++) { + OmKeyInfo value = TestOMRequestUtils.createOmKeyInfo("sampleVol", + "sampleBuck", "key" + (i+1), HddsProtos.ReplicationType.STAND_ALONE, + HddsProtos.ReplicationFactor.ONE); + String key = "key"+ (i); + Table keyTable = dbStore.getTable("keyTable"); + keyTable.put(key.getBytes(), value.getProtobuf().toByteArray()); + } + rdbParser.setDbPath(dbStore.getDbLocation().getAbsolutePath()); dbScanner.setParent(rdbParser); // list will store volumeNames/bucketNames/keyNames - List entityNames = new ArrayList<>(); - getEntityNames(dbScanner, "volumeTable", entityNames); - Assert.assertTrue(entityNames.contains("volume10")); - Assert.assertTrue(entityNames.contains("volume11")); - getEntityNames(dbScanner, "bucketTable", entityNames); - Assert.assertTrue(entityNames.contains("bucket10")); - Assert.assertTrue(entityNames.contains("bucket11")); - getEntityNames(dbScanner, "keyTable", entityNames); - Assert.assertTrue(entityNames.contains("key0")); - Assert.assertTrue(entityNames.contains("key1")); - //test maxLimit - Assert.assertEquals(2, entityNames.size()); + List keyNames = new ArrayList<>(); + getKeyNames(dbScanner, "keyTable", keyNames); + Assert.assertEquals(5, keyNames.size()); + Assert.assertTrue(keyNames.contains("key1")); + Assert.assertTrue(keyNames.contains("key5")); + Assert.assertFalse(keyNames.contains("key6")); dbScanner.setLimit(1); - getEntityNames(dbScanner, "keyTable", entityNames); - Assert.assertEquals(1, entityNames.size()); + getKeyNames(dbScanner, "keyTable", keyNames); + Assert.assertEquals(1, keyNames.size()); dbScanner.setLimit(-1); - getEntityNames(dbScanner, "keyTable", entityNames); - Assert.assertEquals(0, entityNames.size()); + getKeyNames(dbScanner, "keyTable", keyNames); + Assert.assertEquals(0, keyNames.size()); } - private static void getEntityNames(DBScanner dbScanner, + private static void getKeyNames(DBScanner dbScanner, String tableName, List entityNames) throws Exception { dbScanner.setTableName(tableName); dbScanner.call(); entityNames.clear(); Assert.assertFalse(dbScanner.getScannedObjects().isEmpty()); for (Object o : dbScanner.getScannedObjects()){ - if(o instanceof OmVolumeArgs) { - OmVolumeArgs volumeArgs = (OmVolumeArgs) o; - entityNames.add(volumeArgs.getVolume()); - } else if (o instanceof OmBucketInfo){ - OmBucketInfo bucketInfo = (OmBucketInfo)o; - entityNames.add(bucketInfo.getBucketName()); - } else { - OmKeyInfo keyInfo = (OmKeyInfo)o; - entityNames.add(keyInfo.getKeyName()); - } + OmKeyInfo keyInfo = (OmKeyInfo)o; + entityNames.add(keyInfo.getKeyName()); } } } diff --git a/hadoop-ozone/integration-test/src/test/java/org/apache/hadoop/ozone/om/TestOmSQLCli.java b/hadoop-ozone/integration-test/src/test/java/org/apache/hadoop/ozone/om/TestOmSQLCli.java deleted file mode 100644 index b1ce4ba81cda..000000000000 --- a/hadoop-ozone/integration-test/src/test/java/org/apache/hadoop/ozone/om/TestOmSQLCli.java +++ /dev/null @@ -1,235 +0,0 @@ -/** - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with this - * work for additional information regarding copyright ownership. The ASF - * licenses this file to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - *

- * http://www.apache.org/licenses/LICENSE-2.0 - *

- * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS,WITHOUT - * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the - * License for the specific language governing permissions and limitations under - * the License. - */ -package org.apache.hadoop.ozone.om; - -import java.io.IOException; -import java.nio.file.Files; -import java.nio.file.Paths; -import java.sql.Connection; -import java.sql.DriverManager; -import java.sql.ResultSet; -import java.sql.SQLException; -import java.sql.Statement; -import java.util.ArrayList; -import java.util.Arrays; -import java.util.HashMap; -import java.util.List; -import java.util.UUID; - -import org.apache.hadoop.hdds.HddsConfigKeys; -import org.apache.hadoop.hdds.conf.OzoneConfiguration; -import org.apache.hadoop.ozone.MiniOzoneCluster; -import org.apache.hadoop.ozone.TestDataUtil; -import org.apache.hadoop.ozone.client.OzoneBucket; -import org.apache.hadoop.ozone.scm.cli.SQLCLI; -import org.apache.hadoop.test.GenericTestUtils; - -import static org.apache.hadoop.ozone.OzoneConsts.OM_DB_NAME; -import org.junit.After; -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertFalse; -import static org.junit.Assert.assertNotNull; -import static org.junit.Assert.assertTrue; -import org.junit.Before; -import org.junit.Ignore; -import org.junit.Rule; -import org.junit.Test; -import org.junit.rules.Timeout; - -/** - * This class tests the CLI that transforms om.db into SQLite DB files. - */ -public class TestOmSQLCli { - - /** - * Set a timeout for each test. - */ - @Rule - public Timeout timeout = new Timeout(300000); - private MiniOzoneCluster cluster = null; - - private OzoneConfiguration conf; - private SQLCLI cli; - - private String userName = "userTest"; - private String adminName = "adminTest"; - private String volumeName0 = "volumeTest0"; - private String volumeName1 = "volumeTest1"; - private String bucketName0 = "bucketTest0"; - private String bucketName1 = "bucketTest1"; - private String bucketName2 = "bucketTest2"; - private String keyName0 = "key0"; - private String keyName1 = "key1"; - private String keyName2 = "key2"; - private String keyName3 = "key3"; - - /** - * Create a MiniDFSCluster for testing. - *

- * Ozone is made active by setting OZONE_ENABLED = true - * - * @throws IOException - */ - @Before - public void setup() throws Exception { - conf = new OzoneConfiguration(); - cluster = MiniOzoneCluster.newBuilder(conf).build(); - cluster.waitForClusterToBeReady(); - OzoneBucket bucket0 = - TestDataUtil.createVolumeAndBucket(cluster, volumeName0, bucketName0); - OzoneBucket bucket1 = - TestDataUtil.createVolumeAndBucket(cluster, volumeName1, bucketName1); - OzoneBucket bucket2 = - TestDataUtil.createVolumeAndBucket(cluster, volumeName0, bucketName2); - - TestDataUtil.createKey(bucket0, keyName0, ""); - TestDataUtil.createKey(bucket1, keyName1, ""); - TestDataUtil.createKey(bucket2, keyName2, ""); - TestDataUtil.createKey(bucket2, keyName3, ""); - - cluster.getOzoneManager().stop(); - cluster.getStorageContainerManager().stop(); - cli = new SQLCLI(conf); - } - - @After - public void shutdown() { - if (cluster != null) { - cluster.shutdown(); - } - } - - // After HDDS-357, we have to fix SQLCli. - // TODO: fix SQLCli - @Ignore - @Test - public void testOmDB() throws Exception { - String dbOutPath = GenericTestUtils.getTempPath( - UUID.randomUUID() + "/out_sql.db"); - - String dbRootPath = conf.get(HddsConfigKeys.OZONE_METADATA_DIRS); - String dbPath = dbRootPath + "/" + OM_DB_NAME; - String[] args = {"-p", dbPath, "-o", dbOutPath}; - - cli.run(args); - - Connection conn = connectDB(dbOutPath); - String sql = "SELECT * FROM volumeList"; - ResultSet rs = executeQuery(conn, sql); - List expectedValues = - new ArrayList<>(Arrays.asList(volumeName0, volumeName1)); - while (rs.next()) { - String userNameRs = rs.getString("userName"); - String volumeNameRs = rs.getString("volumeName"); - assertEquals(userName, userNameRs.substring(1)); - assertTrue(expectedValues.remove(volumeNameRs)); - } - assertEquals(0, expectedValues.size()); - - sql = "SELECT * FROM volumeInfo"; - rs = executeQuery(conn, sql); - expectedValues = - new ArrayList<>(Arrays.asList(volumeName0, volumeName1)); - while (rs.next()) { - String adName = rs.getString("adminName"); - String ownerName = rs.getString("ownerName"); - String volumeName = rs.getString("volumeName"); - assertEquals(adminName, adName); - assertEquals(userName, ownerName); - assertTrue(expectedValues.remove(volumeName)); - } - assertEquals(0, expectedValues.size()); - - sql = "SELECT * FROM aclInfo"; - rs = executeQuery(conn, sql); - expectedValues = - new ArrayList<>(Arrays.asList(volumeName0, volumeName1)); - while (rs.next()) { - String adName = rs.getString("adminName"); - String ownerName = rs.getString("ownerName"); - String volumeName = rs.getString("volumeName"); - String type = rs.getString("type"); - String uName = rs.getString("userName"); - String rights = rs.getString("rights"); - assertEquals(adminName, adName); - assertEquals(userName, ownerName); - assertEquals("USER", type); - assertEquals(userName, uName); - assertEquals("READ_WRITE", rights); - assertTrue(expectedValues.remove(volumeName)); - } - assertEquals(0, expectedValues.size()); - - sql = "SELECT * FROM bucketInfo"; - rs = executeQuery(conn, sql); - HashMap expectedMap = new HashMap<>(); - expectedMap.put(bucketName0, volumeName0); - expectedMap.put(bucketName2, volumeName0); - expectedMap.put(bucketName1, volumeName1); - while (rs.next()) { - String volumeName = rs.getString("volumeName"); - String bucketName = rs.getString("bucketName"); - boolean versionEnabled = rs.getBoolean("versionEnabled"); - String storegeType = rs.getString("storageType"); - assertEquals(volumeName, expectedMap.remove(bucketName)); - assertFalse(versionEnabled); - assertEquals("DISK", storegeType); - } - assertEquals(0, expectedMap.size()); - - sql = "SELECT * FROM keyInfo"; - rs = executeQuery(conn, sql); - HashMap> expectedMap2 = new HashMap<>(); - // no data written, data size will be 0 - expectedMap2.put(keyName0, - Arrays.asList(volumeName0, bucketName0, "0")); - expectedMap2.put(keyName1, - Arrays.asList(volumeName1, bucketName1, "0")); - expectedMap2.put(keyName2, - Arrays.asList(volumeName0, bucketName2, "0")); - expectedMap2.put(keyName3, - Arrays.asList(volumeName0, bucketName2, "0")); - while (rs.next()) { - String volumeName = rs.getString("volumeName"); - String bucketName = rs.getString("bucketName"); - String keyName = rs.getString("keyName"); - int dataSize = rs.getInt("dataSize"); - List vals = expectedMap2.remove(keyName); - assertNotNull(vals); - assertEquals(vals.get(0), volumeName); - assertEquals(vals.get(1), bucketName); - assertEquals(vals.get(2), Integer.toString(dataSize)); - } - assertEquals(0, expectedMap2.size()); - - conn.close(); - Files.delete(Paths.get(dbOutPath)); - } - - private ResultSet executeQuery(Connection conn, String sql) - throws SQLException { - Statement stmt = conn.createStatement(); - return stmt.executeQuery(sql); - } - - private Connection connectDB(String dbPath) throws Exception { - Class.forName("org.sqlite.JDBC"); - String connectPath = - String.format("jdbc:sqlite:%s", dbPath); - return DriverManager.getConnection(connectPath); - } -} From 22c26a81368fac033bb5e19c51521e7accbd45b6 Mon Sep 17 00:00:00 2001 From: sadanand48 Date: Wed, 29 Jul 2020 17:18:55 +0530 Subject: [PATCH 3/7] deleted unwanted object --- .../src/test/java/org/apache/hadoop/ozone/om/TestOmLDBCli.java | 1 - 1 file changed, 1 deletion(-) diff --git a/hadoop-ozone/integration-test/src/test/java/org/apache/hadoop/ozone/om/TestOmLDBCli.java b/hadoop-ozone/integration-test/src/test/java/org/apache/hadoop/ozone/om/TestOmLDBCli.java index ba1bdf7a5be7..83a0b7a41bbb 100644 --- a/hadoop-ozone/integration-test/src/test/java/org/apache/hadoop/ozone/om/TestOmLDBCli.java +++ b/hadoop-ozone/integration-test/src/test/java/org/apache/hadoop/ozone/om/TestOmLDBCli.java @@ -43,7 +43,6 @@ * This class tests the Debug LDB CLI that reads from an om.db file. */ public class TestOmLDBCli { - private MiniOzoneCluster cluster = null; private OzoneConfiguration conf; private RDBParser rdbParser; From 0f58ba2923d39ce952cc41f4ff6770875867b1fc Mon Sep 17 00:00:00 2001 From: sadanand48 Date: Wed, 29 Jul 2020 17:38:40 +0530 Subject: [PATCH 4/7] fix checkstyle --- .../src/test/java/org/apache/hadoop/ozone/om/TestOmLDBCli.java | 1 - 1 file changed, 1 deletion(-) diff --git a/hadoop-ozone/integration-test/src/test/java/org/apache/hadoop/ozone/om/TestOmLDBCli.java b/hadoop-ozone/integration-test/src/test/java/org/apache/hadoop/ozone/om/TestOmLDBCli.java index 83a0b7a41bbb..a99d751b612c 100644 --- a/hadoop-ozone/integration-test/src/test/java/org/apache/hadoop/ozone/om/TestOmLDBCli.java +++ b/hadoop-ozone/integration-test/src/test/java/org/apache/hadoop/ozone/om/TestOmLDBCli.java @@ -22,7 +22,6 @@ import org.apache.hadoop.hdds.utils.db.DBStore; import org.apache.hadoop.hdds.utils.db.DBStoreBuilder; import org.apache.hadoop.hdds.utils.db.Table; -import org.apache.hadoop.ozone.MiniOzoneCluster; import org.apache.hadoop.ozone.debug.DBScanner; import org.apache.hadoop.ozone.debug.RDBParser; import org.apache.hadoop.ozone.om.helpers.OmKeyInfo; From e838b5af3058ca4683693dc85bd391172f7fa8c0 Mon Sep 17 00:00:00 2001 From: sadanand48 Date: Thu, 30 Jul 2020 23:54:53 +0530 Subject: [PATCH 5/7] Addressed review comments --- .../apache/hadoop/ozone/om/TestOmLDBCli.java | 37 ++++++++++--------- 1 file changed, 20 insertions(+), 17 deletions(-) diff --git a/hadoop-ozone/integration-test/src/test/java/org/apache/hadoop/ozone/om/TestOmLDBCli.java b/hadoop-ozone/integration-test/src/test/java/org/apache/hadoop/ozone/om/TestOmLDBCli.java index a99d751b612c..e14ce80ddad7 100644 --- a/hadoop-ozone/integration-test/src/test/java/org/apache/hadoop/ozone/om/TestOmLDBCli.java +++ b/hadoop-ozone/integration-test/src/test/java/org/apache/hadoop/ozone/om/TestOmLDBCli.java @@ -47,6 +47,7 @@ public class TestOmLDBCli { private RDBParser rdbParser; private DBScanner dbScanner; private DBStore dbStore = null; + private static List keyNames; @Rule public TemporaryFolder folder = new TemporaryFolder(); @@ -56,6 +57,7 @@ public void setup() throws Exception { conf = new OzoneConfiguration(); rdbParser = new RDBParser(); dbScanner = new DBScanner(); + keyNames = new ArrayList<>(); } @After @@ -65,7 +67,7 @@ public void shutdown() throws Exception { } } - @Test(expected = IllegalArgumentException.class) + @Test public void testOMDB() throws Exception { File newFolder = folder.newFolder(); if(!newFolder.exists()) { @@ -88,30 +90,31 @@ public void testOMDB() throws Exception { } rdbParser.setDbPath(dbStore.getDbLocation().getAbsolutePath()); dbScanner.setParent(rdbParser); - // list will store volumeNames/bucketNames/keyNames - List keyNames = new ArrayList<>(); - getKeyNames(dbScanner, "keyTable", keyNames); - Assert.assertEquals(5, keyNames.size()); - Assert.assertTrue(keyNames.contains("key1")); - Assert.assertTrue(keyNames.contains("key5")); - Assert.assertFalse(keyNames.contains("key6")); + Assert.assertEquals(5, getKeyNames(dbScanner).size()); + Assert.assertTrue(getKeyNames(dbScanner).contains("key1")); + Assert.assertTrue(getKeyNames(dbScanner).contains("key5")); + Assert.assertFalse(getKeyNames(dbScanner).contains("key6")); dbScanner.setLimit(1); - getKeyNames(dbScanner, "keyTable", keyNames); - Assert.assertEquals(1, keyNames.size()); + Assert.assertEquals(1, getKeyNames(dbScanner).size()); dbScanner.setLimit(-1); - getKeyNames(dbScanner, "keyTable", keyNames); - Assert.assertEquals(0, keyNames.size()); + try { + getKeyNames(dbScanner); + Assert.fail("IllegalArgumentException is expected"); + }catch (IllegalArgumentException e){ + //ignore + } } - private static void getKeyNames(DBScanner dbScanner, - String tableName, List entityNames) throws Exception { - dbScanner.setTableName(tableName); + private static List getKeyNames(DBScanner dbScanner) + throws Exception { + keyNames.clear(); + dbScanner.setTableName("keyTable"); dbScanner.call(); - entityNames.clear(); Assert.assertFalse(dbScanner.getScannedObjects().isEmpty()); for (Object o : dbScanner.getScannedObjects()){ OmKeyInfo keyInfo = (OmKeyInfo)o; - entityNames.add(keyInfo.getKeyName()); + keyNames.add(keyInfo.getKeyName()); } + return keyNames; } } From 4d51176806717e1e5a7485d6406e01ebb75c28b7 Mon Sep 17 00:00:00 2001 From: sadanand48 Date: Wed, 12 Aug 2020 11:36:38 +0530 Subject: [PATCH 6/7] fixed failing checks --- .../org/apache/hadoop/ozone/om/TestOmLDBCli.java | 12 ++++++------ .../org/apache/hadoop/ozone/debug/DBScanner.java | 16 +++++++++++++--- 2 files changed, 19 insertions(+), 9 deletions(-) diff --git a/hadoop-ozone/integration-test/src/test/java/org/apache/hadoop/ozone/om/TestOmLDBCli.java b/hadoop-ozone/integration-test/src/test/java/org/apache/hadoop/ozone/om/TestOmLDBCli.java index e14ce80ddad7..db0889525b82 100644 --- a/hadoop-ozone/integration-test/src/test/java/org/apache/hadoop/ozone/om/TestOmLDBCli.java +++ b/hadoop-ozone/integration-test/src/test/java/org/apache/hadoop/ozone/om/TestOmLDBCli.java @@ -75,15 +75,15 @@ public void testOMDB() throws Exception { } // Dummy om.db with only keyTable dbStore = DBStoreBuilder.newBuilder(conf) - .setName("om.db") - .setPath(newFolder.toPath()) - .addTable("keyTable") - .build(); + .setName("om.db") + .setPath(newFolder.toPath()) + .addTable("keyTable") + .build(); // insert 5 keys for (int i = 0; i<5; i++) { OmKeyInfo value = TestOMRequestUtils.createOmKeyInfo("sampleVol", "sampleBuck", "key" + (i+1), HddsProtos.ReplicationType.STAND_ALONE, - HddsProtos.ReplicationFactor.ONE); + HddsProtos.ReplicationFactor.ONE); String key = "key"+ (i); Table keyTable = dbStore.getTable("keyTable"); keyTable.put(key.getBytes(), value.getProtobuf().toByteArray()); @@ -106,7 +106,7 @@ public void testOMDB() throws Exception { } private static List getKeyNames(DBScanner dbScanner) - throws Exception { + throws Exception { keyNames.clear(); dbScanner.setTableName("keyTable"); dbScanner.call(); diff --git a/hadoop-ozone/tools/src/main/java/org/apache/hadoop/ozone/debug/DBScanner.java b/hadoop-ozone/tools/src/main/java/org/apache/hadoop/ozone/debug/DBScanner.java index 21d5fd171985..daf4ae76ca73 100644 --- a/hadoop-ozone/tools/src/main/java/org/apache/hadoop/ozone/debug/DBScanner.java +++ b/hadoop-ozone/tools/src/main/java/org/apache/hadoop/ozone/debug/DBScanner.java @@ -74,7 +74,7 @@ public class DBScanner implements Callable, SubcommandWithParent { private List scannedObjects; private static List displayTable(RocksIterator iterator, - DBColumnFamilyDefinition dbColumnFamilyDefinition) throws IOException { + DBColumnFamilyDefinition dbColumnFamilyDefinition) throws IOException { List outputs = new ArrayList<>(); iterator.seekToFirst(); while (iterator.isValid() && limit > 0){ @@ -92,6 +92,7 @@ private static List displayTable(RocksIterator iterator, Gson gson = new GsonBuilder().setPrettyPrinting().create(); result.append(gson.toJson(o)); System.out.println(result.toString()); + limit--; iterator.next(); } return outputs; @@ -110,7 +111,11 @@ public void setParent(RDBParser parent) { } public void setLimit(int limit) { - this.limit = limit; + DBScanner.limit = limit; + } + + public List getScannedObjects() { + return scannedObjects; } private static ColumnFamilyHandle getColumnFamilyHandle( @@ -166,6 +171,10 @@ public Void call() throws Exception { private void printAppropriateTable( List columnFamilyHandleList, RocksDB rocksDB, String dbPath) throws IOException { + if (limit < 1) { + throw new IllegalArgumentException( + "List length should be a positive number"); + } dbPath = removeTrailingSlashIfNeeded(dbPath); this.constructColumnFamilyMap(DBDefinitionFactory. getDefinition(new File(dbPath).getName())); @@ -177,7 +186,8 @@ private void printAppropriateTable( this.columnFamilyMap.get(tableName); ColumnFamilyHandle columnFamilyHandle = getColumnFamilyHandle( columnFamilyDefinition.getTableName() - .getBytes(StandardCharsets.UTF_8), columnFamilyHandleList); + .getBytes(StandardCharsets.UTF_8), + columnFamilyHandleList); if (columnFamilyHandle == null) { throw new IllegalArgumentException("columnFamilyHandle is null"); } From 10386cec9c94882ca927a8ad76d5231ab7f95995 Mon Sep 17 00:00:00 2001 From: sadanand48 Date: Wed, 12 Aug 2020 11:59:01 +0530 Subject: [PATCH 7/7] refactor code --- .../test/java/org/apache/hadoop/ozone/om/TestOmLDBCli.java | 4 ++-- .../main/java/org/apache/hadoop/ozone/debug/DBScanner.java | 2 +- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/hadoop-ozone/integration-test/src/test/java/org/apache/hadoop/ozone/om/TestOmLDBCli.java b/hadoop-ozone/integration-test/src/test/java/org/apache/hadoop/ozone/om/TestOmLDBCli.java index db0889525b82..450eebb3a449 100644 --- a/hadoop-ozone/integration-test/src/test/java/org/apache/hadoop/ozone/om/TestOmLDBCli.java +++ b/hadoop-ozone/integration-test/src/test/java/org/apache/hadoop/ozone/om/TestOmLDBCli.java @@ -94,9 +94,9 @@ public void testOMDB() throws Exception { Assert.assertTrue(getKeyNames(dbScanner).contains("key1")); Assert.assertTrue(getKeyNames(dbScanner).contains("key5")); Assert.assertFalse(getKeyNames(dbScanner).contains("key6")); - dbScanner.setLimit(1); + DBScanner.setLimit(1); Assert.assertEquals(1, getKeyNames(dbScanner).size()); - dbScanner.setLimit(-1); + DBScanner.setLimit(-1); try { getKeyNames(dbScanner); Assert.fail("IllegalArgumentException is expected"); diff --git a/hadoop-ozone/tools/src/main/java/org/apache/hadoop/ozone/debug/DBScanner.java b/hadoop-ozone/tools/src/main/java/org/apache/hadoop/ozone/debug/DBScanner.java index daf4ae76ca73..638fa0cb5423 100644 --- a/hadoop-ozone/tools/src/main/java/org/apache/hadoop/ozone/debug/DBScanner.java +++ b/hadoop-ozone/tools/src/main/java/org/apache/hadoop/ozone/debug/DBScanner.java @@ -110,7 +110,7 @@ public void setParent(RDBParser parent) { this.parent = parent; } - public void setLimit(int limit) { + public static void setLimit(int limit) { DBScanner.limit = limit; }