diff --git a/hadoop-ozone/dist/src/main/smoketest/debug/ozone-debug-ldb.robot b/hadoop-ozone/dist/src/main/smoketest/debug/ozone-debug-ldb.robot new file mode 100644 index 000000000000..e006e154af1b --- /dev/null +++ b/hadoop-ozone/dist/src/main/smoketest/debug/ozone-debug-ldb.robot @@ -0,0 +1,93 @@ +# Licensed to the Apache Software Foundation (ASF) under one or more +# contributor license agreements. See the NOTICE file distributed with +# this work for additional information regarding copyright ownership. +# The ASF licenses this file to You under the Apache License, Version 2.0 +# (the "License"); you may not use this file except in compliance with +# the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +*** Settings *** +Documentation Test ozone debug ldb CLI +Library OperatingSystem +Resource ../lib/os.robot +Test Timeout 5 minute +Suite Setup Write keys + +*** Variables *** +${PREFIX} ${EMPTY} +${VOLUME} cli-debug-volume${PREFIX} +${BUCKET} cli-debug-bucket +${DEBUGKEY} debugKey +${TESTFILE} testfile + +*** Keywords *** +Write keys + Run Keyword if '${SECURITY_ENABLED}' == 'true' Kinit test user testuser testuser.keytab + Execute ozone sh volume create ${VOLUME} + Execute ozone sh bucket create ${VOLUME}/${BUCKET} -l OBJECT_STORE + Execute dd if=/dev/urandom of=${TEMP_DIR}/${TESTFILE} bs=100000 count=15 + Execute ozone sh key put ${VOLUME}/${BUCKET}/${TESTFILE}1 ${TEMP_DIR}/${TESTFILE} + Execute ozone sh key put ${VOLUME}/${BUCKET}/${TESTFILE}2 ${TEMP_DIR}/${TESTFILE} + Execute ozone sh key put ${VOLUME}/${BUCKET}/${TESTFILE}3 ${TEMP_DIR}/${TESTFILE} + Execute ozone sh key addacl -a user:systest:a ${VOLUME}/${BUCKET}/${TESTFILE}3 + +*** Test Cases *** +Test ozone debug ldb ls + ${output} = Execute ozone debug ldb --db=/data/metadata/om.db ls + Should contain ${output} keyTable + +Test ozone debug ldb scan + # test count option + ${output} = Execute ozone debug ldb --db=/data/metadata/om.db scan --cf=keyTable --count + Should Not Be Equal ${output} 0 + # test valid json for scan command + ${output} = Execute ozone debug ldb --db=/data/metadata/om.db scan --cf=keyTable | jq -r '.' + Should contain ${output} keyName + Should contain ${output} testfile1 + Should contain ${output} testfile2 + Should contain ${output} testfile3 + # test startkey option + ${output} = Execute ozone debug ldb --db=/data/metadata/om.db scan --cf=keyTable --startkey="/cli-debug-volume/cli-debug-bucket/testfile2" + Should not contain ${output} testfile1 + Should contain ${output} testfile2 + Should contain ${output} testfile3 + # test endkey option + ${output} = Execute ozone debug ldb --db=/data/metadata/om.db scan --cf=keyTable --endkey="/cli-debug-volume/cli-debug-bucket/testfile2" + Should contain ${output} testfile1 + Should contain ${output} testfile2 + Should not contain ${output} testfile3 + # test fields option + ${output} = Execute ozone debug ldb --db=/data/metadata/om.db scan --cf=keyTable --fields="volumeName,bucketName,keyName" + Should contain ${output} volumeName + Should contain ${output} bucketName + Should contain ${output} keyName + Should not contain ${output} objectID + Should not contain ${output} dataSize + Should not contain ${output} keyLocationVersions + # test filter option with one filter + ${output} = Execute ozone debug ldb --db=/data/metadata/om.db scan --cf=keyTable --filter="keyName:equals:testfile2" + Should not contain ${output} testfile1 + Should contain ${output} testfile2 + Should not contain ${output} testfile3 + # test filter option with one multi-level filter + ${output} = Execute ozone debug ldb --db=/data/metadata/om.db scan --cf=keyTable --filter="acls.name:equals:systest" + Should not contain ${output} testfile1 + Should not contain ${output} testfile2 + Should contain ${output} testfile3 + # test filter option with multiple filter + ${output} = Execute ozone debug ldb --db=/data/metadata/om.db scan --cf=keyTable --filter="keyName:equals:testfile3,acls.name:equals:systest" + Should not contain ${output} testfile1 + Should not contain ${output} testfile2 + Should contain ${output} testfile3 + # test filter option with no records match both filters + ${output} = Execute ozone debug ldb --db=/data/metadata/om.db scan --cf=keyTable --filter="acls.name:equals:systest,keyName:equals:testfile2" + Should not contain ${output} testfile1 + Should not contain ${output} testfile2 + Should not contain ${output} testfile3 diff --git a/hadoop-ozone/integration-test/src/test/java/org/apache/hadoop/ozone/debug/TestLDBCli.java b/hadoop-ozone/integration-test/src/test/java/org/apache/hadoop/ozone/debug/TestLDBCli.java index 7af0b5f9aa19..a4327a49bfab 100644 --- a/hadoop-ozone/integration-test/src/test/java/org/apache/hadoop/ozone/debug/TestLDBCli.java +++ b/hadoop-ozone/integration-test/src/test/java/org/apache/hadoop/ozone/debug/TestLDBCli.java @@ -170,6 +170,18 @@ private static Stream scanTestCases() { Named.of("Invalid EndKey key9", Arrays.asList("--endkey", "key9")), Named.of("Expect key1-key5", Pair.of("key1", "key6")) ), + Arguments.of( + Named.of(KEY_TABLE, Pair.of(KEY_TABLE, false)), + Named.of("Default", Pair.of(0, "")), + Named.of("Filter key3", Arrays.asList("--filter", "keyName:equals:key3")), + Named.of("Expect key3", Pair.of("key3", "key4")) + ), + Arguments.of( + Named.of(KEY_TABLE, Pair.of(KEY_TABLE, false)), + Named.of("Default", Pair.of(0, "")), + Named.of("Filter invalid key", Arrays.asList("--filter", "keyName:equals:key9")), + Named.of("Expect key1-key3", null) + ), Arguments.of( Named.of(BLOCK_DATA + " V3", Pair.of(BLOCK_DATA, true)), Named.of("Default", Pair.of(0, "")), diff --git a/hadoop-ozone/tools/src/main/java/org/apache/hadoop/ozone/debug/DBScanner.java b/hadoop-ozone/tools/src/main/java/org/apache/hadoop/ozone/debug/DBScanner.java index 4653aa3eeb31..5e1207519aba 100644 --- a/hadoop-ozone/tools/src/main/java/org/apache/hadoop/ozone/debug/DBScanner.java +++ b/hadoop-ozone/tools/src/main/java/org/apache/hadoop/ozone/debug/DBScanner.java @@ -44,6 +44,7 @@ import org.apache.hadoop.ozone.OzoneConsts; import org.apache.hadoop.ozone.container.common.statemachine.DatanodeConfiguration; import org.apache.hadoop.ozone.container.metadata.DatanodeSchemaThreeDBDefinition; +import org.apache.hadoop.ozone.utils.Filter; import org.kohsuke.MetaInfServices; import org.rocksdb.ColumnFamilyDescriptor; import org.rocksdb.ColumnFamilyHandle; @@ -128,6 +129,14 @@ public class DBScanner implements Callable, SubcommandWithParent { "eg.) \"name,acls.type\" for showing name and type under acls.") private String fieldsFilter; + @CommandLine.Option(names = {"--filter"}, + description = "Comma-separated list of \"::\" where " + + " is any valid field of the record, " + + " is (EQUALS,MAX or MIN) and " + + " is the value of the field. " + + "eg.) \"dataSize:equals:1000\" for showing records having the value 1000 for dataSize") + private String filter; + @CommandLine.Option(names = {"--dnSchema", "--dn-schema", "-d"}, description = "Datanode DB Schema Version: V1/V2/V3", defaultValue = "V3") @@ -298,7 +307,7 @@ private void processRecords(ManagedRocksIterator iterator, } Future future = threadPool.submit( new Task(dbColumnFamilyDef, batch, logWriter, sequenceId, - withKey, schemaV3, fieldsFilter)); + withKey, schemaV3, fieldsFilter, filter)); futures.add(future); batch = new ArrayList<>(batchSize); sequenceId++; @@ -306,7 +315,7 @@ private void processRecords(ManagedRocksIterator iterator, } if (!batch.isEmpty()) { Future future = threadPool.submit(new Task(dbColumnFamilyDef, - batch, logWriter, sequenceId, withKey, schemaV3, fieldsFilter)); + batch, logWriter, sequenceId, withKey, schemaV3, fieldsFilter, filter)); futures.add(future); } @@ -473,10 +482,12 @@ private static class Task implements Callable { private final boolean withKey; private final boolean schemaV3; private String valueFields; + private String valueFilter; + @SuppressWarnings("checkstyle:parameternumber") Task(DBColumnFamilyDefinition dbColumnFamilyDefinition, ArrayList batch, LogWriter logWriter, - long sequenceId, boolean withKey, boolean schemaV3, String valueFields) { + long sequenceId, boolean withKey, boolean schemaV3, String valueFields, String filter) { this.dbColumnFamilyDefinition = dbColumnFamilyDefinition; this.batch = batch; this.logWriter = logWriter; @@ -484,6 +495,7 @@ private static class Task implements Callable { this.withKey = withKey; this.schemaV3 = schemaV3; this.valueFields = valueFields; + this.valueFilter = filter; } Map getFieldSplit(List fields, Map fieldMap) { @@ -504,6 +516,31 @@ Map getFieldSplit(List fields, Map field return fieldMap; } + void getFilterSplit(List fields, Map fieldMap, Filter leafValue) throws IOException { + int len = fields.size(); + if (len == 1) { + Filter currentValue = fieldMap.get(fields.get(0)); + if (currentValue != null) { + err().println("Cannot pass multiple values for the same field and " + + "cannot have filter for both parent and child"); + throw new IOException("Invalid filter passed"); + } + fieldMap.put(fields.get(0), leafValue); + } else { + Filter fieldMapGet = fieldMap.computeIfAbsent(fields.get(0), k -> new Filter()); + if (fieldMapGet.getValue() != null) { + err().println("Cannot pass multiple values for the same field and " + + "cannot have filter for both parent and child"); + throw new IOException("Invalid filter passed"); + } + Map nextLevel = fieldMapGet.getNextLevel(); + if (nextLevel == null) { + fieldMapGet.setNextLevel(new HashMap<>()); + } + getFilterSplit(fields.subList(1, len), fieldMapGet.getNextLevel(), leafValue); + } + } + @Override public Void call() { try { @@ -517,6 +554,26 @@ public Void call() { } } + Map fieldsFilterSplitMap = new HashMap<>(); + if (valueFilter != null) { + for (String field : valueFilter.split(",")) { + String[] fieldValue = field.split(":"); + if (fieldValue.length != 3) { + err().println("Error: Invalid format for filter \"" + field + + "\". Usage: ::. Ignoring filter passed"); + } else { + Filter filter = new Filter(fieldValue[1], fieldValue[2]); + if (filter.getOperator() == null) { + err().println("Error: Invalid format for filter \"" + filter + + "\". can be one of [EQUALS,MIN,MAX]. Ignoring filter passed"); + } else { + String[] subfields = fieldValue[0].split("\\."); + getFilterSplit(Arrays.asList(subfields), fieldsFilterSplitMap, filter); + } + } + } + } + for (ByteArrayKeyValue byteArrayKeyValue : batch) { StringBuilder sb = new StringBuilder(); if (!(sequenceId == FIRST_SEQUENCE_ID && results.isEmpty())) { @@ -552,9 +609,14 @@ public Void call() { Object o = dbColumnFamilyDefinition.getValueCodec() .fromPersistedFormat(byteArrayKeyValue.getValue()); + if (valueFilter != null && + !checkFilteredObject(o, dbColumnFamilyDefinition.getValueType(), fieldsFilterSplitMap)) { + // the record doesn't pass the filter + continue; + } if (valueFields != null) { Map filteredValue = new HashMap<>(); - filteredValue.putAll(getFilteredObject(o, dbColumnFamilyDefinition.getValueType(), fieldsSplitMap)); + filteredValue.putAll(getFieldsFilteredObject(o, dbColumnFamilyDefinition.getValueType(), fieldsSplitMap)); sb.append(WRITER.writeValueAsString(filteredValue)); } else { sb.append(WRITER.writeValueAsString(o)); @@ -570,7 +632,92 @@ public Void call() { return null; } - Map getFilteredObject(Object obj, Class clazz, Map fieldsSplitMap) { + boolean checkFilteredObject(Object obj, Class clazz, Map fieldsSplitMap) + throws IOException { + for (Map.Entry field : fieldsSplitMap.entrySet()) { + try { + Field valueClassField = getRequiredFieldFromAllFields(clazz, field.getKey()); + Object valueObject = valueClassField.get(obj); + Filter fieldValue = field.getValue(); + + if (valueObject == null) { + // there is no such field in the record. This filter will be ignored for the current record. + continue; + } + if (fieldValue == null) { + err().println("Malformed filter. Check input"); + throw new IOException("Invalid filter passed"); + } else if (fieldValue.getNextLevel() == null) { + // reached the end of fields hierarchy, check if they match the filter + // Currently, only equals operation is supported + if (Filter.FilterOperator.EQUALS.equals(fieldValue.getOperator()) && + !String.valueOf(valueObject).equals(fieldValue.getValue())) { + return false; + } else if (!Filter.FilterOperator.EQUALS.equals(fieldValue.getOperator())) { + err().println("Only EQUALS operator is supported currently."); + throw new IOException("Invalid filter passed"); + } + } else { + Map subfields = fieldValue.getNextLevel(); + if (Collection.class.isAssignableFrom(valueObject.getClass())) { + if (!checkFilteredObjectCollection((Collection) valueObject, subfields)) { + return false; + } + } else if (Map.class.isAssignableFrom(valueObject.getClass())) { + Map valueObjectMap = (Map) valueObject; + boolean flag = false; + for (Map.Entry ob : valueObjectMap.entrySet()) { + boolean subflag; + if (Collection.class.isAssignableFrom(ob.getValue().getClass())) { + subflag = checkFilteredObjectCollection((Collection)ob.getValue(), subfields); + } else { + subflag = checkFilteredObject(ob.getValue(), ob.getValue().getClass(), subfields); + } + if (subflag) { + // atleast one item in the map/list of the record has matched the filter, + // so record passes the filter. + flag = true; + break; + } + } + if (!flag) { + // none of the items in the map/list passed the filter => record doesn't pass the filter + return false; + } + } else { + if (!checkFilteredObject(valueObject, valueClassField.getType(), subfields)) { + return false; + } + } + } + } catch (NoSuchFieldException ex) { + err().println("ERROR: no such field: " + field); + exception = true; + return false; + } catch (IllegalAccessException e) { + err().println("ERROR: Cannot get field from object: " + field); + exception = true; + return false; + } catch (Exception ex) { + err().println("ERROR: field: " + field + ", ex: " + ex); + exception = true; + return false; + } + } + return true; + } + + boolean checkFilteredObjectCollection(Collection valueObject, Map fields) + throws NoSuchFieldException, IllegalAccessException, IOException { + for (Object ob : valueObject) { + if (checkFilteredObject(ob, ob.getClass(), fields)) { + return true; + } + } + return false; + } + + Map getFieldsFilteredObject(Object obj, Class clazz, Map fieldsSplitMap) { Map valueMap = new HashMap<>(); for (Map.Entry field : fieldsSplitMap.entrySet()) { try { @@ -583,7 +730,7 @@ Map getFilteredObject(Object obj, Class clazz, Map subfieldObjectsList = - getFilteredObjectCollection((Collection) valueObject, subfields); + getFieldsFilteredObjectCollection((Collection) valueObject, subfields); valueMap.put(field.getKey(), subfieldObjectsList); } else if (Map.class.isAssignableFrom(valueObject.getClass())) { Map subfieldObjectsMap = new HashMap<>(); @@ -591,16 +738,16 @@ Map getFilteredObject(Object obj, Class clazz, Map ob : valueObjectMap.entrySet()) { Object subfieldValue; if (Collection.class.isAssignableFrom(ob.getValue().getClass())) { - subfieldValue = getFilteredObjectCollection((Collection)ob.getValue(), subfields); + subfieldValue = getFieldsFilteredObjectCollection((Collection)ob.getValue(), subfields); } else { - subfieldValue = getFilteredObject(ob.getValue(), ob.getValue().getClass(), subfields); + subfieldValue = getFieldsFilteredObject(ob.getValue(), ob.getValue().getClass(), subfields); } subfieldObjectsMap.put(ob.getKey(), subfieldValue); } valueMap.put(field.getKey(), subfieldObjectsMap); } else { valueMap.put(field.getKey(), - getFilteredObject(valueObject, valueClassField.getType(), subfields)); + getFieldsFilteredObject(valueObject, valueClassField.getType(), subfields)); } } } catch (NoSuchFieldException ex) { @@ -612,11 +759,11 @@ Map getFilteredObject(Object obj, Class clazz, Map getFilteredObjectCollection(Collection valueObject, Map fields) + List getFieldsFilteredObjectCollection(Collection valueObject, Map fields) throws NoSuchFieldException, IllegalAccessException { List subfieldObjectsList = new ArrayList<>(); for (Object ob : valueObject) { - Object subfieldValue = getFilteredObject(ob, ob.getClass(), fields); + Object subfieldValue = getFieldsFilteredObject(ob, ob.getClass(), fields); subfieldObjectsList.add(subfieldValue); } return subfieldObjectsList; diff --git a/hadoop-ozone/tools/src/main/java/org/apache/hadoop/ozone/utils/Filter.java b/hadoop-ozone/tools/src/main/java/org/apache/hadoop/ozone/utils/Filter.java new file mode 100644 index 000000000000..129e1a6158d0 --- /dev/null +++ b/hadoop-ozone/tools/src/main/java/org/apache/hadoop/ozone/utils/Filter.java @@ -0,0 +1,107 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.hadoop.ozone.utils; + +import java.util.Map; + +/** + * Represent class which has info of what operation and value a set of records should be filtered with. + */ +public class Filter { + private FilterOperator operator; + private Object value; + private Map nextLevel = null; + + public Filter() { + this.operator = null; + this.value = null; + } + + public Filter(FilterOperator operator, Object value) { + this.operator = operator; + this.value = value; + } + + public Filter(String op, Object value) { + this.operator = getFilterOperator(op); + this.value = value; + } + + public Filter(FilterOperator operator, Object value, Map next) { + this.operator = operator; + this.value = value; + this.nextLevel = next; + } + + public Filter(String op, Object value, Map next) { + this.operator = getFilterOperator(op); + this.value = value; + this.nextLevel = next; + } + + public FilterOperator getOperator() { + return operator; + } + + public void setOperator(FilterOperator operator) { + this.operator = operator; + } + + public Object getValue() { + return value; + } + + public void setValue(Object value) { + this.value = value; + } + + public Map getNextLevel() { + return nextLevel; + } + + public void setNextLevel(Map nextLevel) { + this.nextLevel = nextLevel; + } + + public FilterOperator getFilterOperator(String op) { + if (op.equalsIgnoreCase("equals")) { + return FilterOperator.EQUALS; + } else if (op.equalsIgnoreCase("max")) { + return FilterOperator.MAX; + } else if (op.equalsIgnoreCase("min")) { + return FilterOperator.MIN; + } else { + return null; + } + } + + @Override + public String toString() { + return "(" + operator + "," + value + "," + nextLevel + ")"; + } + + /** + * Operation of the filter. + */ + public enum FilterOperator { + EQUALS, + MAX, + MIN; + } +}