From 70b3b47bb92d319755fe6f78250fdbbbf4effcac Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Szabolcs=20G=C3=A1l?= Date: Mon, 14 Nov 2022 18:04:38 +0100 Subject: [PATCH 1/8] HDDS-7398 Create CLI tool to remove expired certificates from SCM metadata store --- .../hdds/scm/metadata/SCMMetadataStore.java | 16 +-- .../hadoop/hdds/utils/db/TypedTable.java | 16 +-- .../scm/metadata/SCMMetadataStoreImpl.java | 33 +++--- hadoop-hdds/tools/pom.xml | 4 + .../hdds/scm/cli/cert/CertCommands.java | 6 +- .../hdds/scm/cli/cert/CleanExpired.java | 102 ++++++++++++++++++ .../hdds/scm/cli/cert/TestCleanExpired.java | 90 ++++++++++++++++ 7 files changed, 231 insertions(+), 36 deletions(-) create mode 100644 hadoop-hdds/tools/src/main/java/org/apache/hadoop/hdds/scm/cli/cert/CleanExpired.java create mode 100644 hadoop-hdds/tools/src/test/java/org/apache/hadoop/hdds/scm/cli/cert/TestCleanExpired.java diff --git a/hadoop-hdds/framework/src/main/java/org/apache/hadoop/hdds/scm/metadata/SCMMetadataStore.java b/hadoop-hdds/framework/src/main/java/org/apache/hadoop/hdds/scm/metadata/SCMMetadataStore.java index 46b19aa09040..d01e4c7af69a 100644 --- a/hadoop-hdds/framework/src/main/java/org/apache/hadoop/hdds/scm/metadata/SCMMetadataStore.java +++ b/hadoop-hdds/framework/src/main/java/org/apache/hadoop/hdds/scm/metadata/SCMMetadataStore.java @@ -17,28 +17,27 @@ */ package org.apache.hadoop.hdds.scm.metadata; -import java.io.IOException; -import java.math.BigInteger; -import java.security.cert.X509Certificate; - +import com.google.common.annotations.VisibleForTesting; import com.google.protobuf.ByteString; import org.apache.hadoop.hdds.conf.OzoneConfiguration; import org.apache.hadoop.hdds.protocol.proto.StorageContainerDatanodeProtocolProtos.DeletedBlocksTransaction; import org.apache.hadoop.hdds.scm.container.ContainerID; import org.apache.hadoop.hdds.scm.container.ContainerInfo; import org.apache.hadoop.hdds.scm.container.common.helpers.MoveDataNodePair; -import org.apache.hadoop.hdds.security.x509.certificate.CertInfo; -import org.apache.hadoop.hdds.utils.DBStoreHAManager; import org.apache.hadoop.hdds.scm.pipeline.Pipeline; import org.apache.hadoop.hdds.scm.pipeline.PipelineID; +import org.apache.hadoop.hdds.security.x509.certificate.CertInfo; import org.apache.hadoop.hdds.security.x509.certificate.authority.CertificateStore; import org.apache.hadoop.hdds.security.x509.crl.CRLInfo; +import org.apache.hadoop.hdds.utils.DBStoreHAManager; import org.apache.hadoop.hdds.utils.db.BatchOperationHandler; import org.apache.hadoop.hdds.utils.db.DBStore; import org.apache.hadoop.hdds.utils.db.Table; import org.apache.hadoop.hdds.utils.db.TableIterator; -import com.google.common.annotations.VisibleForTesting; +import java.io.IOException; +import java.math.BigInteger; +import java.security.cert.X509Certificate; /** * Generic interface for data stores for SCM. @@ -129,7 +128,8 @@ public interface SCMMetadataStore extends DBStoreHAManager { * @return Iterator * @throws IOException on failure. */ - TableIterator getAllCerts(CertificateStore.CertType certType) + TableIterator> getAllCerts( + CertificateStore.CertType certType) throws IOException; /** diff --git a/hadoop-hdds/framework/src/main/java/org/apache/hadoop/hdds/utils/db/TypedTable.java b/hadoop-hdds/framework/src/main/java/org/apache/hadoop/hdds/utils/db/TypedTable.java index ee15fa896de1..5fa7b9667ff5 100644 --- a/hadoop-hdds/framework/src/main/java/org/apache/hadoop/hdds/utils/db/TypedTable.java +++ b/hadoop-hdds/framework/src/main/java/org/apache/hadoop/hdds/utils/db/TypedTable.java @@ -18,13 +18,6 @@ */ package org.apache.hadoop.hdds.utils.db; -import java.io.File; -import java.io.IOException; -import java.util.ArrayList; -import java.util.Iterator; -import java.util.List; -import java.util.Map; - import com.google.common.annotations.VisibleForTesting; import com.google.common.base.Optional; import org.apache.hadoop.hdds.utils.MetadataKeyFilters; @@ -34,8 +27,15 @@ import org.apache.hadoop.hdds.utils.db.cache.CacheValue; import org.apache.hadoop.hdds.utils.db.cache.FullTableCache; import org.apache.hadoop.hdds.utils.db.cache.PartialTableCache; -import org.apache.hadoop.hdds.utils.db.cache.TableCache.CacheType; import org.apache.hadoop.hdds.utils.db.cache.TableCache; +import org.apache.hadoop.hdds.utils.db.cache.TableCache.CacheType; + +import java.io.File; +import java.io.IOException; +import java.util.ArrayList; +import java.util.Iterator; +import java.util.List; +import java.util.Map; import static org.apache.hadoop.hdds.utils.db.cache.CacheResult.CacheStatus.EXISTS; import static org.apache.hadoop.hdds.utils.db.cache.CacheResult.CacheStatus.NOT_EXIST; diff --git a/hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds/scm/metadata/SCMMetadataStoreImpl.java b/hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds/scm/metadata/SCMMetadataStoreImpl.java index 3d57b0b145d6..a3ea29eb2a50 100644 --- a/hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds/scm/metadata/SCMMetadataStoreImpl.java +++ b/hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds/scm/metadata/SCMMetadataStoreImpl.java @@ -17,52 +17,51 @@ */ package org.apache.hadoop.hdds.scm.metadata; -import java.io.File; -import java.io.IOException; -import java.math.BigInteger; -import java.security.cert.X509Certificate; -import java.util.Map; -import java.util.concurrent.ConcurrentHashMap; - import com.google.protobuf.ByteString; import org.apache.hadoop.hdds.conf.OzoneConfiguration; import org.apache.hadoop.hdds.protocol.proto.StorageContainerDatanodeProtocolProtos.DeletedBlocksTransaction; import org.apache.hadoop.hdds.scm.container.ContainerID; import org.apache.hadoop.hdds.scm.container.ContainerInfo; import org.apache.hadoop.hdds.scm.container.common.helpers.MoveDataNodePair; -import org.apache.hadoop.hdds.security.x509.certificate.CertInfo; -import org.apache.hadoop.hdds.utils.HAUtils; -import org.apache.hadoop.hdds.utils.TransactionInfo; import org.apache.hadoop.hdds.scm.pipeline.Pipeline; import org.apache.hadoop.hdds.scm.pipeline.PipelineID; +import org.apache.hadoop.hdds.security.x509.certificate.CertInfo; import org.apache.hadoop.hdds.security.x509.certificate.authority.CertificateStore; import org.apache.hadoop.hdds.security.x509.crl.CRLInfo; +import org.apache.hadoop.hdds.utils.HAUtils; +import org.apache.hadoop.hdds.utils.TransactionInfo; import org.apache.hadoop.hdds.utils.db.BatchOperationHandler; import org.apache.hadoop.hdds.utils.db.DBStore; import org.apache.hadoop.hdds.utils.db.DBStoreBuilder; import org.apache.hadoop.hdds.utils.db.Table; import org.apache.hadoop.hdds.utils.db.TableIterator; +import org.apache.ratis.util.ExitUtils; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import java.io.File; +import java.io.IOException; +import java.math.BigInteger; +import java.security.cert.X509Certificate; +import java.util.Map; +import java.util.concurrent.ConcurrentHashMap; import static org.apache.hadoop.hdds.scm.metadata.SCMDBDefinition.CONTAINERS; import static org.apache.hadoop.hdds.scm.metadata.SCMDBDefinition.CRLS; import static org.apache.hadoop.hdds.scm.metadata.SCMDBDefinition.CRL_SEQUENCE_ID; import static org.apache.hadoop.hdds.scm.metadata.SCMDBDefinition.DELETED_BLOCKS; +import static org.apache.hadoop.hdds.scm.metadata.SCMDBDefinition.META; import static org.apache.hadoop.hdds.scm.metadata.SCMDBDefinition.MOVE; import static org.apache.hadoop.hdds.scm.metadata.SCMDBDefinition.PIPELINES; import static org.apache.hadoop.hdds.scm.metadata.SCMDBDefinition.REVOKED_CERTS; import static org.apache.hadoop.hdds.scm.metadata.SCMDBDefinition.REVOKED_CERTS_V2; +import static org.apache.hadoop.hdds.scm.metadata.SCMDBDefinition.SEQUENCE_ID; import static org.apache.hadoop.hdds.scm.metadata.SCMDBDefinition.STATEFUL_SERVICE_CONFIG; import static org.apache.hadoop.hdds.scm.metadata.SCMDBDefinition.TRANSACTIONINFO; import static org.apache.hadoop.hdds.scm.metadata.SCMDBDefinition.VALID_CERTS; import static org.apache.hadoop.hdds.scm.metadata.SCMDBDefinition.VALID_SCM_CERTS; -import static org.apache.hadoop.hdds.scm.metadata.SCMDBDefinition.SEQUENCE_ID; -import static org.apache.hadoop.hdds.scm.metadata.SCMDBDefinition.META; import static org.apache.hadoop.ozone.OzoneConsts.DB_TRANSIENT_MARKER; -import org.apache.ratis.util.ExitUtils; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - /** * A RocksDB based implementation of SCM Metadata Store. * @@ -267,7 +266,7 @@ public Table getCRLSequenceIdTable() { } @Override - public TableIterator getAllCerts(CertificateStore.CertType certType) + public TableIterator> getAllCerts(CertificateStore.CertType certType) throws IOException { if (certType == CertificateStore.CertType.VALID_CERTS) { return validCertsTable.iterator(); diff --git a/hadoop-hdds/tools/pom.xml b/hadoop-hdds/tools/pom.xml index 29cbeaf0e1c9..43e8ef297ece 100644 --- a/hadoop-hdds/tools/pom.xml +++ b/hadoop-hdds/tools/pom.xml @@ -99,5 +99,9 @@ https://maven.apache.org/xsd/maven-4.0.0.xsd"> slf4j-reload4j ${slf4j.version} + + org.apache.ozone + hdds-server-scm + diff --git a/hadoop-hdds/tools/src/main/java/org/apache/hadoop/hdds/scm/cli/cert/CertCommands.java b/hadoop-hdds/tools/src/main/java/org/apache/hadoop/hdds/scm/cli/cert/CertCommands.java index 21ba03599e76..69a27674c3e8 100644 --- a/hadoop-hdds/tools/src/main/java/org/apache/hadoop/hdds/scm/cli/cert/CertCommands.java +++ b/hadoop-hdds/tools/src/main/java/org/apache/hadoop/hdds/scm/cli/cert/CertCommands.java @@ -17,18 +17,17 @@ */ package org.apache.hadoop.hdds.scm.cli.cert; -import java.util.concurrent.Callable; - import org.apache.hadoop.hdds.cli.GenericCli; import org.apache.hadoop.hdds.cli.HddsVersionProvider; import org.apache.hadoop.hdds.cli.OzoneAdmin; import org.apache.hadoop.hdds.cli.SubcommandWithParent; - import org.kohsuke.MetaInfServices; import picocli.CommandLine.Command; import picocli.CommandLine.Model.CommandSpec; import picocli.CommandLine.Spec; +import java.util.concurrent.Callable; + /** * Sub command for certificate related operations. */ @@ -40,6 +39,7 @@ subcommands = { InfoSubcommand.class, ListSubcommand.class, + CleanExpired.class, }) @MetaInfServices(SubcommandWithParent.class) diff --git a/hadoop-hdds/tools/src/main/java/org/apache/hadoop/hdds/scm/cli/cert/CleanExpired.java b/hadoop-hdds/tools/src/main/java/org/apache/hadoop/hdds/scm/cli/cert/CleanExpired.java new file mode 100644 index 000000000000..d8eb82a0aa37 --- /dev/null +++ b/hadoop-hdds/tools/src/main/java/org/apache/hadoop/hdds/scm/cli/cert/CleanExpired.java @@ -0,0 +1,102 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + *

+ * http://www.apache.org/licenses/LICENSE-2.0 + *

+ * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.hadoop.hdds.scm.cli.cert; + +import org.apache.hadoop.hdds.cli.GenericParentCommand; +import org.apache.hadoop.hdds.cli.HddsVersionProvider; +import org.apache.hadoop.hdds.cli.SubcommandWithParent; +import org.apache.hadoop.hdds.conf.OzoneConfiguration; +import org.apache.hadoop.hdds.scm.metadata.SCMDBDefinition; +import org.apache.hadoop.hdds.utils.HAUtils; +import org.apache.hadoop.hdds.utils.db.DBStore; +import org.apache.hadoop.hdds.utils.db.Table; +import org.apache.hadoop.hdds.utils.db.TableIterator; +import picocli.CommandLine; + +import java.io.File; +import java.io.IOException; +import java.math.BigInteger; +import java.nio.file.Files; +import java.nio.file.Paths; +import java.security.cert.X509Certificate; +import java.time.Instant; +import java.util.concurrent.Callable; + +/** + * This is the handler to clean SCM database from expired certificates + */ +@CommandLine.Command( + name = "clean", + description = "Clean expired certificates from the SCM metadata", + mixinStandardHelpOptions = true, + versionProvider = HddsVersionProvider.class) +public class CleanExpired implements Callable, SubcommandWithParent { + + @CommandLine.Option(names = {"--db"}, + required = true, + description = "Database metadata directory") + private String dbDirectory; + + @CommandLine.Option(names = {"--name"}, + required = true, + description = "Database file name") + private String dbName; + + @CommandLine.Spec + private CommandLine.Model.CommandSpec spec; + + @Override + public Void call() throws Exception { + GenericParentCommand parent = + (GenericParentCommand) spec.root().userObject(); + + OzoneConfiguration configuration = parent.createOzoneConfiguration(); + + if (!Files.exists(Paths.get(dbDirectory))) { + System.out.println("DB path not exist:" + dbDirectory); + return null; + } + + DBStore dbStore = HAUtils.loadDB( + configuration, new File(dbDirectory), dbName, new SCMDBDefinition()); + + Table certsTable = SCMDBDefinition.VALID_CERTS.getTable(dbStore); + removeExpiredCertificates(certsTable.iterator()); + dbStore.close(); + return null; + } + + private void removeExpiredCertificates( + TableIterator> certs) + throws IOException { + while (certs.hasNext()) { + Table.KeyValue certPair = certs.next(); + X509Certificate certificate = (X509Certificate) certPair.getValue(); + if (Instant.now().isAfter(certificate.getNotAfter().toInstant())) { + System.out.println("Certificate with id " + certPair.getKey() + + " and value: " + certificate + "will be deleted"); + certs.removeFromDB(); + } + } + } + + @Override + public Class getParentType() { + return CertCommands.class; + } +} diff --git a/hadoop-hdds/tools/src/test/java/org/apache/hadoop/hdds/scm/cli/cert/TestCleanExpired.java b/hadoop-hdds/tools/src/test/java/org/apache/hadoop/hdds/scm/cli/cert/TestCleanExpired.java new file mode 100644 index 000000000000..3fb1b43e09ed --- /dev/null +++ b/hadoop-hdds/tools/src/test/java/org/apache/hadoop/hdds/scm/cli/cert/TestCleanExpired.java @@ -0,0 +1,90 @@ +package org.apache.hadoop.hdds.scm.cli.cert; + +import org.apache.hadoop.hdds.HddsConfigKeys; +import org.apache.hadoop.hdds.conf.OzoneConfiguration; +import org.apache.hadoop.hdds.scm.metadata.SCMDBDefinition; +import org.apache.hadoop.hdds.utils.db.DBDefinition; +import org.apache.hadoop.hdds.utils.db.DBStore; +import org.apache.hadoop.hdds.utils.db.DBStoreBuilder; +import org.apache.hadoop.hdds.utils.db.Table; +import org.junit.jupiter.api.AfterEach; +import org.junit.jupiter.api.Assertions; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.io.TempDir; +import picocli.CommandLine; +import sun.security.x509.CertificateValidity; +import sun.security.x509.X509CertImpl; +import sun.security.x509.X509CertInfo; + +import java.io.ByteArrayOutputStream; +import java.io.File; +import java.io.IOException; +import java.io.PrintStream; +import java.math.BigInteger; +import java.nio.charset.StandardCharsets; +import java.nio.file.Path; +import java.security.cert.X509Certificate; +import java.sql.Date; +import java.time.Duration; +import java.time.Instant; + +import static org.junit.jupiter.api.Assertions.assertFalse; +import static org.junit.jupiter.api.Assertions.assertTrue; + +public class TestCleanExpired { + + private CleanExpired cmd; + private final ByteArrayOutputStream outContent = new ByteArrayOutputStream(); + private final ByteArrayOutputStream errContent = new ByteArrayOutputStream(); + private final PrintStream originalOut = System.out; + private final PrintStream originalErr = System.err; + private static final String DEFAULT_ENCODING = StandardCharsets.UTF_8.name(); + + @BeforeEach + public void setup() throws IOException { + cmd = new CleanExpired(); + System.setOut(new PrintStream(outContent, false, DEFAULT_ENCODING)); + System.setErr(new PrintStream(errContent, false, DEFAULT_ENCODING)); + } + + @AfterEach + public void tearDown() { + System.setOut(originalOut); + System.setErr(originalErr); + } + + @Test + public void testOnlyExpiredCertsRemoved(@TempDir Path tempDir) + throws Exception { + OzoneConfiguration conf = new OzoneConfiguration(); + conf.set(HddsConfigKeys.OZONE_METADATA_DIRS, tempDir.toString()); + File newFolder = new File(tempDir + "/newFolder"); + + if (!newFolder.exists()) { + assertTrue(newFolder.mkdirs()); + } + DBDefinition sampleDBDef = new SCMDBDefinition(); + X509CertInfo certInfo = new X509CertInfo(); + certInfo.set(X509CertInfo.IDENT, new CertificateValidity(Date.from(Instant.now().minus(Duration.ofDays(365))), Date.from(Instant.now().plus(Duration.ofDays(365))))); + X509Certificate nonExpired = new X509CertImpl(certInfo); + certInfo.set(X509CertInfo.IDENT, new CertificateValidity(Date.from(Instant.now().minus(Duration.ofDays(365))), Date.from(Instant.now().minus(Duration.ofDays(15))))); + X509Certificate expired = new X509CertImpl(certInfo); + String dbName = "SampleStore"; + try (DBStore dbStore = DBStoreBuilder.newBuilder(conf, sampleDBDef) + .setName("SampleStore").setPath(newFolder.toPath()).build()) { + Table table = SCMDBDefinition.VALID_CERTS + .getTable(dbStore); + + table.put(BigInteger.ONE, nonExpired); + table.put(BigInteger.valueOf(2L), expired); + + CommandLine c = new CommandLine(cmd); + c.parseArgs("-db", newFolder.getPath(), "-name", dbName); + cmd.call(); + assertTrue(table.isExist(BigInteger.ONE)); + assertFalse(table.isExist(BigInteger.valueOf(2L))); + } + Assertions.assertFalse(false); + } +} \ No newline at end of file From 9f160fc8483c66ee6452a45c697ee63588e6b8e4 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Szabolcs=20G=C3=A1l?= Date: Tue, 15 Nov 2022 16:51:13 +0100 Subject: [PATCH 2/8] HDDS-7398 Fix test, refine CleanExpired --- .../hdds/scm/cli/cert/CleanExpired.java | 44 +++++++---- .../hdds/scm/cli/cert/TestCleanExpired.java | 79 +++++++++---------- 2 files changed, 64 insertions(+), 59 deletions(-) diff --git a/hadoop-hdds/tools/src/main/java/org/apache/hadoop/hdds/scm/cli/cert/CleanExpired.java b/hadoop-hdds/tools/src/main/java/org/apache/hadoop/hdds/scm/cli/cert/CleanExpired.java index d8eb82a0aa37..53368c097a06 100644 --- a/hadoop-hdds/tools/src/main/java/org/apache/hadoop/hdds/scm/cli/cert/CleanExpired.java +++ b/hadoop-hdds/tools/src/main/java/org/apache/hadoop/hdds/scm/cli/cert/CleanExpired.java @@ -17,6 +17,7 @@ */ package org.apache.hadoop.hdds.scm.cli.cert; +import com.google.common.annotations.VisibleForTesting; import org.apache.hadoop.hdds.cli.GenericParentCommand; import org.apache.hadoop.hdds.cli.HddsVersionProvider; import org.apache.hadoop.hdds.cli.SubcommandWithParent; @@ -61,7 +62,7 @@ public class CleanExpired implements Callable, SubcommandWithParent { private CommandLine.Model.CommandSpec spec; @Override - public Void call() throws Exception { + public Void call() { GenericParentCommand parent = (GenericParentCommand) spec.root().userObject(); @@ -72,26 +73,35 @@ public Void call() throws Exception { return null; } - DBStore dbStore = HAUtils.loadDB( - configuration, new File(dbDirectory), dbName, new SCMDBDefinition()); - - Table certsTable = SCMDBDefinition.VALID_CERTS.getTable(dbStore); - removeExpiredCertificates(certsTable.iterator()); - dbStore.close(); + //DBStore mockolás, inkább ezt adjuk a removeExpCertnek ++ hibakezelés (nincs certstable) + try (DBStore dbStore = HAUtils.loadDB( + configuration, new File(dbDirectory), dbName, new SCMDBDefinition())) { + removeExpiredCertificates(dbStore); + } catch (IOException e) { + System.out.println("Error trying to open" + dbName + + " at: " + dbDirectory); + } return null; } - private void removeExpiredCertificates( - TableIterator> certs) - throws IOException { - while (certs.hasNext()) { - Table.KeyValue certPair = certs.next(); - X509Certificate certificate = (X509Certificate) certPair.getValue(); - if (Instant.now().isAfter(certificate.getNotAfter().toInstant())) { - System.out.println("Certificate with id " + certPair.getKey() + - " and value: " + certificate + "will be deleted"); - certs.removeFromDB(); + @VisibleForTesting + void removeExpiredCertificates(DBStore dbStore) { + try { + Table certsTable = + SCMDBDefinition.VALID_CERTS.getTable(dbStore); + TableIterator> tableIterator = certsTable.iterator(); + while (tableIterator.hasNext()) { + Table.KeyValue certPair = tableIterator.next(); + X509Certificate certificate = (X509Certificate) certPair.getValue(); + if (Instant.now().isAfter(certificate.getNotAfter().toInstant())) { + System.out.println("Certificate with id " + certPair.getKey() + + " and value: " + certificate + "will be deleted"); + tableIterator.removeFromDB(); + } } + } catch (IOException e) { + System.out.println("Error when trying to open certificate table from db"); } } diff --git a/hadoop-hdds/tools/src/test/java/org/apache/hadoop/hdds/scm/cli/cert/TestCleanExpired.java b/hadoop-hdds/tools/src/test/java/org/apache/hadoop/hdds/scm/cli/cert/TestCleanExpired.java index 3fb1b43e09ed..9c62ef3ebbfa 100644 --- a/hadoop-hdds/tools/src/test/java/org/apache/hadoop/hdds/scm/cli/cert/TestCleanExpired.java +++ b/hadoop-hdds/tools/src/test/java/org/apache/hadoop/hdds/scm/cli/cert/TestCleanExpired.java @@ -1,37 +1,27 @@ package org.apache.hadoop.hdds.scm.cli.cert; -import org.apache.hadoop.hdds.HddsConfigKeys; import org.apache.hadoop.hdds.conf.OzoneConfiguration; import org.apache.hadoop.hdds.scm.metadata.SCMDBDefinition; -import org.apache.hadoop.hdds.utils.db.DBDefinition; import org.apache.hadoop.hdds.utils.db.DBStore; -import org.apache.hadoop.hdds.utils.db.DBStoreBuilder; import org.apache.hadoop.hdds.utils.db.Table; +import org.apache.hadoop.hdds.utils.db.TableIterator; import org.junit.jupiter.api.AfterEach; -import org.junit.jupiter.api.Assertions; import org.junit.jupiter.api.BeforeEach; import org.junit.jupiter.api.Test; -import org.junit.jupiter.api.io.TempDir; -import picocli.CommandLine; -import sun.security.x509.CertificateValidity; -import sun.security.x509.X509CertImpl; -import sun.security.x509.X509CertInfo; +import org.mockito.Mock; +import org.mockito.Mockito; +import org.mockito.MockitoAnnotations; import java.io.ByteArrayOutputStream; -import java.io.File; import java.io.IOException; import java.io.PrintStream; import java.math.BigInteger; import java.nio.charset.StandardCharsets; -import java.nio.file.Path; import java.security.cert.X509Certificate; import java.sql.Date; import java.time.Duration; import java.time.Instant; -import static org.junit.jupiter.api.Assertions.assertFalse; -import static org.junit.jupiter.api.Assertions.assertTrue; - public class TestCleanExpired { private CleanExpired cmd; @@ -41,8 +31,26 @@ public class TestCleanExpired { private final PrintStream originalErr = System.err; private static final String DEFAULT_ENCODING = StandardCharsets.UTF_8.name(); + + @Mock + DBStore dbStore; + @Mock + Table mockTable; + @Mock + TableIterator iterator; + @Mock + Table.KeyValue kv; + @Mock + Table.KeyValue kv2; + @Mock + X509Certificate nonExpiredCert; + @Mock + X509Certificate expiredCert; + + @BeforeEach public void setup() throws IOException { + MockitoAnnotations.initMocks(this); cmd = new CleanExpired(); System.setOut(new PrintStream(outContent, false, DEFAULT_ENCODING)); System.setErr(new PrintStream(errContent, false, DEFAULT_ENCODING)); @@ -55,36 +63,23 @@ public void tearDown() { } @Test - public void testOnlyExpiredCertsRemoved(@TempDir Path tempDir) + public void testOnlyExpiredCertsRemoved() throws Exception { OzoneConfiguration conf = new OzoneConfiguration(); - conf.set(HddsConfigKeys.OZONE_METADATA_DIRS, tempDir.toString()); - File newFolder = new File(tempDir + "/newFolder"); - - if (!newFolder.exists()) { - assertTrue(newFolder.mkdirs()); - } - DBDefinition sampleDBDef = new SCMDBDefinition(); - X509CertInfo certInfo = new X509CertInfo(); - certInfo.set(X509CertInfo.IDENT, new CertificateValidity(Date.from(Instant.now().minus(Duration.ofDays(365))), Date.from(Instant.now().plus(Duration.ofDays(365))))); - X509Certificate nonExpired = new X509CertImpl(certInfo); - certInfo.set(X509CertInfo.IDENT, new CertificateValidity(Date.from(Instant.now().minus(Duration.ofDays(365))), Date.from(Instant.now().minus(Duration.ofDays(15))))); - X509Certificate expired = new X509CertImpl(certInfo); - String dbName = "SampleStore"; - try (DBStore dbStore = DBStoreBuilder.newBuilder(conf, sampleDBDef) - .setName("SampleStore").setPath(newFolder.toPath()).build()) { - Table table = SCMDBDefinition.VALID_CERTS - .getTable(dbStore); - - table.put(BigInteger.ONE, nonExpired); - table.put(BigInteger.valueOf(2L), expired); + // Mockito.when(dbStore.getTable("validCerts", BigInteger.class, X509Certificate.class)).thenReturn(mockTable); + Mockito.when(SCMDBDefinition.VALID_CERTS.getTable(dbStore)).thenReturn(mockTable); + TableIterator> iterator1 = mockTable.iterator(); + Mockito.when(mockTable.iterator()).thenReturn(iterator); + Mockito.when(nonExpiredCert.getNotAfter()) + .thenReturn(Date.from(Instant.now().plus(Duration.ofDays(365)))); + Mockito.when(expiredCert.getNotAfter()) + .thenReturn(Date.from(Instant.now().minus(Duration.ofDays(365)))); + Mockito.when(iterator.hasNext()).thenReturn(true, true, false); + Mockito.when(iterator.next()).thenReturn(kv, kv2); + Mockito.when(kv.getValue()).thenReturn(expiredCert); + Mockito.when(kv2.getValue()).thenReturn(nonExpiredCert); - CommandLine c = new CommandLine(cmd); - c.parseArgs("-db", newFolder.getPath(), "-name", dbName); - cmd.call(); - assertTrue(table.isExist(BigInteger.ONE)); - assertFalse(table.isExist(BigInteger.valueOf(2L))); - } - Assertions.assertFalse(false); + cmd.removeExpiredCertificates(dbStore); + Mockito.verify(iterator, Mockito.times(1)).removeFromDB(); } } \ No newline at end of file From e48e5f64ac9b4d3a484e7d1f0db9f47e3b2b796f Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Szabolcs=20G=C3=A1l?= Date: Wed, 16 Nov 2022 14:46:46 +0100 Subject: [PATCH 3/8] HDDS-7398 Fix checkstyle issues, minor refinements. --- .../scm/metadata/SCMMetadataStoreImpl.java | 3 +- .../hdds/scm/cli/cert/CleanExpired.java | 3 +- .../hdds/scm/cli/cert/TestCleanExpired.java | 41 +++++++++++++------ 3 files changed, 32 insertions(+), 15 deletions(-) diff --git a/hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds/scm/metadata/SCMMetadataStoreImpl.java b/hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds/scm/metadata/SCMMetadataStoreImpl.java index a3ea29eb2a50..dcbf608d6473 100644 --- a/hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds/scm/metadata/SCMMetadataStoreImpl.java +++ b/hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds/scm/metadata/SCMMetadataStoreImpl.java @@ -266,7 +266,8 @@ public Table getCRLSequenceIdTable() { } @Override - public TableIterator> getAllCerts(CertificateStore.CertType certType) + public TableIterator> getAllCerts( + CertificateStore.CertType certType) throws IOException { if (certType == CertificateStore.CertType.VALID_CERTS) { return validCertsTable.iterator(); diff --git a/hadoop-hdds/tools/src/main/java/org/apache/hadoop/hdds/scm/cli/cert/CleanExpired.java b/hadoop-hdds/tools/src/main/java/org/apache/hadoop/hdds/scm/cli/cert/CleanExpired.java index 53368c097a06..50a23e18fae1 100644 --- a/hadoop-hdds/tools/src/main/java/org/apache/hadoop/hdds/scm/cli/cert/CleanExpired.java +++ b/hadoop-hdds/tools/src/main/java/org/apache/hadoop/hdds/scm/cli/cert/CleanExpired.java @@ -39,7 +39,7 @@ import java.util.concurrent.Callable; /** - * This is the handler to clean SCM database from expired certificates + * This is the handler to clean SCM database from expired certificates. */ @CommandLine.Command( name = "clean", @@ -73,7 +73,6 @@ public Void call() { return null; } - //DBStore mockolás, inkább ezt adjuk a removeExpCertnek ++ hibakezelés (nincs certstable) try (DBStore dbStore = HAUtils.loadDB( configuration, new File(dbDirectory), dbName, new SCMDBDefinition())) { removeExpiredCertificates(dbStore); diff --git a/hadoop-hdds/tools/src/test/java/org/apache/hadoop/hdds/scm/cli/cert/TestCleanExpired.java b/hadoop-hdds/tools/src/test/java/org/apache/hadoop/hdds/scm/cli/cert/TestCleanExpired.java index 9c62ef3ebbfa..930ef20a614a 100644 --- a/hadoop-hdds/tools/src/test/java/org/apache/hadoop/hdds/scm/cli/cert/TestCleanExpired.java +++ b/hadoop-hdds/tools/src/test/java/org/apache/hadoop/hdds/scm/cli/cert/TestCleanExpired.java @@ -1,6 +1,22 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + *

+ * http://www.apache.org/licenses/LICENSE-2.0 + *

+ * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ package org.apache.hadoop.hdds.scm.cli.cert; -import org.apache.hadoop.hdds.conf.OzoneConfiguration; import org.apache.hadoop.hdds.scm.metadata.SCMDBDefinition; import org.apache.hadoop.hdds.utils.db.DBStore; import org.apache.hadoop.hdds.utils.db.Table; @@ -22,6 +38,9 @@ import java.time.Duration; import java.time.Instant; +/** + * Test the cleaning tool for expired certificates. + */ public class TestCleanExpired { private CleanExpired cmd; @@ -33,19 +52,19 @@ public class TestCleanExpired { @Mock - DBStore dbStore; + private DBStore dbStore; @Mock - Table mockTable; + private Table mockTable; @Mock - TableIterator iterator; + private TableIterator iterator; @Mock - Table.KeyValue kv; + private Table.KeyValue kv; @Mock - Table.KeyValue kv2; + private Table.KeyValue kv2; @Mock - X509Certificate nonExpiredCert; + private X509Certificate nonExpiredCert; @Mock - X509Certificate expiredCert; + private X509Certificate expiredCert; @BeforeEach @@ -65,10 +84,8 @@ public void tearDown() { @Test public void testOnlyExpiredCertsRemoved() throws Exception { - OzoneConfiguration conf = new OzoneConfiguration(); - // Mockito.when(dbStore.getTable("validCerts", BigInteger.class, X509Certificate.class)).thenReturn(mockTable); - Mockito.when(SCMDBDefinition.VALID_CERTS.getTable(dbStore)).thenReturn(mockTable); - TableIterator> iterator1 = mockTable.iterator(); + Mockito.when(SCMDBDefinition.VALID_CERTS.getTable(dbStore)) + .thenReturn(mockTable); Mockito.when(mockTable.iterator()).thenReturn(iterator); Mockito.when(nonExpiredCert.getNotAfter()) .thenReturn(Date.from(Instant.now().plus(Duration.ofDays(365)))); From 76834db0319ac3b300d1fcf87b1adbeeb439c91e Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Szabolcs=20G=C3=A1l?= Date: Mon, 5 Dec 2022 10:54:23 +0100 Subject: [PATCH 4/8] HDDS-7398 Fix review comments. Restore import order, add detailed exception message and use only one parameter for the db. --- .../hadoop/hdds/utils/db/TypedTable.java | 16 ++++---- .../scm/metadata/SCMMetadataStoreImpl.java | 37 +++++++++---------- .../hdds/scm/cli/cert/CertCommands.java | 5 ++- .../hdds/scm/cli/cert/CleanExpired.java | 30 +++++++-------- .../hdds/scm/cli/cert/TestCleanExpired.java | 2 - 5 files changed, 43 insertions(+), 47 deletions(-) diff --git a/hadoop-hdds/framework/src/main/java/org/apache/hadoop/hdds/utils/db/TypedTable.java b/hadoop-hdds/framework/src/main/java/org/apache/hadoop/hdds/utils/db/TypedTable.java index 5fa7b9667ff5..ee15fa896de1 100644 --- a/hadoop-hdds/framework/src/main/java/org/apache/hadoop/hdds/utils/db/TypedTable.java +++ b/hadoop-hdds/framework/src/main/java/org/apache/hadoop/hdds/utils/db/TypedTable.java @@ -18,6 +18,13 @@ */ package org.apache.hadoop.hdds.utils.db; +import java.io.File; +import java.io.IOException; +import java.util.ArrayList; +import java.util.Iterator; +import java.util.List; +import java.util.Map; + import com.google.common.annotations.VisibleForTesting; import com.google.common.base.Optional; import org.apache.hadoop.hdds.utils.MetadataKeyFilters; @@ -27,15 +34,8 @@ import org.apache.hadoop.hdds.utils.db.cache.CacheValue; import org.apache.hadoop.hdds.utils.db.cache.FullTableCache; import org.apache.hadoop.hdds.utils.db.cache.PartialTableCache; -import org.apache.hadoop.hdds.utils.db.cache.TableCache; import org.apache.hadoop.hdds.utils.db.cache.TableCache.CacheType; - -import java.io.File; -import java.io.IOException; -import java.util.ArrayList; -import java.util.Iterator; -import java.util.List; -import java.util.Map; +import org.apache.hadoop.hdds.utils.db.cache.TableCache; import static org.apache.hadoop.hdds.utils.db.cache.CacheResult.CacheStatus.EXISTS; import static org.apache.hadoop.hdds.utils.db.cache.CacheResult.CacheStatus.NOT_EXIST; diff --git a/hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds/scm/metadata/SCMMetadataStoreImpl.java b/hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds/scm/metadata/SCMMetadataStoreImpl.java index dcbf608d6473..16ba0503d02f 100644 --- a/hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds/scm/metadata/SCMMetadataStoreImpl.java +++ b/hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds/scm/metadata/SCMMetadataStoreImpl.java @@ -17,54 +17,54 @@ */ package org.apache.hadoop.hdds.scm.metadata; +import java.io.File; +import java.io.IOException; +import java.math.BigInteger; +import java.security.cert.X509Certificate; +import java.util.Map; +import java.util.concurrent.ConcurrentHashMap; + import com.google.protobuf.ByteString; import org.apache.hadoop.hdds.conf.OzoneConfiguration; import org.apache.hadoop.hdds.protocol.proto.StorageContainerDatanodeProtocolProtos.DeletedBlocksTransaction; import org.apache.hadoop.hdds.scm.container.ContainerID; import org.apache.hadoop.hdds.scm.container.ContainerInfo; import org.apache.hadoop.hdds.scm.container.common.helpers.MoveDataNodePair; +import org.apache.hadoop.hdds.security.x509.certificate.CertInfo; +import org.apache.hadoop.hdds.utils.HAUtils; +import org.apache.hadoop.hdds.utils.TransactionInfo; import org.apache.hadoop.hdds.scm.pipeline.Pipeline; import org.apache.hadoop.hdds.scm.pipeline.PipelineID; -import org.apache.hadoop.hdds.security.x509.certificate.CertInfo; import org.apache.hadoop.hdds.security.x509.certificate.authority.CertificateStore; import org.apache.hadoop.hdds.security.x509.crl.CRLInfo; -import org.apache.hadoop.hdds.utils.HAUtils; -import org.apache.hadoop.hdds.utils.TransactionInfo; import org.apache.hadoop.hdds.utils.db.BatchOperationHandler; import org.apache.hadoop.hdds.utils.db.DBStore; import org.apache.hadoop.hdds.utils.db.DBStoreBuilder; import org.apache.hadoop.hdds.utils.db.Table; import org.apache.hadoop.hdds.utils.db.TableIterator; -import org.apache.ratis.util.ExitUtils; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -import java.io.File; -import java.io.IOException; -import java.math.BigInteger; -import java.security.cert.X509Certificate; -import java.util.Map; -import java.util.concurrent.ConcurrentHashMap; import static org.apache.hadoop.hdds.scm.metadata.SCMDBDefinition.CONTAINERS; import static org.apache.hadoop.hdds.scm.metadata.SCMDBDefinition.CRLS; import static org.apache.hadoop.hdds.scm.metadata.SCMDBDefinition.CRL_SEQUENCE_ID; import static org.apache.hadoop.hdds.scm.metadata.SCMDBDefinition.DELETED_BLOCKS; -import static org.apache.hadoop.hdds.scm.metadata.SCMDBDefinition.META; import static org.apache.hadoop.hdds.scm.metadata.SCMDBDefinition.MOVE; import static org.apache.hadoop.hdds.scm.metadata.SCMDBDefinition.PIPELINES; import static org.apache.hadoop.hdds.scm.metadata.SCMDBDefinition.REVOKED_CERTS; import static org.apache.hadoop.hdds.scm.metadata.SCMDBDefinition.REVOKED_CERTS_V2; -import static org.apache.hadoop.hdds.scm.metadata.SCMDBDefinition.SEQUENCE_ID; import static org.apache.hadoop.hdds.scm.metadata.SCMDBDefinition.STATEFUL_SERVICE_CONFIG; import static org.apache.hadoop.hdds.scm.metadata.SCMDBDefinition.TRANSACTIONINFO; import static org.apache.hadoop.hdds.scm.metadata.SCMDBDefinition.VALID_CERTS; import static org.apache.hadoop.hdds.scm.metadata.SCMDBDefinition.VALID_SCM_CERTS; +import static org.apache.hadoop.hdds.scm.metadata.SCMDBDefinition.SEQUENCE_ID; +import static org.apache.hadoop.hdds.scm.metadata.SCMDBDefinition.META; import static org.apache.hadoop.ozone.OzoneConsts.DB_TRANSIENT_MARKER; +import org.apache.ratis.util.ExitUtils; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + /** * A RocksDB based implementation of SCM Metadata Store. - * */ public class SCMMetadataStoreImpl implements SCMMetadataStore { @@ -191,7 +191,7 @@ public void start(OzoneConfiguration config) metaTable = META.getTable(store); - checkAndPopulateTable(moveTable, META.getName()); + checkAndPopulateTable(metaTable, META.getName()); statefulServiceConfigTable = STATEFUL_SERVICE_CONFIG.getTable(store); @@ -266,8 +266,7 @@ public Table getCRLSequenceIdTable() { } @Override - public TableIterator> getAllCerts( - CertificateStore.CertType certType) + public TableIterator getAllCerts(CertificateStore.CertType certType) throws IOException { if (certType == CertificateStore.CertType.VALID_CERTS) { return validCertsTable.iterator(); diff --git a/hadoop-hdds/tools/src/main/java/org/apache/hadoop/hdds/scm/cli/cert/CertCommands.java b/hadoop-hdds/tools/src/main/java/org/apache/hadoop/hdds/scm/cli/cert/CertCommands.java index 69a27674c3e8..6b50cb451b1a 100644 --- a/hadoop-hdds/tools/src/main/java/org/apache/hadoop/hdds/scm/cli/cert/CertCommands.java +++ b/hadoop-hdds/tools/src/main/java/org/apache/hadoop/hdds/scm/cli/cert/CertCommands.java @@ -17,17 +17,18 @@ */ package org.apache.hadoop.hdds.scm.cli.cert; +import java.util.concurrent.Callable; + import org.apache.hadoop.hdds.cli.GenericCli; import org.apache.hadoop.hdds.cli.HddsVersionProvider; import org.apache.hadoop.hdds.cli.OzoneAdmin; import org.apache.hadoop.hdds.cli.SubcommandWithParent; + import org.kohsuke.MetaInfServices; import picocli.CommandLine.Command; import picocli.CommandLine.Model.CommandSpec; import picocli.CommandLine.Spec; -import java.util.concurrent.Callable; - /** * Sub command for certificate related operations. */ diff --git a/hadoop-hdds/tools/src/main/java/org/apache/hadoop/hdds/scm/cli/cert/CleanExpired.java b/hadoop-hdds/tools/src/main/java/org/apache/hadoop/hdds/scm/cli/cert/CleanExpired.java index 50a23e18fae1..09af5fc063bf 100644 --- a/hadoop-hdds/tools/src/main/java/org/apache/hadoop/hdds/scm/cli/cert/CleanExpired.java +++ b/hadoop-hdds/tools/src/main/java/org/apache/hadoop/hdds/scm/cli/cert/CleanExpired.java @@ -29,7 +29,6 @@ import org.apache.hadoop.hdds.utils.db.TableIterator; import picocli.CommandLine; -import java.io.File; import java.io.IOException; import java.math.BigInteger; import java.nio.file.Files; @@ -43,20 +42,16 @@ */ @CommandLine.Command( name = "clean", - description = "Clean expired certificates from the SCM metadata", + description = "Clean expired certificates from the SCM metadata. " + + "This command is only supported when the SCM is shutdown.", mixinStandardHelpOptions = true, versionProvider = HddsVersionProvider.class) public class CleanExpired implements Callable, SubcommandWithParent { @CommandLine.Option(names = {"--db"}, required = true, - description = "Database metadata directory") - private String dbDirectory; - - @CommandLine.Option(names = {"--name"}, - required = true, - description = "Database file name") - private String dbName; + description = "Database file path") + private String dbFile; @CommandLine.Spec private CommandLine.Model.CommandSpec spec; @@ -68,17 +63,19 @@ public Void call() { OzoneConfiguration configuration = parent.createOzoneConfiguration(); - if (!Files.exists(Paths.get(dbDirectory))) { - System.out.println("DB path not exist:" + dbDirectory); + if (!Files.exists(Paths.get(dbFile))) { + System.out.println("DB path does not exist: " + dbFile); return null; } - try (DBStore dbStore = HAUtils.loadDB( - configuration, new File(dbDirectory), dbName, new SCMDBDefinition())) { + try { + DBStore dbStore = HAUtils.loadDB( + configuration, Paths.get(dbFile).getParent().toFile(), + Paths.get(dbFile).getFileName().toString(), new SCMDBDefinition()); removeExpiredCertificates(dbStore); } catch (IOException e) { - System.out.println("Error trying to open" + dbName + - " at: " + dbDirectory); + System.out.println("Error trying to open file: " + dbFile + + " failed with exception: " + e); } return null; } @@ -100,7 +97,8 @@ void removeExpiredCertificates(DBStore dbStore) { } } } catch (IOException e) { - System.out.println("Error when trying to open certificate table from db"); + System.out.println("Error when trying to open " + + "certificate table from db: " + e); } } diff --git a/hadoop-hdds/tools/src/test/java/org/apache/hadoop/hdds/scm/cli/cert/TestCleanExpired.java b/hadoop-hdds/tools/src/test/java/org/apache/hadoop/hdds/scm/cli/cert/TestCleanExpired.java index 930ef20a614a..b169e6359d59 100644 --- a/hadoop-hdds/tools/src/test/java/org/apache/hadoop/hdds/scm/cli/cert/TestCleanExpired.java +++ b/hadoop-hdds/tools/src/test/java/org/apache/hadoop/hdds/scm/cli/cert/TestCleanExpired.java @@ -50,7 +50,6 @@ public class TestCleanExpired { private final PrintStream originalErr = System.err; private static final String DEFAULT_ENCODING = StandardCharsets.UTF_8.name(); - @Mock private DBStore dbStore; @Mock @@ -66,7 +65,6 @@ public class TestCleanExpired { @Mock private X509Certificate expiredCert; - @BeforeEach public void setup() throws IOException { MockitoAnnotations.initMocks(this); From a59e22b2a8ac82e1390c8146bce606cf5e754873 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Szabolcs=20G=C3=A1l?= Date: Tue, 6 Dec 2022 09:09:03 +0100 Subject: [PATCH 5/8] HDDS-7398 Fix review comments. Restore import order, add detailed exception message and use only one parameter for the db. --- .../hdds/scm/cli/cert/CleanExpired.java | 19 ++++++++++--------- 1 file changed, 10 insertions(+), 9 deletions(-) diff --git a/hadoop-hdds/tools/src/main/java/org/apache/hadoop/hdds/scm/cli/cert/CleanExpired.java b/hadoop-hdds/tools/src/main/java/org/apache/hadoop/hdds/scm/cli/cert/CleanExpired.java index 09af5fc063bf..3feab95c96aa 100644 --- a/hadoop-hdds/tools/src/main/java/org/apache/hadoop/hdds/scm/cli/cert/CleanExpired.java +++ b/hadoop-hdds/tools/src/main/java/org/apache/hadoop/hdds/scm/cli/cert/CleanExpired.java @@ -29,10 +29,9 @@ import org.apache.hadoop.hdds.utils.db.TableIterator; import picocli.CommandLine; +import java.io.File; import java.io.IOException; import java.math.BigInteger; -import java.nio.file.Files; -import java.nio.file.Paths; import java.security.cert.X509Certificate; import java.time.Instant; import java.util.concurrent.Callable; @@ -51,7 +50,7 @@ public class CleanExpired implements Callable, SubcommandWithParent { @CommandLine.Option(names = {"--db"}, required = true, description = "Database file path") - private String dbFile; + private String dbFilePath; @CommandLine.Spec private CommandLine.Model.CommandSpec spec; @@ -63,18 +62,20 @@ public Void call() { OzoneConfiguration configuration = parent.createOzoneConfiguration(); - if (!Files.exists(Paths.get(dbFile))) { - System.out.println("DB path does not exist: " + dbFile); + File db = new File(dbFilePath); + if (!db.exists()) { + System.out.println("DB path does not exist: " + dbFilePath); return null; } try { DBStore dbStore = HAUtils.loadDB( - configuration, Paths.get(dbFile).getParent().toFile(), - Paths.get(dbFile).getFileName().toString(), new SCMDBDefinition()); + configuration, db.getParentFile(), + db.getName(), new SCMDBDefinition()); removeExpiredCertificates(dbStore); - } catch (IOException e) { - System.out.println("Error trying to open file: " + dbFile + + } catch (Exception e) { + + System.out.println("Error trying to open file: " + dbFilePath + " failed with exception: " + e); } return null; From 541742e74243078be504ab97e1f319ed93205824 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Szabolcs=20G=C3=A1l?= Date: Fri, 9 Dec 2022 13:34:48 +0100 Subject: [PATCH 6/8] HDDS-7398. Prevent invalid paths and directories --- .../hadoop/hdds/scm/cli/cert/CleanExpired.java | 17 ++++++++++++----- 1 file changed, 12 insertions(+), 5 deletions(-) diff --git a/hadoop-hdds/tools/src/main/java/org/apache/hadoop/hdds/scm/cli/cert/CleanExpired.java b/hadoop-hdds/tools/src/main/java/org/apache/hadoop/hdds/scm/cli/cert/CleanExpired.java index 3feab95c96aa..b5a2ec523f15 100644 --- a/hadoop-hdds/tools/src/main/java/org/apache/hadoop/hdds/scm/cli/cert/CleanExpired.java +++ b/hadoop-hdds/tools/src/main/java/org/apache/hadoop/hdds/scm/cli/cert/CleanExpired.java @@ -27,6 +27,8 @@ import org.apache.hadoop.hdds.utils.db.DBStore; import org.apache.hadoop.hdds.utils.db.Table; import org.apache.hadoop.hdds.utils.db.TableIterator; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import picocli.CommandLine; import java.io.File; @@ -47,6 +49,8 @@ versionProvider = HddsVersionProvider.class) public class CleanExpired implements Callable, SubcommandWithParent { + private static final Logger LOG = LoggerFactory.getLogger(CleanExpired.class); + @CommandLine.Option(names = {"--db"}, required = true, description = "Database file path") @@ -64,7 +68,11 @@ public Void call() { File db = new File(dbFilePath); if (!db.exists()) { - System.out.println("DB path does not exist: " + dbFilePath); + LOG.error("DB path does not exist: " + dbFilePath); + return null; + } + if (!db.isDirectory()) { + LOG.error("DB path does not point to a directory: " + dbFilePath); return null; } @@ -74,8 +82,7 @@ public Void call() { db.getName(), new SCMDBDefinition()); removeExpiredCertificates(dbStore); } catch (Exception e) { - - System.out.println("Error trying to open file: " + dbFilePath + + LOG.error("Error trying to open file: " + dbFilePath + " failed with exception: " + e); } return null; @@ -92,13 +99,13 @@ void removeExpiredCertificates(DBStore dbStore) { Table.KeyValue certPair = tableIterator.next(); X509Certificate certificate = (X509Certificate) certPair.getValue(); if (Instant.now().isAfter(certificate.getNotAfter().toInstant())) { - System.out.println("Certificate with id " + certPair.getKey() + + LOG.info("Certificate with id " + certPair.getKey() + " and value: " + certificate + "will be deleted"); tableIterator.removeFromDB(); } } } catch (IOException e) { - System.out.println("Error when trying to open " + + LOG.error("Error when trying to open " + "certificate table from db: " + e); } } From f42ba32e57cc4b85679dfbac487d018b68ebe957 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Szabolcs=20G=C3=A1l?= Date: Fri, 9 Dec 2022 13:42:56 +0100 Subject: [PATCH 7/8] HDDS-7398. Remove unnecessary changes --- .../hdds/scm/metadata/SCMMetadataStore.java | 16 ++++++++-------- 1 file changed, 8 insertions(+), 8 deletions(-) diff --git a/hadoop-hdds/framework/src/main/java/org/apache/hadoop/hdds/scm/metadata/SCMMetadataStore.java b/hadoop-hdds/framework/src/main/java/org/apache/hadoop/hdds/scm/metadata/SCMMetadataStore.java index d01e4c7af69a..46b19aa09040 100644 --- a/hadoop-hdds/framework/src/main/java/org/apache/hadoop/hdds/scm/metadata/SCMMetadataStore.java +++ b/hadoop-hdds/framework/src/main/java/org/apache/hadoop/hdds/scm/metadata/SCMMetadataStore.java @@ -17,27 +17,28 @@ */ package org.apache.hadoop.hdds.scm.metadata; -import com.google.common.annotations.VisibleForTesting; +import java.io.IOException; +import java.math.BigInteger; +import java.security.cert.X509Certificate; + import com.google.protobuf.ByteString; import org.apache.hadoop.hdds.conf.OzoneConfiguration; import org.apache.hadoop.hdds.protocol.proto.StorageContainerDatanodeProtocolProtos.DeletedBlocksTransaction; import org.apache.hadoop.hdds.scm.container.ContainerID; import org.apache.hadoop.hdds.scm.container.ContainerInfo; import org.apache.hadoop.hdds.scm.container.common.helpers.MoveDataNodePair; +import org.apache.hadoop.hdds.security.x509.certificate.CertInfo; +import org.apache.hadoop.hdds.utils.DBStoreHAManager; import org.apache.hadoop.hdds.scm.pipeline.Pipeline; import org.apache.hadoop.hdds.scm.pipeline.PipelineID; -import org.apache.hadoop.hdds.security.x509.certificate.CertInfo; import org.apache.hadoop.hdds.security.x509.certificate.authority.CertificateStore; import org.apache.hadoop.hdds.security.x509.crl.CRLInfo; -import org.apache.hadoop.hdds.utils.DBStoreHAManager; import org.apache.hadoop.hdds.utils.db.BatchOperationHandler; import org.apache.hadoop.hdds.utils.db.DBStore; import org.apache.hadoop.hdds.utils.db.Table; import org.apache.hadoop.hdds.utils.db.TableIterator; -import java.io.IOException; -import java.math.BigInteger; -import java.security.cert.X509Certificate; +import com.google.common.annotations.VisibleForTesting; /** * Generic interface for data stores for SCM. @@ -128,8 +129,7 @@ public interface SCMMetadataStore extends DBStoreHAManager { * @return Iterator * @throws IOException on failure. */ - TableIterator> getAllCerts( - CertificateStore.CertType certType) + TableIterator getAllCerts(CertificateStore.CertType certType) throws IOException; /** From d60ec4c77d0c81ed1a6e4eda76b0f9ba2fe0982a Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Szabolcs=20G=C3=A1l?= Date: Thu, 15 Dec 2022 16:45:33 +0100 Subject: [PATCH 8/8] HDDS-7398. Remove whitespace change --- .../apache/hadoop/hdds/scm/metadata/SCMMetadataStoreImpl.java | 1 + 1 file changed, 1 insertion(+) diff --git a/hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds/scm/metadata/SCMMetadataStoreImpl.java b/hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds/scm/metadata/SCMMetadataStoreImpl.java index 16ba0503d02f..d342d4872085 100644 --- a/hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds/scm/metadata/SCMMetadataStoreImpl.java +++ b/hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds/scm/metadata/SCMMetadataStoreImpl.java @@ -65,6 +65,7 @@ /** * A RocksDB based implementation of SCM Metadata Store. + * */ public class SCMMetadataStoreImpl implements SCMMetadataStore {