From 98ff63d44913ef9230f8a71d4a39a4a772b81884 Mon Sep 17 00:00:00 2001 From: Hari Dara Date: Fri, 17 Jan 2025 12:23:18 +0530 Subject: [PATCH 01/70] PBE: POC for initializing STK and a default implementation for key provider --- .gitignore | 4 + .../org/apache/hadoop/hbase/HConstants.java | 12 ++ .../hadoop/hbase/io/crypto/Encryption.java | 3 + .../hbase/io/crypto/KeyStoreKeyProvider.java | 10 +- .../hadoop/hbase/io/crypto/PBEKeyData.java | 70 +++++++ .../hbase/io/crypto/PBEKeyProvider.java | 29 +++ .../hadoop/hbase/io/crypto/PBEKeyStatus.java | 28 +++ .../io/crypto/PBEKeyStoreKeyProvider.java | 80 +++++++ .../apache/hadoop/hbase/util/GsonUtil.java | 11 + .../io/crypto/TestKeyStoreKeyProvider.java | 65 ++++-- .../io/crypto/TestPBEKeyStoreKeyProvider.java | 147 +++++++++++++ .../hbase/master/ClusterKeyManager.java | 196 ++++++++++++++++++ .../apache/hadoop/hbase/master/HMaster.java | 4 + .../hadoop/hbase/master/KeyMetaSchema.java | 27 +++ .../hadoop/hbase/master/MasterFileSystem.java | 8 + .../hadoop/hbase/master/SplitWALManager.java | 1 - 16 files changed, 672 insertions(+), 23 deletions(-) create mode 100644 hbase-common/src/main/java/org/apache/hadoop/hbase/io/crypto/PBEKeyData.java create mode 100644 hbase-common/src/main/java/org/apache/hadoop/hbase/io/crypto/PBEKeyProvider.java create mode 100644 hbase-common/src/main/java/org/apache/hadoop/hbase/io/crypto/PBEKeyStatus.java create mode 100644 hbase-common/src/main/java/org/apache/hadoop/hbase/io/crypto/PBEKeyStoreKeyProvider.java create mode 100644 hbase-common/src/test/java/org/apache/hadoop/hbase/io/crypto/TestPBEKeyStoreKeyProvider.java create mode 100644 hbase-server/src/main/java/org/apache/hadoop/hbase/master/ClusterKeyManager.java create mode 100644 hbase-server/src/main/java/org/apache/hadoop/hbase/master/KeyMetaSchema.java diff --git a/.gitignore b/.gitignore index 52d169dd5ad7..89e60343cc28 100644 --- a/.gitignore +++ b/.gitignore @@ -25,3 +25,7 @@ linklint/ **/*.log tmp **/.flattened-pom.xml +.*.sw* +ID +filenametags +tags diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/HConstants.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/HConstants.java index eba3eb657ea5..670bda523919 100644 --- a/hbase-common/src/main/java/org/apache/hadoop/hbase/HConstants.java +++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/HConstants.java @@ -1192,6 +1192,11 @@ public enum OperationStatusCode { /** Temporary directory used for table creation and deletion */ public static final String HBASE_TEMP_DIRECTORY = ".tmp"; + /** + * Directory used for storing master keys for the cluster + */ + public static final String CLUSTER_KEYS_DIRECTORY = ".cluster_keys"; + public static final String CLUSTER_KEY_FILE_PREFIX = "cluster_key."; /** * The period (in milliseconds) between computing region server point in time metrics */ @@ -1284,6 +1289,13 @@ public enum OperationStatusCode { /** Configuration key for the name of the master key for the cluster, a string */ public static final String CRYPTO_MASTERKEY_NAME_CONF_KEY = "hbase.crypto.master.key.name"; + public static final String CRYPTO_PBE_MASTERKEY_NAME_CONF_KEY = + "hbase.crypto.pbe.master.key.name"; + + public static final String CRYPTO_PBE_ENABLED_CONF_KEY = "hbase.crypto.pbe.enabled"; + + public static final String CRYPTO_PBE_PREFIX_CONF_KEY_PREFIX = "hbase.crypto.pbe.prefix."; + /** Configuration key for the name of the alternate master key for the cluster, a string */ public static final String CRYPTO_MASTERKEY_ALTERNATE_NAME_CONF_KEY = "hbase.crypto.master.alternate.key.name"; diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/io/crypto/Encryption.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/io/crypto/Encryption.java index 13e335b82ee3..5dac1af2c27a 100644 --- a/hbase-common/src/main/java/org/apache/hadoop/hbase/io/crypto/Encryption.java +++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/io/crypto/Encryption.java @@ -561,6 +561,9 @@ public static KeyProvider getKeyProvider(Configuration conf) { provider = (KeyProvider) ReflectionUtils .newInstance(getClassLoaderForClass(KeyProvider.class).loadClass(providerClassName), conf); provider.init(providerParameters); + if (provider instanceof PBEKeyProvider) { + ((PBEKeyProvider) provider).initConfig(conf); + } if (LOG.isDebugEnabled()) { LOG.debug("Installed " + providerClassName + " into key provider cache"); } diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/io/crypto/KeyStoreKeyProvider.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/io/crypto/KeyStoreKeyProvider.java index 604bede13b20..c401d3b3f6b9 100644 --- a/hbase-common/src/main/java/org/apache/hadoop/hbase/io/crypto/KeyStoreKeyProvider.java +++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/io/crypto/KeyStoreKeyProvider.java @@ -76,6 +76,8 @@ @InterfaceAudience.Public public class KeyStoreKeyProvider implements KeyProvider { + private static final char[] NO_PASSWORD = new char[0]; + protected KeyStore store; protected char[] password; // can be null if no password protected Properties passwordFile; // can be null if no file provided @@ -172,9 +174,15 @@ protected char[] getAliasPassword(String alias) { @Override public Key getKey(String alias) { + // First try with no password, as it is more common to have a password only for the store. try { - return store.getKey(alias, getAliasPassword(alias)); + return store.getKey(alias, NO_PASSWORD); } catch (UnrecoverableKeyException e) { + try { + return store.getKey(alias, getAliasPassword(alias)); + } catch (UnrecoverableKeyException|NoSuchAlgorithmException|KeyStoreException e2) { + // Ignore. + } throw new RuntimeException(e); } catch (KeyStoreException e) { throw new RuntimeException(e); diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/io/crypto/PBEKeyData.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/io/crypto/PBEKeyData.java new file mode 100644 index 000000000000..f69201c10a7e --- /dev/null +++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/io/crypto/PBEKeyData.java @@ -0,0 +1,70 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.hadoop.hbase.io.crypto; + +import org.apache.commons.lang3.builder.EqualsBuilder; +import org.apache.commons.lang3.builder.HashCodeBuilder; +import org.apache.yetus.audience.InterfaceAudience; +import java.security.Key; + +@InterfaceAudience.Public +public class PBEKeyData { + private byte[] pbe_prefix; + private Key theKey; + private PBEKeyStatus keyStatus; + private String keyMetadata; + + public PBEKeyData(byte[] pbe_prefix, Key theKey, PBEKeyStatus keyStatus, String keyMetadata) { + this.pbe_prefix = pbe_prefix; + this.theKey = theKey; + this.keyStatus = keyStatus; + this.keyMetadata = keyMetadata; + } + + public byte[] getPbe_prefix() { + return pbe_prefix; + } + + public Key getTheKey() { + return theKey; + } + + public PBEKeyStatus getKeyStatus() { + return keyStatus; + } + + public String getKeyMetadata() { + return keyMetadata; + } + + @Override public boolean equals(Object o) { + if (this == o) return true; + + if (o == null || getClass() != o.getClass()) return false; + + PBEKeyData that = (PBEKeyData) o; + + return new EqualsBuilder().append(pbe_prefix, that.pbe_prefix).append(theKey, that.theKey) + .append(keyStatus, that.keyStatus).append(keyMetadata, that.keyMetadata).isEquals(); + } + + @Override public int hashCode() { + return new HashCodeBuilder(17, 37).append(pbe_prefix).append( + theKey).append(keyStatus).append(keyMetadata).toHashCode(); + } +} diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/io/crypto/PBEKeyProvider.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/io/crypto/PBEKeyProvider.java new file mode 100644 index 000000000000..44e6945a2ff3 --- /dev/null +++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/io/crypto/PBEKeyProvider.java @@ -0,0 +1,29 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.hadoop.hbase.io.crypto; + +import org.apache.hadoop.conf.Configuration; +import org.apache.yetus.audience.InterfaceAudience; + +@InterfaceAudience.Public +public interface PBEKeyProvider extends KeyProvider { + void initConfig(Configuration conf); + PBEKeyData getClusterKey(byte[] clusterId); + PBEKeyData getPBEKey(byte[] pbe_prefix); + PBEKeyData unwrapKey(String keyMetaData); +} diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/io/crypto/PBEKeyStatus.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/io/crypto/PBEKeyStatus.java new file mode 100644 index 000000000000..853ab59e2c2b --- /dev/null +++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/io/crypto/PBEKeyStatus.java @@ -0,0 +1,28 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.hadoop.hbase.io.crypto; + +import org.apache.yetus.audience.InterfaceAudience; + +@InterfaceAudience.Public +public enum PBEKeyStatus { + ACTIVE, + INACTIVE, + FAILED, + DISABLED, +} diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/io/crypto/PBEKeyStoreKeyProvider.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/io/crypto/PBEKeyStoreKeyProvider.java new file mode 100644 index 000000000000..a44afa2e6475 --- /dev/null +++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/io/crypto/PBEKeyStoreKeyProvider.java @@ -0,0 +1,80 @@ +package org.apache.hadoop.hbase.io.crypto; + +import org.apache.hadoop.conf.Configuration; +import org.apache.hadoop.hbase.HConstants; +import org.apache.hadoop.hbase.util.GsonUtil; +import org.apache.yetus.audience.InterfaceAudience; +import java.security.Key; +import java.util.Base64; +import java.util.HashMap; +import java.util.Map; + +@InterfaceAudience.Public +public class PBEKeyStoreKeyProvider extends KeyStoreKeyProvider implements PBEKeyProvider { + public static final String KEY_METADATA_ALIAS = "KeyAlias"; + public static final String KEY_METADATA_PREFIX = "PBE_PREFIX"; + + private Configuration conf; + + @Override public void initConfig(Configuration conf) { + this.conf = conf; + } + + @Override public PBEKeyData getClusterKey(byte[] clusterId) { + checkConfig(); + String masterKeyAlias = conf.get(HConstants.CRYPTO_PBE_MASTERKEY_NAME_CONF_KEY, null); + if (masterKeyAlias == null) { + throw new RuntimeException("No alias configured for master key"); + } + Key key = getKey(masterKeyAlias); + if (key == null) { + throw new RuntimeException("Unable to find cluster key with alias: " + masterKeyAlias); + } + // Encode clusterId too for consistency with that of PBE prefixes. + String keyMetadata = generateKeyMetadata(masterKeyAlias, + Base64.getEncoder().encodeToString(clusterId)); + return new PBEKeyData(clusterId, key, PBEKeyStatus.ACTIVE, keyMetadata); + } + + @Override public PBEKeyData getPBEKey(byte[] pbe_prefix) { + checkConfig(); + String encodedPrefix = Base64.getEncoder().encodeToString(pbe_prefix); + String aliasConfKey = HConstants.CRYPTO_PBE_PREFIX_CONF_KEY_PREFIX + encodedPrefix + "." + + "alias"; + String keyAlias = conf.get(aliasConfKey, null); + if (keyAlias != null) { + String keyMetadata = generateKeyMetadata(keyAlias, encodedPrefix); + return unwrapKey(keyMetadata); + } + return null; + } + + @Override public PBEKeyData unwrapKey(String keyMetadataStr) { + Map keyMetadata = GsonUtil.getDefaultInstance().fromJson(keyMetadataStr, + HashMap.class); + String alias = keyMetadata.get(KEY_METADATA_ALIAS); + Key key = getKey(alias); + if (key != null) { + String encodedPrefix = keyMetadata.get(KEY_METADATA_PREFIX); + String activeStatusConfKey = HConstants.CRYPTO_PBE_PREFIX_CONF_KEY_PREFIX + encodedPrefix + + ".active"; + boolean isActive = conf.getBoolean(activeStatusConfKey, true); + return new PBEKeyData(Base64.getDecoder().decode(encodedPrefix), key, + isActive ? PBEKeyStatus.ACTIVE : PBEKeyStatus.INACTIVE, keyMetadataStr); + } + return null; + } + + private String generateKeyMetadata(String aliasName, String encodedPrefix) { + return GsonUtil.getDefaultInstance().toJson(new HashMap() {{ + put(KEY_METADATA_ALIAS, aliasName); + put(KEY_METADATA_PREFIX, encodedPrefix); + }}, HashMap.class); + } + + private void checkConfig() { + if (conf == null) { + throw new IllegalStateException("initConfig is not called or config is null"); + } + } +} diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/util/GsonUtil.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/util/GsonUtil.java index e592b1f935a1..adb6536efbb1 100644 --- a/hbase-common/src/main/java/org/apache/hadoop/hbase/util/GsonUtil.java +++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/util/GsonUtil.java @@ -19,6 +19,7 @@ import java.io.IOException; import java.util.concurrent.atomic.LongAdder; +import org.apache.hbase.thirdparty.com.google.gson.Gson; import org.apache.yetus.audience.InterfaceAudience; import org.apache.hbase.thirdparty.com.google.gson.GsonBuilder; @@ -33,6 +34,8 @@ @InterfaceAudience.Private public final class GsonUtil { + private static Gson INSTANCE; + private GsonUtil() { } @@ -62,4 +65,12 @@ public LongAdder read(JsonReader in) throws IOException { public static GsonBuilder createGsonWithDisableHtmlEscaping() { return createGson().disableHtmlEscaping(); } + + public static Gson getDefaultInstance() { + if (INSTANCE == null) { + INSTANCE = createGson().create(); + + } + return INSTANCE; + } } diff --git a/hbase-common/src/test/java/org/apache/hadoop/hbase/io/crypto/TestKeyStoreKeyProvider.java b/hbase-common/src/test/java/org/apache/hadoop/hbase/io/crypto/TestKeyStoreKeyProvider.java index 581681988c28..de91aa904581 100644 --- a/hbase-common/src/test/java/org/apache/hadoop/hbase/io/crypto/TestKeyStoreKeyProvider.java +++ b/hbase-common/src/test/java/org/apache/hadoop/hbase/io/crypto/TestKeyStoreKeyProvider.java @@ -26,6 +26,8 @@ import java.security.Key; import java.security.KeyStore; import java.security.MessageDigest; +import java.util.Arrays; +import java.util.Collection; import java.util.Properties; import javax.crypto.spec.SecretKeySpec; import org.apache.hadoop.hbase.HBaseClassTestRule; @@ -33,12 +35,15 @@ import org.apache.hadoop.hbase.testclassification.MiscTests; import org.apache.hadoop.hbase.testclassification.SmallTests; import org.apache.hadoop.hbase.util.Bytes; -import org.junit.BeforeClass; +import org.junit.Before; import org.junit.ClassRule; import org.junit.Test; import org.junit.experimental.categories.Category; +import org.junit.runner.RunWith; +import org.junit.runners.Parameterized; @Category({ MiscTests.class, SmallTests.class }) +@RunWith(Parameterized.class) public class TestKeyStoreKeyProvider { @ClassRule @@ -53,14 +58,33 @@ public class TestKeyStoreKeyProvider { static File storeFile; static File passwordFile; - @BeforeClass - public static void setUp() throws Exception { + protected KeyProvider provider; + + @Parameterized.Parameter(0) + public boolean withPasswordOnAlias; + @Parameterized.Parameter(1) + public boolean withPasswordFile; + + @Parameterized.Parameters(name = "withPasswordOnAlias={0} withPasswordFile={1}") + public static Collection parameters() { + return Arrays.asList(new Object[][] { + { Boolean.TRUE, Boolean.TRUE }, + { Boolean.TRUE, Boolean.FALSE }, + { Boolean.FALSE, Boolean.TRUE }, + { Boolean.FALSE, Boolean.FALSE }, + }); + } + + @Before + public void setUp() throws Exception { KEY = MessageDigest.getInstance("SHA-256").digest(Bytes.toBytes(ALIAS)); // Create a JKECS store containing a test secret key KeyStore store = KeyStore.getInstance("JCEKS"); store.load(null, PASSWORD.toCharArray()); store.setEntry(ALIAS, new KeyStore.SecretKeyEntry(new SecretKeySpec(KEY, "AES")), - new KeyStore.PasswordProtection(PASSWORD.toCharArray())); + new KeyStore.PasswordProtection(withPasswordOnAlias ? PASSWORD.toCharArray() : new char[0])); + Properties p = new Properties(); + addCustomEntries(store, p); // Create the test directory String dataDir = TEST_UTIL.getDataTestDir().toString(); new File(dataDir).mkdirs(); @@ -73,8 +97,6 @@ public static void setUp() throws Exception { os.close(); } // Write the password file - Properties p = new Properties(); - p.setProperty(ALIAS, PASSWORD); passwordFile = new File(dataDir, "keystore.pw"); os = new FileOutputStream(passwordFile); try { @@ -82,26 +104,27 @@ public static void setUp() throws Exception { } finally { os.close(); } - } - @Test - public void testKeyStoreKeyProviderWithPassword() throws Exception { - KeyProvider provider = new KeyStoreKeyProvider(); - provider.init("jceks://" + storeFile.toURI().getPath() + "?password=" + PASSWORD); - Key key = provider.getKey(ALIAS); - assertNotNull(key); - byte[] keyBytes = key.getEncoded(); - assertEquals(keyBytes.length, KEY.length); - for (int i = 0; i < KEY.length; i++) { - assertEquals(keyBytes[i], KEY[i]); + provider = createProvider(); + if (withPasswordFile) { + provider.init("jceks://" + storeFile.toURI().getPath() + "?passwordFile=" + + URLEncoder.encode(passwordFile.getAbsolutePath(), "UTF-8")); + } + else { + provider.init("jceks://" + storeFile.toURI().getPath() + "?password=" + PASSWORD); } } + protected KeyProvider createProvider() { + return new KeyStoreKeyProvider(); + } + + protected void addCustomEntries(KeyStore store, Properties passwdProps) throws Exception { + passwdProps.setProperty(ALIAS, PASSWORD); + } + @Test - public void testKeyStoreKeyProviderWithPasswordFile() throws Exception { - KeyProvider provider = new KeyStoreKeyProvider(); - provider.init("jceks://" + storeFile.toURI().getPath() + "?passwordFile=" - + URLEncoder.encode(passwordFile.getAbsolutePath(), "UTF-8")); + public void testKeyStoreKeyProvider() throws Exception { Key key = provider.getKey(ALIAS); assertNotNull(key); byte[] keyBytes = key.getEncoded(); diff --git a/hbase-common/src/test/java/org/apache/hadoop/hbase/io/crypto/TestPBEKeyStoreKeyProvider.java b/hbase-common/src/test/java/org/apache/hadoop/hbase/io/crypto/TestPBEKeyStoreKeyProvider.java new file mode 100644 index 000000000000..44dc378e62dd --- /dev/null +++ b/hbase-common/src/test/java/org/apache/hadoop/hbase/io/crypto/TestPBEKeyStoreKeyProvider.java @@ -0,0 +1,147 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.hadoop.hbase.io.crypto; + +import org.apache.hadoop.conf.Configuration; +import org.apache.hadoop.hbase.HBaseClassTestRule; +import org.apache.hadoop.hbase.HBaseConfiguration; +import org.apache.hadoop.hbase.HConstants; +import org.apache.hadoop.hbase.testclassification.MiscTests; +import org.apache.hadoop.hbase.testclassification.SmallTests; +import org.apache.hadoop.hbase.util.Bytes; +import org.apache.hadoop.hbase.util.GsonUtil; +import org.junit.Before; +import org.junit.ClassRule; +import org.junit.Test; +import org.junit.experimental.categories.Category; +import org.junit.runner.RunWith; +import org.junit.runners.Parameterized; +import javax.crypto.spec.SecretKeySpec; +import java.security.KeyStore; +import java.security.MessageDigest; +import java.util.Base64; +import java.util.HashMap; +import java.util.Map; +import java.util.Properties; +import java.util.UUID; +import static org.apache.hadoop.hbase.io.crypto.PBEKeyStoreKeyProvider.KEY_METADATA_ALIAS; +import static org.apache.hadoop.hbase.io.crypto.PBEKeyStoreKeyProvider.KEY_METADATA_PREFIX; +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertNotNull; + +@Category({ MiscTests.class, SmallTests.class }) +@RunWith(Parameterized.class) +public class TestPBEKeyStoreKeyProvider extends TestKeyStoreKeyProvider { + + private static final String MASTER_KEY_ALIAS = "master-alias"; + + private Configuration conf = HBaseConfiguration.create(); + + @ClassRule + public static final HBaseClassTestRule CLASS_RULE = + HBaseClassTestRule.forClass(TestPBEKeyStoreKeyProvider.class); + private int nPrefixes = 2; + + private PBEKeyProvider pbeProvider; + + private Map prefix2key = new HashMap<>(); + private Map prefix2alias = new HashMap<>(); + private String clusterId; + private byte[] masterKey; + + @Before + public void setUp() throws Exception { + super.setUp();; + pbeProvider = (PBEKeyProvider) provider; + pbeProvider.initConfig(conf); + } + + protected KeyProvider createProvider() { + return new PBEKeyStoreKeyProvider(); + } + + protected void addCustomEntries(KeyStore store, Properties passwdProps) throws Exception { + super.addCustomEntries(store, passwdProps); + for (int i = 0; i < nPrefixes; ++i) { + String prefix = "prefix+ " + i; + String alias = prefix + "-alias"; + byte[] key = MessageDigest.getInstance("SHA-256").digest(Bytes.toBytes(alias)); + prefix2alias.put(new Bytes(prefix.getBytes()), alias); + prefix2key.put(new Bytes(prefix.getBytes()), new Bytes(key)); + store.setEntry(alias, new KeyStore.SecretKeyEntry(new SecretKeySpec(key, "AES")), + new KeyStore.PasswordProtection(withPasswordOnAlias ? PASSWORD.toCharArray() : new char[0])); + + String encPrefix = Base64.getEncoder().encodeToString(prefix.getBytes()); + String confKey = HConstants.CRYPTO_PBE_PREFIX_CONF_KEY_PREFIX + encPrefix + "." + "alias"; + conf.set(confKey, alias); + + passwdProps.setProperty(alias, PASSWORD); + + clusterId = UUID.randomUUID().toString(); + masterKey = MessageDigest.getInstance("SHA-256").digest( + Bytes.toBytes(MASTER_KEY_ALIAS)); + store.setEntry(MASTER_KEY_ALIAS, new KeyStore.SecretKeyEntry( + new SecretKeySpec(masterKey, "AES")), + new KeyStore.PasswordProtection(withPasswordOnAlias ? PASSWORD.toCharArray() : + new char[0])); + + conf.set(HConstants.CRYPTO_PBE_MASTERKEY_NAME_CONF_KEY, MASTER_KEY_ALIAS); + + passwdProps.setProperty(MASTER_KEY_ALIAS, PASSWORD); + } + } + + private void addEntry(String alias, String prefix) { + String encPrefix = Base64.getEncoder().encodeToString(prefix.getBytes()); + String confKey = HConstants.CRYPTO_PBE_PREFIX_CONF_KEY_PREFIX + encPrefix + "." + "alias"; + conf.set(confKey, alias); + } + + @Test + public void testGetPBEKey() throws Exception { + for (Bytes prefix: prefix2key.keySet()) { + PBEKeyData keyData = pbeProvider.getPBEKey(prefix.get()); + assertPBEKeyData(keyData, PBEKeyStatus.ACTIVE, prefix2key.get(prefix).get(), prefix.get(), + prefix2alias.get(prefix)); + } + } + + @Test + public void testGetClusterKey() throws Exception { + PBEKeyData clusterKeyData = pbeProvider.getClusterKey(clusterId.getBytes()); + assertPBEKeyData(clusterKeyData, PBEKeyStatus.ACTIVE, masterKey, clusterId.getBytes(), + MASTER_KEY_ALIAS); + } + + private void assertPBEKeyData(PBEKeyData keyData, PBEKeyStatus expKeyStatus, byte[] key, + byte[] prefixBytes, String alias) { + assertNotNull(keyData); + assertEquals(expKeyStatus, keyData.getKeyStatus()); + byte[] keyBytes = keyData.getTheKey().getEncoded(); + assertEquals(key.length, keyBytes.length); + assertEquals(new Bytes(key), keyBytes); + Map keyMetadata = GsonUtil.getDefaultInstance().fromJson(keyData.getKeyMetadata(), + HashMap.class); + assertNotNull(keyMetadata); + assertEquals(new Bytes(prefixBytes), keyData.getPbe_prefix()); + assertEquals(alias, keyMetadata.get(KEY_METADATA_ALIAS)); + assertEquals(Base64.getEncoder().encodeToString(prefixBytes), + keyMetadata.get(KEY_METADATA_PREFIX)); + assertEquals(keyData, pbeProvider.unwrapKey(keyData.getKeyMetadata())); + } +} diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/ClusterKeyManager.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/ClusterKeyManager.java new file mode 100644 index 000000000000..78c5417c95a6 --- /dev/null +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/ClusterKeyManager.java @@ -0,0 +1,196 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.hadoop.hbase.master; + +import java.io.IOException; +import java.util.ArrayList; +import java.util.List; +import java.util.UUID; +import org.apache.hadoop.fs.FSDataInputStream; +import org.apache.hadoop.fs.FSDataOutputStream; +import org.apache.hadoop.fs.FileStatus; +import org.apache.hadoop.fs.FileSystem; +import org.apache.hadoop.fs.Path; +import org.apache.hadoop.hbase.HConstants; +import org.apache.hadoop.hbase.io.crypto.Encryption; +import org.apache.hadoop.hbase.io.crypto.KeyProvider; +import org.apache.hadoop.hbase.io.crypto.PBEKeyData; +import org.apache.hadoop.hbase.io.crypto.PBEKeyProvider; +import org.apache.yetus.audience.InterfaceAudience; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; +import static org.apache.hadoop.hbase.HConstants.CLUSTER_KEY_FILE_PREFIX; + +@InterfaceAudience.Private +public class ClusterKeyManager { + private static final Logger LOG = LoggerFactory.getLogger(ClusterKeyManager.class); + + private final MasterServices master; + private Boolean pbeEnabled; + + public ClusterKeyManager(MasterServices master) throws IOException { + this.master = master; + } + + public void ensureClusterKeyInitialized() throws IOException { + if (! isPBEEnabled()) { + return; + } + List clusterKeys = getAllClusterKeys(); + if (clusterKeys.size() == 0) { + LOG.info("Initializing Cluster Key for the first time"); + // Double check for cluster key as another HMaster might have succeeded. + if (rotateClusterKey(null) == null && getAllClusterKeys().size() == 0) { + throw new RuntimeException("Failed to generate or save Cluster Key"); + } + } + else if (rotateClusterKeyIfChanged() != null) { + LOG.info("Cluster key has been rotated"); + } + else { + LOG.info("Cluster key is already initialized and unchanged"); + } + } + + private boolean isPBEEnabled() { + if (pbeEnabled == null) { + pbeEnabled = master.getConfiguration().getBoolean(HConstants.CRYPTO_PBE_ENABLED_CONF_KEY, + false); + } + return pbeEnabled; + } + + public PBEKeyData rotateClusterKeyIfChanged() throws IOException { + if (! isPBEEnabled()) { + return null; + } + Path latestFile = getLatestClusterKeyFile(); + FSDataInputStream fin = master.getMasterFileSystem().getFileSystem().open(latestFile); + try { + String latestKeyMeata = fin.readUTF(); + PBEKeyData rotatedKey = rotateClusterKey(latestKeyMeata); + if (rotatedKey != null && ! latestKeyMeata.equals(rotatedKey.getKeyMetadata())) { + return rotatedKey; + } + } + finally { + fin.close();; + } + return null; + } + + public PBEKeyData rotateClusterKey(String currentKeyMetadata) throws IOException { + if (! isPBEEnabled()) { + return null; + } + PBEKeyProvider provider = getKeyProvider(); + PBEKeyData clusterKey = provider.getClusterKey( + master.getMasterFileSystem().getClusterId().toString().getBytes()); + if (clusterKey != null && clusterKey.getKeyMetadata() != null && + ! clusterKey.getKeyMetadata().equals(currentKeyMetadata) && + saveLatestClusterKey(clusterKey.getKeyMetadata())) { + return clusterKey; + } + return null; + } + + public Path getLatestClusterKeyFile() throws IOException { + if (! isPBEEnabled()) { + return null; + } + int currentMaxSeqNum = findLatestKeySequence(getAllClusterKeys()); + return new Path(master.getMasterFileSystem().getClusterKeyDir(), + CLUSTER_KEY_FILE_PREFIX + currentMaxSeqNum); + } + + public List getAllClusterKeys() throws IOException { + if (! isPBEEnabled()) { + return null; + } + MasterFileSystem masterFS = master.getMasterFileSystem(); + Path clusterKeyDir = masterFS.getClusterKeyDir(); + FileSystem fs = masterFS.getFileSystem(); + List clusterKeys = new ArrayList<>(); + for (FileStatus st: fs.globStatus(new Path(clusterKeyDir, CLUSTER_KEY_FILE_PREFIX + "*"))) { + Path keyPath = st.getPath(); + extractClusterKeySeqNum(keyPath); // Just check for validity. + clusterKeys.add(keyPath); + } + return clusterKeys; + } + + private int findLatestKeySequence(List clusterKeys) throws IOException { + int maxKeySeq = -1; + for (Path keyPath: clusterKeys) { + if (keyPath.getName().startsWith(CLUSTER_KEY_FILE_PREFIX)) { + int keySeq = Integer.valueOf(keyPath.getName().substring(CLUSTER_KEY_FILE_PREFIX.length())); + if (keySeq > maxKeySeq) { + maxKeySeq = keySeq; + } + } + } + return maxKeySeq; + } + + private boolean saveLatestClusterKey(String keyMetadata) throws IOException { + int nextClusterKeySeq = findLatestKeySequence(getAllClusterKeys()) + 1; + LOG.info("Trying to save a new cluster key at seq: ", nextClusterKeySeq); + MasterFileSystem masterFS = master.getMasterFileSystem(); + Path nextClusterKeyPath = new Path(masterFS.getClusterKeyDir(), + CLUSTER_KEY_FILE_PREFIX + nextClusterKeySeq); + Path tempClusterKeyFile = new Path(masterFS.getTempDir(), + nextClusterKeyPath.getName() + UUID.randomUUID().toString()); + FSDataOutputStream fsDataOutputStream = masterFS.getFileSystem().create(tempClusterKeyFile); + try { + fsDataOutputStream.writeUTF(keyMetadata); + boolean succeeded = masterFS.getFileSystem().rename(tempClusterKeyFile, nextClusterKeyPath); + if (succeeded) { + LOG.info("Cluster key save succeeded for seq: {}", nextClusterKeySeq); + } + else { + LOG.error("Cluster key save failed for seq: {}", nextClusterKeySeq); + } + return succeeded; + } + finally { + fsDataOutputStream.close(); + masterFS.getFileSystem().delete(tempClusterKeyFile, false); + } + } + + private int extractClusterKeySeqNum(Path keyPath) throws IOException { + if (keyPath.getName().startsWith(CLUSTER_KEY_FILE_PREFIX)) { + try { + return Integer.valueOf(keyPath.getName().substring(CLUSTER_KEY_FILE_PREFIX.length())); + } + catch (NumberFormatException e) { + LOG.error("Invalid file name for a cluster key: {}", keyPath, e); + } + } + throw new IOException("Couldn't parse key file name: " + keyPath.getName()); + } + + private PBEKeyProvider getKeyProvider() { + KeyProvider provider = Encryption.getKeyProvider(master.getConfiguration()); + if (!(provider instanceof PBEKeyProvider)) { + throw new RuntimeException("KeyProvider: " + provider.getClass().getName() + + " expected to be of type PBEKeyProvider"); + } + return (PBEKeyProvider) provider; + } +} diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/HMaster.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/HMaster.java index 7ccbb4d614ab..2bad0bc8399e 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/HMaster.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/HMaster.java @@ -355,6 +355,7 @@ public class HMaster extends HBaseServerBase implements Maste // file system manager for the master FS operations private MasterFileSystem fileSystemManager; private MasterWalManager walManager; + private ClusterKeyManager clusterKeyManager; // manager to manage procedure-based WAL splitting, can be null if current // is zk-based WAL splitting. SplitWALManager will replace SplitLogManager @@ -992,6 +993,9 @@ private void finishActiveMasterInitialization() throws IOException, InterruptedE ZKClusterId.setClusterId(this.zooKeeper, fileSystemManager.getClusterId()); this.clusterId = clusterId.toString(); + clusterKeyManager = new ClusterKeyManager(this); + clusterKeyManager.ensureClusterKeyInitialized(); + // Precaution. Put in place the old hbck1 lock file to fence out old hbase1s running their // hbck1s against an hbase2 cluster; it could do damage. To skip this behavior, set // hbase.write.hbck1.lock.file to false. diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/KeyMetaSchema.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/KeyMetaSchema.java new file mode 100644 index 000000000000..ce85e11f7518 --- /dev/null +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/KeyMetaSchema.java @@ -0,0 +1,27 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.hadoop.hbase.master; + +import org.apache.yetus.audience.InterfaceAudience; + +/** + * API for managing the keys in key_meta table. + */ +@InterfaceAudience.Public +public interface KeyMetaSchema { +} diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/MasterFileSystem.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/MasterFileSystem.java index 5a43cd98feb9..a0b345a48207 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/MasterFileSystem.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/MasterFileSystem.java @@ -66,6 +66,7 @@ public class MasterFileSystem { private final FileSystem walFs; // root log directory on the FS private final Path rootdir; + private final Path clusterKeyDir; // hbase temp directory used for table construction and deletion private final Path tempdir; // root hbase directory on the FS @@ -96,6 +97,7 @@ public MasterFileSystem(Configuration conf) throws IOException { // default localfs. Presumption is that rootdir is fully-qualified before // we get to here with appropriate fs scheme. this.rootdir = CommonFSUtils.getRootDir(conf); + this.clusterKeyDir = new Path(this.rootdir, HConstants.CLUSTER_KEYS_DIRECTORY); this.tempdir = new Path(this.rootdir, HConstants.HBASE_TEMP_DIRECTORY); // Cover both bases, the old way of setting default fs and the new. // We're supposed to run on 0.20 and 0.21 anyways. @@ -134,6 +136,7 @@ private void createInitialFileSystemLayout() throws IOException { HConstants.CORRUPT_DIR_NAME, ReplicationUtils.REMOTE_WAL_DIR_NAME }; // check if the root directory exists checkRootDir(this.rootdir, conf, this.fs); + checkRootDir(this.clusterKeyDir, conf, this.fs); // Check the directories under rootdir. checkTempDir(this.tempdir, conf, this.fs); @@ -158,6 +161,7 @@ private void createInitialFileSystemLayout() throws IOException { if (isSecurityEnabled) { fs.setPermission(new Path(rootdir, HConstants.VERSION_FILE_NAME), secureRootFilePerms); fs.setPermission(new Path(rootdir, HConstants.CLUSTER_ID_FILE_NAME), secureRootFilePerms); + fs.setPermission(clusterKeyDir, secureRootFilePerms); } FsPermission currentRootPerms = fs.getFileStatus(this.rootdir).getPermission(); if ( @@ -196,6 +200,10 @@ public Path getWALRootDir() { return this.walRootDir; } + public Path getClusterKeyDir() { + return clusterKeyDir; + } + /** Returns the directory for a give {@code region}. */ public Path getRegionDir(RegionInfo region) { return FSUtils.getRegionDirFromRootDir(getRootDir(), region); diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/SplitWALManager.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/SplitWALManager.java index 18dfc7d493bf..99a373c8262f 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/SplitWALManager.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/SplitWALManager.java @@ -21,7 +21,6 @@ import static org.apache.hadoop.hbase.HConstants.HBASE_SPLIT_WAL_MAX_SPLITTER; import static org.apache.hadoop.hbase.master.MasterWalManager.META_FILTER; import static org.apache.hadoop.hbase.master.MasterWalManager.NON_META_FILTER; - import java.io.IOException; import java.util.Arrays; import java.util.Collections; From 4e426a8a14cbc08eb40efbf9db562598b6702c8f Mon Sep 17 00:00:00 2001 From: Hari Dara Date: Tue, 28 Jan 2025 16:58:03 +0530 Subject: [PATCH 02/70] PBE: POC for creating keymeta table and providing admin service --- .../hbase/keymeta/KeyMetaAdminClient.java | 34 +++++ .../hadoop/hbase/io/crypto/PBEKeyData.java | 28 +++++ .../hbase/io/crypto/PBEKeyProvider.java | 5 +- .../hadoop/hbase/io/crypto/PBEKeyStatus.java | 34 ++++- .../io/crypto/PBEKeyStoreKeyProvider.java | 36 +++--- .../hadoop/hbase/keymeta/KeyMetaAdmin.java | 27 ++++ .../hadoop/hbase/util/CommonFSUtils.java | 4 + .../io/crypto/TestPBEKeyStoreKeyProvider.java | 2 +- .../src/main/protobuf/server/PBEAdmin.proto | 46 +++++++ .../hbase/keymeta/ClusterKeyAccessor.java | 100 +++++++++++++++ .../hadoop/hbase/keymeta/ClusterKeyCache.java | 60 +++++++++ .../hadoop/hbase/keymeta/KeyMetaManager.java | 57 +++++++++ .../hbase/keymeta/KeyMetaMasterService.java | 67 ++++++++++ .../hbase/keymeta/KeyMetaServiceEndpoint.java | 93 ++++++++++++++ .../hbase/keymeta/KeyMetaTableAccessor.java | 116 ++++++++++++++++++ .../hadoop/hbase/keymeta/PBEKeyManager.java | 57 +++++++++ .../hbase/master/ClusterKeyManager.java | 116 +++--------------- .../apache/hadoop/hbase/master/HMaster.java | 5 + .../hadoop/hbase/master/MasterFileSystem.java | 6 +- hbase-shell/src/main/ruby/hbase/hbase.rb | 4 + hbase-shell/src/main/ruby/hbase/pbe_admin.rb | 36 ++++++ hbase-shell/src/main/ruby/hbase_constants.rb | 1 + hbase-shell/src/main/ruby/shell.rb | 14 +++ hbase-shell/src/main/ruby/shell/commands.rb | 4 + .../main/ruby/shell/commands/pbe_enable.rb | 34 +++++ 25 files changed, 857 insertions(+), 129 deletions(-) create mode 100644 hbase-client/src/main/java/org/apache/hadoop/hbase/keymeta/KeyMetaAdminClient.java create mode 100644 hbase-common/src/main/java/org/apache/hadoop/hbase/keymeta/KeyMetaAdmin.java create mode 100644 hbase-protocol-shaded/src/main/protobuf/server/PBEAdmin.proto create mode 100644 hbase-server/src/main/java/org/apache/hadoop/hbase/keymeta/ClusterKeyAccessor.java create mode 100644 hbase-server/src/main/java/org/apache/hadoop/hbase/keymeta/ClusterKeyCache.java create mode 100644 hbase-server/src/main/java/org/apache/hadoop/hbase/keymeta/KeyMetaManager.java create mode 100644 hbase-server/src/main/java/org/apache/hadoop/hbase/keymeta/KeyMetaMasterService.java create mode 100644 hbase-server/src/main/java/org/apache/hadoop/hbase/keymeta/KeyMetaServiceEndpoint.java create mode 100644 hbase-server/src/main/java/org/apache/hadoop/hbase/keymeta/KeyMetaTableAccessor.java create mode 100644 hbase-server/src/main/java/org/apache/hadoop/hbase/keymeta/PBEKeyManager.java create mode 100644 hbase-shell/src/main/ruby/hbase/pbe_admin.rb create mode 100644 hbase-shell/src/main/ruby/shell/commands/pbe_enable.rb diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/keymeta/KeyMetaAdminClient.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/keymeta/KeyMetaAdminClient.java new file mode 100644 index 000000000000..29c4b847018d --- /dev/null +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/keymeta/KeyMetaAdminClient.java @@ -0,0 +1,34 @@ +package org.apache.hadoop.hbase.keymeta; + +import com.google.protobuf.ServiceException; +import org.apache.hadoop.hbase.client.Connection; +import org.apache.hadoop.hbase.io.crypto.PBEKeyStatus; +import org.apache.hadoop.hbase.protobuf.ProtobufUtil; +import org.apache.hadoop.hbase.protobuf.generated.PBEAdminProtos; +import org.apache.hadoop.hbase.protobuf.generated.PBEAdminProtos.PBEAdminRequest; +import org.apache.hadoop.hbase.protobuf.generated.PBEAdminProtos.PBEAdminResponse; +import org.apache.yetus.audience.InterfaceAudience; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; +import java.io.IOException; + +@InterfaceAudience.Public +public class KeyMetaAdminClient implements KeyMetaAdmin { + private static final Logger LOG = LoggerFactory.getLogger(KeyMetaAdminClient.class); + private PBEAdminProtos.PBEAdminService.BlockingInterface stub; + + public KeyMetaAdminClient(Connection conn) throws IOException { + this.stub = PBEAdminProtos.PBEAdminService.newBlockingStub(conn.getAdmin().coprocessorService()); + } + + @Override public PBEKeyStatus enablePBE(String pbePrefix) throws IOException { + try { + PBEAdminResponse pbeAdminResponse = stub.enablePBE(null, + PBEAdminRequest.newBuilder().setPbePrefix(pbePrefix).build()); + LOG.info("Got response: " + pbeAdminResponse); + return PBEKeyStatus.forValue((byte) pbeAdminResponse.getPbeStatus().getNumber()); + } catch (ServiceException e) { + throw ProtobufUtil.handleRemoteException(e); + } + } +} diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/io/crypto/PBEKeyData.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/io/crypto/PBEKeyData.java index f69201c10a7e..77228f0bf8a0 100644 --- a/hbase-common/src/main/java/org/apache/hadoop/hbase/io/crypto/PBEKeyData.java +++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/io/crypto/PBEKeyData.java @@ -19,8 +19,11 @@ import org.apache.commons.lang3.builder.EqualsBuilder; import org.apache.commons.lang3.builder.HashCodeBuilder; +import org.apache.hadoop.util.DataChecksum; import org.apache.yetus.audience.InterfaceAudience; import java.security.Key; +import java.security.MessageDigest; +import java.security.NoSuchAlgorithmException; @InterfaceAudience.Public public class PBEKeyData { @@ -28,6 +31,8 @@ public class PBEKeyData { private Key theKey; private PBEKeyStatus keyStatus; private String keyMetadata; + private volatile long keyChecksum = 0; + private byte[] keyMetadataHash; public PBEKeyData(byte[] pbe_prefix, Key theKey, PBEKeyStatus keyStatus, String keyMetadata) { this.pbe_prefix = pbe_prefix; @@ -67,4 +72,27 @@ public String getKeyMetadata() { return new HashCodeBuilder(17, 37).append(pbe_prefix).append( theKey).append(keyStatus).append(keyMetadata).toHashCode(); } + + public long getKeyChecksum() { + if (keyChecksum == 0) { + DataChecksum dataChecksum = DataChecksum.newDataChecksum(DataChecksum.Type.CRC32C, 16); + byte[] data = theKey.getEncoded(); + dataChecksum.update(data, 0, data.length); + keyChecksum = dataChecksum.getValue(); + } + return keyChecksum; + } + + public byte[] getKeyMetadataHash() { + if (keyMetadataHash == null) { + MessageDigest md5; + try { + md5 = MessageDigest.getInstance("MD5"); + } catch (NoSuchAlgorithmException e) { + throw new RuntimeException(e); + } + keyMetadataHash = md5.digest(keyMetadata.getBytes()); + } + return keyMetadataHash; + } } diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/io/crypto/PBEKeyProvider.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/io/crypto/PBEKeyProvider.java index 44e6945a2ff3..2bc6568e4c31 100644 --- a/hbase-common/src/main/java/org/apache/hadoop/hbase/io/crypto/PBEKeyProvider.java +++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/io/crypto/PBEKeyProvider.java @@ -19,11 +19,12 @@ import org.apache.hadoop.conf.Configuration; import org.apache.yetus.audience.InterfaceAudience; +import java.io.IOException; @InterfaceAudience.Public public interface PBEKeyProvider extends KeyProvider { void initConfig(Configuration conf); PBEKeyData getClusterKey(byte[] clusterId); - PBEKeyData getPBEKey(byte[] pbe_prefix); - PBEKeyData unwrapKey(String keyMetaData); + PBEKeyData getPBEKey(byte[] pbe_prefix) throws IOException; + PBEKeyData unwrapKey(String keyMetaData) throws IOException; } diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/io/crypto/PBEKeyStatus.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/io/crypto/PBEKeyStatus.java index 853ab59e2c2b..46e9931c97b4 100644 --- a/hbase-common/src/main/java/org/apache/hadoop/hbase/io/crypto/PBEKeyStatus.java +++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/io/crypto/PBEKeyStatus.java @@ -18,11 +18,37 @@ package org.apache.hadoop.hbase.io.crypto; import org.apache.yetus.audience.InterfaceAudience; +import java.util.HashMap; +import java.util.Map; @InterfaceAudience.Public public enum PBEKeyStatus { - ACTIVE, - INACTIVE, - FAILED, - DISABLED, + ACTIVE((byte) 1), + INACTIVE((byte) 2), + FAILED((byte) 3), + DISABLED((byte) 4), + ; + + private static Map lookupByVal; + + private final byte val; + + PBEKeyStatus(byte val) { + this.val = val; + } + + public byte getVal() { + return val; + } + + public static PBEKeyStatus forValue(byte val) { + if (lookupByVal == null) { + Map tbl = new HashMap<>(); + for (PBEKeyStatus e: PBEKeyStatus.values()) { + tbl.put(e.getVal(), e); + } + lookupByVal = tbl; + } + return lookupByVal.get(val); + } } diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/io/crypto/PBEKeyStoreKeyProvider.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/io/crypto/PBEKeyStoreKeyProvider.java index a44afa2e6475..c2bcdf12f40e 100644 --- a/hbase-common/src/main/java/org/apache/hadoop/hbase/io/crypto/PBEKeyStoreKeyProvider.java +++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/io/crypto/PBEKeyStoreKeyProvider.java @@ -4,6 +4,7 @@ import org.apache.hadoop.hbase.HConstants; import org.apache.hadoop.hbase.util.GsonUtil; import org.apache.yetus.audience.InterfaceAudience; +import java.io.IOException; import java.security.Key; import java.util.Base64; import java.util.HashMap; @@ -36,33 +37,38 @@ public class PBEKeyStoreKeyProvider extends KeyStoreKeyProvider implements PBEKe return new PBEKeyData(clusterId, key, PBEKeyStatus.ACTIVE, keyMetadata); } - @Override public PBEKeyData getPBEKey(byte[] pbe_prefix) { + @Override public PBEKeyData getPBEKey(byte[] pbe_prefix) throws IOException { checkConfig(); String encodedPrefix = Base64.getEncoder().encodeToString(pbe_prefix); String aliasConfKey = HConstants.CRYPTO_PBE_PREFIX_CONF_KEY_PREFIX + encodedPrefix + "." + "alias"; - String keyAlias = conf.get(aliasConfKey, null); - if (keyAlias != null) { - String keyMetadata = generateKeyMetadata(keyAlias, encodedPrefix); - return unwrapKey(keyMetadata); - } - return null; + String keyMetadata = generateKeyMetadata(conf.get(aliasConfKey, null), encodedPrefix); + return unwrapKey(keyMetadata); } - @Override public PBEKeyData unwrapKey(String keyMetadataStr) { + @Override public PBEKeyData unwrapKey(String keyMetadataStr) throws IOException { Map keyMetadata = GsonUtil.getDefaultInstance().fromJson(keyMetadataStr, HashMap.class); + String encodedPrefix = keyMetadata.get(KEY_METADATA_PREFIX); + String activeStatusConfKey = HConstants.CRYPTO_PBE_PREFIX_CONF_KEY_PREFIX + encodedPrefix + + ".active"; + boolean isActive = conf.getBoolean(activeStatusConfKey, true); + byte[] pbe_prefix; + try { + pbe_prefix = Base64.getDecoder().decode(encodedPrefix); + } + catch (IllegalArgumentException e) { + throw new IOException("Failed to decode specified prefix as Base64 string: " + + encodedPrefix, e); + } String alias = keyMetadata.get(KEY_METADATA_ALIAS); - Key key = getKey(alias); + Key key = alias != null ? getKey(alias) : null; if (key != null) { - String encodedPrefix = keyMetadata.get(KEY_METADATA_PREFIX); - String activeStatusConfKey = HConstants.CRYPTO_PBE_PREFIX_CONF_KEY_PREFIX + encodedPrefix + - ".active"; - boolean isActive = conf.getBoolean(activeStatusConfKey, true); - return new PBEKeyData(Base64.getDecoder().decode(encodedPrefix), key, + return new PBEKeyData(pbe_prefix, key, isActive ? PBEKeyStatus.ACTIVE : PBEKeyStatus.INACTIVE, keyMetadataStr); } - return null; + return new PBEKeyData(pbe_prefix, null, + isActive ? PBEKeyStatus.FAILED : PBEKeyStatus.DISABLED, keyMetadataStr); } private String generateKeyMetadata(String aliasName, String encodedPrefix) { diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/keymeta/KeyMetaAdmin.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/keymeta/KeyMetaAdmin.java new file mode 100644 index 000000000000..14bd04d033b3 --- /dev/null +++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/keymeta/KeyMetaAdmin.java @@ -0,0 +1,27 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.hadoop.hbase.keymeta; + +import org.apache.hadoop.hbase.io.crypto.PBEKeyStatus; +import org.apache.yetus.audience.InterfaceAudience; +import java.io.IOException; + +@InterfaceAudience.Public +public interface KeyMetaAdmin { + PBEKeyStatus enablePBE(String pbePrefix) throws IOException; +} diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/util/CommonFSUtils.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/util/CommonFSUtils.java index fe6f3bc238a9..a2ffee3f8e69 100644 --- a/hbase-common/src/main/java/org/apache/hadoop/hbase/util/CommonFSUtils.java +++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/util/CommonFSUtils.java @@ -297,6 +297,10 @@ public static void setRootDir(final Configuration c, final Path root) { c.set(HConstants.HBASE_DIR, root.toString()); } + public static Path getClusterKeyDir(final Configuration c) throws IOException { + return new Path(getRootDir(c), HConstants.CLUSTER_KEYS_DIRECTORY); + } + public static void setFsDefault(final Configuration c, final Path root) { c.set("fs.defaultFS", root.toString()); // for hadoop 0.21+ } diff --git a/hbase-common/src/test/java/org/apache/hadoop/hbase/io/crypto/TestPBEKeyStoreKeyProvider.java b/hbase-common/src/test/java/org/apache/hadoop/hbase/io/crypto/TestPBEKeyStoreKeyProvider.java index 44dc378e62dd..2025433d157c 100644 --- a/hbase-common/src/test/java/org/apache/hadoop/hbase/io/crypto/TestPBEKeyStoreKeyProvider.java +++ b/hbase-common/src/test/java/org/apache/hadoop/hbase/io/crypto/TestPBEKeyStoreKeyProvider.java @@ -129,7 +129,7 @@ public void testGetClusterKey() throws Exception { } private void assertPBEKeyData(PBEKeyData keyData, PBEKeyStatus expKeyStatus, byte[] key, - byte[] prefixBytes, String alias) { + byte[] prefixBytes, String alias) throws Exception { assertNotNull(keyData); assertEquals(expKeyStatus, keyData.getKeyStatus()); byte[] keyBytes = keyData.getTheKey().getEncoded(); diff --git a/hbase-protocol-shaded/src/main/protobuf/server/PBEAdmin.proto b/hbase-protocol-shaded/src/main/protobuf/server/PBEAdmin.proto new file mode 100644 index 000000000000..4975ee5f70cb --- /dev/null +++ b/hbase-protocol-shaded/src/main/protobuf/server/PBEAdmin.proto @@ -0,0 +1,46 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +syntax = "proto2"; +package hbase.pb; + +option java_package = "org.apache.hadoop.hbase.protobuf.generated"; +option java_outer_classname = "PBEAdminProtos"; +option java_generic_services = true; +option java_generate_equals_and_hash = true; +option optimize_for = SPEED; + +message PBEAdminRequest { + required string pbe_prefix = 1; +} + +enum PBEKeyStatus { + ACTIVE = 1; + INACTIVE = 2; + FAILED = 3; + DISABLED = 4; +} + +message PBEAdminResponse { + required string pbe_prefix = 1; + required PBEKeyStatus pbe_status = 2; +} + +service PBEAdminService { + rpc EnablePBE(PBEAdminRequest) + returns (PBEAdminResponse); +} diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/keymeta/ClusterKeyAccessor.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/keymeta/ClusterKeyAccessor.java new file mode 100644 index 000000000000..11d956eb6a5b --- /dev/null +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/keymeta/ClusterKeyAccessor.java @@ -0,0 +1,100 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.hadoop.hbase.keymeta; + +import org.apache.hadoop.fs.FSDataInputStream; +import org.apache.hadoop.fs.FileStatus; +import org.apache.hadoop.fs.FileSystem; +import org.apache.hadoop.fs.Path; +import org.apache.hadoop.hbase.Server; +import org.apache.hadoop.hbase.io.crypto.PBEKeyData; +import org.apache.hadoop.hbase.io.crypto.PBEKeyProvider; +import org.apache.hadoop.hbase.util.CommonFSUtils; +import org.apache.yetus.audience.InterfaceAudience; +import java.io.IOException; +import java.util.ArrayList; +import java.util.List; +import static org.apache.hadoop.hbase.HConstants.CLUSTER_KEY_FILE_PREFIX; + +@InterfaceAudience.Private +public class ClusterKeyAccessor extends PBEKeyManager { + protected final Path clusterKeyDir; + + public ClusterKeyAccessor(Server server) throws IOException { + super(server); + this.clusterKeyDir = CommonFSUtils.getClusterKeyDir(server.getConfiguration()); + } + + public Path getLatestClusterKeyFile() throws IOException { + if (! isPBEEnabled()) { + return null; + } + int currentMaxSeqNum = findLatestKeySequence(getAllClusterKeys()); + return new Path(clusterKeyDir, CLUSTER_KEY_FILE_PREFIX + currentMaxSeqNum); + } + + public List getAllClusterKeys() throws IOException { + if (! isPBEEnabled()) { + return null; + } + FileSystem fs = server.getFileSystem(); + List clusterKeys = new ArrayList<>(); + for (FileStatus st: fs.globStatus(new Path(clusterKeyDir, CLUSTER_KEY_FILE_PREFIX + "*"))) { + Path keyPath = st.getPath(); + extractClusterKeySeqNum(keyPath); // Just check for validity. + clusterKeys.add(keyPath); + } + return clusterKeys; + } + + public PBEKeyData loadClusterKey(Path keyPath) throws IOException { + PBEKeyProvider provider = getKeyProvider(); + return provider.unwrapKey(loadKeyMetadata(keyPath)); + } + + public int extractClusterKeySeqNum(Path keyPath) throws IOException { + if (keyPath.getName().startsWith(CLUSTER_KEY_FILE_PREFIX)) { + try { + return Integer.valueOf(keyPath.getName().substring(CLUSTER_KEY_FILE_PREFIX.length())); + } + catch (NumberFormatException e) { + LOG.error("Invalid file name for a cluster key: {}", keyPath, e); + } + } + throw new IOException("Couldn't parse key file name: " + keyPath.getName()); + } + + protected int findLatestKeySequence(List clusterKeys) throws IOException { + int maxKeySeq = -1; + for (Path keyPath: clusterKeys) { + if (keyPath.getName().startsWith(CLUSTER_KEY_FILE_PREFIX)) { + int keySeq = Integer.valueOf(keyPath.getName().substring(CLUSTER_KEY_FILE_PREFIX.length())); + if (keySeq > maxKeySeq) { + maxKeySeq = keySeq; + } + } + } + return maxKeySeq; + } + + protected String loadKeyMetadata(Path keyPath) throws IOException { + try (FSDataInputStream fin = server.getFileSystem().open(keyPath)) { + return fin.readUTF(); + } + } +} diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/keymeta/ClusterKeyCache.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/keymeta/ClusterKeyCache.java new file mode 100644 index 000000000000..0a563b3dcd68 --- /dev/null +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/keymeta/ClusterKeyCache.java @@ -0,0 +1,60 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.hadoop.hbase.keymeta; + +import org.apache.hadoop.fs.Path; +import org.apache.hadoop.hbase.io.crypto.PBEKeyData; +import org.apache.yetus.audience.InterfaceAudience; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; +import java.io.IOException; +import java.util.HashMap; +import java.util.List; +import java.util.Map; + +@InterfaceAudience.Private +public class ClusterKeyCache { + private static final Logger LOG = LoggerFactory.getLogger(ClusterKeyCache.class); + + private final ClusterKeyAccessor accessor; + private PBEKeyData latestClusterKey; + private Map clusterKeys = new HashMap<>(); + + public ClusterKeyCache(ClusterKeyAccessor accessor) throws IOException { + this.accessor = accessor; + + List allClusterKeys = accessor.getAllClusterKeys(); + int latestKeySequence = accessor.findLatestKeySequence(allClusterKeys); + for (Path keyPath: allClusterKeys) { + LOG.info("Loading cluster key from: {}", keyPath); + PBEKeyData keyData = accessor.loadClusterKey(keyPath); + if (accessor.extractClusterKeySeqNum(keyPath) == latestKeySequence) { + latestClusterKey = keyData; + } + clusterKeys.put(keyData.getKeyChecksum(), keyData); + } + } + + public PBEKeyData getLatestClusterKey() { + return latestClusterKey; + } + + public PBEKeyData getClusterKeyByChecksum(long checksum) { + return clusterKeys.get(checksum); + } +} diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/keymeta/KeyMetaManager.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/keymeta/KeyMetaManager.java new file mode 100644 index 000000000000..62b69aadab7b --- /dev/null +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/keymeta/KeyMetaManager.java @@ -0,0 +1,57 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.hadoop.hbase.keymeta; + +import org.apache.hadoop.hbase.io.crypto.PBEKeyData; +import org.apache.hadoop.hbase.io.crypto.PBEKeyProvider; +import org.apache.hadoop.hbase.io.crypto.PBEKeyStatus; +import org.apache.hadoop.hbase.master.MasterServices; +import org.apache.yetus.audience.InterfaceAudience; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; +import java.io.IOException; +import java.util.Base64; + +@InterfaceAudience.Private +public class KeyMetaManager extends KeyMetaTableAccessor implements KeyMetaAdmin { + private static final Logger LOG = LoggerFactory.getLogger(KeyMetaManager.class); + + public KeyMetaManager(MasterServices master) { + super(master); + } + + @Override public PBEKeyStatus enablePBE(String pbePrefix) throws IOException { + if (! isPBEEnabled()) { + throw new IOException("PBE is currently not enabled in HBase configuration"); + } + LOG.info("Trying to enable PBE on key: {}", pbePrefix); + byte[] pbe_prefix; + try { + pbe_prefix = Base64.getDecoder().decode(pbePrefix); + } + catch (IllegalArgumentException e) { + throw new IOException("Failed to decode specified prefix as Base64 string: " + pbePrefix, e); + } + PBEKeyProvider provider = getKeyProvider(); + // TODO: key provider already decodes, should the param type be changed to encoded prefix? + PBEKeyData pbeKey = provider.getPBEKey(pbe_prefix); + LOG.info("Got key data with status: {} for prefix: {}", pbeKey.getKeyStatus(), pbePrefix); + addKey(pbeKey); + return pbeKey.getKeyStatus(); + } +} diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/keymeta/KeyMetaMasterService.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/keymeta/KeyMetaMasterService.java new file mode 100644 index 000000000000..be72f1b25f6c --- /dev/null +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/keymeta/KeyMetaMasterService.java @@ -0,0 +1,67 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.hadoop.hbase.keymeta; + +import org.apache.hadoop.hbase.HConstants; +import org.apache.hadoop.hbase.client.ColumnFamilyDescriptorBuilder; +import org.apache.hadoop.hbase.client.TableDescriptorBuilder; +import org.apache.hadoop.hbase.master.MasterServices; +import org.apache.yetus.audience.InterfaceAudience; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; +import java.io.IOException; + +@InterfaceAudience.Private +public class KeyMetaMasterService { + private static final Logger LOG = LoggerFactory.getLogger(KeyMetaMasterService.class); + + private final MasterServices master; + Boolean pbeEnabled; + + private static final TableDescriptorBuilder TABLE_DESCRIPTOR_BUILDER = TableDescriptorBuilder + .newBuilder(KeyMetaTableAccessor.KEY_META_TABLE_NAME).setRegionReplication(1) + .setPriority(HConstants.SYSTEMTABLE_QOS) + .setColumnFamily(ColumnFamilyDescriptorBuilder.newBuilder( + KeyMetaTableAccessor.KEY_META_INFO_FAMILY) + .setScope(HConstants.REPLICATION_SCOPE_LOCAL).setMaxVersions(1) + .setInMemory(true) + .build()); + + public KeyMetaMasterService(MasterServices masterServices) { + this.master = masterServices; + } + + public void init() throws IOException { + if (!isPBEEnabled()) { + return; + } + if (!master.getTableDescriptors().exists(KeyMetaTableAccessor.KEY_META_TABLE_NAME)) { + LOG.info("{} table not found. Creating.", + KeyMetaTableAccessor.KEY_META_TABLE_NAME.getNameWithNamespaceInclAsString()); + this.master.createSystemTable(TABLE_DESCRIPTOR_BUILDER.build()); + } + } + + private boolean isPBEEnabled() { + if (pbeEnabled == null) { + pbeEnabled = master.getConfiguration().getBoolean(HConstants.CRYPTO_PBE_ENABLED_CONF_KEY, + false); + } + return pbeEnabled; + } +} diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/keymeta/KeyMetaServiceEndpoint.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/keymeta/KeyMetaServiceEndpoint.java new file mode 100644 index 000000000000..0d40d724b09d --- /dev/null +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/keymeta/KeyMetaServiceEndpoint.java @@ -0,0 +1,93 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.hadoop.hbase.keymeta; + +import com.google.protobuf.RpcCallback; +import com.google.protobuf.RpcController; +import com.google.protobuf.Service; +import org.apache.hadoop.hbase.CoprocessorEnvironment; +import org.apache.hadoop.hbase.coprocessor.CoreCoprocessor; +import org.apache.hadoop.hbase.coprocessor.HasMasterServices; +import org.apache.hadoop.hbase.coprocessor.MasterCoprocessor; +import org.apache.hadoop.hbase.io.crypto.PBEKeyStatus; +import org.apache.hadoop.hbase.ipc.CoprocessorRpcUtils; +import org.apache.hadoop.hbase.master.MasterServices; +import org.apache.hadoop.hbase.protobuf.generated.PBEAdminProtos; +import org.apache.hadoop.hbase.protobuf.generated.PBEAdminProtos.PBEAdminRequest; +import org.apache.hadoop.hbase.protobuf.generated.PBEAdminProtos.PBEAdminResponse; +import org.apache.hadoop.hbase.protobuf.generated.PBEAdminProtos.PBEAdminService; +import org.apache.yetus.audience.InterfaceAudience; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; +import java.io.IOException; +import java.util.Base64; +import java.util.Collections; + +@CoreCoprocessor +@InterfaceAudience.Private +public class KeyMetaServiceEndpoint implements MasterCoprocessor { + private static final Logger LOG = LoggerFactory.getLogger(KeyMetaServiceEndpoint.class); + + private MasterServices master = null; + private KeyMetaManager keyMetaManager; + + private final PBEAdminService pbeAdminService = new KeyMetaAdminServiceImpl(); + + @Override public void start(CoprocessorEnvironment env) throws IOException { + if (!(env instanceof HasMasterServices)) { + throw new IOException("Does not implement HMasterServices"); + } + + master = ((HasMasterServices) env).getMasterServices(); + keyMetaManager = new KeyMetaManager(master); + } + + @Override + public Iterable getServices() { + return Collections.singleton(pbeAdminService); + } + + private class KeyMetaAdminServiceImpl extends PBEAdminService { + + @Override + public void enablePBE(RpcController controller, PBEAdminRequest request, + RpcCallback done) { + PBEAdminResponse.Builder builder = + PBEAdminResponse.newBuilder().setPbePrefix(request.getPbePrefix()); + byte[] pbe_prefix = null; + try { + pbe_prefix = Base64.getDecoder().decode(request.getPbePrefix()); + } + catch (IllegalArgumentException e) { + builder.setPbeStatus(PBEAdminProtos.PBEKeyStatus.FAILED); + CoprocessorRpcUtils.setControllerException(controller, new IOException( + "Failed to decode specified prefix as Base64 string: " + request.getPbePrefix(), e)); + } + if (pbe_prefix != null) { + try { + PBEKeyStatus pbeKeyStatus = keyMetaManager.enablePBE(request.getPbePrefix()); + builder.setPbeStatus(PBEAdminProtos.PBEKeyStatus.valueOf(pbeKeyStatus.getVal())); + } catch (IOException e) { + CoprocessorRpcUtils.setControllerException(controller, e); + builder.setPbeStatus(PBEAdminProtos.PBEKeyStatus.FAILED); + } + } + done.run(builder.build()); + } + } +} diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/keymeta/KeyMetaTableAccessor.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/keymeta/KeyMetaTableAccessor.java new file mode 100644 index 000000000000..fa5ead093ebc --- /dev/null +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/keymeta/KeyMetaTableAccessor.java @@ -0,0 +1,116 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.hadoop.hbase.keymeta; + +import org.apache.hadoop.hbase.HConstants; +import org.apache.hadoop.hbase.NamespaceDescriptor; +import org.apache.hadoop.hbase.Server; +import org.apache.hadoop.hbase.TableName; +import org.apache.hadoop.hbase.client.Connection; +import org.apache.hadoop.hbase.client.Durability; +import org.apache.hadoop.hbase.client.Put; +import org.apache.hadoop.hbase.client.Table; +import org.apache.hadoop.hbase.io.crypto.PBEKeyData; +import org.apache.hadoop.hbase.io.crypto.PBEKeyStatus; +import org.apache.hadoop.hbase.util.Bytes; +import org.apache.hadoop.hbase.util.EnvironmentEdgeManager; +import org.apache.yetus.audience.InterfaceAudience; +import java.io.IOException; +import java.util.ArrayList; +import java.util.Arrays; +import java.util.List; + +@InterfaceAudience.Private +public class KeyMetaTableAccessor extends PBEKeyManager { + private static final String KEY_META_INFO_FAMILY_STR = "info"; + + public static final byte[] KEY_META_INFO_FAMILY = Bytes.toBytes(KEY_META_INFO_FAMILY_STR); + + public static final TableName KEY_META_TABLE_NAME = + TableName.valueOf(NamespaceDescriptor.SYSTEM_NAMESPACE_NAME_STR, "keymeta"); + + public static final String PBE_PREFIX_QUAL_NAME = "pbe_prefix"; + public static final byte[] PBE_PREFIX_QUAL_BYTES = Bytes.toBytes(PBE_PREFIX_QUAL_NAME); + + public static final String DEK_METADATA_QUAL_NAME = "dek_metadata"; + public static final byte[] DEK_METADATA_QUAL_BYTES = Bytes.toBytes(DEK_METADATA_QUAL_NAME); + + public static final String DEK_CHECKSUM_QUAL_NAME = "dek_checksum"; + public static final byte[] DEK_CHECKSUM_QUAL_BYTES = Bytes.toBytes(DEK_CHECKSUM_QUAL_NAME); + + public static final String DEK_WRAPPED_BY_STK_QUAL_NAME = "dek_wrapped_by_stk"; + public static final byte[] DEK_WRAPPED_BY_STK_QUAL_BYTES = Bytes.toBytes(DEK_WRAPPED_BY_STK_QUAL_NAME); + + public static final String STK_CHECKSUM_QUAL_NAME = "stk_checksum"; + public static final byte[] STK_CHECKSUM_QUAL_BYTES = Bytes.toBytes(STK_CHECKSUM_QUAL_NAME); + + public static final String REFRESHED_TIMESTAMP_QUAL_NAME = "refreshed_timestamp"; + public static final byte[] REFRESHED_TIMESTAMP_QUAL_BYTES = Bytes.toBytes(REFRESHED_TIMESTAMP_QUAL_NAME); + + public static final String KEY_STATUS_QUAL_NAME = "key_status"; + public static final byte[] KEY_STATUS_QUAL_BYTES = Bytes.toBytes(KEY_STATUS_QUAL_NAME); + + public KeyMetaTableAccessor(Server server) { + super(server); + } + + public void addKey(PBEKeyData keyData) throws IOException { + long refreshTime = EnvironmentEdgeManager.currentTime(); + final Put putForPrefix = addMutationColumns(new Put(keyData.getPbe_prefix()), keyData, + refreshTime); + final Put putForMetadata = addMutationColumns(new Put(constructRowKeyForMetadata(keyData)), + keyData, refreshTime); + + Connection connection = server.getConnection(); + try (Table table = connection.getTable(KEY_META_TABLE_NAME)) { + table.put(Arrays.asList(putForPrefix, putForMetadata)); + } + } + + private Put addMutationColumns(Put put, PBEKeyData keyData, long refreshTime) { + if (keyData.getTheKey() != null) { + put.addColumn(KEY_META_INFO_FAMILY, DEK_CHECKSUM_QUAL_BYTES, + Bytes.toBytes(keyData.getKeyChecksum())); + } + return put.setDurability(Durability.SKIP_WAL) + .setPriority(HConstants.SYSTEMTABLE_QOS) + .addColumn(KEY_META_INFO_FAMILY, DEK_METADATA_QUAL_BYTES, keyData.getKeyMetadata().getBytes()) + //.addColumn(KEY_META_INFO_FAMILY, DEK_WRAPPED_BY_STK_QUAL_BYTES, null) + //.addColumn(KEY_META_INFO_FAMILY, STK_CHECKSUM_QUAL_BYTES, null) + .addColumn(KEY_META_INFO_FAMILY, REFRESHED_TIMESTAMP_QUAL_BYTES, Bytes.toBytes(refreshTime)) + .addColumn(KEY_META_INFO_FAMILY, KEY_STATUS_QUAL_BYTES, + new byte[] { keyData.getKeyStatus().getVal() }) + ; + } + + private byte[] constructRowKeyForMetadata(PBEKeyData keyData) { + byte[] pbePrefix = keyData.getPbe_prefix(); + int prefixLength = pbePrefix.length; + byte[] keyMetadataHash = keyData.getKeyMetadataHash(); + return Bytes.add(Bytes.toBytes(prefixLength), pbePrefix, keyMetadataHash); + } + + private byte[] extractPBEPrefix(byte[] rowkey) { + int prefixLength = Bytes.toInt(rowkey); + return Bytes.copy(rowkey, Bytes.SIZEOF_INT, prefixLength); + } + + private byte[] extractKeyMetadataHash(byte[] rowkey, byte[] pbePreefix) { + return Bytes.copy(rowkey, Bytes.SIZEOF_INT + pbePreefix.length, rowkey.length); + } +} diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/keymeta/PBEKeyManager.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/keymeta/PBEKeyManager.java new file mode 100644 index 000000000000..ac315d6eb1b8 --- /dev/null +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/keymeta/PBEKeyManager.java @@ -0,0 +1,57 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.hadoop.hbase.keymeta; + +import org.apache.hadoop.hbase.HConstants; +import org.apache.hadoop.hbase.io.crypto.Encryption; +import org.apache.hadoop.hbase.io.crypto.KeyProvider; +import org.apache.hadoop.hbase.io.crypto.PBEKeyProvider; +import org.apache.hadoop.hbase.Server; +import org.apache.yetus.audience.InterfaceAudience; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +@InterfaceAudience.Private +public class PBEKeyManager { + protected static final Logger LOG = LoggerFactory.getLogger(PBEKeyManager.class); + + protected final Server server; + + private Boolean pbeEnabled; + + public PBEKeyManager(Server server) { + this.server = server; + } + + protected PBEKeyProvider getKeyProvider() { + KeyProvider provider = Encryption.getKeyProvider(server.getConfiguration()); + if (!(provider instanceof PBEKeyProvider)) { + throw new RuntimeException( + "KeyProvider: " + provider.getClass().getName() + " expected to be of type PBEKeyProvider"); + } + return (PBEKeyProvider) provider; + } + + protected boolean isPBEEnabled() { + if (pbeEnabled == null) { + pbeEnabled = server.getConfiguration().getBoolean(HConstants.CRYPTO_PBE_ENABLED_CONF_KEY, + false); + } + return pbeEnabled; + } +} diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/ClusterKeyManager.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/ClusterKeyManager.java index 78c5417c95a6..ed54231e93d7 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/ClusterKeyManager.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/ClusterKeyManager.java @@ -18,32 +18,23 @@ package org.apache.hadoop.hbase.master; import java.io.IOException; -import java.util.ArrayList; import java.util.List; import java.util.UUID; import org.apache.hadoop.fs.FSDataInputStream; import org.apache.hadoop.fs.FSDataOutputStream; -import org.apache.hadoop.fs.FileStatus; -import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; -import org.apache.hadoop.hbase.HConstants; -import org.apache.hadoop.hbase.io.crypto.Encryption; -import org.apache.hadoop.hbase.io.crypto.KeyProvider; import org.apache.hadoop.hbase.io.crypto.PBEKeyData; import org.apache.hadoop.hbase.io.crypto.PBEKeyProvider; +import org.apache.hadoop.hbase.keymeta.ClusterKeyAccessor; import org.apache.yetus.audience.InterfaceAudience; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; import static org.apache.hadoop.hbase.HConstants.CLUSTER_KEY_FILE_PREFIX; @InterfaceAudience.Private -public class ClusterKeyManager { - private static final Logger LOG = LoggerFactory.getLogger(ClusterKeyManager.class); - +public class ClusterKeyManager extends ClusterKeyAccessor { private final MasterServices master; - private Boolean pbeEnabled; public ClusterKeyManager(MasterServices master) throws IOException { + super(master); this.master = master; } @@ -52,10 +43,10 @@ public void ensureClusterKeyInitialized() throws IOException { return; } List clusterKeys = getAllClusterKeys(); - if (clusterKeys.size() == 0) { + if (clusterKeys.isEmpty()) { LOG.info("Initializing Cluster Key for the first time"); // Double check for cluster key as another HMaster might have succeeded. - if (rotateClusterKey(null) == null && getAllClusterKeys().size() == 0) { + if (rotateClusterKey(null) == null && getAllClusterKeys().isEmpty()) { throw new RuntimeException("Failed to generate or save Cluster Key"); } } @@ -67,34 +58,16 @@ else if (rotateClusterKeyIfChanged() != null) { } } - private boolean isPBEEnabled() { - if (pbeEnabled == null) { - pbeEnabled = master.getConfiguration().getBoolean(HConstants.CRYPTO_PBE_ENABLED_CONF_KEY, - false); - } - return pbeEnabled; - } - public PBEKeyData rotateClusterKeyIfChanged() throws IOException { if (! isPBEEnabled()) { return null; } Path latestFile = getLatestClusterKeyFile(); - FSDataInputStream fin = master.getMasterFileSystem().getFileSystem().open(latestFile); - try { - String latestKeyMeata = fin.readUTF(); - PBEKeyData rotatedKey = rotateClusterKey(latestKeyMeata); - if (rotatedKey != null && ! latestKeyMeata.equals(rotatedKey.getKeyMetadata())) { - return rotatedKey; - } - } - finally { - fin.close();; - } - return null; + String latestKeyMeta = loadKeyMetadata(latestFile); + return rotateClusterKey(latestKeyMeta); } - public PBEKeyData rotateClusterKey(String currentKeyMetadata) throws IOException { + private PBEKeyData rotateClusterKey(String currentKeyMetadata) throws IOException { if (! isPBEEnabled()) { return null; } @@ -109,88 +82,27 @@ public PBEKeyData rotateClusterKey(String currentKeyMetadata) throws IOException return null; } - public Path getLatestClusterKeyFile() throws IOException { - if (! isPBEEnabled()) { - return null; - } - int currentMaxSeqNum = findLatestKeySequence(getAllClusterKeys()); - return new Path(master.getMasterFileSystem().getClusterKeyDir(), - CLUSTER_KEY_FILE_PREFIX + currentMaxSeqNum); - } - - public List getAllClusterKeys() throws IOException { - if (! isPBEEnabled()) { - return null; - } - MasterFileSystem masterFS = master.getMasterFileSystem(); - Path clusterKeyDir = masterFS.getClusterKeyDir(); - FileSystem fs = masterFS.getFileSystem(); - List clusterKeys = new ArrayList<>(); - for (FileStatus st: fs.globStatus(new Path(clusterKeyDir, CLUSTER_KEY_FILE_PREFIX + "*"))) { - Path keyPath = st.getPath(); - extractClusterKeySeqNum(keyPath); // Just check for validity. - clusterKeys.add(keyPath); - } - return clusterKeys; - } - - private int findLatestKeySequence(List clusterKeys) throws IOException { - int maxKeySeq = -1; - for (Path keyPath: clusterKeys) { - if (keyPath.getName().startsWith(CLUSTER_KEY_FILE_PREFIX)) { - int keySeq = Integer.valueOf(keyPath.getName().substring(CLUSTER_KEY_FILE_PREFIX.length())); - if (keySeq > maxKeySeq) { - maxKeySeq = keySeq; - } - } - } - return maxKeySeq; - } - private boolean saveLatestClusterKey(String keyMetadata) throws IOException { int nextClusterKeySeq = findLatestKeySequence(getAllClusterKeys()) + 1; - LOG.info("Trying to save a new cluster key at seq: ", nextClusterKeySeq); + LOG.info("Trying to save a new cluster key at seq: {}", nextClusterKeySeq); MasterFileSystem masterFS = master.getMasterFileSystem(); - Path nextClusterKeyPath = new Path(masterFS.getClusterKeyDir(), + Path nextClusterKeyPath = new Path(clusterKeyDir, CLUSTER_KEY_FILE_PREFIX + nextClusterKeySeq); Path tempClusterKeyFile = new Path(masterFS.getTempDir(), - nextClusterKeyPath.getName() + UUID.randomUUID().toString()); - FSDataOutputStream fsDataOutputStream = masterFS.getFileSystem().create(tempClusterKeyFile); - try { + nextClusterKeyPath.getName() + UUID.randomUUID()); + try (FSDataOutputStream fsDataOutputStream = masterFS.getFileSystem() + .create(tempClusterKeyFile)) { fsDataOutputStream.writeUTF(keyMetadata); boolean succeeded = masterFS.getFileSystem().rename(tempClusterKeyFile, nextClusterKeyPath); if (succeeded) { LOG.info("Cluster key save succeeded for seq: {}", nextClusterKeySeq); - } - else { + } else { LOG.error("Cluster key save failed for seq: {}", nextClusterKeySeq); } return succeeded; } finally { - fsDataOutputStream.close(); masterFS.getFileSystem().delete(tempClusterKeyFile, false); } } - - private int extractClusterKeySeqNum(Path keyPath) throws IOException { - if (keyPath.getName().startsWith(CLUSTER_KEY_FILE_PREFIX)) { - try { - return Integer.valueOf(keyPath.getName().substring(CLUSTER_KEY_FILE_PREFIX.length())); - } - catch (NumberFormatException e) { - LOG.error("Invalid file name for a cluster key: {}", keyPath, e); - } - } - throw new IOException("Couldn't parse key file name: " + keyPath.getName()); - } - - private PBEKeyProvider getKeyProvider() { - KeyProvider provider = Encryption.getKeyProvider(master.getConfiguration()); - if (!(provider instanceof PBEKeyProvider)) { - throw new RuntimeException("KeyProvider: " + provider.getClass().getName() - + " expected to be of type PBEKeyProvider"); - } - return (PBEKeyProvider) provider; - } } diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/HMaster.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/HMaster.java index 2bad0bc8399e..843cb413b596 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/HMaster.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/HMaster.java @@ -122,6 +122,7 @@ import org.apache.hadoop.hbase.ipc.CoprocessorRpcUtils; import org.apache.hadoop.hbase.ipc.RpcServer; import org.apache.hadoop.hbase.ipc.ServerNotRunningYetException; +import org.apache.hadoop.hbase.keymeta.KeyMetaMasterService; import org.apache.hadoop.hbase.log.HBaseMarkers; import org.apache.hadoop.hbase.master.MasterRpcServices.BalanceSwitchMode; import org.apache.hadoop.hbase.master.assignment.AssignmentManager; @@ -356,6 +357,7 @@ public class HMaster extends HBaseServerBase implements Maste private MasterFileSystem fileSystemManager; private MasterWalManager walManager; private ClusterKeyManager clusterKeyManager; + private KeyMetaMasterService keyMetaMasterService; // manager to manage procedure-based WAL splitting, can be null if current // is zk-based WAL splitting. SplitWALManager will replace SplitLogManager @@ -1035,6 +1037,9 @@ private void finishActiveMasterInitialization() throws IOException, InterruptedE Map, List>> procsByType = procedureExecutor .getActiveProceduresNoCopy().stream().collect(Collectors.groupingBy(p -> p.getClass())); + keyMetaMasterService = new KeyMetaMasterService(this); + keyMetaMasterService.init(); + // Create Assignment Manager this.assignmentManager = createAssignmentManager(this, masterRegion); this.assignmentManager.start(); diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/MasterFileSystem.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/MasterFileSystem.java index a0b345a48207..4178e31d0cb2 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/MasterFileSystem.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/MasterFileSystem.java @@ -97,7 +97,7 @@ public MasterFileSystem(Configuration conf) throws IOException { // default localfs. Presumption is that rootdir is fully-qualified before // we get to here with appropriate fs scheme. this.rootdir = CommonFSUtils.getRootDir(conf); - this.clusterKeyDir = new Path(this.rootdir, HConstants.CLUSTER_KEYS_DIRECTORY); + this.clusterKeyDir = CommonFSUtils.getClusterKeyDir(conf); this.tempdir = new Path(this.rootdir, HConstants.HBASE_TEMP_DIRECTORY); // Cover both bases, the old way of setting default fs and the new. // We're supposed to run on 0.20 and 0.21 anyways. @@ -200,10 +200,6 @@ public Path getWALRootDir() { return this.walRootDir; } - public Path getClusterKeyDir() { - return clusterKeyDir; - } - /** Returns the directory for a give {@code region}. */ public Path getRegionDir(RegionInfo region) { return FSUtils.getRegionDirFromRootDir(getRootDir(), region); diff --git a/hbase-shell/src/main/ruby/hbase/hbase.rb b/hbase-shell/src/main/ruby/hbase/hbase.rb index a9b35ed1de21..d93adadd640f 100644 --- a/hbase-shell/src/main/ruby/hbase/hbase.rb +++ b/hbase-shell/src/main/ruby/hbase/hbase.rb @@ -59,6 +59,10 @@ def rsgroup_admin ::Hbase::RSGroupAdmin.new(self.connection) end + def pbe_admin + ::Hbase::PBEAdmin.new(@connection) + end + def taskmonitor ::Hbase::TaskMonitor.new(configuration) end diff --git a/hbase-shell/src/main/ruby/hbase/pbe_admin.rb b/hbase-shell/src/main/ruby/hbase/pbe_admin.rb new file mode 100644 index 000000000000..4f8d9f1e830f --- /dev/null +++ b/hbase-shell/src/main/ruby/hbase/pbe_admin.rb @@ -0,0 +1,36 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +include Java + +module Hbase + class PBEAdmin + def initialize(connection) + @connection = connection + @admin = org.apache.hadoop.hbase.keymeta.KeyMetaAdminClient.new(connection) + @hb_admin = @connection.getAdmin + end + + def close + @admin.close + end + + def pbe_enable(pbe_prefix) + @admin.enablePBE(pbe_prefix) + end + end +end diff --git a/hbase-shell/src/main/ruby/hbase_constants.rb b/hbase-shell/src/main/ruby/hbase_constants.rb index d4df1f8f5821..87effe071a37 100644 --- a/hbase-shell/src/main/ruby/hbase_constants.rb +++ b/hbase-shell/src/main/ruby/hbase_constants.rb @@ -138,3 +138,4 @@ def self.promote_constants(constants) require 'hbase/security' require 'hbase/visibility_labels' require 'hbase/rsgroup_admin' +require 'hbase/pbe_admin' diff --git a/hbase-shell/src/main/ruby/shell.rb b/hbase-shell/src/main/ruby/shell.rb index 46b38dd96b89..30e272291821 100644 --- a/hbase-shell/src/main/ruby/shell.rb +++ b/hbase-shell/src/main/ruby/shell.rb @@ -150,6 +150,10 @@ def hbase_rsgroup_admin @rsgroup_admin ||= hbase.rsgroup_admin end + def hbase_pbe_admin + @pbe_admin ||= hbase.pbe_admin + end + ## # Create singleton methods on the target receiver object for all the loaded commands # @@ -615,6 +619,16 @@ def self.exception_handler(hide_traceback) ] ) +Shell.load_command_group( + 'pbe', + full_name: 'PBE', + comment: "NOTE: The PBE KeyMeta Coprocessor Endpoint must be enabled on the Master else commands fail with: + UnknownProtocolException: No registered Master Coprocessor Endpoint found for PBEAdminService", + commands: %w[ + pbe_enable + ] +) + Shell.load_command_group( 'rsgroup', full_name: 'RSGroups', diff --git a/hbase-shell/src/main/ruby/shell/commands.rb b/hbase-shell/src/main/ruby/shell/commands.rb index a40f737e7908..3b9a11a0962f 100644 --- a/hbase-shell/src/main/ruby/shell/commands.rb +++ b/hbase-shell/src/main/ruby/shell/commands.rb @@ -105,6 +105,10 @@ def rsgroup_admin @shell.hbase_rsgroup_admin end + def pbe_admin + @shell.hbase_pbe_admin + end + #---------------------------------------------------------------------- # Creates formatter instance first time and then reuses it. def formatter diff --git a/hbase-shell/src/main/ruby/shell/commands/pbe_enable.rb b/hbase-shell/src/main/ruby/shell/commands/pbe_enable.rb new file mode 100644 index 000000000000..e74fdc607083 --- /dev/null +++ b/hbase-shell/src/main/ruby/shell/commands/pbe_enable.rb @@ -0,0 +1,34 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +module Shell + module Commands + class PbeEnable < Command + def help + <<-EOF +Enable PBE for a given prefix (in Base64 format) +EOF + end + + def command(pbe_prefix) + formatter.header(['KEY', 'STATUS']) + status = pbe_admin.pbe_enable(pbe_prefix) + formatter.row([pbe_prefix, status.toString()]) + end + end + end +end From 537aadbc4297754b257e1a2ee42ff63c283f47f5 Mon Sep 17 00:00:00 2001 From: Hari Dara Date: Sun, 30 Mar 2025 22:16:16 +0530 Subject: [PATCH 03/70] Fix compilation errors --- .../hadoop/hbase/keymeta/KeyMetaAdminClient.java | 4 ++-- .../src/main/protobuf/server/PBEAdmin.proto | 8 ++++---- .../hadoop/hbase/keymeta/KeyMetaServiceEndpoint.java | 10 +++++----- 3 files changed, 11 insertions(+), 11 deletions(-) diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/keymeta/KeyMetaAdminClient.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/keymeta/KeyMetaAdminClient.java index 29c4b847018d..9a590efe3cb0 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/keymeta/KeyMetaAdminClient.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/keymeta/KeyMetaAdminClient.java @@ -1,12 +1,12 @@ package org.apache.hadoop.hbase.keymeta; -import com.google.protobuf.ServiceException; import org.apache.hadoop.hbase.client.Connection; import org.apache.hadoop.hbase.io.crypto.PBEKeyStatus; -import org.apache.hadoop.hbase.protobuf.ProtobufUtil; import org.apache.hadoop.hbase.protobuf.generated.PBEAdminProtos; import org.apache.hadoop.hbase.protobuf.generated.PBEAdminProtos.PBEAdminRequest; import org.apache.hadoop.hbase.protobuf.generated.PBEAdminProtos.PBEAdminResponse; +import org.apache.hadoop.hbase.shaded.protobuf.ProtobufUtil; +import org.apache.hbase.thirdparty.com.google.protobuf.ServiceException; import org.apache.yetus.audience.InterfaceAudience; import org.slf4j.Logger; import org.slf4j.LoggerFactory; diff --git a/hbase-protocol-shaded/src/main/protobuf/server/PBEAdmin.proto b/hbase-protocol-shaded/src/main/protobuf/server/PBEAdmin.proto index 4975ee5f70cb..a86a6bbf1991 100644 --- a/hbase-protocol-shaded/src/main/protobuf/server/PBEAdmin.proto +++ b/hbase-protocol-shaded/src/main/protobuf/server/PBEAdmin.proto @@ -29,10 +29,10 @@ message PBEAdminRequest { } enum PBEKeyStatus { - ACTIVE = 1; - INACTIVE = 2; - FAILED = 3; - DISABLED = 4; + PBE_ACTIVE = 1; + PBE_INACTIVE = 2; + PBE_FAILED = 3; + PBE_DISABLED = 4; } message PBEAdminResponse { diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/keymeta/KeyMetaServiceEndpoint.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/keymeta/KeyMetaServiceEndpoint.java index 0d40d724b09d..5978e2c01a8f 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/keymeta/KeyMetaServiceEndpoint.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/keymeta/KeyMetaServiceEndpoint.java @@ -17,9 +17,6 @@ */ package org.apache.hadoop.hbase.keymeta; -import com.google.protobuf.RpcCallback; -import com.google.protobuf.RpcController; -import com.google.protobuf.Service; import org.apache.hadoop.hbase.CoprocessorEnvironment; import org.apache.hadoop.hbase.coprocessor.CoreCoprocessor; import org.apache.hadoop.hbase.coprocessor.HasMasterServices; @@ -31,6 +28,9 @@ import org.apache.hadoop.hbase.protobuf.generated.PBEAdminProtos.PBEAdminRequest; import org.apache.hadoop.hbase.protobuf.generated.PBEAdminProtos.PBEAdminResponse; import org.apache.hadoop.hbase.protobuf.generated.PBEAdminProtos.PBEAdminService; +import org.apache.hbase.thirdparty.com.google.protobuf.RpcCallback; +import org.apache.hbase.thirdparty.com.google.protobuf.RpcController; +import org.apache.hbase.thirdparty.com.google.protobuf.Service; import org.apache.yetus.audience.InterfaceAudience; import org.slf4j.Logger; import org.slf4j.LoggerFactory; @@ -74,7 +74,7 @@ public void enablePBE(RpcController controller, PBEAdminRequest request, pbe_prefix = Base64.getDecoder().decode(request.getPbePrefix()); } catch (IllegalArgumentException e) { - builder.setPbeStatus(PBEAdminProtos.PBEKeyStatus.FAILED); + builder.setPbeStatus(PBEAdminProtos.PBEKeyStatus.PBE_FAILED); CoprocessorRpcUtils.setControllerException(controller, new IOException( "Failed to decode specified prefix as Base64 string: " + request.getPbePrefix(), e)); } @@ -84,7 +84,7 @@ public void enablePBE(RpcController controller, PBEAdminRequest request, builder.setPbeStatus(PBEAdminProtos.PBEKeyStatus.valueOf(pbeKeyStatus.getVal())); } catch (IOException e) { CoprocessorRpcUtils.setControllerException(controller, e); - builder.setPbeStatus(PBEAdminProtos.PBEKeyStatus.FAILED); + builder.setPbeStatus(PBEAdminProtos.PBEKeyStatus.PBE_FAILED); } } done.run(builder.build()); From bcee4071b37c3b66185e2fd7e610bcec89812636 Mon Sep 17 00:00:00 2001 From: Hari Dara Date: Wed, 5 Mar 2025 16:25:49 +0530 Subject: [PATCH 04/70] Added support for key namespace - Also committing WIP L1 cache implementation - Added JavaDoc --- .gitignore | 1 + ...Client.java => PBEKeymetaAdminClient.java} | 8 +- .../hadoop/hbase/io/crypto/PBEKeyData.java | 173 +++++++++++++++--- .../hbase/io/crypto/PBEKeyProvider.java | 42 ++++- .../hadoop/hbase/io/crypto/PBEKeyStatus.java | 18 +- .../io/crypto/PBEKeyStoreKeyProvider.java | 9 +- ...KeyMetaAdmin.java => PBEKeymetaAdmin.java} | 21 ++- .../org/apache/hadoop/hbase/util/Bytes.java | 17 +- .../io/crypto/TestPBEKeyStoreKeyProvider.java | 2 +- .../src/main/protobuf/server/PBEAdmin.proto | 4 +- .../apache/hadoop/hbase/HBaseServerBase.java | 19 ++ .../hbase/MockRegionServerServices.java | 10 + .../java/org/apache/hadoop/hbase/Server.java | 5 + ...cessor.java => PBEClusterKeyAccessor.java} | 4 +- ...rKeyCache.java => PBEClusterKeyCache.java} | 19 +- .../hadoop/hbase/keymeta/PBEKeyAccessor.java | 77 ++++++++ .../hadoop/hbase/keymeta/PBEKeyDataCache.java | 153 ++++++++++++++++ .../hadoop/hbase/keymeta/PBEKeyManager.java | 15 +- ...aManager.java => PBEKeymetaAdminImpl.java} | 18 +- ...vice.java => PBEKeymetaMasterService.java} | 14 +- ...nt.java => PBEKeymetaServiceEndpoint.java} | 61 ++++-- ...ssor.java => PBEKeymetaTableAccessor.java} | 93 +++++++--- .../apache/hadoop/hbase/master/HMaster.java | 14 +- .../hadoop/hbase/master/KeyMetaSchema.java | 27 --- ...Manager.java => PBEClusterKeyManager.java} | 11 +- .../hadoop/hbase/regionserver/HRegion.java | 10 +- .../hbase/regionserver/HRegionServer.java | 4 +- .../regionserver/ReplicationSyncUp.java | 10 + .../hbase/master/MockNoopMasterServices.java | 10 + .../hadoop/hbase/master/MockRegionServer.java | 10 + .../regionserver/TestHeapMemoryManager.java | 10 + .../token/TestTokenAuthentication.java | 10 + .../apache/hadoop/hbase/util/MockServer.java | 10 + hbase-shell/src/main/ruby/hbase/pbe_admin.rb | 7 +- 34 files changed, 771 insertions(+), 145 deletions(-) rename hbase-client/src/main/java/org/apache/hadoop/hbase/keymeta/{KeyMetaAdminClient.java => PBEKeymetaAdminClient.java} (79%) rename hbase-common/src/main/java/org/apache/hadoop/hbase/keymeta/{KeyMetaAdmin.java => PBEKeymetaAdmin.java} (60%) rename hbase-server/src/main/java/org/apache/hadoop/hbase/keymeta/{ClusterKeyAccessor.java => PBEClusterKeyAccessor.java} (96%) rename hbase-server/src/main/java/org/apache/hadoop/hbase/keymeta/{ClusterKeyCache.java => PBEClusterKeyCache.java} (75%) create mode 100644 hbase-server/src/main/java/org/apache/hadoop/hbase/keymeta/PBEKeyAccessor.java create mode 100644 hbase-server/src/main/java/org/apache/hadoop/hbase/keymeta/PBEKeyDataCache.java rename hbase-server/src/main/java/org/apache/hadoop/hbase/keymeta/{KeyMetaManager.java => PBEKeymetaAdminImpl.java} (75%) rename hbase-server/src/main/java/org/apache/hadoop/hbase/keymeta/{KeyMetaMasterService.java => PBEKeymetaMasterService.java} (80%) rename hbase-server/src/main/java/org/apache/hadoop/hbase/keymeta/{KeyMetaServiceEndpoint.java => PBEKeymetaServiceEndpoint.java} (60%) rename hbase-server/src/main/java/org/apache/hadoop/hbase/keymeta/{KeyMetaTableAccessor.java => PBEKeymetaTableAccessor.java} (55%) delete mode 100644 hbase-server/src/main/java/org/apache/hadoop/hbase/master/KeyMetaSchema.java rename hbase-server/src/main/java/org/apache/hadoop/hbase/master/{ClusterKeyManager.java => PBEClusterKeyManager.java} (92%) diff --git a/.gitignore b/.gitignore index 89e60343cc28..efe9f99ae703 100644 --- a/.gitignore +++ b/.gitignore @@ -29,3 +29,4 @@ tmp ID filenametags tags +.codegenie diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/keymeta/KeyMetaAdminClient.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/keymeta/PBEKeymetaAdminClient.java similarity index 79% rename from hbase-client/src/main/java/org/apache/hadoop/hbase/keymeta/KeyMetaAdminClient.java rename to hbase-client/src/main/java/org/apache/hadoop/hbase/keymeta/PBEKeymetaAdminClient.java index 9a590efe3cb0..e150edc143dd 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/keymeta/KeyMetaAdminClient.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/keymeta/PBEKeymetaAdminClient.java @@ -13,15 +13,15 @@ import java.io.IOException; @InterfaceAudience.Public -public class KeyMetaAdminClient implements KeyMetaAdmin { - private static final Logger LOG = LoggerFactory.getLogger(KeyMetaAdminClient.class); +public class PBEKeymetaAdminClient implements PBEKeymetaAdmin { + private static final Logger LOG = LoggerFactory.getLogger(PBEKeymetaAdminClient.class); private PBEAdminProtos.PBEAdminService.BlockingInterface stub; - public KeyMetaAdminClient(Connection conn) throws IOException { + public PBEKeymetaAdminClient(Connection conn) throws IOException { this.stub = PBEAdminProtos.PBEAdminService.newBlockingStub(conn.getAdmin().coprocessorService()); } - @Override public PBEKeyStatus enablePBE(String pbePrefix) throws IOException { + @Override public PBEKeyStatus enablePBE(String pbePrefix, String keyNamespace) throws IOException { try { PBEAdminResponse pbeAdminResponse = stub.enablePBE(null, PBEAdminRequest.newBuilder().setPbePrefix(pbePrefix).build()); diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/io/crypto/PBEKeyData.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/io/crypto/PBEKeyData.java index 77228f0bf8a0..13cb51d72041 100644 --- a/hbase-common/src/main/java/org/apache/hadoop/hbase/io/crypto/PBEKeyData.java +++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/io/crypto/PBEKeyData.java @@ -19,60 +19,151 @@ import org.apache.commons.lang3.builder.EqualsBuilder; import org.apache.commons.lang3.builder.HashCodeBuilder; +import org.apache.hadoop.hbase.util.EnvironmentEdgeManager; import org.apache.hadoop.util.DataChecksum; +import org.apache.hbase.thirdparty.com.google.common.base.Preconditions; import org.apache.yetus.audience.InterfaceAudience; import java.security.Key; import java.security.MessageDigest; import java.security.NoSuchAlgorithmException; +/** + * This class represents an encryption key data which includes the key itself, its status, metadata + * and a prefix. The metadata encodes enough information on the key such that it can be used to + * retrieve the exact same key again in the future. If the key status is {@link PBEKeyStatus#FAILED} + * expect the key to be {@code null}. + * + * The key data is represented by the following fields: + *
    + *
  • pbe_prefix: The prefix for which this key belongs to
  • + *
  • theKey: The key capturing the bytes and encoding
  • + *
  • keyStatus: The status of the key (see {@link PBEKeyStatus})
  • + *
  • keyMetadata: Metadata that identifies the key
  • + *
+ * + * The class provides methods to retrieve, as well as to compute a checksum + * for the key data. The checksum is used to ensure the integrity of the key data. + * + * The class also provides a method to generate an MD5 hash of the key metadata, which can be used + * for validation and identification. + */ @InterfaceAudience.Public public class PBEKeyData { + public static final String KEY_NAMESPACE_GLOBAL = "*"; + private byte[] pbe_prefix; + private String key_namespace; private Key theKey; private PBEKeyStatus keyStatus; private String keyMetadata; + private long refreshTimestamp; private volatile long keyChecksum = 0; private byte[] keyMetadataHash; - public PBEKeyData(byte[] pbe_prefix, Key theKey, PBEKeyStatus keyStatus, String keyMetadata) { + /** + * Constructs a new instance with the given parameters. + * + * @param pbe_prefix The PBE prefix associated with the key. + * @param theKey The actual key, can be {@code null}. + * @param keyStatus The status of the key. + * @param keyMetadata The metadata associated with the key. + * @throws NullPointerException if any of pbe_prefix, keyStatus or keyMetadata is null. + */ + public PBEKeyData(byte[] pbe_prefix, String key_namespace, Key theKey, PBEKeyStatus keyStatus, + String keyMetadata) { + this(pbe_prefix, key_namespace, theKey, keyStatus, keyMetadata, + EnvironmentEdgeManager.currentTime()); + } + + /** + * Constructs a new instance with the given parameters. + * + * @param pbe_prefix The PBE prefix associated with the key. + * @param theKey The actual key, can be {@code null}. + * @param keyStatus The status of the key. + * @param keyMetadata The metadata associated with the key. + * @param refreshTimestamp The timestamp when this key was last refreshed. + * @throws NullPointerException if any of pbe_prefix, keyStatus or keyMetadata is null. + */ + public PBEKeyData(byte[] pbe_prefix, String key_namespace, Key theKey, PBEKeyStatus keyStatus, + String keyMetadata, long refreshTimestamp) { + Preconditions.checkNotNull(pbe_prefix, "pbe_prefix should not be null"); + Preconditions.checkNotNull(key_namespace, "key_namespace should not be null"); + Preconditions.checkNotNull(keyStatus, "keyStatus should not be null"); + Preconditions.checkNotNull(keyMetadata, "keyMetadata should not be null"); + this.pbe_prefix = pbe_prefix; + this.key_namespace = key_namespace; this.theKey = theKey; this.keyStatus = keyStatus; this.keyMetadata = keyMetadata; + this.refreshTimestamp = refreshTimestamp; } + /** + * Returns the PBE prefix associated with the key. + * + * @return The PBE prefix as a byte array. + */ public byte[] getPbe_prefix() { return pbe_prefix; } + /** + * Returns the namespace associated with the key. + * + * @return The namespace as a {@code String}. + */ + public String getKeyNamespace() { + return key_namespace; + } + + /** + * Returns the namespace associated with the key. + * + * @return The namespace as a {@code String}. + */ + public String getKey_namespace() { + return key_namespace; + } + + /** + * Returns the actual key. + * + * @return The key as a {@code Key} object. + */ public Key getTheKey() { return theKey; } + /** + * Returns the status of the key. + * + * @return The key status as a {@code PBEKeyStatus} enum value. + */ public PBEKeyStatus getKeyStatus() { return keyStatus; } + /** + * Returns the metadata associated with the key. + * + * @return The key metadata as a {@code String}. + */ public String getKeyMetadata() { return keyMetadata; } - @Override public boolean equals(Object o) { - if (this == o) return true; - - if (o == null || getClass() != o.getClass()) return false; - - PBEKeyData that = (PBEKeyData) o; - - return new EqualsBuilder().append(pbe_prefix, that.pbe_prefix).append(theKey, that.theKey) - .append(keyStatus, that.keyStatus).append(keyMetadata, that.keyMetadata).isEquals(); - } - - @Override public int hashCode() { - return new HashCodeBuilder(17, 37).append(pbe_prefix).append( - theKey).append(keyStatus).append(keyMetadata).toHashCode(); + public long getRefreshTimestamp() { + return refreshTimestamp; } + /** + * Computes the checksum of the key. If the checksum has already been computed, this method + * returns the previously computed value. The checksum is computed using the CRC32C algorithm. + * + * @return The checksum of the key as a long value. + */ public long getKeyChecksum() { if (keyChecksum == 0) { DataChecksum dataChecksum = DataChecksum.newDataChecksum(DataChecksum.Type.CRC32C, 16); @@ -83,16 +174,54 @@ public long getKeyChecksum() { return keyChecksum; } + /** + * Computes the hash of the key metadata. If the hash has already been computed, this method + * returns the previously computed value. The hash is computed using the MD5 algorithm. + * + * @return The hash of the key metadata as a byte array. + */ public byte[] getKeyMetadataHash() { if (keyMetadataHash == null) { - MessageDigest md5; - try { - md5 = MessageDigest.getInstance("MD5"); - } catch (NoSuchAlgorithmException e) { - throw new RuntimeException(e); - } - keyMetadataHash = md5.digest(keyMetadata.getBytes()); + keyMetadataHash = makeMetadataHash(keyMetadata); } return keyMetadataHash; } + + public static byte[] makeMetadataHash(String metadata) { + MessageDigest md5; + try { + md5 = MessageDigest.getInstance("MD5"); + } catch (NoSuchAlgorithmException e) { + throw new RuntimeException(e); + } + return md5.digest(metadata.getBytes()); + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + + if (o == null || getClass() != o.getClass()) return false; + + PBEKeyData that = (PBEKeyData) o; + + return new EqualsBuilder() + .append(pbe_prefix, that.pbe_prefix) + .append(key_namespace, that.key_namespace) + .append(theKey, that.theKey) + .append(keyStatus, that.keyStatus) + .append(keyMetadata, that.keyMetadata) + .isEquals(); + } + + @Override + public int hashCode() { + return new HashCodeBuilder(17, 37) + .append(pbe_prefix) + .append(key_namespace) + .append(theKey) + .append(keyStatus) + .append(keyMetadata) + .toHashCode(); + } } diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/io/crypto/PBEKeyProvider.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/io/crypto/PBEKeyProvider.java index 2bc6568e4c31..8bb5ee6ce00d 100644 --- a/hbase-common/src/main/java/org/apache/hadoop/hbase/io/crypto/PBEKeyProvider.java +++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/io/crypto/PBEKeyProvider.java @@ -21,10 +21,48 @@ import org.apache.yetus.audience.InterfaceAudience; import java.io.IOException; +/** + * Interface for PBE-based key providers. Defines methods for generating and managing + * PBE-derived keys, as well as handling key storage and retrieval. + * + * The interface extends the basic {@link KeyProvider} interface with additional + * methods for working with PBE keys. + */ @InterfaceAudience.Public public interface PBEKeyProvider extends KeyProvider { + /** + * Initialize the provider with the given configuration. + * + * @param conf Hadoop configuration + */ void initConfig(Configuration conf); - PBEKeyData getClusterKey(byte[] clusterId); - PBEKeyData getPBEKey(byte[] pbe_prefix) throws IOException; + + /** + * Retrieve the cluster key using the given cluster identifier. + * + * @param clusterId Cluster identifier + * @return PBEKeyData for the cluster key and is guaranteed to be not {@code null} + * @throws IOException if an error occurs while retrieving the key + */ + PBEKeyData getClusterKey(byte[] clusterId) throws IOException; + + /** + * Retrieve a PBE-based key for the specified prefix. + * + * @param pbe_prefix Key prefix + * @param key_namespace Key namespace + * @return PBEKeyData for the cluster key and is guaranteed to be not {@code null} + * @throws IOException if an error occurs while retrieving the key + */ + PBEKeyData getPBEKey(byte[] pbe_prefix, String key_namespace) throws IOException; + + /** + * Retrieve a key identified by the key metadata. The key metadata is typically generated by the + * same key provider via the {@link #getClusterKey(byte[])} or {@link #getPBEKey(byte[], String)} methods. + * + * @param keyMetaData Key metadata + * @return PBEKeyData for the key represented by the metadata + * @throws IOException if an error occurs while generating the key + */ PBEKeyData unwrapKey(String keyMetaData) throws IOException; } diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/io/crypto/PBEKeyStatus.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/io/crypto/PBEKeyStatus.java index 46e9931c97b4..2b7118120172 100644 --- a/hbase-common/src/main/java/org/apache/hadoop/hbase/io/crypto/PBEKeyStatus.java +++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/io/crypto/PBEKeyStatus.java @@ -21,11 +21,18 @@ import java.util.HashMap; import java.util.Map; +/** + * Enum of PBE key status. The status of a PBE key is used to indicate the state of the key. + */ @InterfaceAudience.Public public enum PBEKeyStatus { + /** Represents the active status of a PBE key. */ ACTIVE((byte) 1), + /** Represents the inactive status of a PBE key. */ INACTIVE((byte) 2), + /** Represents the retrieval failure status of a PBE key. */ FAILED((byte) 3), + /** Represents the disabled status of a PBE key. */ DISABLED((byte) 4), ; @@ -33,14 +40,23 @@ public enum PBEKeyStatus { private final byte val; - PBEKeyStatus(byte val) { + private PBEKeyStatus(byte val) { this.val = val; } + /** + * Returns the numeric value of the PBE key status. + * @return byte value + */ public byte getVal() { return val; } + /** + * Returns the PBEKeyStatus for the given numeric value. + * @param val The numeric value of the desired PBEKeyStatus + * @return The PBEKeyStatus corresponding to the given value + */ public static PBEKeyStatus forValue(byte val) { if (lookupByVal == null) { Map tbl = new HashMap<>(); diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/io/crypto/PBEKeyStoreKeyProvider.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/io/crypto/PBEKeyStoreKeyProvider.java index c2bcdf12f40e..f801bc9d7836 100644 --- a/hbase-common/src/main/java/org/apache/hadoop/hbase/io/crypto/PBEKeyStoreKeyProvider.java +++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/io/crypto/PBEKeyStoreKeyProvider.java @@ -34,10 +34,11 @@ public class PBEKeyStoreKeyProvider extends KeyStoreKeyProvider implements PBEKe // Encode clusterId too for consistency with that of PBE prefixes. String keyMetadata = generateKeyMetadata(masterKeyAlias, Base64.getEncoder().encodeToString(clusterId)); - return new PBEKeyData(clusterId, key, PBEKeyStatus.ACTIVE, keyMetadata); + return new PBEKeyData(clusterId, PBEKeyData.KEY_NAMESPACE_GLOBAL, key, PBEKeyStatus.ACTIVE, + keyMetadata); } - @Override public PBEKeyData getPBEKey(byte[] pbe_prefix) throws IOException { + @Override public PBEKeyData getPBEKey(byte[] pbe_prefix, String key_namespace) throws IOException { checkConfig(); String encodedPrefix = Base64.getEncoder().encodeToString(pbe_prefix); String aliasConfKey = HConstants.CRYPTO_PBE_PREFIX_CONF_KEY_PREFIX + encodedPrefix + "." + @@ -64,10 +65,10 @@ public class PBEKeyStoreKeyProvider extends KeyStoreKeyProvider implements PBEKe String alias = keyMetadata.get(KEY_METADATA_ALIAS); Key key = alias != null ? getKey(alias) : null; if (key != null) { - return new PBEKeyData(pbe_prefix, key, + return new PBEKeyData(pbe_prefix, PBEKeyData.KEY_NAMESPACE_GLOBAL, key, isActive ? PBEKeyStatus.ACTIVE : PBEKeyStatus.INACTIVE, keyMetadataStr); } - return new PBEKeyData(pbe_prefix, null, + return new PBEKeyData(pbe_prefix, PBEKeyData.KEY_NAMESPACE_GLOBAL, null, isActive ? PBEKeyStatus.FAILED : PBEKeyStatus.DISABLED, keyMetadataStr); } diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/keymeta/KeyMetaAdmin.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/keymeta/PBEKeymetaAdmin.java similarity index 60% rename from hbase-common/src/main/java/org/apache/hadoop/hbase/keymeta/KeyMetaAdmin.java rename to hbase-common/src/main/java/org/apache/hadoop/hbase/keymeta/PBEKeymetaAdmin.java index 14bd04d033b3..ae0f1af9d1ba 100644 --- a/hbase-common/src/main/java/org/apache/hadoop/hbase/keymeta/KeyMetaAdmin.java +++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/keymeta/PBEKeymetaAdmin.java @@ -21,7 +21,24 @@ import org.apache.yetus.audience.InterfaceAudience; import java.io.IOException; +/** + * PBEKeymetaAdmin is an interface for administrative functions related to PBE keys. + * It handles the following methods: + * + *
    + *
  • enablePBE(): Enables PBE for a given pbe_prefix and namespace.
  • + *
+ */ @InterfaceAudience.Public -public interface KeyMetaAdmin { - PBEKeyStatus enablePBE(String pbePrefix) throws IOException; +public interface PBEKeymetaAdmin { + /** + * Enables PBE for the specified key prefix and namespace. + * + * @param pbePrefix The prefix for the PBE key. + * @param keyNamespace The namespace for the PBE key. + * + * @return The current status of the PBE key. + * @throws IOException if an error occurs while enabling PBE. + */ + PBEKeyStatus enablePBE(String pbePrefix, String keyNamespace) throws IOException; } diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/util/Bytes.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/util/Bytes.java index 96b3dbd4a8a5..55da4b3b12c0 100644 --- a/hbase-common/src/main/java/org/apache/hadoop/hbase/util/Bytes.java +++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/util/Bytes.java @@ -1688,16 +1688,31 @@ public static byte[] add(final byte[] a, final byte[] b) { /** * Concatenate byte arrays. + * * @param a first third * @param b second third * @param c third third * @return New array made from a, b and c */ public static byte[] add(final byte[] a, final byte[] b, final byte[] c) { - byte[] result = new byte[a.length + b.length + c.length]; + return add(a, b, c, EMPTY_BYTE_ARRAY); + } + + /** + * Concatenate byte arrays. + * + * @param a first fourth + * @param b second fourth + * @param c third fourth + * @param d fourth fourth + * @return New array made from a, b, c, and d + */ + public static byte[] add(final byte[] a, final byte[] b, final byte[] c, final byte[] d) { + byte[] result = new byte[a.length + b.length + c.length + d.length]; System.arraycopy(a, 0, result, 0, a.length); System.arraycopy(b, 0, result, a.length, b.length); System.arraycopy(c, 0, result, a.length + b.length, c.length); + System.arraycopy(d, 0, result, a.length + b.length + c.length, d.length); return result; } diff --git a/hbase-common/src/test/java/org/apache/hadoop/hbase/io/crypto/TestPBEKeyStoreKeyProvider.java b/hbase-common/src/test/java/org/apache/hadoop/hbase/io/crypto/TestPBEKeyStoreKeyProvider.java index 2025433d157c..e511627d35dc 100644 --- a/hbase-common/src/test/java/org/apache/hadoop/hbase/io/crypto/TestPBEKeyStoreKeyProvider.java +++ b/hbase-common/src/test/java/org/apache/hadoop/hbase/io/crypto/TestPBEKeyStoreKeyProvider.java @@ -115,7 +115,7 @@ private void addEntry(String alias, String prefix) { @Test public void testGetPBEKey() throws Exception { for (Bytes prefix: prefix2key.keySet()) { - PBEKeyData keyData = pbeProvider.getPBEKey(prefix.get()); + PBEKeyData keyData = pbeProvider.getPBEKey(prefix.get(), PBEKeyData.KEY_NAMESPACE_GLOBAL); assertPBEKeyData(keyData, PBEKeyStatus.ACTIVE, prefix2key.get(prefix).get(), prefix.get(), prefix2alias.get(prefix)); } diff --git a/hbase-protocol-shaded/src/main/protobuf/server/PBEAdmin.proto b/hbase-protocol-shaded/src/main/protobuf/server/PBEAdmin.proto index a86a6bbf1991..75dfffac427c 100644 --- a/hbase-protocol-shaded/src/main/protobuf/server/PBEAdmin.proto +++ b/hbase-protocol-shaded/src/main/protobuf/server/PBEAdmin.proto @@ -26,6 +26,7 @@ option optimize_for = SPEED; message PBEAdminRequest { required string pbe_prefix = 1; + required string key_namespace = 2; } enum PBEKeyStatus { @@ -37,7 +38,8 @@ enum PBEKeyStatus { message PBEAdminResponse { required string pbe_prefix = 1; - required PBEKeyStatus pbe_status = 2; + required string key_namespace = 2; + required PBEKeyStatus pbe_status = 3; } service PBEAdminService { diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/HBaseServerBase.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/HBaseServerBase.java index bf9640196f62..30bcd40a7a0a 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/HBaseServerBase.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/HBaseServerBase.java @@ -52,6 +52,9 @@ import org.apache.hadoop.hbase.http.InfoServer; import org.apache.hadoop.hbase.io.util.MemorySizeUtil; import org.apache.hadoop.hbase.ipc.RpcServerInterface; +import org.apache.hadoop.hbase.keymeta.PBEKeyAccessor; +import org.apache.hadoop.hbase.keymeta.PBEKeymetaAdmin; +import org.apache.hadoop.hbase.keymeta.PBEKeymetaAdminImpl; import org.apache.hadoop.hbase.master.HMaster; import org.apache.hadoop.hbase.master.MasterCoprocessorHost; import org.apache.hadoop.hbase.namequeues.NamedQueueRecorder; @@ -187,6 +190,9 @@ public abstract class HBaseServerBase> extends protected final NettyEventLoopGroupConfig eventLoopGroupConfig; + private PBEKeymetaAdminImpl pbeKeymetaAdmin; + private PBEKeyAccessor pbeKeyAccessor; + private void setupSignalHandlers() { if (!SystemUtils.IS_OS_WINDOWS) { HBasePlatformDependent.handle("HUP", (number, name) -> { @@ -283,6 +289,9 @@ public HBaseServerBase(Configuration conf, String name) throws IOException { initializeFileSystem(); + pbeKeymetaAdmin = new PBEKeymetaAdminImpl(this); + pbeKeyAccessor = new PBEKeyAccessor(pbeKeymetaAdmin); + int choreServiceInitialSize = conf.getInt(CHORE_SERVICE_INITIAL_POOL_SIZE, DEFAULT_CHORE_SERVICE_INITIAL_POOL_SIZE); this.choreService = new ChoreService(getName(), choreServiceInitialSize, true); @@ -403,6 +412,16 @@ public ZKWatcher getZooKeeper() { return zooKeeper; } + @Override + public PBEKeymetaAdmin getPBEKeymetaAdmin() { + return pbeKeymetaAdmin; + } + + @Override + public PBEKeyAccessor getPBEKeyAccessor() { + return pbeKeyAccessor; + } + protected final void shutdownChore(ScheduledChore chore) { if (chore != null) { chore.shutdown(); diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/MockRegionServerServices.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/MockRegionServerServices.java index 32ad587ad96d..d0820464ad5d 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/MockRegionServerServices.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/MockRegionServerServices.java @@ -38,6 +38,8 @@ import org.apache.hadoop.hbase.fs.HFileSystem; import org.apache.hadoop.hbase.io.hfile.BlockCache; import org.apache.hadoop.hbase.ipc.RpcServerInterface; +import org.apache.hadoop.hbase.keymeta.PBEKeyAccessor; +import org.apache.hadoop.hbase.keymeta.PBEKeymetaAdmin; import org.apache.hadoop.hbase.mob.MobFileCache; import org.apache.hadoop.hbase.quotas.RegionServerRpcQuotaManager; import org.apache.hadoop.hbase.quotas.RegionServerSpaceQuotaManager; @@ -256,6 +258,14 @@ public ChoreService getChoreService() { return null; } + @Override public PBEKeyAccessor getPBEKeyAccessor() { + return null; + } + + @Override public PBEKeymetaAdmin getPBEKeymetaAdmin() { + return null; + } + @Override public void updateRegionFavoredNodesMapping(String encodedRegionName, List favoredNodes) { diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/Server.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/Server.java index d99807e46b06..57ac18bcd0a6 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/Server.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/Server.java @@ -23,6 +23,8 @@ import org.apache.hadoop.hbase.client.AsyncClusterConnection; import org.apache.hadoop.hbase.client.AsyncConnection; import org.apache.hadoop.hbase.client.Connection; +import org.apache.hadoop.hbase.keymeta.PBEKeyAccessor; +import org.apache.hadoop.hbase.keymeta.PBEKeymetaAdmin; import org.apache.hadoop.hbase.zookeeper.ZKWatcher; import org.apache.yetus.audience.InterfaceAudience; @@ -83,6 +85,9 @@ default AsyncConnection getAsyncConnection() { /** Returns The {@link ChoreService} instance for this server */ ChoreService getChoreService(); + public PBEKeyAccessor getPBEKeyAccessor(); + public PBEKeymetaAdmin getPBEKeymetaAdmin(); + /** Returns Return the FileSystem object used (can return null!). */ // TODO: Distinguish between "dataFs" and "walFs". default FileSystem getFileSystem() { diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/keymeta/ClusterKeyAccessor.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/keymeta/PBEClusterKeyAccessor.java similarity index 96% rename from hbase-server/src/main/java/org/apache/hadoop/hbase/keymeta/ClusterKeyAccessor.java rename to hbase-server/src/main/java/org/apache/hadoop/hbase/keymeta/PBEClusterKeyAccessor.java index 11d956eb6a5b..1e3ee5bbde5e 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/keymeta/ClusterKeyAccessor.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/keymeta/PBEClusterKeyAccessor.java @@ -32,10 +32,10 @@ import static org.apache.hadoop.hbase.HConstants.CLUSTER_KEY_FILE_PREFIX; @InterfaceAudience.Private -public class ClusterKeyAccessor extends PBEKeyManager { +public class PBEClusterKeyAccessor extends PBEKeyManager { protected final Path clusterKeyDir; - public ClusterKeyAccessor(Server server) throws IOException { + public PBEClusterKeyAccessor(Server server) throws IOException { super(server); this.clusterKeyDir = CommonFSUtils.getClusterKeyDir(server.getConfiguration()); } diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/keymeta/ClusterKeyCache.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/keymeta/PBEClusterKeyCache.java similarity index 75% rename from hbase-server/src/main/java/org/apache/hadoop/hbase/keymeta/ClusterKeyCache.java rename to hbase-server/src/main/java/org/apache/hadoop/hbase/keymeta/PBEClusterKeyCache.java index 0a563b3dcd68..9ac76164d1e3 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/keymeta/ClusterKeyCache.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/keymeta/PBEClusterKeyCache.java @@ -28,18 +28,16 @@ import java.util.Map; @InterfaceAudience.Private -public class ClusterKeyCache { - private static final Logger LOG = LoggerFactory.getLogger(ClusterKeyCache.class); +public class PBEClusterKeyCache { + private static final Logger LOG = LoggerFactory.getLogger(PBEClusterKeyCache.class); - private final ClusterKeyAccessor accessor; private PBEKeyData latestClusterKey; private Map clusterKeys = new HashMap<>(); - public ClusterKeyCache(ClusterKeyAccessor accessor) throws IOException { - this.accessor = accessor; - + public PBEClusterKeyCache createCache(PBEClusterKeyAccessor accessor) throws IOException { List allClusterKeys = accessor.getAllClusterKeys(); int latestKeySequence = accessor.findLatestKeySequence(allClusterKeys); + PBEKeyData latestClusterKey = null; for (Path keyPath: allClusterKeys) { LOG.info("Loading cluster key from: {}", keyPath); PBEKeyData keyData = accessor.loadClusterKey(keyPath); @@ -48,6 +46,15 @@ public ClusterKeyCache(ClusterKeyAccessor accessor) throws IOException { } clusterKeys.put(keyData.getKeyChecksum(), keyData); } + if (latestClusterKey == null) { + throw new RuntimeException("Expected to find a key for sequence: " + latestKeySequence); + } + return new PBEClusterKeyCache(clusterKeys, latestClusterKey); + } + + private PBEClusterKeyCache(Map clusterKeys, PBEKeyData latestClusterKey) { + this.clusterKeys = clusterKeys; + this.latestClusterKey = latestClusterKey; } public PBEKeyData getLatestClusterKey() { diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/keymeta/PBEKeyAccessor.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/keymeta/PBEKeyAccessor.java new file mode 100644 index 000000000000..0aefeddeca9b --- /dev/null +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/keymeta/PBEKeyAccessor.java @@ -0,0 +1,77 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.hadoop.hbase.keymeta; + +import org.apache.hadoop.hbase.io.crypto.PBEKeyData; +import org.apache.yetus.audience.InterfaceAudience; +import java.io.IOException; +import java.util.List; + +/** + * This class provides a unified access on top of both {@code PBEKeyDataCache} (L1) and + * {@code PBEKeymetaTableAccessor} (L2) to access PBE keys. When the getter is called, it first + * checks if L1 cache has the key, if not, it tries to get the key from L2. + */ +@InterfaceAudience.Private +public class PBEKeyAccessor { + private final PBEKeyDataCache keyDataCache; + private final PBEKeymetaTableAccessor keymetaAccessor; + + public PBEKeyAccessor(PBEKeymetaTableAccessor keymetaAccessor) { + this.keymetaAccessor = keymetaAccessor; + keyDataCache = new PBEKeyDataCache(); + } + + /** + * Get key data by key metadata. + * + * @param pbePrefix The prefix of the key + * @param keyNamespace The namespace of the key + * @param keyMetadata The metadata of the key + * @return The key data + * @throws IOException if an error occurs while retrieving the key + */ + public PBEKeyData getKey(byte[] pbePrefix, String keyNamespace, String keyMetadata) throws IOException { + PBEKeyData keyData = keyDataCache.getEntry(keyMetadata); + if (keyData == null) { + keyData = keymetaAccessor.getKey(pbePrefix, keyNamespace, keyMetadata); + keyDataCache.addEntry(keyData); + } + return keyData; + } + + /** + * Get an active key for the given prefix suitable for use in encryption. + * + * @param pbePrefix The prefix of the key + * @param keyNamespace The namespace of the key + * @return The key data + * @throws IOException if an error occurs while retrieving the key + */ + public PBEKeyData getAnActiveKey(byte[] pbePrefix, String keyNamespace) throws IOException { + PBEKeyData keyData = keyDataCache.getRandomEntryForPrefix(pbePrefix, keyNamespace); + if (keyData == null) { + List activeKeys = keymetaAccessor.getActiveKeys(pbePrefix, keyNamespace); + for (PBEKeyData kd: activeKeys) { + keyDataCache.addEntry(kd); + } + keyData = keyDataCache.getRandomEntryForPrefix(pbePrefix, keyNamespace); + } + return keyData; + } +} diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/keymeta/PBEKeyDataCache.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/keymeta/PBEKeyDataCache.java new file mode 100644 index 000000000000..b022177d83d6 --- /dev/null +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/keymeta/PBEKeyDataCache.java @@ -0,0 +1,153 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.hadoop.hbase.keymeta; + +import org.apache.hadoop.hbase.io.crypto.PBEKeyData; +import org.apache.hadoop.hbase.io.crypto.PBEKeyStatus; +import org.apache.hadoop.hbase.util.Bytes; +import org.apache.yetus.audience.InterfaceAudience; + +import java.util.ArrayList; +import java.util.List; +import java.util.Map; +import java.util.HashMap; +import java.util.concurrent.locks.ReentrantLock; + +/** + * In-memory cache for PBEKeyData entries, using key metadata as the cache key. + */ +@InterfaceAudience.Private +public class PBEKeyDataCache { + private final Map cache; + private final Map>> prefixCache; + private final ReentrantLock lock; + + public PBEKeyDataCache() { + this.prefixCache = new HashMap<>(); + this.cache = new HashMap<>(); + this.lock = new ReentrantLock(); + } + + /** + * Adds a new entry to the cache. + * + * @param pbeKeyData the PBEKeyData entry to be added + */ + public void addEntry(PBEKeyData pbeKeyData) { + lock.lock(); + try { + Bytes pbePrefix = new Bytes(pbeKeyData.getPbe_prefix()); + String keyNamespace = pbeKeyData.getKeyNamespace(); + + cache.put(pbeKeyData.getKeyMetadata(), pbeKeyData); + + Map> nsCache = prefixCache.get(keyNamespace); + if (nsCache == null) { + nsCache = new HashMap<>(); + prefixCache.put(keyNamespace, nsCache); + } + + List keyList = nsCache.get(pbePrefix); + if (keyList == null) { + keyList = new ArrayList<>(); + prefixCache.get(keyNamespace).put(pbePrefix, keyList); + } + + keyList.add(pbeKeyData); + } finally { + lock.unlock(); + } + } + + /** + * Retrieves an entry from the cache based on its key metadata. + * + * @param keyMetadata the key metadata of the entry to be retrieved + * @return the corresponding PBEKeyData entry, or null if not found + */ + public PBEKeyData getEntry(String keyMetadata) { + lock.lock(); + try { + return cache.get(keyMetadata); + } finally { + lock.unlock(); + } + } + + /** + * Removes an entry from the cache based on its key metadata. + * + * @param keyMetadata the key metadata of the entry to be removed + * @return the removed PBEKeyData entry, or null if not found + */ + public PBEKeyData removeEntry(String keyMetadata) { + lock.lock(); + try { + PBEKeyData removedEntry = cache.remove(keyMetadata); + if (removedEntry != null) { + Bytes pbePrefix = new Bytes(removedEntry.getPbe_prefix()); + String keyNamespace = removedEntry.getKeyNamespace(); + Map> nsCache = prefixCache.get(keyNamespace); + List keyList = nsCache != null ? nsCache.get(pbePrefix) : null; + if (keyList != null) { + keyList.remove(removedEntry); + if (keyList.isEmpty()) { + prefixCache.get(keyNamespace).remove(pbePrefix); + } + } + } + return removedEntry; + } finally { + lock.unlock(); + } + } + + /** + * Retrieves a random entry from the cache based on its PBE prefix, key namespace, and filters out entries with + * a status other than ACTIVE. + * + * @param pbe_prefix the PBE prefix to search for + * @param keyNamespace the key namespace to search for + * @return a random PBEKeyData entry with the given PBE prefix and ACTIVE status, or null if not found + */ + public PBEKeyData getRandomEntryForPrefix(byte[] pbe_prefix, String keyNamespace) { + lock.lock(); + try { + List activeEntries = new ArrayList<>(); + + Bytes pbePrefix = new Bytes(pbe_prefix); + Map> nsCache = prefixCache.get(keyNamespace); + List keyList = nsCache != null ? nsCache.get(pbePrefix) : null; + if (keyList != null) { + for (PBEKeyData entry : keyList) { + if (entry.getKeyStatus() == PBEKeyStatus.ACTIVE) { + activeEntries.add(entry); + } + } + } + + if (activeEntries.isEmpty()) { + return null; + } + + return activeEntries.get((int) (Math.random() * activeEntries.size())); + } finally { + lock.unlock(); + } + } +} diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/keymeta/PBEKeyManager.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/keymeta/PBEKeyManager.java index ac315d6eb1b8..bb426a8661b6 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/keymeta/PBEKeyManager.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/keymeta/PBEKeyManager.java @@ -26,8 +26,11 @@ import org.slf4j.Logger; import org.slf4j.LoggerFactory; +/** + * A base class for all keymeta manager implementations. + */ @InterfaceAudience.Private -public class PBEKeyManager { +public abstract class PBEKeyManager { protected static final Logger LOG = LoggerFactory.getLogger(PBEKeyManager.class); protected final Server server; @@ -38,6 +41,12 @@ public PBEKeyManager(Server server) { this.server = server; } + /** + * A utility method for getting the PBE key provider. + * @return the key provider + * @throws RuntimeException if no provider is configured or if the configured provider is not an + * instance of PBEKeyProvider + */ protected PBEKeyProvider getKeyProvider() { KeyProvider provider = Encryption.getKeyProvider(server.getConfiguration()); if (!(provider instanceof PBEKeyProvider)) { @@ -47,6 +56,10 @@ protected PBEKeyProvider getKeyProvider() { return (PBEKeyProvider) provider; } + /** + * A utility method for checking if PBE is enabled. + * @return true if PBE is enabled + */ protected boolean isPBEEnabled() { if (pbeEnabled == null) { pbeEnabled = server.getConfiguration().getBoolean(HConstants.CRYPTO_PBE_ENABLED_CONF_KEY, diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/keymeta/KeyMetaManager.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/keymeta/PBEKeymetaAdminImpl.java similarity index 75% rename from hbase-server/src/main/java/org/apache/hadoop/hbase/keymeta/KeyMetaManager.java rename to hbase-server/src/main/java/org/apache/hadoop/hbase/keymeta/PBEKeymetaAdminImpl.java index 62b69aadab7b..f4a4a6b87b1e 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/keymeta/KeyMetaManager.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/keymeta/PBEKeymetaAdminImpl.java @@ -17,10 +17,10 @@ */ package org.apache.hadoop.hbase.keymeta; +import org.apache.hadoop.hbase.Server; import org.apache.hadoop.hbase.io.crypto.PBEKeyData; import org.apache.hadoop.hbase.io.crypto.PBEKeyProvider; import org.apache.hadoop.hbase.io.crypto.PBEKeyStatus; -import org.apache.hadoop.hbase.master.MasterServices; import org.apache.yetus.audience.InterfaceAudience; import org.slf4j.Logger; import org.slf4j.LoggerFactory; @@ -28,18 +28,19 @@ import java.util.Base64; @InterfaceAudience.Private -public class KeyMetaManager extends KeyMetaTableAccessor implements KeyMetaAdmin { - private static final Logger LOG = LoggerFactory.getLogger(KeyMetaManager.class); +public class PBEKeymetaAdminImpl extends PBEKeymetaTableAccessor implements PBEKeymetaAdmin { + private static final Logger LOG = LoggerFactory.getLogger(PBEKeymetaAdminImpl.class); - public KeyMetaManager(MasterServices master) { - super(master); + public PBEKeymetaAdminImpl(Server server) { + super(server); } - @Override public PBEKeyStatus enablePBE(String pbePrefix) throws IOException { + @Override + public PBEKeyStatus enablePBE(String pbePrefix, String keyNamespace) throws IOException { if (! isPBEEnabled()) { throw new IOException("PBE is currently not enabled in HBase configuration"); } - LOG.info("Trying to enable PBE on key: {}", pbePrefix); + LOG.info("Trying to enable PBE on key: {} for namespace: {}", pbePrefix, keyNamespace); byte[] pbe_prefix; try { pbe_prefix = Base64.getDecoder().decode(pbePrefix); @@ -48,8 +49,7 @@ public KeyMetaManager(MasterServices master) { throw new IOException("Failed to decode specified prefix as Base64 string: " + pbePrefix, e); } PBEKeyProvider provider = getKeyProvider(); - // TODO: key provider already decodes, should the param type be changed to encoded prefix? - PBEKeyData pbeKey = provider.getPBEKey(pbe_prefix); + PBEKeyData pbeKey = provider.getPBEKey(pbe_prefix, keyNamespace); LOG.info("Got key data with status: {} for prefix: {}", pbeKey.getKeyStatus(), pbePrefix); addKey(pbeKey); return pbeKey.getKeyStatus(); diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/keymeta/KeyMetaMasterService.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/keymeta/PBEKeymetaMasterService.java similarity index 80% rename from hbase-server/src/main/java/org/apache/hadoop/hbase/keymeta/KeyMetaMasterService.java rename to hbase-server/src/main/java/org/apache/hadoop/hbase/keymeta/PBEKeymetaMasterService.java index be72f1b25f6c..4c4980aee364 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/keymeta/KeyMetaMasterService.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/keymeta/PBEKeymetaMasterService.java @@ -27,22 +27,22 @@ import java.io.IOException; @InterfaceAudience.Private -public class KeyMetaMasterService { - private static final Logger LOG = LoggerFactory.getLogger(KeyMetaMasterService.class); +public class PBEKeymetaMasterService { + private static final Logger LOG = LoggerFactory.getLogger(PBEKeymetaMasterService.class); private final MasterServices master; Boolean pbeEnabled; private static final TableDescriptorBuilder TABLE_DESCRIPTOR_BUILDER = TableDescriptorBuilder - .newBuilder(KeyMetaTableAccessor.KEY_META_TABLE_NAME).setRegionReplication(1) + .newBuilder(PBEKeymetaTableAccessor.KEY_META_TABLE_NAME).setRegionReplication(1) .setPriority(HConstants.SYSTEMTABLE_QOS) .setColumnFamily(ColumnFamilyDescriptorBuilder.newBuilder( - KeyMetaTableAccessor.KEY_META_INFO_FAMILY) + PBEKeymetaTableAccessor.KEY_META_INFO_FAMILY) .setScope(HConstants.REPLICATION_SCOPE_LOCAL).setMaxVersions(1) .setInMemory(true) .build()); - public KeyMetaMasterService(MasterServices masterServices) { + public PBEKeymetaMasterService(MasterServices masterServices) { this.master = masterServices; } @@ -50,9 +50,9 @@ public void init() throws IOException { if (!isPBEEnabled()) { return; } - if (!master.getTableDescriptors().exists(KeyMetaTableAccessor.KEY_META_TABLE_NAME)) { + if (!master.getTableDescriptors().exists(PBEKeymetaTableAccessor.KEY_META_TABLE_NAME)) { LOG.info("{} table not found. Creating.", - KeyMetaTableAccessor.KEY_META_TABLE_NAME.getNameWithNamespaceInclAsString()); + PBEKeymetaTableAccessor.KEY_META_TABLE_NAME.getNameWithNamespaceInclAsString()); this.master.createSystemTable(TABLE_DESCRIPTOR_BUILDER.build()); } } diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/keymeta/KeyMetaServiceEndpoint.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/keymeta/PBEKeymetaServiceEndpoint.java similarity index 60% rename from hbase-server/src/main/java/org/apache/hadoop/hbase/keymeta/KeyMetaServiceEndpoint.java rename to hbase-server/src/main/java/org/apache/hadoop/hbase/keymeta/PBEKeymetaServiceEndpoint.java index 5978e2c01a8f..02c56d95dd28 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/keymeta/KeyMetaServiceEndpoint.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/keymeta/PBEKeymetaServiceEndpoint.java @@ -38,49 +38,86 @@ import java.util.Base64; import java.util.Collections; -@CoreCoprocessor -@InterfaceAudience.Private -public class KeyMetaServiceEndpoint implements MasterCoprocessor { - private static final Logger LOG = LoggerFactory.getLogger(KeyMetaServiceEndpoint.class); +/** + * This class implements a coprocessor service endpoint for the Phoenix Query Server's + * PBE (Prefix Based Encryption) key metadata operations. It handles the following + * methods: + * + *
    + *
  • enablePBE(): Enables PBE for a given pbe_prefix and namespace.
  • + *
+ * + * This endpoint is designed to work in conjunction with the {@link org.apache.hadoop.hbase.keymeta.PBEKeymetaAdmin} + * interface, which provides the actual implementation of the key metadata operations. + *

+ */ +@CoreCoprocessor @InterfaceAudience.Private +public class PBEKeymetaServiceEndpoint implements MasterCoprocessor { + private static final Logger LOG = LoggerFactory.getLogger(PBEKeymetaServiceEndpoint.class); private MasterServices master = null; - private KeyMetaManager keyMetaManager; private final PBEAdminService pbeAdminService = new KeyMetaAdminServiceImpl(); - @Override public void start(CoprocessorEnvironment env) throws IOException { + /** + * Starts the coprocessor by initializing the reference to the {@link org.apache.hadoop.hbase.master.MasterServices} + * instance. + * + * @param env The coprocessor environment. + * @throws IOException If an error occurs during initialization. + */ + @Override + public void start(CoprocessorEnvironment env) throws IOException { if (!(env instanceof HasMasterServices)) { throw new IOException("Does not implement HMasterServices"); } master = ((HasMasterServices) env).getMasterServices(); - keyMetaManager = new KeyMetaManager(master); } + /** + * Returns an iterable of the available coprocessor services, which includes the + * {@link org.apache.hadoop.hbase.protobuf.generated.PBEAdminProtos.PBEAdminService} implemented by + * {@link org.apache.hadoop.hbase.keymeta.PBEKeymetaServiceEndpoint.KeyMetaAdminServiceImpl}. + * + * @return An iterable of the available coprocessor services. + */ @Override public Iterable getServices() { return Collections.singleton(pbeAdminService); } + /** + * The implementation of the {@link org.apache.hadoop.hbase.protobuf.generated.PBEAdminProtos.PBEAdminService} + * interface, which provides the actual method implementations for enabling PBE. + */ private class KeyMetaAdminServiceImpl extends PBEAdminService { + /** + * Enables PBE for a given tenant and namespace, as specified in the provided request. + * + * @param controller The RPC controller. + * @param request The request containing the tenant and table specifications. + * @param done The callback to be invoked with the response. + */ @Override public void enablePBE(RpcController controller, PBEAdminRequest request, - RpcCallback done) { + RpcCallback done) { PBEAdminResponse.Builder builder = - PBEAdminResponse.newBuilder().setPbePrefix(request.getPbePrefix()); + PBEAdminResponse.newBuilder().setPbePrefix(request.getPbePrefix()) + .setKeyNamespace(request.getKeyNamespace()); byte[] pbe_prefix = null; try { pbe_prefix = Base64.getDecoder().decode(request.getPbePrefix()); - } - catch (IllegalArgumentException e) { + } catch (IllegalArgumentException e) { builder.setPbeStatus(PBEAdminProtos.PBEKeyStatus.PBE_FAILED); CoprocessorRpcUtils.setControllerException(controller, new IOException( "Failed to decode specified prefix as Base64 string: " + request.getPbePrefix(), e)); } if (pbe_prefix != null) { try { - PBEKeyStatus pbeKeyStatus = keyMetaManager.enablePBE(request.getPbePrefix()); + PBEKeyStatus pbeKeyStatus = master.getPBEKeymetaAdmin() + .enablePBE(request.getPbePrefix(), request.getKeyNamespace()); builder.setPbeStatus(PBEAdminProtos.PBEKeyStatus.valueOf(pbeKeyStatus.getVal())); } catch (IOException e) { CoprocessorRpcUtils.setControllerException(controller, e); diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/keymeta/KeyMetaTableAccessor.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/keymeta/PBEKeymetaTableAccessor.java similarity index 55% rename from hbase-server/src/main/java/org/apache/hadoop/hbase/keymeta/KeyMetaTableAccessor.java rename to hbase-server/src/main/java/org/apache/hadoop/hbase/keymeta/PBEKeymetaTableAccessor.java index fa5ead093ebc..f42370c9fc48 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/keymeta/KeyMetaTableAccessor.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/keymeta/PBEKeymetaTableAccessor.java @@ -23,8 +23,13 @@ import org.apache.hadoop.hbase.TableName; import org.apache.hadoop.hbase.client.Connection; import org.apache.hadoop.hbase.client.Durability; +import org.apache.hadoop.hbase.client.Get; import org.apache.hadoop.hbase.client.Put; +import org.apache.hadoop.hbase.client.Result; +import org.apache.hadoop.hbase.client.ResultScanner; +import org.apache.hadoop.hbase.client.Scan; import org.apache.hadoop.hbase.client.Table; +import org.apache.hadoop.hbase.filter.PrefixFilter; import org.apache.hadoop.hbase.io.crypto.PBEKeyData; import org.apache.hadoop.hbase.io.crypto.PBEKeyStatus; import org.apache.hadoop.hbase.util.Bytes; @@ -32,11 +37,13 @@ import org.apache.yetus.audience.InterfaceAudience; import java.io.IOException; import java.util.ArrayList; -import java.util.Arrays; import java.util.List; +/** + * Accessor for PBE keymeta table. + */ @InterfaceAudience.Private -public class KeyMetaTableAccessor extends PBEKeyManager { +public class PBEKeymetaTableAccessor extends PBEKeyManager { private static final String KEY_META_INFO_FAMILY_STR = "info"; public static final byte[] KEY_META_INFO_FAMILY = Bytes.toBytes(KEY_META_INFO_FAMILY_STR); @@ -44,9 +51,6 @@ public class KeyMetaTableAccessor extends PBEKeyManager { public static final TableName KEY_META_TABLE_NAME = TableName.valueOf(NamespaceDescriptor.SYSTEM_NAMESPACE_NAME_STR, "keymeta"); - public static final String PBE_PREFIX_QUAL_NAME = "pbe_prefix"; - public static final byte[] PBE_PREFIX_QUAL_BYTES = Bytes.toBytes(PBE_PREFIX_QUAL_NAME); - public static final String DEK_METADATA_QUAL_NAME = "dek_metadata"; public static final byte[] DEK_METADATA_QUAL_BYTES = Bytes.toBytes(DEK_METADATA_QUAL_NAME); @@ -65,24 +69,55 @@ public class KeyMetaTableAccessor extends PBEKeyManager { public static final String KEY_STATUS_QUAL_NAME = "key_status"; public static final byte[] KEY_STATUS_QUAL_BYTES = Bytes.toBytes(KEY_STATUS_QUAL_NAME); - public KeyMetaTableAccessor(Server server) { + public PBEKeymetaTableAccessor(Server server) { super(server); } public void addKey(PBEKeyData keyData) throws IOException { - long refreshTime = EnvironmentEdgeManager.currentTime(); - final Put putForPrefix = addMutationColumns(new Put(keyData.getPbe_prefix()), keyData, - refreshTime); final Put putForMetadata = addMutationColumns(new Put(constructRowKeyForMetadata(keyData)), - keyData, refreshTime); + keyData); + + Connection connection = server.getConnection(); + try (Table table = connection.getTable(KEY_META_TABLE_NAME)) { + table.put(putForMetadata); + } + } + public List getActiveKeys(byte[] pbePrefix, String keyNamespace) throws IOException { Connection connection = server.getConnection(); + byte[] prefixForScan = Bytes.add(Bytes.toBytes(pbePrefix.length), pbePrefix, + Bytes.toBytes(keyNamespace)); try (Table table = connection.getTable(KEY_META_TABLE_NAME)) { - table.put(Arrays.asList(putForPrefix, putForMetadata)); + PrefixFilter prefixFilter = new PrefixFilter(prefixForScan); + Scan scan = new Scan(); + scan.setFilter(prefixFilter); + scan.addFamily(KEY_META_INFO_FAMILY); + + ResultScanner scanner = table.getScanner(scan); + List activeKeys = new ArrayList<>(); + for (Result result : scanner) { + PBEKeyData keyData = parseFromResult(pbePrefix, keyNamespace, result); + if (keyData.getKeyStatus() == PBEKeyStatus.ACTIVE) { + activeKeys.add(keyData); + } + } + + return activeKeys; } } - private Put addMutationColumns(Put put, PBEKeyData keyData, long refreshTime) { + public PBEKeyData getKey(byte[] pbePrefix, String keyNamespace, String keyMetadata) + throws IOException { + Connection connection = server.getConnection(); + try (Table table = connection.getTable(KEY_META_TABLE_NAME)) { + byte[] rowKey = constructRowKeyForMetadata(pbePrefix, keyNamespace, + PBEKeyData.makeMetadataHash(keyMetadata)); + Result result = table.get(new Get(rowKey)); + return parseFromResult(pbePrefix, keyNamespace, result); + } + } + + private Put addMutationColumns(Put put, PBEKeyData keyData) { if (keyData.getTheKey() != null) { put.addColumn(KEY_META_INFO_FAMILY, DEK_CHECKSUM_QUAL_BYTES, Bytes.toBytes(keyData.getKeyChecksum())); @@ -92,25 +127,39 @@ private Put addMutationColumns(Put put, PBEKeyData keyData, long refreshTime) { .addColumn(KEY_META_INFO_FAMILY, DEK_METADATA_QUAL_BYTES, keyData.getKeyMetadata().getBytes()) //.addColumn(KEY_META_INFO_FAMILY, DEK_WRAPPED_BY_STK_QUAL_BYTES, null) //.addColumn(KEY_META_INFO_FAMILY, STK_CHECKSUM_QUAL_BYTES, null) - .addColumn(KEY_META_INFO_FAMILY, REFRESHED_TIMESTAMP_QUAL_BYTES, Bytes.toBytes(refreshTime)) + .addColumn(KEY_META_INFO_FAMILY, REFRESHED_TIMESTAMP_QUAL_BYTES, + Bytes.toBytes(keyData.getRefreshTimestamp())) .addColumn(KEY_META_INFO_FAMILY, KEY_STATUS_QUAL_BYTES, new byte[] { keyData.getKeyStatus().getVal() }) ; } private byte[] constructRowKeyForMetadata(PBEKeyData keyData) { - byte[] pbePrefix = keyData.getPbe_prefix(); - int prefixLength = pbePrefix.length; - byte[] keyMetadataHash = keyData.getKeyMetadataHash(); - return Bytes.add(Bytes.toBytes(prefixLength), pbePrefix, keyMetadataHash); + return constructRowKeyForMetadata(keyData.getPbe_prefix(), keyData.getKeyNamespace(), + keyData.getKeyMetadataHash()); } - private byte[] extractPBEPrefix(byte[] rowkey) { - int prefixLength = Bytes.toInt(rowkey); - return Bytes.copy(rowkey, Bytes.SIZEOF_INT, prefixLength); + private static byte[] constructRowKeyForMetadata(byte[] pbePrefix, String keyNamespace, + byte[] keyMetadataHash) { + int prefixLength = pbePrefix.length; + return Bytes.add(Bytes.toBytes(prefixLength), pbePrefix, Bytes.toBytesBinary(keyNamespace), + keyMetadataHash); } - private byte[] extractKeyMetadataHash(byte[] rowkey, byte[] pbePreefix) { - return Bytes.copy(rowkey, Bytes.SIZEOF_INT + pbePreefix.length, rowkey.length); + private PBEKeyData parseFromResult(byte[] pbePrefix, String keyNamespace, Result result) { + if (result == null || result.isEmpty()) { + return null; + } + PBEKeyStatus keyStatus = PBEKeyStatus.forValue( + result.getValue(KEY_META_INFO_FAMILY, KEY_STATUS_QUAL_BYTES)[0]); + String dekMetadata = Bytes.toString(result.getValue(KEY_META_INFO_FAMILY, + DEK_METADATA_QUAL_BYTES)); + long refreshedTimestamp = Bytes.toLong(result.getValue(KEY_META_INFO_FAMILY, REFRESHED_TIMESTAMP_QUAL_BYTES)); + byte[] dekChecksum = result.getValue(KEY_META_INFO_FAMILY, DEK_CHECKSUM_QUAL_BYTES); + //byte[] dekWrappedByStk = result.getValue(KEY_META_INFO_FAMILY, DEK_WRAPPED_BY_STK_QUAL_BYTES); + //byte[] stkChecksum = result.getValue(KEY_META_INFO_FAMILY, STK_CHECKSUM_QUAL_BYTES); + + return new PBEKeyData(pbePrefix, keyNamespace, null, keyStatus, dekMetadata, + refreshedTimestamp); } } diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/HMaster.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/HMaster.java index 843cb413b596..6eac904d6e4c 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/HMaster.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/HMaster.java @@ -122,7 +122,7 @@ import org.apache.hadoop.hbase.ipc.CoprocessorRpcUtils; import org.apache.hadoop.hbase.ipc.RpcServer; import org.apache.hadoop.hbase.ipc.ServerNotRunningYetException; -import org.apache.hadoop.hbase.keymeta.KeyMetaMasterService; +import org.apache.hadoop.hbase.keymeta.PBEKeymetaMasterService; import org.apache.hadoop.hbase.log.HBaseMarkers; import org.apache.hadoop.hbase.master.MasterRpcServices.BalanceSwitchMode; import org.apache.hadoop.hbase.master.assignment.AssignmentManager; @@ -356,8 +356,8 @@ public class HMaster extends HBaseServerBase implements Maste // file system manager for the master FS operations private MasterFileSystem fileSystemManager; private MasterWalManager walManager; - private ClusterKeyManager clusterKeyManager; - private KeyMetaMasterService keyMetaMasterService; + private PBEClusterKeyManager pbeClusterKeyManager; + private PBEKeymetaMasterService pbeKeymetaMasterService; // manager to manage procedure-based WAL splitting, can be null if current // is zk-based WAL splitting. SplitWALManager will replace SplitLogManager @@ -995,8 +995,8 @@ private void finishActiveMasterInitialization() throws IOException, InterruptedE ZKClusterId.setClusterId(this.zooKeeper, fileSystemManager.getClusterId()); this.clusterId = clusterId.toString(); - clusterKeyManager = new ClusterKeyManager(this); - clusterKeyManager.ensureClusterKeyInitialized(); + pbeClusterKeyManager = new PBEClusterKeyManager(this); + pbeClusterKeyManager.ensureClusterKeyInitialized(); // Precaution. Put in place the old hbck1 lock file to fence out old hbase1s running their // hbck1s against an hbase2 cluster; it could do damage. To skip this behavior, set @@ -1037,8 +1037,8 @@ private void finishActiveMasterInitialization() throws IOException, InterruptedE Map, List>> procsByType = procedureExecutor .getActiveProceduresNoCopy().stream().collect(Collectors.groupingBy(p -> p.getClass())); - keyMetaMasterService = new KeyMetaMasterService(this); - keyMetaMasterService.init(); + pbeKeymetaMasterService = new PBEKeymetaMasterService(this); + pbeKeymetaMasterService.init(); // Create Assignment Manager this.assignmentManager = createAssignmentManager(this, masterRegion); diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/KeyMetaSchema.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/KeyMetaSchema.java deleted file mode 100644 index ce85e11f7518..000000000000 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/KeyMetaSchema.java +++ /dev/null @@ -1,27 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.apache.hadoop.hbase.master; - -import org.apache.yetus.audience.InterfaceAudience; - -/** - * API for managing the keys in key_meta table. - */ -@InterfaceAudience.Public -public interface KeyMetaSchema { -} diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/ClusterKeyManager.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/PBEClusterKeyManager.java similarity index 92% rename from hbase-server/src/main/java/org/apache/hadoop/hbase/master/ClusterKeyManager.java rename to hbase-server/src/main/java/org/apache/hadoop/hbase/master/PBEClusterKeyManager.java index ed54231e93d7..f3e0f60a8c61 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/ClusterKeyManager.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/PBEClusterKeyManager.java @@ -20,20 +20,19 @@ import java.io.IOException; import java.util.List; import java.util.UUID; -import org.apache.hadoop.fs.FSDataInputStream; import org.apache.hadoop.fs.FSDataOutputStream; import org.apache.hadoop.fs.Path; import org.apache.hadoop.hbase.io.crypto.PBEKeyData; import org.apache.hadoop.hbase.io.crypto.PBEKeyProvider; -import org.apache.hadoop.hbase.keymeta.ClusterKeyAccessor; +import org.apache.hadoop.hbase.keymeta.PBEClusterKeyAccessor; import org.apache.yetus.audience.InterfaceAudience; import static org.apache.hadoop.hbase.HConstants.CLUSTER_KEY_FILE_PREFIX; @InterfaceAudience.Private -public class ClusterKeyManager extends ClusterKeyAccessor { +public class PBEClusterKeyManager extends PBEClusterKeyAccessor { private final MasterServices master; - public ClusterKeyManager(MasterServices master) throws IOException { + public PBEClusterKeyManager(MasterServices master) throws IOException { super(master); this.master = master; } @@ -63,8 +62,8 @@ public PBEKeyData rotateClusterKeyIfChanged() throws IOException { return null; } Path latestFile = getLatestClusterKeyFile(); - String latestKeyMeta = loadKeyMetadata(latestFile); - return rotateClusterKey(latestKeyMeta); + String latestKeyMetadata = loadKeyMetadata(latestFile); + return rotateClusterKey(latestKeyMetadata); } private PBEKeyData rotateClusterKey(String currentKeyMetadata) throws IOException { diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HRegion.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HRegion.java index 1801b4d971ed..165b1cd68a34 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HRegion.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HRegion.java @@ -806,8 +806,8 @@ public HRegion(final HRegionFileSystem fs, final WAL wal, final Configuration co this.conf = new CompoundConfiguration().add(confParam).addBytesMap(htd.getValues()); this.cellComparator = htd.isMetaTable() || conf.getBoolean(USE_META_CELL_COMPARATOR, DEFAULT_USE_META_CELL_COMPARATOR) - ? MetaCellComparator.META_COMPARATOR - : CellComparatorImpl.COMPARATOR; + ? MetaCellComparator.META_COMPARATOR + : CellComparatorImpl.COMPARATOR; this.lock = new ReentrantReadWriteLock( conf.getBoolean(FAIR_REENTRANT_CLOSE_LOCK, DEFAULT_FAIR_REENTRANT_CLOSE_LOCK)); this.regionLockHolders = new ConcurrentHashMap<>(); @@ -906,9 +906,9 @@ public HRegion(final HRegionFileSystem fs, final WAL wal, final Configuration co // disable stats tracking system tables, but check the config for everything else this.regionStatsEnabled = htd.getTableName().getNamespaceAsString() .equals(NamespaceDescriptor.SYSTEM_NAMESPACE_NAME_STR) - ? false - : conf.getBoolean(HConstants.ENABLE_CLIENT_BACKPRESSURE, - HConstants.DEFAULT_ENABLE_CLIENT_BACKPRESSURE); + ? false + : conf.getBoolean(HConstants.ENABLE_CLIENT_BACKPRESSURE, + HConstants.DEFAULT_ENABLE_CLIENT_BACKPRESSURE); this.maxCellSize = conf.getLong(HBASE_MAX_CELL_SIZE_KEY, DEFAULT_MAX_CELL_SIZE); this.miniBatchSize = diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HRegionServer.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HRegionServer.java index a4105a31bfac..d80f58edf366 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HRegionServer.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HRegionServer.java @@ -120,6 +120,9 @@ import org.apache.hadoop.hbase.ipc.RpcServer; import org.apache.hadoop.hbase.ipc.ServerNotRunningYetException; import org.apache.hadoop.hbase.ipc.ServerRpcController; +import org.apache.hadoop.hbase.keymeta.PBEKeyAccessor; +import org.apache.hadoop.hbase.keymeta.PBEKeymetaAdmin; +import org.apache.hadoop.hbase.keymeta.PBEKeymetaAdminImpl; import org.apache.hadoop.hbase.log.HBaseMarkers; import org.apache.hadoop.hbase.mob.MobFileCache; import org.apache.hadoop.hbase.mob.RSMobFileCleanerChore; @@ -596,7 +599,6 @@ protected RegionServerCoprocessorHost getCoprocessorHost() { return getRegionServerCoprocessorHost(); } - @Override protected boolean canCreateBaseZNode() { return !clusterMode(); } diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/replication/regionserver/ReplicationSyncUp.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/replication/regionserver/ReplicationSyncUp.java index a4ca20fa7311..1e5f9db2d3c3 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/replication/regionserver/ReplicationSyncUp.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/replication/regionserver/ReplicationSyncUp.java @@ -41,6 +41,8 @@ import org.apache.hadoop.hbase.ServerName; import org.apache.hadoop.hbase.client.AsyncClusterConnection; import org.apache.hadoop.hbase.client.Connection; +import org.apache.hadoop.hbase.keymeta.PBEKeyAccessor; +import org.apache.hadoop.hbase.keymeta.PBEKeymetaAdmin; import org.apache.hadoop.hbase.master.replication.OfflineTableReplicationQueueStorage; import org.apache.hadoop.hbase.replication.ReplicationException; import org.apache.hadoop.hbase.replication.ReplicationGroupOffset; @@ -366,6 +368,14 @@ public ChoreService getChoreService() { return null; } + @Override public PBEKeyAccessor getPBEKeyAccessor() { + return null; + } + + @Override public PBEKeymetaAdmin getPBEKeymetaAdmin() { + return null; + } + @Override public FileSystem getFileSystem() { return null; diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/MockNoopMasterServices.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/MockNoopMasterServices.java index e78ca7d0cdb7..8ce567d25f12 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/MockNoopMasterServices.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/MockNoopMasterServices.java @@ -38,6 +38,8 @@ import org.apache.hadoop.hbase.client.TableDescriptor; import org.apache.hadoop.hbase.executor.ExecutorService; import org.apache.hadoop.hbase.favored.FavoredNodesManager; +import org.apache.hadoop.hbase.keymeta.PBEKeyAccessor; +import org.apache.hadoop.hbase.keymeta.PBEKeymetaAdmin; import org.apache.hadoop.hbase.master.assignment.AssignmentManager; import org.apache.hadoop.hbase.master.hbck.HbckChore; import org.apache.hadoop.hbase.master.janitor.CatalogJanitor; @@ -116,6 +118,14 @@ public ChoreService getChoreService() { return null; } + @Override public PBEKeyAccessor getPBEKeyAccessor() { + return null; + } + + @Override public PBEKeymetaAdmin getPBEKeymetaAdmin() { + return null; + } + @Override public CatalogJanitor getCatalogJanitor() { return null; diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/MockRegionServer.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/MockRegionServer.java index a25bae6ec7bd..2b5f138a6a0f 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/MockRegionServer.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/MockRegionServer.java @@ -52,6 +52,8 @@ import org.apache.hadoop.hbase.io.hfile.BlockCache; import org.apache.hadoop.hbase.ipc.HBaseRpcController; import org.apache.hadoop.hbase.ipc.RpcServerInterface; +import org.apache.hadoop.hbase.keymeta.PBEKeyAccessor; +import org.apache.hadoop.hbase.keymeta.PBEKeymetaAdmin; import org.apache.hadoop.hbase.mob.MobFileCache; import org.apache.hadoop.hbase.quotas.RegionServerRpcQuotaManager; import org.apache.hadoop.hbase.quotas.RegionServerSpaceQuotaManager; @@ -556,6 +558,14 @@ public ChoreService getChoreService() { return null; } + @Override public PBEKeyAccessor getPBEKeyAccessor() { + return null; + } + + @Override public PBEKeymetaAdmin getPBEKeymetaAdmin() { + return null; + } + @Override public void updateRegionFavoredNodesMapping(String encodedRegionName, List favoredNodes) { diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestHeapMemoryManager.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestHeapMemoryManager.java index 443019bee808..f151e20bf587 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestHeapMemoryManager.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestHeapMemoryManager.java @@ -46,6 +46,8 @@ import org.apache.hadoop.hbase.io.hfile.CachedBlock; import org.apache.hadoop.hbase.io.hfile.ResizableBlockCache; import org.apache.hadoop.hbase.io.util.MemorySizeUtil; +import org.apache.hadoop.hbase.keymeta.PBEKeyAccessor; +import org.apache.hadoop.hbase.keymeta.PBEKeymetaAdmin; import org.apache.hadoop.hbase.regionserver.HeapMemoryManager.TunerContext; import org.apache.hadoop.hbase.regionserver.HeapMemoryManager.TunerResult; import org.apache.hadoop.hbase.testclassification.MediumTests; @@ -837,6 +839,14 @@ public ChoreService getChoreService() { return null; } + @Override public PBEKeyAccessor getPBEKeyAccessor() { + return null; + } + + @Override public PBEKeymetaAdmin getPBEKeymetaAdmin() { + return null; + } + @Override public FileSystem getFileSystem() { return null; diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/security/token/TestTokenAuthentication.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/security/token/TestTokenAuthentication.java index 58ffdcf91d43..44d35ae442ed 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/security/token/TestTokenAuthentication.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/security/token/TestTokenAuthentication.java @@ -55,6 +55,8 @@ import org.apache.hadoop.hbase.ipc.RpcServerInterface; import org.apache.hadoop.hbase.ipc.ServerRpcController; import org.apache.hadoop.hbase.ipc.SimpleRpcServer; +import org.apache.hadoop.hbase.keymeta.PBEKeyAccessor; +import org.apache.hadoop.hbase.keymeta.PBEKeymetaAdmin; import org.apache.hadoop.hbase.log.HBaseMarkers; import org.apache.hadoop.hbase.regionserver.RegionServerServices; import org.apache.hadoop.hbase.security.SecurityInfo; @@ -350,6 +352,14 @@ public ChoreService getChoreService() { return null; } + @Override public PBEKeyAccessor getPBEKeyAccessor() { + return null; + } + + @Override public PBEKeymetaAdmin getPBEKeymetaAdmin() { + return null; + } + @Override public Connection createConnection(Configuration conf) throws IOException { return null; diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/util/MockServer.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/util/MockServer.java index 90f4a7555b93..cbf6d0773d3e 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/util/MockServer.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/util/MockServer.java @@ -26,6 +26,8 @@ import org.apache.hadoop.hbase.ServerName; import org.apache.hadoop.hbase.client.AsyncClusterConnection; import org.apache.hadoop.hbase.client.Connection; +import org.apache.hadoop.hbase.keymeta.PBEKeyAccessor; +import org.apache.hadoop.hbase.keymeta.PBEKeymetaAdmin; import org.apache.hadoop.hbase.log.HBaseMarkers; import org.apache.hadoop.hbase.zookeeper.ZKWatcher; import org.slf4j.Logger; @@ -100,6 +102,14 @@ public ChoreService getChoreService() { throw new UnsupportedOperationException(); } + @Override public PBEKeyAccessor getPBEKeyAccessor() { + return null; + } + + @Override public PBEKeymetaAdmin getPBEKeymetaAdmin() { + return null; + } + @Override public FileSystem getFileSystem() { throw new UnsupportedOperationException(); diff --git a/hbase-shell/src/main/ruby/hbase/pbe_admin.rb b/hbase-shell/src/main/ruby/hbase/pbe_admin.rb index 4f8d9f1e830f..4a9dfbd7f998 100644 --- a/hbase-shell/src/main/ruby/hbase/pbe_admin.rb +++ b/hbase-shell/src/main/ruby/hbase/pbe_admin.rb @@ -21,7 +21,7 @@ module Hbase class PBEAdmin def initialize(connection) @connection = connection - @admin = org.apache.hadoop.hbase.keymeta.KeyMetaAdminClient.new(connection) + @admin = org.apache.hadoop.hbase.keymeta.PBEKeymetaAdminClient.new(connection) @hb_admin = @connection.getAdmin end @@ -30,7 +30,10 @@ def close end def pbe_enable(pbe_prefix) - @admin.enablePBE(pbe_prefix) + prefixInfo = pbe_prefix.split(':') + assert prefixInfo.length <= 2, 'Invalid prefix:namespace format' + @admin.enablePBE(prefixInfo[0], prefixInfo.length > 1? prefixInfo[1] : + org.apache.hadoop.hbase.io.crypto.PBEKeyData.KEY_NAMESPACE_GLOBAL) end end end From 2d2cdb5af1b0f3ea9780f53a55b07ea8a1eafe7f Mon Sep 17 00:00:00 2001 From: Hari Dara Date: Mon, 10 Mar 2025 14:01:57 +0530 Subject: [PATCH 05/70] Integrated with the STK cache to cache the DEK key material --- .../hbase/keymeta/PBEKeymetaAdminClient.java | 5 +- .../hadoop/hbase/security/EncryptionUtil.java | 54 ++++++++++++++++--- .../hadoop/hbase/io/crypto/Encryption.java | 40 +++++++++----- .../hadoop/hbase/io/crypto/PBEKeyData.java | 15 +++--- .../io/crypto/PBEKeyStoreKeyProvider.java | 12 +++-- .../apache/hadoop/hbase/HBaseServerBase.java | 19 +++++-- .../hbase/MockRegionServerServices.java | 5 ++ .../java/org/apache/hadoop/hbase/Server.java | 13 +++++ .../hbase/keymeta/PBEClusterKeyAccessor.java | 54 ++++++++++++------- .../hbase/keymeta/PBEClusterKeyCache.java | 29 ++++++---- .../hadoop/hbase/keymeta/PBEKeyAccessor.java | 7 ++- .../hbase/keymeta/PBEKeymetaAdminImpl.java | 3 +- .../keymeta/PBEKeymetaTableAccessor.java | 48 +++++++++++------ .../apache/hadoop/hbase/master/HMaster.java | 3 ++ .../hbase/master/PBEClusterKeyManager.java | 8 +-- .../hbase/regionserver/HRegionServer.java | 5 ++ .../regionserver/ReplicationSyncUp.java | 5 ++ .../hbase/master/MockNoopMasterServices.java | 5 ++ .../hadoop/hbase/master/MockRegionServer.java | 5 ++ .../hbase/master/TestActiveMasterManager.java | 15 ++++++ .../cleaner/TestReplicationHFileCleaner.java | 15 ++++++ ...onProcedureStorePerformanceEvaluation.java | 15 ++++++ .../regionserver/TestHeapMemoryManager.java | 5 ++ .../token/TestTokenAuthentication.java | 5 ++ .../apache/hadoop/hbase/util/MockServer.java | 5 ++ hbase-shell/src/main/ruby/hbase/pbe_admin.rb | 11 ++-- .../main/ruby/shell/commands/pbe_enable.rb | 3 +- 27 files changed, 319 insertions(+), 90 deletions(-) diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/keymeta/PBEKeymetaAdminClient.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/keymeta/PBEKeymetaAdminClient.java index e150edc143dd..5ca264a147ee 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/keymeta/PBEKeymetaAdminClient.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/keymeta/PBEKeymetaAdminClient.java @@ -21,10 +21,11 @@ public PBEKeymetaAdminClient(Connection conn) throws IOException { this.stub = PBEAdminProtos.PBEAdminService.newBlockingStub(conn.getAdmin().coprocessorService()); } - @Override public PBEKeyStatus enablePBE(String pbePrefix, String keyNamespace) throws IOException { + @Override + public PBEKeyStatus enablePBE(String pbePrefix, String keyNamespace) throws IOException { try { PBEAdminResponse pbeAdminResponse = stub.enablePBE(null, - PBEAdminRequest.newBuilder().setPbePrefix(pbePrefix).build()); + PBEAdminRequest.newBuilder().setPbePrefix(pbePrefix).setKeyNamespace(keyNamespace).build()); LOG.info("Got response: " + pbeAdminResponse); return PBEKeyStatus.forValue((byte) pbeAdminResponse.getPbeStatus().getNumber()); } catch (ServiceException e) { diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/security/EncryptionUtil.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/security/EncryptionUtil.java index 04fc5201cc10..4d0efba8122f 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/security/EncryptionUtil.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/security/EncryptionUtil.java @@ -80,6 +80,20 @@ public static byte[] wrapKey(Configuration conf, byte[] key, String algorithm) * @return the encrypted key bytes */ public static byte[] wrapKey(Configuration conf, String subject, Key key) throws IOException { + return wrapKey(conf, subject, key, null); + } + + /** + * Protect a key by encrypting it with the secret key of the given subject or kek. The + * configuration must be set up correctly for key alias resolution. Only one of the + * {@code subject} or {@code kek} needs to be specified and the other one can be {@code null}. + * @param conf configuration + * @param subject subject key alias + * @param key the key + * @param kek the key encryption key + * @return the encrypted key bytes + */ + public static byte[] wrapKey(Configuration conf, String subject, Key key, Key kek) throws IOException { // Wrap the key with the configured encryption algorithm. String algorithm = conf.get(HConstants.CRYPTO_KEY_ALGORITHM_CONF_KEY, HConstants.CIPHER_AES); Cipher cipher = Encryption.getCipher(conf, algorithm); @@ -100,8 +114,13 @@ public static byte[] wrapKey(Configuration conf, String subject, Key key) throws builder .setHash(UnsafeByteOperations.unsafeWrap(Encryption.computeCryptoKeyHash(conf, keyBytes))); ByteArrayOutputStream out = new ByteArrayOutputStream(); - Encryption.encryptWithSubjectKey(out, new ByteArrayInputStream(keyBytes), subject, conf, cipher, - iv); + if (kek != null) { + Encryption.encryptWithGivenKey(kek, out, new ByteArrayInputStream(keyBytes), cipher, iv); + } + else { + Encryption.encryptWithSubjectKey(out, new ByteArrayInputStream(keyBytes), subject, conf, + cipher, iv); + } builder.setData(UnsafeByteOperations.unsafeWrap(out.toByteArray())); // Build and return the protobuf message out.reset(); @@ -118,6 +137,21 @@ public static byte[] wrapKey(Configuration conf, String subject, Key key) throws * @return the raw key bytes */ public static Key unwrapKey(Configuration conf, String subject, byte[] value) + throws IOException, KeyException { + return unwrapKey(conf, subject, value, null); + } + + /** + * Unwrap a key by decrypting it with the secret key of the given subject. The configuration must + * be set up correctly for key alias resolution. Only one of the {@code subject} or {@code kek} + * needs to be specified and the other one can be {@code null}. + * @param conf configuration + * @param subject subject key alias + * @param value the encrypted key bytes + * @param kek the key encryption key + * @return the raw key bytes + */ + public static Key unwrapKey(Configuration conf, String subject, byte[] value, Key kek) throws IOException, KeyException { EncryptionProtos.WrappedKey wrappedKey = EncryptionProtos.WrappedKey.parser().parseDelimitedFrom(new ByteArrayInputStream(value)); @@ -126,11 +160,11 @@ public static Key unwrapKey(Configuration conf, String subject, byte[] value) if (cipher == null) { throw new RuntimeException("Cipher '" + algorithm + "' not available"); } - return getUnwrapKey(conf, subject, wrappedKey, cipher); + return getUnwrapKey(conf, subject, wrappedKey, cipher, kek); } private static Key getUnwrapKey(Configuration conf, String subject, - EncryptionProtos.WrappedKey wrappedKey, Cipher cipher) throws IOException, KeyException { + EncryptionProtos.WrappedKey wrappedKey, Cipher cipher, Key kek) throws IOException, KeyException { String configuredHashAlgorithm = Encryption.getConfiguredHashAlgorithm(conf); String wrappedHashAlgorithm = wrappedKey.getHashAlgorithm().trim(); if (!configuredHashAlgorithm.equalsIgnoreCase(wrappedHashAlgorithm)) { @@ -143,8 +177,14 @@ private static Key getUnwrapKey(Configuration conf, String subject, } ByteArrayOutputStream out = new ByteArrayOutputStream(); byte[] iv = wrappedKey.hasIv() ? wrappedKey.getIv().toByteArray() : null; - Encryption.decryptWithSubjectKey(out, wrappedKey.getData().newInput(), wrappedKey.getLength(), - subject, conf, cipher, iv); + if (kek != null) { + Encryption.decryptWithGivenKey(kek, out, wrappedKey.getData().newInput(), wrappedKey.getLength(), + cipher, iv); + } + else { + Encryption.decryptWithSubjectKey(out, wrappedKey.getData().newInput(), wrappedKey.getLength(), + subject, conf, cipher, iv); + } byte[] keyBytes = out.toByteArray(); if (wrappedKey.hasHash()) { if ( @@ -176,7 +216,7 @@ public static Key unwrapWALKey(Configuration conf, String subject, byte[] value) if (cipher == null) { throw new RuntimeException("Cipher '" + algorithm + "' not available"); } - return getUnwrapKey(conf, subject, wrappedKey, cipher); + return getUnwrapKey(conf, subject, wrappedKey, cipher, null); } /** diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/io/crypto/Encryption.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/io/crypto/Encryption.java index 5dac1af2c27a..c0abe8ccc59f 100644 --- a/hbase-common/src/main/java/org/apache/hadoop/hbase/io/crypto/Encryption.java +++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/io/crypto/Encryption.java @@ -468,6 +468,19 @@ public static void encryptWithSubjectKey(OutputStream out, InputStream in, Strin if (key == null) { throw new IOException("No key found for subject '" + subject + "'"); } + encryptWithGivenKey(key, out, in, cipher, iv); + } + + /** + * Encrypts a block of plaintext with the specified symmetric key. + * @param key The symmetric key + * @param out ciphertext + * @param in plaintext + * @param cipher the encryption algorithm + * @param iv the initialization vector, can be null + */ + public static void encryptWithGivenKey(Key key, OutputStream out, InputStream in, + Cipher cipher, byte[] iv) throws IOException { Encryptor e = cipher.getEncryptor(); e.setKey(key); e.setIv(iv); // can be null @@ -490,19 +503,16 @@ public static void decryptWithSubjectKey(OutputStream out, InputStream in, int o if (key == null) { throw new IOException("No key found for subject '" + subject + "'"); } - Decryptor d = cipher.getDecryptor(); - d.setKey(key); - d.setIv(iv); // can be null try { - decrypt(out, in, outLen, d); + decryptWithGivenKey(key, out, in, outLen, cipher, iv); } catch (IOException e) { // If the current cipher algorithm fails to unwrap, try the alternate cipher algorithm, if one // is configured String alternateAlgorithm = conf.get(HConstants.CRYPTO_ALTERNATE_KEY_ALGORITHM_CONF_KEY); if (alternateAlgorithm != null) { if (LOG.isDebugEnabled()) { - LOG.debug("Unable to decrypt data with current cipher algorithm '" - + conf.get(HConstants.CRYPTO_KEY_ALGORITHM_CONF_KEY, HConstants.CIPHER_AES) + LOG.debug("Unable to decrypt data with current cipher algorithm '" + conf.get( + HConstants.CRYPTO_KEY_ALGORITHM_CONF_KEY, HConstants.CIPHER_AES) + "'. Trying with the alternate cipher algorithm '" + alternateAlgorithm + "' configured."); } @@ -510,16 +520,22 @@ public static void decryptWithSubjectKey(OutputStream out, InputStream in, int o if (alterCipher == null) { throw new RuntimeException("Cipher '" + alternateAlgorithm + "' not available"); } - d = alterCipher.getDecryptor(); - d.setKey(key); - d.setIv(iv); // can be null - decrypt(out, in, outLen, d); - } else { - throw new IOException(e); + decryptWithGivenKey(key, out, in, outLen, alterCipher, iv); + } + else { + throw e; } } } + public static void decryptWithGivenKey(Key key, OutputStream out, InputStream in, int outLen, + Cipher cipher, byte[] iv) throws IOException { + Decryptor d = cipher.getDecryptor(); + d.setKey(key); + d.setIv(iv); // can be null + decrypt(out, in, outLen, d); + } + private static ClassLoader getClassLoaderForClass(Class c) { ClassLoader cl = Thread.currentThread().getContextClassLoader(); if (cl == null) { diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/io/crypto/PBEKeyData.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/io/crypto/PBEKeyData.java index 13cb51d72041..865966ee46cb 100644 --- a/hbase-common/src/main/java/org/apache/hadoop/hbase/io/crypto/PBEKeyData.java +++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/io/crypto/PBEKeyData.java @@ -166,14 +166,17 @@ public long getRefreshTimestamp() { */ public long getKeyChecksum() { if (keyChecksum == 0) { - DataChecksum dataChecksum = DataChecksum.newDataChecksum(DataChecksum.Type.CRC32C, 16); - byte[] data = theKey.getEncoded(); - dataChecksum.update(data, 0, data.length); - keyChecksum = dataChecksum.getValue(); + keyChecksum = constructKeyChecksum(theKey.getEncoded()); } return keyChecksum; } + public static long constructKeyChecksum(byte[] data) { + DataChecksum dataChecksum = DataChecksum.newDataChecksum(DataChecksum.Type.CRC32C, 16); + dataChecksum.update(data, 0, data.length); + return dataChecksum.getValue(); + } + /** * Computes the hash of the key metadata. If the hash has already been computed, this method * returns the previously computed value. The hash is computed using the MD5 algorithm. @@ -182,12 +185,12 @@ public long getKeyChecksum() { */ public byte[] getKeyMetadataHash() { if (keyMetadataHash == null) { - keyMetadataHash = makeMetadataHash(keyMetadata); + keyMetadataHash = constructMetadataHash(keyMetadata); } return keyMetadataHash; } - public static byte[] makeMetadataHash(String metadata) { + public static byte[] constructMetadataHash(String metadata) { MessageDigest md5; try { md5 = MessageDigest.getInstance("MD5"); diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/io/crypto/PBEKeyStoreKeyProvider.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/io/crypto/PBEKeyStoreKeyProvider.java index f801bc9d7836..4c958a30b01d 100644 --- a/hbase-common/src/main/java/org/apache/hadoop/hbase/io/crypto/PBEKeyStoreKeyProvider.java +++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/io/crypto/PBEKeyStoreKeyProvider.java @@ -17,11 +17,13 @@ public class PBEKeyStoreKeyProvider extends KeyStoreKeyProvider implements PBEKe private Configuration conf; - @Override public void initConfig(Configuration conf) { + @Override + public void initConfig(Configuration conf) { this.conf = conf; } - @Override public PBEKeyData getClusterKey(byte[] clusterId) { + @Override + public PBEKeyData getClusterKey(byte[] clusterId) { checkConfig(); String masterKeyAlias = conf.get(HConstants.CRYPTO_PBE_MASTERKEY_NAME_CONF_KEY, null); if (masterKeyAlias == null) { @@ -38,7 +40,8 @@ public class PBEKeyStoreKeyProvider extends KeyStoreKeyProvider implements PBEKe keyMetadata); } - @Override public PBEKeyData getPBEKey(byte[] pbe_prefix, String key_namespace) throws IOException { + @Override + public PBEKeyData getPBEKey(byte[] pbe_prefix, String key_namespace) throws IOException { checkConfig(); String encodedPrefix = Base64.getEncoder().encodeToString(pbe_prefix); String aliasConfKey = HConstants.CRYPTO_PBE_PREFIX_CONF_KEY_PREFIX + encodedPrefix + "." + @@ -47,7 +50,8 @@ public class PBEKeyStoreKeyProvider extends KeyStoreKeyProvider implements PBEKe return unwrapKey(keyMetadata); } - @Override public PBEKeyData unwrapKey(String keyMetadataStr) throws IOException { + @Override + public PBEKeyData unwrapKey(String keyMetadataStr) throws IOException { Map keyMetadata = GsonUtil.getDefaultInstance().fromJson(keyMetadataStr, HashMap.class); String encodedPrefix = keyMetadata.get(KEY_METADATA_PREFIX); diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/HBaseServerBase.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/HBaseServerBase.java index 30bcd40a7a0a..ddf0fe578cff 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/HBaseServerBase.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/HBaseServerBase.java @@ -52,6 +52,8 @@ import org.apache.hadoop.hbase.http.InfoServer; import org.apache.hadoop.hbase.io.util.MemorySizeUtil; import org.apache.hadoop.hbase.ipc.RpcServerInterface; +import org.apache.hadoop.hbase.keymeta.PBEClusterKeyAccessor; +import org.apache.hadoop.hbase.keymeta.PBEClusterKeyCache; import org.apache.hadoop.hbase.keymeta.PBEKeyAccessor; import org.apache.hadoop.hbase.keymeta.PBEKeymetaAdmin; import org.apache.hadoop.hbase.keymeta.PBEKeymetaAdminImpl; @@ -190,8 +192,9 @@ public abstract class HBaseServerBase> extends protected final NettyEventLoopGroupConfig eventLoopGroupConfig; - private PBEKeymetaAdminImpl pbeKeymetaAdmin; - private PBEKeyAccessor pbeKeyAccessor; + private PBEClusterKeyCache pbeClusterKeyCache; + protected PBEKeymetaAdminImpl pbeKeymetaAdmin; + protected PBEKeyAccessor pbeKeyAccessor; private void setupSignalHandlers() { if (!SystemUtils.IS_OS_WINDOWS) { @@ -290,7 +293,6 @@ public HBaseServerBase(Configuration conf, String name) throws IOException { initializeFileSystem(); pbeKeymetaAdmin = new PBEKeymetaAdminImpl(this); - pbeKeyAccessor = new PBEKeyAccessor(pbeKeymetaAdmin); int choreServiceInitialSize = conf.getInt(CHORE_SERVICE_INITIAL_POOL_SIZE, DEFAULT_CHORE_SERVICE_INITIAL_POOL_SIZE); @@ -422,6 +424,17 @@ public PBEKeyAccessor getPBEKeyAccessor() { return pbeKeyAccessor; } + @Override + public PBEClusterKeyCache getPBEClusterKeyCache() { + return pbeClusterKeyCache; + } + + protected void buildPBEClusterKeyCache() throws IOException { + if (pbeClusterKeyCache == null) { + pbeClusterKeyCache = PBEClusterKeyCache.createCache(new PBEClusterKeyAccessor(this)); + } + } + protected final void shutdownChore(ScheduledChore chore) { if (chore != null) { chore.shutdown(); diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/MockRegionServerServices.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/MockRegionServerServices.java index d0820464ad5d..c14a28cd8f9d 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/MockRegionServerServices.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/MockRegionServerServices.java @@ -38,6 +38,7 @@ import org.apache.hadoop.hbase.fs.HFileSystem; import org.apache.hadoop.hbase.io.hfile.BlockCache; import org.apache.hadoop.hbase.ipc.RpcServerInterface; +import org.apache.hadoop.hbase.keymeta.PBEClusterKeyCache; import org.apache.hadoop.hbase.keymeta.PBEKeyAccessor; import org.apache.hadoop.hbase.keymeta.PBEKeymetaAdmin; import org.apache.hadoop.hbase.mob.MobFileCache; @@ -258,6 +259,10 @@ public ChoreService getChoreService() { return null; } + @Override public PBEClusterKeyCache getPBEClusterKeyCache() { + return null; + } + @Override public PBEKeyAccessor getPBEKeyAccessor() { return null; } diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/Server.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/Server.java index 57ac18bcd0a6..dcca89e8b2fb 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/Server.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/Server.java @@ -23,6 +23,7 @@ import org.apache.hadoop.hbase.client.AsyncClusterConnection; import org.apache.hadoop.hbase.client.AsyncConnection; import org.apache.hadoop.hbase.client.Connection; +import org.apache.hadoop.hbase.keymeta.PBEClusterKeyCache; import org.apache.hadoop.hbase.keymeta.PBEKeyAccessor; import org.apache.hadoop.hbase.keymeta.PBEKeymetaAdmin; import org.apache.hadoop.hbase.zookeeper.ZKWatcher; @@ -85,7 +86,19 @@ default AsyncConnection getAsyncConnection() { /** Returns The {@link ChoreService} instance for this server */ ChoreService getChoreService(); + /** + * @return the cache for cluster keys. + */ + public PBEClusterKeyCache getPBEClusterKeyCache(); + + /** + * @return the accessor for cluster keys. + */ public PBEKeyAccessor getPBEKeyAccessor(); + + /** + * @return the admin for keymeta. + */ public PBEKeymetaAdmin getPBEKeymetaAdmin(); /** Returns Return the FileSystem object used (can return null!). */ diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/keymeta/PBEClusterKeyAccessor.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/keymeta/PBEClusterKeyAccessor.java index 1e3ee5bbde5e..ef62e92d93c6 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/keymeta/PBEClusterKeyAccessor.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/keymeta/PBEClusterKeyAccessor.java @@ -28,7 +28,10 @@ import org.apache.yetus.audience.InterfaceAudience; import java.io.IOException; import java.util.ArrayList; +import java.util.Comparator; import java.util.List; +import java.util.Map; +import java.util.TreeMap; import static org.apache.hadoop.hbase.HConstants.CLUSTER_KEY_FILE_PREFIX; @InterfaceAudience.Private @@ -44,24 +47,36 @@ public Path getLatestClusterKeyFile() throws IOException { if (! isPBEEnabled()) { return null; } - int currentMaxSeqNum = findLatestKeySequence(getAllClusterKeys()); + List allClusterKeyFiles = getAllClusterKeyFiles(); + if (allClusterKeyFiles.isEmpty()) { + throw new RuntimeException("No cluster key initialized yet"); + } + int currentMaxSeqNum = extractKeySequence(allClusterKeyFiles.get(0)); return new Path(clusterKeyDir, CLUSTER_KEY_FILE_PREFIX + currentMaxSeqNum); } - public List getAllClusterKeys() throws IOException { - if (! isPBEEnabled()) { + /** + * Return all available cluster key files and return them in the order of latest to oldest. + * If no cluster key files are available, then return an empty list. If PBE is not enabled, + * then return null. + * + * @return a list of all available cluster key files + * @throws IOException + */ + public List getAllClusterKeyFiles() throws IOException { + if (!isPBEEnabled()) { return null; } FileSystem fs = server.getFileSystem(); - List clusterKeys = new ArrayList<>(); - for (FileStatus st: fs.globStatus(new Path(clusterKeyDir, CLUSTER_KEY_FILE_PREFIX + "*"))) { + Map clusterKeys = new TreeMap<>(Comparator.reverseOrder()); + for (FileStatus st : fs.globStatus(new Path(clusterKeyDir, CLUSTER_KEY_FILE_PREFIX + "*"))) { Path keyPath = st.getPath(); - extractClusterKeySeqNum(keyPath); // Just check for validity. - clusterKeys.add(keyPath); + int seqNum = extractClusterKeySeqNum(keyPath); + clusterKeys.put(seqNum, keyPath); } - return clusterKeys; - } + return new ArrayList<>(clusterKeys.values()); + } public PBEKeyData loadClusterKey(Path keyPath) throws IOException { PBEKeyProvider provider = getKeyProvider(); return provider.unwrapKey(loadKeyMetadata(keyPath)); @@ -79,17 +94,18 @@ public int extractClusterKeySeqNum(Path keyPath) throws IOException { throw new IOException("Couldn't parse key file name: " + keyPath.getName()); } - protected int findLatestKeySequence(List clusterKeys) throws IOException { - int maxKeySeq = -1; - for (Path keyPath: clusterKeys) { - if (keyPath.getName().startsWith(CLUSTER_KEY_FILE_PREFIX)) { - int keySeq = Integer.valueOf(keyPath.getName().substring(CLUSTER_KEY_FILE_PREFIX.length())); - if (keySeq > maxKeySeq) { - maxKeySeq = keySeq; - } - } + /** + * Extract the key sequence number from the cluster key file name. + * @param clusterKeyFile + * @return The sequence or {@code -1} if not a valid sequence file. + * @throws IOException + */ + protected int extractKeySequence(Path clusterKeyFile) throws IOException { + int keySeq = -1; + if (clusterKeyFile.getName().startsWith(CLUSTER_KEY_FILE_PREFIX)) { + keySeq = Integer.valueOf(clusterKeyFile.getName().substring(CLUSTER_KEY_FILE_PREFIX.length())); } - return maxKeySeq; + return keySeq; } protected String loadKeyMetadata(Path keyPath) throws IOException { diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/keymeta/PBEClusterKeyCache.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/keymeta/PBEClusterKeyCache.java index 9ac76164d1e3..a77583ecf35b 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/keymeta/PBEClusterKeyCache.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/keymeta/PBEClusterKeyCache.java @@ -23,32 +23,39 @@ import org.slf4j.Logger; import org.slf4j.LoggerFactory; import java.io.IOException; -import java.util.HashMap; import java.util.List; import java.util.Map; +import java.util.TreeMap; @InterfaceAudience.Private public class PBEClusterKeyCache { private static final Logger LOG = LoggerFactory.getLogger(PBEClusterKeyCache.class); - private PBEKeyData latestClusterKey; - private Map clusterKeys = new HashMap<>(); + private final PBEKeyData latestClusterKey; + private final Map clusterKeys; - public PBEClusterKeyCache createCache(PBEClusterKeyAccessor accessor) throws IOException { - List allClusterKeys = accessor.getAllClusterKeys(); - int latestKeySequence = accessor.findLatestKeySequence(allClusterKeys); + /** + * Construct the Cluster Key cache from the specified accessor. + * @param accessor + * @return the cache or {@code null} if no keys are found. + * @throws IOException + */ + public static PBEClusterKeyCache createCache(PBEClusterKeyAccessor accessor) throws IOException { + List allClusterKeyFiles = accessor.getAllClusterKeyFiles(); + if (allClusterKeyFiles.isEmpty()) { + LOG.warn("No cluster key files found, skipping cache creation"); + return null; + } PBEKeyData latestClusterKey = null; - for (Path keyPath: allClusterKeys) { + Map clusterKeys = new TreeMap<>(); + for (Path keyPath: allClusterKeyFiles) { LOG.info("Loading cluster key from: {}", keyPath); PBEKeyData keyData = accessor.loadClusterKey(keyPath); - if (accessor.extractClusterKeySeqNum(keyPath) == latestKeySequence) { + if (latestClusterKey == null) { latestClusterKey = keyData; } clusterKeys.put(keyData.getKeyChecksum(), keyData); } - if (latestClusterKey == null) { - throw new RuntimeException("Expected to find a key for sequence: " + latestKeySequence); - } return new PBEClusterKeyCache(clusterKeys, latestClusterKey); } diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/keymeta/PBEKeyAccessor.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/keymeta/PBEKeyAccessor.java index 0aefeddeca9b..5d79875a2c46 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/keymeta/PBEKeyAccessor.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/keymeta/PBEKeyAccessor.java @@ -20,6 +20,7 @@ import org.apache.hadoop.hbase.io.crypto.PBEKeyData; import org.apache.yetus.audience.InterfaceAudience; import java.io.IOException; +import java.security.KeyException; import java.util.List; /** @@ -46,7 +47,8 @@ public PBEKeyAccessor(PBEKeymetaTableAccessor keymetaAccessor) { * @return The key data * @throws IOException if an error occurs while retrieving the key */ - public PBEKeyData getKey(byte[] pbePrefix, String keyNamespace, String keyMetadata) throws IOException { + public PBEKeyData getKey(byte[] pbePrefix, String keyNamespace, String keyMetadata) + throws IOException, KeyException { PBEKeyData keyData = keyDataCache.getEntry(keyMetadata); if (keyData == null) { keyData = keymetaAccessor.getKey(pbePrefix, keyNamespace, keyMetadata); @@ -63,7 +65,8 @@ public PBEKeyData getKey(byte[] pbePrefix, String keyNamespace, String keyMetada * @return The key data * @throws IOException if an error occurs while retrieving the key */ - public PBEKeyData getAnActiveKey(byte[] pbePrefix, String keyNamespace) throws IOException { + public PBEKeyData getAnActiveKey(byte[] pbePrefix, String keyNamespace) + throws IOException, KeyException { PBEKeyData keyData = keyDataCache.getRandomEntryForPrefix(pbePrefix, keyNamespace); if (keyData == null) { List activeKeys = keymetaAccessor.getActiveKeys(pbePrefix, keyNamespace); diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/keymeta/PBEKeymetaAdminImpl.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/keymeta/PBEKeymetaAdminImpl.java index f4a4a6b87b1e..c005d5643b20 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/keymeta/PBEKeymetaAdminImpl.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/keymeta/PBEKeymetaAdminImpl.java @@ -50,7 +50,8 @@ public PBEKeyStatus enablePBE(String pbePrefix, String keyNamespace) throws IOEx } PBEKeyProvider provider = getKeyProvider(); PBEKeyData pbeKey = provider.getPBEKey(pbe_prefix, keyNamespace); - LOG.info("Got key data with status: {} for prefix: {}", pbeKey.getKeyStatus(), pbePrefix); + LOG.info("Got key data with status: {} and metadata: {} for prefix: {}", pbeKey.getKeyStatus(), + pbeKey.getKeyMetadata(), pbePrefix); addKey(pbeKey); return pbeKey.getKeyStatus(); } diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/keymeta/PBEKeymetaTableAccessor.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/keymeta/PBEKeymetaTableAccessor.java index f42370c9fc48..7704f7384d12 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/keymeta/PBEKeymetaTableAccessor.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/keymeta/PBEKeymetaTableAccessor.java @@ -32,10 +32,13 @@ import org.apache.hadoop.hbase.filter.PrefixFilter; import org.apache.hadoop.hbase.io.crypto.PBEKeyData; import org.apache.hadoop.hbase.io.crypto.PBEKeyStatus; +import org.apache.hadoop.hbase.security.EncryptionUtil; import org.apache.hadoop.hbase.util.Bytes; -import org.apache.hadoop.hbase.util.EnvironmentEdgeManager; import org.apache.yetus.audience.InterfaceAudience; +import javax.crypto.spec.SecretKeySpec; import java.io.IOException; +import java.security.Key; +import java.security.KeyException; import java.util.ArrayList; import java.util.List; @@ -83,7 +86,8 @@ public void addKey(PBEKeyData keyData) throws IOException { } } - public List getActiveKeys(byte[] pbePrefix, String keyNamespace) throws IOException { + public List getActiveKeys(byte[] pbePrefix, String keyNamespace) + throws IOException, KeyException { Connection connection = server.getConnection(); byte[] prefixForScan = Bytes.add(Bytes.toBytes(pbePrefix.length), pbePrefix, Bytes.toBytes(keyNamespace)); @@ -107,26 +111,31 @@ public List getActiveKeys(byte[] pbePrefix, String keyNamespace) thr } public PBEKeyData getKey(byte[] pbePrefix, String keyNamespace, String keyMetadata) - throws IOException { + throws IOException, KeyException { Connection connection = server.getConnection(); try (Table table = connection.getTable(KEY_META_TABLE_NAME)) { byte[] rowKey = constructRowKeyForMetadata(pbePrefix, keyNamespace, - PBEKeyData.makeMetadataHash(keyMetadata)); + PBEKeyData.constructMetadataHash(keyMetadata)); Result result = table.get(new Get(rowKey)); return parseFromResult(pbePrefix, keyNamespace, result); } } - private Put addMutationColumns(Put put, PBEKeyData keyData) { + private Put addMutationColumns(Put put, PBEKeyData keyData) throws IOException { + PBEKeyData latestClusterKey = server.getPBEClusterKeyCache().getLatestClusterKey(); if (keyData.getTheKey() != null) { + byte[] dekWrappedBySTK = EncryptionUtil.wrapKey(server.getConfiguration(), null, + keyData.getTheKey(), latestClusterKey.getTheKey()); put.addColumn(KEY_META_INFO_FAMILY, DEK_CHECKSUM_QUAL_BYTES, - Bytes.toBytes(keyData.getKeyChecksum())); + Bytes.toBytes(keyData.getKeyChecksum())) + .addColumn(KEY_META_INFO_FAMILY, DEK_WRAPPED_BY_STK_QUAL_BYTES, dekWrappedBySTK) + ; } return put.setDurability(Durability.SKIP_WAL) .setPriority(HConstants.SYSTEMTABLE_QOS) .addColumn(KEY_META_INFO_FAMILY, DEK_METADATA_QUAL_BYTES, keyData.getKeyMetadata().getBytes()) - //.addColumn(KEY_META_INFO_FAMILY, DEK_WRAPPED_BY_STK_QUAL_BYTES, null) - //.addColumn(KEY_META_INFO_FAMILY, STK_CHECKSUM_QUAL_BYTES, null) + .addColumn(KEY_META_INFO_FAMILY, STK_CHECKSUM_QUAL_BYTES, + Bytes.toBytes(latestClusterKey.getKeyChecksum())) .addColumn(KEY_META_INFO_FAMILY, REFRESHED_TIMESTAMP_QUAL_BYTES, Bytes.toBytes(keyData.getRefreshTimestamp())) .addColumn(KEY_META_INFO_FAMILY, KEY_STATUS_QUAL_BYTES, @@ -146,7 +155,8 @@ private static byte[] constructRowKeyForMetadata(byte[] pbePrefix, String keyNam keyMetadataHash); } - private PBEKeyData parseFromResult(byte[] pbePrefix, String keyNamespace, Result result) { + private PBEKeyData parseFromResult(byte[] pbePrefix, String keyNamespace, Result result) + throws IOException, KeyException { if (result == null || result.isEmpty()) { return null; } @@ -154,12 +164,20 @@ private PBEKeyData parseFromResult(byte[] pbePrefix, String keyNamespace, Result result.getValue(KEY_META_INFO_FAMILY, KEY_STATUS_QUAL_BYTES)[0]); String dekMetadata = Bytes.toString(result.getValue(KEY_META_INFO_FAMILY, DEK_METADATA_QUAL_BYTES)); - long refreshedTimestamp = Bytes.toLong(result.getValue(KEY_META_INFO_FAMILY, REFRESHED_TIMESTAMP_QUAL_BYTES)); + long refreshedTimestamp = Bytes.toLong(result.getValue(KEY_META_INFO_FAMILY, + REFRESHED_TIMESTAMP_QUAL_BYTES)); byte[] dekChecksum = result.getValue(KEY_META_INFO_FAMILY, DEK_CHECKSUM_QUAL_BYTES); - //byte[] dekWrappedByStk = result.getValue(KEY_META_INFO_FAMILY, DEK_WRAPPED_BY_STK_QUAL_BYTES); - //byte[] stkChecksum = result.getValue(KEY_META_INFO_FAMILY, STK_CHECKSUM_QUAL_BYTES); - - return new PBEKeyData(pbePrefix, keyNamespace, null, keyStatus, dekMetadata, - refreshedTimestamp); + byte[] dekWrappedByStk = result.getValue(KEY_META_INFO_FAMILY, DEK_WRAPPED_BY_STK_QUAL_BYTES); + long stkChecksum = Bytes.toLong(result.getValue(KEY_META_INFO_FAMILY, STK_CHECKSUM_QUAL_BYTES)); + + PBEKeyData clusterKey = server.getPBEClusterKeyCache().getClusterKeyByChecksum(stkChecksum); + Key dek = EncryptionUtil.unwrapKey(server.getConfiguration(), null, dekWrappedByStk, + clusterKey.getTheKey()); + PBEKeyData dekKeyData = + new PBEKeyData(pbePrefix, keyNamespace, dek, keyStatus, dekMetadata, refreshedTimestamp); + if (!Bytes.equals(dekKeyData.getKeyMetadataHash(), dekChecksum)) { + throw new RuntimeException("Key has didn't match for key with metadata" + dekMetadata); + } + return dekKeyData; } } diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/HMaster.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/HMaster.java index 6eac904d6e4c..6a93fd2ab667 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/HMaster.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/HMaster.java @@ -122,6 +122,8 @@ import org.apache.hadoop.hbase.ipc.CoprocessorRpcUtils; import org.apache.hadoop.hbase.ipc.RpcServer; import org.apache.hadoop.hbase.ipc.ServerNotRunningYetException; +import org.apache.hadoop.hbase.keymeta.PBEClusterKeyAccessor; +import org.apache.hadoop.hbase.keymeta.PBEClusterKeyCache; import org.apache.hadoop.hbase.keymeta.PBEKeymetaMasterService; import org.apache.hadoop.hbase.log.HBaseMarkers; import org.apache.hadoop.hbase.master.MasterRpcServices.BalanceSwitchMode; @@ -997,6 +999,7 @@ private void finishActiveMasterInitialization() throws IOException, InterruptedE pbeClusterKeyManager = new PBEClusterKeyManager(this); pbeClusterKeyManager.ensureClusterKeyInitialized(); + buildPBEClusterKeyCache(); // Precaution. Put in place the old hbck1 lock file to fence out old hbase1s running their // hbck1s against an hbase2 cluster; it could do damage. To skip this behavior, set diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/PBEClusterKeyManager.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/PBEClusterKeyManager.java index f3e0f60a8c61..3a8c821fd872 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/PBEClusterKeyManager.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/PBEClusterKeyManager.java @@ -41,11 +41,11 @@ public void ensureClusterKeyInitialized() throws IOException { if (! isPBEEnabled()) { return; } - List clusterKeys = getAllClusterKeys(); + List clusterKeys = getAllClusterKeyFiles(); if (clusterKeys.isEmpty()) { LOG.info("Initializing Cluster Key for the first time"); // Double check for cluster key as another HMaster might have succeeded. - if (rotateClusterKey(null) == null && getAllClusterKeys().isEmpty()) { + if (rotateClusterKey(null) == null && getAllClusterKeyFiles().isEmpty()) { throw new RuntimeException("Failed to generate or save Cluster Key"); } } @@ -82,7 +82,9 @@ private PBEKeyData rotateClusterKey(String currentKeyMetadata) throws IOExceptio } private boolean saveLatestClusterKey(String keyMetadata) throws IOException { - int nextClusterKeySeq = findLatestKeySequence(getAllClusterKeys()) + 1; + List allClusterKeyFiles = getAllClusterKeyFiles(); + int nextClusterKeySeq = (allClusterKeyFiles.isEmpty() ? -1 + : extractKeySequence(allClusterKeyFiles.get(0))) + 1; LOG.info("Trying to save a new cluster key at seq: {}", nextClusterKeySeq); MasterFileSystem masterFS = master.getMasterFileSystem(); Path nextClusterKeyPath = new Path(clusterKeyDir, diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HRegionServer.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HRegionServer.java index d80f58edf366..eb5d9e31b8fa 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HRegionServer.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HRegionServer.java @@ -120,6 +120,8 @@ import org.apache.hadoop.hbase.ipc.RpcServer; import org.apache.hadoop.hbase.ipc.ServerNotRunningYetException; import org.apache.hadoop.hbase.ipc.ServerRpcController; +import org.apache.hadoop.hbase.keymeta.PBEClusterKeyAccessor; +import org.apache.hadoop.hbase.keymeta.PBEClusterKeyCache; import org.apache.hadoop.hbase.keymeta.PBEKeyAccessor; import org.apache.hadoop.hbase.keymeta.PBEKeymetaAdmin; import org.apache.hadoop.hbase.keymeta.PBEKeymetaAdminImpl; @@ -1451,6 +1453,9 @@ protected void handleReportForDutyResponse(final RegionServerStartupResponse c) initializeFileSystem(); } + buildPBEClusterKeyCache(); + pbeKeyAccessor = new PBEKeyAccessor(pbeKeymetaAdmin); + // hack! Maps DFSClient => RegionServer for logs. HDFS made this // config param for task trackers, but we can piggyback off of it. if (this.conf.get("mapreduce.task.attempt.id") == null) { diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/replication/regionserver/ReplicationSyncUp.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/replication/regionserver/ReplicationSyncUp.java index 1e5f9db2d3c3..a42c24e1a040 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/replication/regionserver/ReplicationSyncUp.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/replication/regionserver/ReplicationSyncUp.java @@ -41,6 +41,7 @@ import org.apache.hadoop.hbase.ServerName; import org.apache.hadoop.hbase.client.AsyncClusterConnection; import org.apache.hadoop.hbase.client.Connection; +import org.apache.hadoop.hbase.keymeta.PBEClusterKeyCache; import org.apache.hadoop.hbase.keymeta.PBEKeyAccessor; import org.apache.hadoop.hbase.keymeta.PBEKeymetaAdmin; import org.apache.hadoop.hbase.master.replication.OfflineTableReplicationQueueStorage; @@ -368,6 +369,10 @@ public ChoreService getChoreService() { return null; } + @Override public PBEClusterKeyCache getPBEClusterKeyCache() { + return null; + } + @Override public PBEKeyAccessor getPBEKeyAccessor() { return null; } diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/MockNoopMasterServices.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/MockNoopMasterServices.java index 8ce567d25f12..d9b384a1d98e 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/MockNoopMasterServices.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/MockNoopMasterServices.java @@ -38,6 +38,7 @@ import org.apache.hadoop.hbase.client.TableDescriptor; import org.apache.hadoop.hbase.executor.ExecutorService; import org.apache.hadoop.hbase.favored.FavoredNodesManager; +import org.apache.hadoop.hbase.keymeta.PBEClusterKeyCache; import org.apache.hadoop.hbase.keymeta.PBEKeyAccessor; import org.apache.hadoop.hbase.keymeta.PBEKeymetaAdmin; import org.apache.hadoop.hbase.master.assignment.AssignmentManager; @@ -118,6 +119,10 @@ public ChoreService getChoreService() { return null; } + @Override public PBEClusterKeyCache getPBEClusterKeyCache() { + return null; + } + @Override public PBEKeyAccessor getPBEKeyAccessor() { return null; } diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/MockRegionServer.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/MockRegionServer.java index 2b5f138a6a0f..0cb5ea436949 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/MockRegionServer.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/MockRegionServer.java @@ -52,6 +52,7 @@ import org.apache.hadoop.hbase.io.hfile.BlockCache; import org.apache.hadoop.hbase.ipc.HBaseRpcController; import org.apache.hadoop.hbase.ipc.RpcServerInterface; +import org.apache.hadoop.hbase.keymeta.PBEClusterKeyCache; import org.apache.hadoop.hbase.keymeta.PBEKeyAccessor; import org.apache.hadoop.hbase.keymeta.PBEKeymetaAdmin; import org.apache.hadoop.hbase.mob.MobFileCache; @@ -558,6 +559,10 @@ public ChoreService getChoreService() { return null; } + @Override public PBEClusterKeyCache getPBEClusterKeyCache() { + return null; + } + @Override public PBEKeyAccessor getPBEKeyAccessor() { return null; } diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestActiveMasterManager.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestActiveMasterManager.java index fcb67ed31b47..f850ab0f52b3 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestActiveMasterManager.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestActiveMasterManager.java @@ -33,6 +33,9 @@ import org.apache.hadoop.hbase.HBaseClassTestRule; import org.apache.hadoop.hbase.HBaseTestingUtil; import org.apache.hadoop.hbase.ServerName; +import org.apache.hadoop.hbase.keymeta.PBEClusterKeyCache; +import org.apache.hadoop.hbase.keymeta.PBEKeyAccessor; +import org.apache.hadoop.hbase.keymeta.PBEKeymetaAdmin; import org.apache.hadoop.hbase.monitoring.MonitoredTask; import org.apache.hadoop.hbase.monitoring.TaskGroup; import org.apache.hadoop.hbase.testclassification.MasterTests; @@ -327,5 +330,17 @@ public ClusterStatusTracker getClusterStatusTracker() { public ActiveMasterManager getActiveMasterManager() { return activeMasterManager; } + + @Override public PBEClusterKeyCache getPBEClusterKeyCache() { + return null; + } + + @Override public PBEKeyAccessor getPBEKeyAccessor() { + return null; + } + + @Override public PBEKeymetaAdmin getPBEKeymetaAdmin() { + return null; + } } } diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/cleaner/TestReplicationHFileCleaner.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/cleaner/TestReplicationHFileCleaner.java index da1bc04d7e03..69de03d223cd 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/cleaner/TestReplicationHFileCleaner.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/cleaner/TestReplicationHFileCleaner.java @@ -38,6 +38,9 @@ import org.apache.hadoop.hbase.TableName; import org.apache.hadoop.hbase.client.Connection; import org.apache.hadoop.hbase.client.TableDescriptor; +import org.apache.hadoop.hbase.keymeta.PBEClusterKeyCache; +import org.apache.hadoop.hbase.keymeta.PBEKeyAccessor; +import org.apache.hadoop.hbase.keymeta.PBEKeymetaAdmin; import org.apache.hadoop.hbase.master.HMaster; import org.apache.hadoop.hbase.replication.ReplicationException; import org.apache.hadoop.hbase.replication.ReplicationFactory; @@ -215,6 +218,18 @@ public Connection getConnection() { } } + @Override public PBEClusterKeyCache getPBEClusterKeyCache() { + return null; + } + + @Override public PBEKeyAccessor getPBEKeyAccessor() { + return null; + } + + @Override public PBEKeymetaAdmin getPBEKeymetaAdmin() { + return null; + } + @Override public FileSystem getFileSystem() { try { diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/procedure2/store/region/RegionProcedureStorePerformanceEvaluation.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/procedure2/store/region/RegionProcedureStorePerformanceEvaluation.java index 3c55696080e3..c4309acea7d4 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/procedure2/store/region/RegionProcedureStorePerformanceEvaluation.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/procedure2/store/region/RegionProcedureStorePerformanceEvaluation.java @@ -24,6 +24,9 @@ import org.apache.hadoop.hbase.HBaseConfiguration; import org.apache.hadoop.hbase.ServerName; import org.apache.hadoop.hbase.io.util.MemorySizeUtil; +import org.apache.hadoop.hbase.keymeta.PBEClusterKeyCache; +import org.apache.hadoop.hbase.keymeta.PBEKeyAccessor; +import org.apache.hadoop.hbase.keymeta.PBEKeymetaAdmin; import org.apache.hadoop.hbase.master.region.MasterRegion; import org.apache.hadoop.hbase.master.region.MasterRegionFactory; import org.apache.hadoop.hbase.procedure2.store.ProcedureStorePerformanceEvaluation; @@ -57,6 +60,18 @@ public Configuration getConfiguration() { public ServerName getServerName() { return serverName; } + + @Override public PBEClusterKeyCache getPBEClusterKeyCache() { + return null; + } + + @Override public PBEKeyAccessor getPBEKeyAccessor() { + return null; + } + + @Override public PBEKeymetaAdmin getPBEKeymetaAdmin() { + return null; + } } private MasterRegion region; diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestHeapMemoryManager.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestHeapMemoryManager.java index f151e20bf587..81c9cb075db5 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestHeapMemoryManager.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestHeapMemoryManager.java @@ -46,6 +46,7 @@ import org.apache.hadoop.hbase.io.hfile.CachedBlock; import org.apache.hadoop.hbase.io.hfile.ResizableBlockCache; import org.apache.hadoop.hbase.io.util.MemorySizeUtil; +import org.apache.hadoop.hbase.keymeta.PBEClusterKeyCache; import org.apache.hadoop.hbase.keymeta.PBEKeyAccessor; import org.apache.hadoop.hbase.keymeta.PBEKeymetaAdmin; import org.apache.hadoop.hbase.regionserver.HeapMemoryManager.TunerContext; @@ -839,6 +840,10 @@ public ChoreService getChoreService() { return null; } + @Override public PBEClusterKeyCache getPBEClusterKeyCache() { + return null; + } + @Override public PBEKeyAccessor getPBEKeyAccessor() { return null; } diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/security/token/TestTokenAuthentication.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/security/token/TestTokenAuthentication.java index 44d35ae442ed..8d8b2f177378 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/security/token/TestTokenAuthentication.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/security/token/TestTokenAuthentication.java @@ -55,6 +55,7 @@ import org.apache.hadoop.hbase.ipc.RpcServerInterface; import org.apache.hadoop.hbase.ipc.ServerRpcController; import org.apache.hadoop.hbase.ipc.SimpleRpcServer; +import org.apache.hadoop.hbase.keymeta.PBEClusterKeyCache; import org.apache.hadoop.hbase.keymeta.PBEKeyAccessor; import org.apache.hadoop.hbase.keymeta.PBEKeymetaAdmin; import org.apache.hadoop.hbase.log.HBaseMarkers; @@ -352,6 +353,10 @@ public ChoreService getChoreService() { return null; } + @Override public PBEClusterKeyCache getPBEClusterKeyCache() { + return null; + } + @Override public PBEKeyAccessor getPBEKeyAccessor() { return null; } diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/util/MockServer.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/util/MockServer.java index cbf6d0773d3e..3896a2a4348e 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/util/MockServer.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/util/MockServer.java @@ -26,6 +26,7 @@ import org.apache.hadoop.hbase.ServerName; import org.apache.hadoop.hbase.client.AsyncClusterConnection; import org.apache.hadoop.hbase.client.Connection; +import org.apache.hadoop.hbase.keymeta.PBEClusterKeyCache; import org.apache.hadoop.hbase.keymeta.PBEKeyAccessor; import org.apache.hadoop.hbase.keymeta.PBEKeymetaAdmin; import org.apache.hadoop.hbase.log.HBaseMarkers; @@ -102,6 +103,10 @@ public ChoreService getChoreService() { throw new UnsupportedOperationException(); } + @Override public PBEClusterKeyCache getPBEClusterKeyCache() { + return null; + } + @Override public PBEKeyAccessor getPBEKeyAccessor() { return null; } diff --git a/hbase-shell/src/main/ruby/hbase/pbe_admin.rb b/hbase-shell/src/main/ruby/hbase/pbe_admin.rb index 4a9dfbd7f998..e00f7ca138ce 100644 --- a/hbase-shell/src/main/ruby/hbase/pbe_admin.rb +++ b/hbase-shell/src/main/ruby/hbase/pbe_admin.rb @@ -16,12 +16,14 @@ # include Java +java_import org.apache.hadoop.hbase.io.crypto.PBEKeyData +java_import org.apache.hadoop.hbase.keymeta.PBEKeymetaAdminClient module Hbase class PBEAdmin def initialize(connection) @connection = connection - @admin = org.apache.hadoop.hbase.keymeta.PBEKeymetaAdminClient.new(connection) + @admin = PBEKeymetaAdminClient.new(connection) @hb_admin = @connection.getAdmin end @@ -31,9 +33,10 @@ def close def pbe_enable(pbe_prefix) prefixInfo = pbe_prefix.split(':') - assert prefixInfo.length <= 2, 'Invalid prefix:namespace format' - @admin.enablePBE(prefixInfo[0], prefixInfo.length > 1? prefixInfo[1] : - org.apache.hadoop.hbase.io.crypto.PBEKeyData.KEY_NAMESPACE_GLOBAL) + raise(ArgumentError, 'Invalid prefix:namespace format') unless (prefixInfo.length == 1 || + prefixInfo.length == 2) + @admin.enablePBE(prefixInfo[0], prefixInfo.length > 1 ? prefixInfo[1] : + PBEKeyData::KEY_NAMESPACE_GLOBAL) end end end diff --git a/hbase-shell/src/main/ruby/shell/commands/pbe_enable.rb b/hbase-shell/src/main/ruby/shell/commands/pbe_enable.rb index e74fdc607083..ccaba7762470 100644 --- a/hbase-shell/src/main/ruby/shell/commands/pbe_enable.rb +++ b/hbase-shell/src/main/ruby/shell/commands/pbe_enable.rb @@ -20,7 +20,8 @@ module Commands class PbeEnable < Command def help <<-EOF -Enable PBE for a given prefix (in Base64 format) +Enable PBE for a given prefix:namespace (prefix in Base64 format). +If no namespace is specified, the global namespace (*) is used. EOF end From 995640c0bac791624b1605cf68b3163b29463036 Mon Sep 17 00:00:00 2001 From: Hari Dara Date: Tue, 11 Mar 2025 15:32:07 +0530 Subject: [PATCH 06/70] Added key listing to do end2end testing for key decryption with STK --- .../hbase/keymeta/PBEKeymetaAdminClient.java | 24 ++++++ .../hadoop/hbase/io/crypto/PBEKeyData.java | 54 ++++++++---- .../hadoop/hbase/keymeta/PBEKeymetaAdmin.java | 14 +++ .../io/crypto/TestPBEKeyStoreKeyProvider.java | 2 +- .../src/main/protobuf/server/PBEAdmin.proto | 8 ++ .../hadoop/hbase/keymeta/PBEKeyAccessor.java | 7 +- .../hadoop/hbase/keymeta/PBEKeyDataCache.java | 4 +- .../hbase/keymeta/PBEKeymetaAdminImpl.java | 33 +++++-- .../keymeta/PBEKeymetaServiceEndpoint.java | 86 +++++++++++++++---- .../keymeta/PBEKeymetaTableAccessor.java | 50 ++++++++--- hbase-shell/src/main/ruby/hbase/pbe_admin.rb | 14 ++- hbase-shell/src/main/ruby/shell.rb | 1 + .../ruby/shell/commands/pbe_get_statuses.rb | 45 ++++++++++ 13 files changed, 281 insertions(+), 61 deletions(-) create mode 100644 hbase-shell/src/main/ruby/shell/commands/pbe_get_statuses.rb diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/keymeta/PBEKeymetaAdminClient.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/keymeta/PBEKeymetaAdminClient.java index 5ca264a147ee..b39e7768e0b3 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/keymeta/PBEKeymetaAdminClient.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/keymeta/PBEKeymetaAdminClient.java @@ -1,6 +1,7 @@ package org.apache.hadoop.hbase.keymeta; import org.apache.hadoop.hbase.client.Connection; +import org.apache.hadoop.hbase.io.crypto.PBEKeyData; import org.apache.hadoop.hbase.io.crypto.PBEKeyStatus; import org.apache.hadoop.hbase.protobuf.generated.PBEAdminProtos; import org.apache.hadoop.hbase.protobuf.generated.PBEAdminProtos.PBEAdminRequest; @@ -11,6 +12,9 @@ import org.slf4j.Logger; import org.slf4j.LoggerFactory; import java.io.IOException; +import java.security.KeyException; +import java.util.ArrayList; +import java.util.List; @InterfaceAudience.Public public class PBEKeymetaAdminClient implements PBEKeymetaAdmin { @@ -32,4 +36,24 @@ public PBEKeyStatus enablePBE(String pbePrefix, String keyNamespace) throws IOEx throw ProtobufUtil.handleRemoteException(e); } } + + @Override public List getPBEKeyStatuses(String pbePrefix, String keyNamespace) + throws IOException, KeyException { + List keyStatuses = new ArrayList<>(); + try { + PBEAdminProtos.PBEGetStatusResponse statusResponse = stub.getPBEStatuses(null, + PBEAdminRequest.newBuilder().setPbePrefix(pbePrefix).setKeyNamespace(keyNamespace).build()); + for (PBEAdminResponse status: statusResponse.getStatusList()) { + keyStatuses.add(new PBEKeyData( + status.getPbePrefixBytes().toByteArray(), + status.getKeyNamespace(), null, + PBEKeyStatus.forValue((byte) status.getPbeStatus().getNumber()), + status.getKeyMetadata(), + status.getRefreshTimestamp())); + } + } catch (ServiceException e) { + throw ProtobufUtil.handleRemoteException(e); + } + return keyStatuses; + } } diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/io/crypto/PBEKeyData.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/io/crypto/PBEKeyData.java index 865966ee46cb..34524dd6703f 100644 --- a/hbase-common/src/main/java/org/apache/hadoop/hbase/io/crypto/PBEKeyData.java +++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/io/crypto/PBEKeyData.java @@ -26,6 +26,8 @@ import java.security.Key; import java.security.MessageDigest; import java.security.NoSuchAlgorithmException; +import java.util.Arrays; +import java.util.Base64; /** * This class represents an encryption key data which includes the key itself, its status, metadata @@ -51,8 +53,8 @@ public class PBEKeyData { public static final String KEY_NAMESPACE_GLOBAL = "*"; - private byte[] pbe_prefix; - private String key_namespace; + private byte[] pbePrefix; + private String keyNamespace; private Key theKey; private PBEKeyStatus keyStatus; private String keyMetadata; @@ -92,8 +94,8 @@ public PBEKeyData(byte[] pbe_prefix, String key_namespace, Key theKey, PBEKeySta Preconditions.checkNotNull(keyStatus, "keyStatus should not be null"); Preconditions.checkNotNull(keyMetadata, "keyMetadata should not be null"); - this.pbe_prefix = pbe_prefix; - this.key_namespace = key_namespace; + this.pbePrefix = pbe_prefix; + this.keyNamespace = key_namespace; this.theKey = theKey; this.keyStatus = keyStatus; this.keyMetadata = keyMetadata; @@ -105,26 +107,26 @@ public PBEKeyData(byte[] pbe_prefix, String key_namespace, Key theKey, PBEKeySta * * @return The PBE prefix as a byte array. */ - public byte[] getPbe_prefix() { - return pbe_prefix; + public byte[] getPBEPrefix() { + return pbePrefix; } /** - * Returns the namespace associated with the key. - * - * @return The namespace as a {@code String}. + * Return the PBE prefix in Base64 encoded form. + * @return the encoded PBE prefix. */ - public String getKeyNamespace() { - return key_namespace; + public String getPBEPrefixEncoded() { + return Base64.getEncoder().encodeToString(pbePrefix); } + /** * Returns the namespace associated with the key. * * @return The namespace as a {@code String}. */ - public String getKey_namespace() { - return key_namespace; + public String getKeyNamespace() { + return keyNamespace; } /** @@ -154,6 +156,12 @@ public String getKeyMetadata() { return keyMetadata; } + @Override public String toString() { + return "PBEKeyData{" + "pbePrefix=" + Arrays.toString(pbePrefix) + ", keyNamespace='" + + keyNamespace + '\'' + ", keyStatus=" + keyStatus + ", keyMetadata='" + keyMetadata + '\'' + + ", refreshTimestamp=" + refreshTimestamp + '}'; + } + public long getRefreshTimestamp() { return refreshTimestamp; } @@ -190,6 +198,18 @@ public byte[] getKeyMetadataHash() { return keyMetadataHash; } + /** + * Return the hash of key metadata in Base64 encoded form. + * @return the encoded hash or {@code null} if no meatadata is available. + */ + public String getKeyMetadataHashEncoded() { + byte[] hash = getKeyMetadataHash(); + if (hash != null) { + return Base64.getEncoder().encodeToString(hash); + } + return null; + } + public static byte[] constructMetadataHash(String metadata) { MessageDigest md5; try { @@ -209,8 +229,8 @@ public boolean equals(Object o) { PBEKeyData that = (PBEKeyData) o; return new EqualsBuilder() - .append(pbe_prefix, that.pbe_prefix) - .append(key_namespace, that.key_namespace) + .append(pbePrefix, that.pbePrefix) + .append(keyNamespace, that.keyNamespace) .append(theKey, that.theKey) .append(keyStatus, that.keyStatus) .append(keyMetadata, that.keyMetadata) @@ -220,8 +240,8 @@ public boolean equals(Object o) { @Override public int hashCode() { return new HashCodeBuilder(17, 37) - .append(pbe_prefix) - .append(key_namespace) + .append(pbePrefix) + .append(keyNamespace) .append(theKey) .append(keyStatus) .append(keyMetadata) diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/keymeta/PBEKeymetaAdmin.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/keymeta/PBEKeymetaAdmin.java index ae0f1af9d1ba..5c2254484bab 100644 --- a/hbase-common/src/main/java/org/apache/hadoop/hbase/keymeta/PBEKeymetaAdmin.java +++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/keymeta/PBEKeymetaAdmin.java @@ -17,9 +17,12 @@ */ package org.apache.hadoop.hbase.keymeta; +import org.apache.hadoop.hbase.io.crypto.PBEKeyData; import org.apache.hadoop.hbase.io.crypto.PBEKeyStatus; import org.apache.yetus.audience.InterfaceAudience; import java.io.IOException; +import java.security.KeyException; +import java.util.List; /** * PBEKeymetaAdmin is an interface for administrative functions related to PBE keys. @@ -41,4 +44,15 @@ public interface PBEKeymetaAdmin { * @throws IOException if an error occurs while enabling PBE. */ PBEKeyStatus enablePBE(String pbePrefix, String keyNamespace) throws IOException; + + /** + * Get the status of all the keys for the specified pbe_prefix. + * + * @param pbePrefix The prefix for the PBE key. + * @param keyNamespace The namespace for the PBE key. + * @return The list of status objects each identifying the key and its current status. + * @throws IOException if an error occurs while enabling PBE. + */ + List getPBEKeyStatuses(String pbePrefix, String keyNamespace) + throws IOException, KeyException; } diff --git a/hbase-common/src/test/java/org/apache/hadoop/hbase/io/crypto/TestPBEKeyStoreKeyProvider.java b/hbase-common/src/test/java/org/apache/hadoop/hbase/io/crypto/TestPBEKeyStoreKeyProvider.java index e511627d35dc..6cc1bbbdfa22 100644 --- a/hbase-common/src/test/java/org/apache/hadoop/hbase/io/crypto/TestPBEKeyStoreKeyProvider.java +++ b/hbase-common/src/test/java/org/apache/hadoop/hbase/io/crypto/TestPBEKeyStoreKeyProvider.java @@ -138,7 +138,7 @@ private void assertPBEKeyData(PBEKeyData keyData, PBEKeyStatus expKeyStatus, byt Map keyMetadata = GsonUtil.getDefaultInstance().fromJson(keyData.getKeyMetadata(), HashMap.class); assertNotNull(keyMetadata); - assertEquals(new Bytes(prefixBytes), keyData.getPbe_prefix()); + assertEquals(new Bytes(prefixBytes), keyData.getPBEPrefix()); assertEquals(alias, keyMetadata.get(KEY_METADATA_ALIAS)); assertEquals(Base64.getEncoder().encodeToString(prefixBytes), keyMetadata.get(KEY_METADATA_PREFIX)); diff --git a/hbase-protocol-shaded/src/main/protobuf/server/PBEAdmin.proto b/hbase-protocol-shaded/src/main/protobuf/server/PBEAdmin.proto index 75dfffac427c..4f0c4422b4c8 100644 --- a/hbase-protocol-shaded/src/main/protobuf/server/PBEAdmin.proto +++ b/hbase-protocol-shaded/src/main/protobuf/server/PBEAdmin.proto @@ -40,9 +40,17 @@ message PBEAdminResponse { required string pbe_prefix = 1; required string key_namespace = 2; required PBEKeyStatus pbe_status = 3; + optional string key_metadata = 4; + optional int64 refresh_timestamp = 5; +} + +message PBEGetStatusResponse { + repeated PBEAdminResponse status = 1; } service PBEAdminService { rpc EnablePBE(PBEAdminRequest) returns (PBEAdminResponse); + rpc GetPBEStatuses(PBEAdminRequest) + returns (PBEGetStatusResponse); } diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/keymeta/PBEKeyAccessor.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/keymeta/PBEKeyAccessor.java index 5d79875a2c46..0714e0b5b362 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/keymeta/PBEKeyAccessor.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/keymeta/PBEKeyAccessor.java @@ -23,6 +23,7 @@ import java.security.KeyException; import java.util.List; +// TODO: Also integrate with the key provider when it is not found in the cache??? /** * This class provides a unified access on top of both {@code PBEKeyDataCache} (L1) and * {@code PBEKeymetaTableAccessor} (L2) to access PBE keys. When the getter is called, it first @@ -44,7 +45,7 @@ public PBEKeyAccessor(PBEKeymetaTableAccessor keymetaAccessor) { * @param pbePrefix The prefix of the key * @param keyNamespace The namespace of the key * @param keyMetadata The metadata of the key - * @return The key data + * @return The key data or {@code null} * @throws IOException if an error occurs while retrieving the key */ public PBEKeyData getKey(byte[] pbePrefix, String keyNamespace, String keyMetadata) @@ -52,7 +53,9 @@ public PBEKeyData getKey(byte[] pbePrefix, String keyNamespace, String keyMetada PBEKeyData keyData = keyDataCache.getEntry(keyMetadata); if (keyData == null) { keyData = keymetaAccessor.getKey(pbePrefix, keyNamespace, keyMetadata); - keyDataCache.addEntry(keyData); + if (keyData != null) { + keyDataCache.addEntry(keyData); + } } return keyData; } diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/keymeta/PBEKeyDataCache.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/keymeta/PBEKeyDataCache.java index b022177d83d6..51bba86a3c4b 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/keymeta/PBEKeyDataCache.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/keymeta/PBEKeyDataCache.java @@ -51,7 +51,7 @@ public PBEKeyDataCache() { public void addEntry(PBEKeyData pbeKeyData) { lock.lock(); try { - Bytes pbePrefix = new Bytes(pbeKeyData.getPbe_prefix()); + Bytes pbePrefix = new Bytes(pbeKeyData.getPBEPrefix()); String keyNamespace = pbeKeyData.getKeyNamespace(); cache.put(pbeKeyData.getKeyMetadata(), pbeKeyData); @@ -100,7 +100,7 @@ public PBEKeyData removeEntry(String keyMetadata) { try { PBEKeyData removedEntry = cache.remove(keyMetadata); if (removedEntry != null) { - Bytes pbePrefix = new Bytes(removedEntry.getPbe_prefix()); + Bytes pbePrefix = new Bytes(removedEntry.getPBEPrefix()); String keyNamespace = removedEntry.getKeyNamespace(); Map> nsCache = prefixCache.get(keyNamespace); List keyList = nsCache != null ? nsCache.get(pbePrefix) : null; diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/keymeta/PBEKeymetaAdminImpl.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/keymeta/PBEKeymetaAdminImpl.java index c005d5643b20..c5cf0615140b 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/keymeta/PBEKeymetaAdminImpl.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/keymeta/PBEKeymetaAdminImpl.java @@ -25,7 +25,9 @@ import org.slf4j.Logger; import org.slf4j.LoggerFactory; import java.io.IOException; +import java.security.KeyException; import java.util.Base64; +import java.util.List; @InterfaceAudience.Private public class PBEKeymetaAdminImpl extends PBEKeymetaTableAccessor implements PBEKeymetaAdmin { @@ -40,7 +42,17 @@ public PBEKeyStatus enablePBE(String pbePrefix, String keyNamespace) throws IOEx if (! isPBEEnabled()) { throw new IOException("PBE is currently not enabled in HBase configuration"); } - LOG.info("Trying to enable PBE on key: {} for namespace: {}", pbePrefix, keyNamespace); + LOG.info("Trying to enable PBE on key: {} under namespace: {}", pbePrefix, keyNamespace); + byte[] pbe_prefix = convertToPrefixBytes(pbePrefix); + PBEKeyProvider provider = getKeyProvider(); + PBEKeyData pbeKey = provider.getPBEKey(pbe_prefix, keyNamespace); + LOG.info("Got key data with status: {} and metadata: {} for prefix: {}", pbeKey.getKeyStatus(), + pbeKey.getKeyMetadata(), pbePrefix); + addKey(pbeKey); + return pbeKey.getKeyStatus(); + } + + private static byte[] convertToPrefixBytes(String pbePrefix) throws IOException { byte[] pbe_prefix; try { pbe_prefix = Base64.getDecoder().decode(pbePrefix); @@ -48,11 +60,18 @@ public PBEKeyStatus enablePBE(String pbePrefix, String keyNamespace) throws IOEx catch (IllegalArgumentException e) { throw new IOException("Failed to decode specified prefix as Base64 string: " + pbePrefix, e); } - PBEKeyProvider provider = getKeyProvider(); - PBEKeyData pbeKey = provider.getPBEKey(pbe_prefix, keyNamespace); - LOG.info("Got key data with status: {} and metadata: {} for prefix: {}", pbeKey.getKeyStatus(), - pbeKey.getKeyMetadata(), pbePrefix); - addKey(pbeKey); - return pbeKey.getKeyStatus(); + return pbe_prefix; + } + + @Override + public List getPBEKeyStatuses(String pbePrefix, String keyNamespace) + throws IOException, KeyException { + if (! isPBEEnabled()) { + throw new IOException("PBE is currently not enabled in HBase configuration"); + } + LOG.info("Getting key statuses for PBE on key: {} under namespace: {}", pbePrefix, + keyNamespace); + byte[] pbe_prefix = convertToPrefixBytes(pbePrefix); + return super.getAllKeys(pbe_prefix, keyNamespace); } } diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/keymeta/PBEKeymetaServiceEndpoint.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/keymeta/PBEKeymetaServiceEndpoint.java index 02c56d95dd28..e32ecd2c7cc5 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/keymeta/PBEKeymetaServiceEndpoint.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/keymeta/PBEKeymetaServiceEndpoint.java @@ -21,13 +21,16 @@ import org.apache.hadoop.hbase.coprocessor.CoreCoprocessor; import org.apache.hadoop.hbase.coprocessor.HasMasterServices; import org.apache.hadoop.hbase.coprocessor.MasterCoprocessor; +import org.apache.hadoop.hbase.io.crypto.PBEKeyData; import org.apache.hadoop.hbase.io.crypto.PBEKeyStatus; import org.apache.hadoop.hbase.ipc.CoprocessorRpcUtils; import org.apache.hadoop.hbase.master.MasterServices; import org.apache.hadoop.hbase.protobuf.generated.PBEAdminProtos; import org.apache.hadoop.hbase.protobuf.generated.PBEAdminProtos.PBEAdminRequest; import org.apache.hadoop.hbase.protobuf.generated.PBEAdminProtos.PBEAdminResponse; +import org.apache.hadoop.hbase.protobuf.generated.PBEAdminProtos.PBEGetStatusResponse; import org.apache.hadoop.hbase.protobuf.generated.PBEAdminProtos.PBEAdminService; +import org.apache.hbase.thirdparty.com.google.protobuf.ByteString; import org.apache.hbase.thirdparty.com.google.protobuf.RpcCallback; import org.apache.hbase.thirdparty.com.google.protobuf.RpcController; import org.apache.hbase.thirdparty.com.google.protobuf.Service; @@ -35,8 +38,10 @@ import org.slf4j.Logger; import org.slf4j.LoggerFactory; import java.io.IOException; +import java.security.KeyException; import java.util.Base64; import java.util.Collections; +import java.util.List; /** * This class implements a coprocessor service endpoint for the Phoenix Query Server's @@ -77,7 +82,7 @@ public void start(CoprocessorEnvironment env) throws IOException { /** * Returns an iterable of the available coprocessor services, which includes the - * {@link org.apache.hadoop.hbase.protobuf.generated.PBEAdminProtos.PBEAdminService} implemented by + * {@link PBEAdminService} implemented by * {@link org.apache.hadoop.hbase.keymeta.PBEKeymetaServiceEndpoint.KeyMetaAdminServiceImpl}. * * @return An iterable of the available coprocessor services. @@ -88,11 +93,10 @@ public Iterable getServices() { } /** - * The implementation of the {@link org.apache.hadoop.hbase.protobuf.generated.PBEAdminProtos.PBEAdminService} + * The implementation of the {@link PBEAdminProtos.PBEAdminService} * interface, which provides the actual method implementations for enabling PBE. */ private class KeyMetaAdminServiceImpl extends PBEAdminService { - /** * Enables PBE for a given tenant and namespace, as specified in the provided request. * @@ -102,19 +106,9 @@ private class KeyMetaAdminServiceImpl extends PBEAdminService { */ @Override public void enablePBE(RpcController controller, PBEAdminRequest request, - RpcCallback done) { - PBEAdminResponse.Builder builder = - PBEAdminResponse.newBuilder().setPbePrefix(request.getPbePrefix()) - .setKeyNamespace(request.getKeyNamespace()); - byte[] pbe_prefix = null; - try { - pbe_prefix = Base64.getDecoder().decode(request.getPbePrefix()); - } catch (IllegalArgumentException e) { - builder.setPbeStatus(PBEAdminProtos.PBEKeyStatus.PBE_FAILED); - CoprocessorRpcUtils.setControllerException(controller, new IOException( - "Failed to decode specified prefix as Base64 string: " + request.getPbePrefix(), e)); - } - if (pbe_prefix != null) { + RpcCallback done) { + PBEAdminResponse.Builder builder = getResponseBuilder(controller, request); + if (builder.getPbePrefix() != null) { try { PBEKeyStatus pbeKeyStatus = master.getPBEKeymetaAdmin() .enablePBE(request.getPbePrefix(), request.getKeyNamespace()); @@ -126,5 +120,65 @@ public void enablePBE(RpcController controller, PBEAdminRequest request, } done.run(builder.build()); } + + @Override + public void getPBEStatuses(RpcController controller, PBEAdminRequest request, + RpcCallback done) { + PBEGetStatusResponse.Builder responseBuilder = + PBEGetStatusResponse.newBuilder(); + PBEAdminResponse.Builder builder = getResponseBuilder(controller, request); + if (builder.getPbePrefix() != null) { + try { + List pbeKeyStatuses = master.getPBEKeymetaAdmin() + .getPBEKeyStatuses(request.getPbePrefix(), request.getKeyNamespace()); + for (PBEKeyData keyData: pbeKeyStatuses) { + builder.setPbeStatus( + PBEAdminProtos.PBEKeyStatus.valueOf(keyData.getKeyStatus().getVal())); + builder.setPbeStatus(PBEAdminProtos.PBEKeyStatus.valueOf( + keyData.getKeyStatus().getVal())) + .setKeyMetadata(keyData.getKeyMetadata()) + .setRefreshTimestamp(keyData.getRefreshTimestamp()) + ; + responseBuilder.addStatus(builder.build()); + } + } catch (IOException e) { + CoprocessorRpcUtils.setControllerException(controller, e); + builder.setPbeStatus(PBEAdminProtos.PBEKeyStatus.PBE_FAILED); + } catch (KeyException e) { + CoprocessorRpcUtils.setControllerException(controller, new IOException(e)); + builder.setPbeStatus(PBEAdminProtos.PBEKeyStatus.PBE_FAILED); + } + } + done.run(responseBuilder.build()); + } + + private byte[] convertToPBEBytes(RpcController controller, PBEAdminRequest request, + PBEAdminResponse.Builder builder) { + byte[] pbe_prefix = null; + try { + pbe_prefix = Base64.getDecoder().decode(request.getPbePrefix()); + } catch (IllegalArgumentException e) { + builder.setPbeStatus(PBEAdminProtos.PBEKeyStatus.PBE_FAILED); + CoprocessorRpcUtils.setControllerException(controller, new IOException( + "Failed to decode specified prefix as Base64 string: " + request.getPbePrefix(), e)); + } + return pbe_prefix; + } + + private PBEAdminResponse.Builder getResponseBuilder(RpcController controller, + PBEAdminRequest request) { + PBEAdminResponse.Builder builder = PBEAdminResponse.newBuilder() + .setKeyNamespace(request.getKeyNamespace()); + byte[] pbe_prefix = null; + try { + pbe_prefix = Base64.getDecoder().decode(request.getPbePrefix()); + builder.setPbePrefixBytes(ByteString.copyFrom(pbe_prefix)); + } catch (IllegalArgumentException e) { + builder.setPbeStatus(PBEAdminProtos.PBEKeyStatus.PBE_FAILED); + CoprocessorRpcUtils.setControllerException(controller, new IOException( + "Failed to decode specified prefix as Base64 string: " + request.getPbePrefix(), e)); + } + return builder; + } } } diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/keymeta/PBEKeymetaTableAccessor.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/keymeta/PBEKeymetaTableAccessor.java index 7704f7384d12..52409251500e 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/keymeta/PBEKeymetaTableAccessor.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/keymeta/PBEKeymetaTableAccessor.java @@ -35,7 +35,6 @@ import org.apache.hadoop.hbase.security.EncryptionUtil; import org.apache.hadoop.hbase.util.Bytes; import org.apache.yetus.audience.InterfaceAudience; -import javax.crypto.spec.SecretKeySpec; import java.io.IOException; import java.security.Key; import java.security.KeyException; @@ -86,7 +85,7 @@ public void addKey(PBEKeyData keyData) throws IOException { } } - public List getActiveKeys(byte[] pbePrefix, String keyNamespace) + protected List getAllKeys(byte[] pbePrefix, String keyNamespace) throws IOException, KeyException { Connection connection = server.getConnection(); byte[] prefixForScan = Bytes.add(Bytes.toBytes(pbePrefix.length), pbePrefix, @@ -98,16 +97,26 @@ public List getActiveKeys(byte[] pbePrefix, String keyNamespace) scan.addFamily(KEY_META_INFO_FAMILY); ResultScanner scanner = table.getScanner(scan); - List activeKeys = new ArrayList<>(); + List allKeys = new ArrayList<>(); for (Result result : scanner) { PBEKeyData keyData = parseFromResult(pbePrefix, keyNamespace, result); - if (keyData.getKeyStatus() == PBEKeyStatus.ACTIVE) { - activeKeys.add(keyData); + if (keyData != null) { + allKeys.add(keyData); } } + return allKeys; + } + } - return activeKeys; + public List getActiveKeys(byte[] pbePrefix, String keyNamespace) + throws IOException, KeyException { + List activeKeys = new ArrayList<>(); + for (PBEKeyData keyData : getAllKeys(pbePrefix, keyNamespace)) { + if (keyData.getKeyStatus() == PBEKeyStatus.ACTIVE) { + activeKeys.add(keyData); + } } + return activeKeys; } public PBEKeyData getKey(byte[] pbePrefix, String keyNamespace, String keyMetadata) @@ -144,7 +153,7 @@ private Put addMutationColumns(Put put, PBEKeyData keyData) throws IOException { } private byte[] constructRowKeyForMetadata(PBEKeyData keyData) { - return constructRowKeyForMetadata(keyData.getPbe_prefix(), keyData.getKeyNamespace(), + return constructRowKeyForMetadata(keyData.getPBEPrefix(), keyData.getKeyNamespace(), keyData.getKeyMetadataHash()); } @@ -166,17 +175,30 @@ private PBEKeyData parseFromResult(byte[] pbePrefix, String keyNamespace, Result DEK_METADATA_QUAL_BYTES)); long refreshedTimestamp = Bytes.toLong(result.getValue(KEY_META_INFO_FAMILY, REFRESHED_TIMESTAMP_QUAL_BYTES)); - byte[] dekChecksum = result.getValue(KEY_META_INFO_FAMILY, DEK_CHECKSUM_QUAL_BYTES); byte[] dekWrappedByStk = result.getValue(KEY_META_INFO_FAMILY, DEK_WRAPPED_BY_STK_QUAL_BYTES); - long stkChecksum = Bytes.toLong(result.getValue(KEY_META_INFO_FAMILY, STK_CHECKSUM_QUAL_BYTES)); - - PBEKeyData clusterKey = server.getPBEClusterKeyCache().getClusterKeyByChecksum(stkChecksum); - Key dek = EncryptionUtil.unwrapKey(server.getConfiguration(), null, dekWrappedByStk, + Key dek = null; + if (dekWrappedByStk != null) { + long stkChecksum = + Bytes.toLong(result.getValue(KEY_META_INFO_FAMILY, STK_CHECKSUM_QUAL_BYTES)); + PBEKeyData clusterKey = server.getPBEClusterKeyCache().getClusterKeyByChecksum(stkChecksum); + if (clusterKey == null) { + LOG.error("Dropping key with metadata: {} as STK with checksum: {} is unavailable", + dekMetadata, stkChecksum); + return null; + } + dek = EncryptionUtil.unwrapKey(server.getConfiguration(), null, dekWrappedByStk, clusterKey.getTheKey()); + } PBEKeyData dekKeyData = new PBEKeyData(pbePrefix, keyNamespace, dek, keyStatus, dekMetadata, refreshedTimestamp); - if (!Bytes.equals(dekKeyData.getKeyMetadataHash(), dekChecksum)) { - throw new RuntimeException("Key has didn't match for key with metadata" + dekMetadata); + if (dek != null) { + long dekChecksum = Bytes.toLong(result.getValue(KEY_META_INFO_FAMILY, + DEK_CHECKSUM_QUAL_BYTES)); + if (dekKeyData.getKeyChecksum() != dekChecksum) { + LOG.error("Dropping key, current key checksum: {} didn't match the expected checksum: {}" + + " for key with metadata: {}", dekKeyData.getKeyChecksum(), dekChecksum, dekMetadata); + return null; + } } return dekKeyData; } diff --git a/hbase-shell/src/main/ruby/hbase/pbe_admin.rb b/hbase-shell/src/main/ruby/hbase/pbe_admin.rb index e00f7ca138ce..656999be8071 100644 --- a/hbase-shell/src/main/ruby/hbase/pbe_admin.rb +++ b/hbase-shell/src/main/ruby/hbase/pbe_admin.rb @@ -32,11 +32,21 @@ def close end def pbe_enable(pbe_prefix) + prefix, namespace = extract_prefix_info(pbe_prefix) + @admin.enablePBE(prefix, namespace) + end + + def show_pbe_status(pbe_prefix) + prefix, namespace = extract_prefix_info(pbe_prefix) + @admin.getPBEKeyStatuses(prefix, namespace) + end + + def extract_prefix_info(pbe_prefix) prefixInfo = pbe_prefix.split(':') raise(ArgumentError, 'Invalid prefix:namespace format') unless (prefixInfo.length == 1 || prefixInfo.length == 2) - @admin.enablePBE(prefixInfo[0], prefixInfo.length > 1 ? prefixInfo[1] : - PBEKeyData::KEY_NAMESPACE_GLOBAL) + return prefixInfo[0], prefixInfo.length > 1 ? prefixInfo[1] : + PBEKeyData::KEY_NAMESPACE_GLOBAL end end end diff --git a/hbase-shell/src/main/ruby/shell.rb b/hbase-shell/src/main/ruby/shell.rb index 30e272291821..c8afe02240ef 100644 --- a/hbase-shell/src/main/ruby/shell.rb +++ b/hbase-shell/src/main/ruby/shell.rb @@ -626,6 +626,7 @@ def self.exception_handler(hide_traceback) UnknownProtocolException: No registered Master Coprocessor Endpoint found for PBEAdminService", commands: %w[ pbe_enable + pbe_get_statuses ] ) diff --git a/hbase-shell/src/main/ruby/shell/commands/pbe_get_statuses.rb b/hbase-shell/src/main/ruby/shell/commands/pbe_get_statuses.rb new file mode 100644 index 000000000000..70dd6fcd205c --- /dev/null +++ b/hbase-shell/src/main/ruby/shell/commands/pbe_get_statuses.rb @@ -0,0 +1,45 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +module Shell + module Commands + class PbeGetStatuses < Command + def help + <<-EOF +Get key statuses for a given prefix:namespace (prefix in Base64 format). +If no namespace is specified, the global namespace (*) is used. +EOF + end + + def command(pbe_prefix) + formatter.header(['ENCODED-KEY', 'NAMESPACE', 'STATUS', 'METADATA', 'METADATA-HASH', 'REFRESH-TIMESTAMP']) + statuses = pbe_admin.show_pbe_status(pbe_prefix) + statuses.each { |status| + formatter.row([ + status.getPBEPrefixEncoded(), + status.getKeyNamespace(), + status.getKeyStatus().toString(), + status.getKeyMetadata(), + status.getKeyMetadataHashEncoded(), + status.getRefreshTimestamp() + ]) + } + formatter.footer(statuses.size()) + end + end + end +end From 56401c7199941b92f2d654e66395a115183b48f2 Mon Sep 17 00:00:00 2001 From: Hari Dara Date: Wed, 12 Mar 2025 10:48:45 +0530 Subject: [PATCH 07/70] Operation counts --- .../hbase/keymeta/PBEKeymetaAdminClient.java | 2 +- .../hadoop/hbase/io/crypto/PBEKeyData.java | 50 ++++++++--- .../src/main/protobuf/server/PBEAdmin.proto | 2 + .../hbase/keymeta/PBEKeymetaAdminImpl.java | 22 ++--- .../keymeta/PBEKeymetaServiceEndpoint.java | 2 + .../keymeta/PBEKeymetaTableAccessor.java | 83 +++++++++++++++++-- .../ruby/shell/commands/pbe_get_statuses.rb | 7 +- 7 files changed, 135 insertions(+), 33 deletions(-) diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/keymeta/PBEKeymetaAdminClient.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/keymeta/PBEKeymetaAdminClient.java index b39e7768e0b3..bc39f3d37b29 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/keymeta/PBEKeymetaAdminClient.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/keymeta/PBEKeymetaAdminClient.java @@ -49,7 +49,7 @@ public PBEKeyStatus enablePBE(String pbePrefix, String keyNamespace) throws IOEx status.getKeyNamespace(), null, PBEKeyStatus.forValue((byte) status.getPbeStatus().getNumber()), status.getKeyMetadata(), - status.getRefreshTimestamp())); + status.getRefreshTimestamp(), status.getReadOpCount(), status.getWriteOpCount())); } } catch (ServiceException e) { throw ProtobufUtil.handleRemoteException(e); diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/io/crypto/PBEKeyData.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/io/crypto/PBEKeyData.java index 34524dd6703f..b49b74659233 100644 --- a/hbase-common/src/main/java/org/apache/hadoop/hbase/io/crypto/PBEKeyData.java +++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/io/crypto/PBEKeyData.java @@ -53,12 +53,14 @@ public class PBEKeyData { public static final String KEY_NAMESPACE_GLOBAL = "*"; - private byte[] pbePrefix; - private String keyNamespace; - private Key theKey; - private PBEKeyStatus keyStatus; - private String keyMetadata; - private long refreshTimestamp; + private final byte[] pbePrefix; + private final String keyNamespace; + private final Key theKey; + private final PBEKeyStatus keyStatus; + private final String keyMetadata; + private final long refreshTimestamp; + private final long readOpCount; + private final long writeOpCount; private volatile long keyChecksum = 0; private byte[] keyMetadataHash; @@ -74,25 +76,31 @@ public class PBEKeyData { public PBEKeyData(byte[] pbe_prefix, String key_namespace, Key theKey, PBEKeyStatus keyStatus, String keyMetadata) { this(pbe_prefix, key_namespace, theKey, keyStatus, keyMetadata, - EnvironmentEdgeManager.currentTime()); + EnvironmentEdgeManager.currentTime(), 0, 0); } /** * Constructs a new instance with the given parameters. * - * @param pbe_prefix The PBE prefix associated with the key. - * @param theKey The actual key, can be {@code null}. - * @param keyStatus The status of the key. - * @param keyMetadata The metadata associated with the key. + * @param pbe_prefix The PBE prefix associated with the key. + * @param theKey The actual key, can be {@code null}. + * @param keyStatus The status of the key. + * @param keyMetadata The metadata associated with the key. * @param refreshTimestamp The timestamp when this key was last refreshed. + * @param readOpCount The current number of read operations for this key. + * @param writeOpCount The current number of write operations for this key. * @throws NullPointerException if any of pbe_prefix, keyStatus or keyMetadata is null. */ public PBEKeyData(byte[] pbe_prefix, String key_namespace, Key theKey, PBEKeyStatus keyStatus, - String keyMetadata, long refreshTimestamp) { + String keyMetadata, long refreshTimestamp, long readOpCount, long writeOpCount) { Preconditions.checkNotNull(pbe_prefix, "pbe_prefix should not be null"); Preconditions.checkNotNull(key_namespace, "key_namespace should not be null"); Preconditions.checkNotNull(keyStatus, "keyStatus should not be null"); Preconditions.checkNotNull(keyMetadata, "keyMetadata should not be null"); + Preconditions.checkArgument(readOpCount >= 0, "readOpCount: " + readOpCount + + " should be >= 0"); + Preconditions.checkArgument(writeOpCount >= 0, "writeOpCount: " + writeOpCount + + " should be >= 0"); this.pbePrefix = pbe_prefix; this.keyNamespace = key_namespace; @@ -100,6 +108,8 @@ public PBEKeyData(byte[] pbe_prefix, String key_namespace, Key theKey, PBEKeySta this.keyStatus = keyStatus; this.keyMetadata = keyMetadata; this.refreshTimestamp = refreshTimestamp; + this.readOpCount = readOpCount; + this.writeOpCount = writeOpCount; } /** @@ -166,6 +176,22 @@ public long getRefreshTimestamp() { return refreshTimestamp; } + /** + * @return the number of times this key has been used for read operations as of the time this + * key data was initialized. + */ + public long getReadOpCount() { + return readOpCount; + } + + /** + * @return the number of times this key has been used for write operations as of the time this + * key data was initialized. + */ + public long getWriteOpCount() { + return writeOpCount; + } + /** * Computes the checksum of the key. If the checksum has already been computed, this method * returns the previously computed value. The checksum is computed using the CRC32C algorithm. diff --git a/hbase-protocol-shaded/src/main/protobuf/server/PBEAdmin.proto b/hbase-protocol-shaded/src/main/protobuf/server/PBEAdmin.proto index 4f0c4422b4c8..78c68d2a745d 100644 --- a/hbase-protocol-shaded/src/main/protobuf/server/PBEAdmin.proto +++ b/hbase-protocol-shaded/src/main/protobuf/server/PBEAdmin.proto @@ -42,6 +42,8 @@ message PBEAdminResponse { required PBEKeyStatus pbe_status = 3; optional string key_metadata = 4; optional int64 refresh_timestamp = 5; + optional int64 read_op_count = 6; + optional int64 write_op_count = 7; } message PBEGetStatusResponse { diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/keymeta/PBEKeymetaAdminImpl.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/keymeta/PBEKeymetaAdminImpl.java index c5cf0615140b..bc09f4685a83 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/keymeta/PBEKeymetaAdminImpl.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/keymeta/PBEKeymetaAdminImpl.java @@ -52,17 +52,6 @@ public PBEKeyStatus enablePBE(String pbePrefix, String keyNamespace) throws IOEx return pbeKey.getKeyStatus(); } - private static byte[] convertToPrefixBytes(String pbePrefix) throws IOException { - byte[] pbe_prefix; - try { - pbe_prefix = Base64.getDecoder().decode(pbePrefix); - } - catch (IllegalArgumentException e) { - throw new IOException("Failed to decode specified prefix as Base64 string: " + pbePrefix, e); - } - return pbe_prefix; - } - @Override public List getPBEKeyStatuses(String pbePrefix, String keyNamespace) throws IOException, KeyException { @@ -74,4 +63,15 @@ public List getPBEKeyStatuses(String pbePrefix, String keyNamespace) byte[] pbe_prefix = convertToPrefixBytes(pbePrefix); return super.getAllKeys(pbe_prefix, keyNamespace); } + + private static byte[] convertToPrefixBytes(String pbePrefix) throws IOException { + byte[] pbe_prefix; + try { + pbe_prefix = Base64.getDecoder().decode(pbePrefix); + } + catch (IllegalArgumentException e) { + throw new IOException("Failed to decode specified prefix as Base64 string: " + pbePrefix, e); + } + return pbe_prefix; + } } diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/keymeta/PBEKeymetaServiceEndpoint.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/keymeta/PBEKeymetaServiceEndpoint.java index e32ecd2c7cc5..40b614dc1a7f 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/keymeta/PBEKeymetaServiceEndpoint.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/keymeta/PBEKeymetaServiceEndpoint.java @@ -138,6 +138,8 @@ public void getPBEStatuses(RpcController controller, PBEAdminRequest request, keyData.getKeyStatus().getVal())) .setKeyMetadata(keyData.getKeyMetadata()) .setRefreshTimestamp(keyData.getRefreshTimestamp()) + .setReadOpCount(keyData.getReadOpCount()) + .setWriteOpCount(keyData.getWriteOpCount()) ; responseBuilder.addStatus(builder.build()); } diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/keymeta/PBEKeymetaTableAccessor.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/keymeta/PBEKeymetaTableAccessor.java index 52409251500e..ec2f25257e9c 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/keymeta/PBEKeymetaTableAccessor.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/keymeta/PBEKeymetaTableAccessor.java @@ -24,6 +24,7 @@ import org.apache.hadoop.hbase.client.Connection; import org.apache.hadoop.hbase.client.Durability; import org.apache.hadoop.hbase.client.Get; +import org.apache.hadoop.hbase.client.Increment; import org.apache.hadoop.hbase.client.Put; import org.apache.hadoop.hbase.client.Result; import org.apache.hadoop.hbase.client.ResultScanner; @@ -71,20 +72,39 @@ public class PBEKeymetaTableAccessor extends PBEKeyManager { public static final String KEY_STATUS_QUAL_NAME = "key_status"; public static final byte[] KEY_STATUS_QUAL_BYTES = Bytes.toBytes(KEY_STATUS_QUAL_NAME); + public static final String READ_OP_COUNT_QUAL_NAME = "read_op_count"; + public static final byte[] READ_OP_COUNT_QUAL_BYTES = Bytes.toBytes(READ_OP_COUNT_QUAL_NAME); + + public static final String WRITE_OP_COUNT_QUAL_NAME = "write_op_count"; + public static final byte[] WRITE_OP_COUNT_QUAL_BYTES = Bytes.toBytes(WRITE_OP_COUNT_QUAL_NAME); + public PBEKeymetaTableAccessor(Server server) { super(server); } + /** + * Add the specified key to the keymeta table. + * @param keyData The key data. + * @throws IOException when there is an underlying IOException. + */ public void addKey(PBEKeyData keyData) throws IOException { final Put putForMetadata = addMutationColumns(new Put(constructRowKeyForMetadata(keyData)), keyData); - Connection connection = server.getConnection(); try (Table table = connection.getTable(KEY_META_TABLE_NAME)) { table.put(putForMetadata); } } + /** + * Get all the keys for the specified pbe_prefix and key_namespace. + * + * @param pbePrefix The prefix + * @param keyNamespace The namespace + * @return a list of key data, one for each key, can be empty when none were found. + * @throws IOException when there is an underlying IOException. + * @throws KeyException when there is an underlying KeyException. + */ protected List getAllKeys(byte[] pbePrefix, String keyNamespace) throws IOException, KeyException { Connection connection = server.getConnection(); @@ -108,6 +128,15 @@ protected List getAllKeys(byte[] pbePrefix, String keyNamespace) } } + /** + * Get all the active keys for the specified pbe_prefix and key_namespace. + * + * @param pbePrefix The prefix + * @param keyNamespace The namespace + * @return a list of key data, one for each active key, can be empty when none were found. + * @throws IOException when there is an underlying IOException. + * @throws KeyException when there is an underlying KeyException. + */ public List getActiveKeys(byte[] pbePrefix, String keyNamespace) throws IOException, KeyException { List activeKeys = new ArrayList<>(); @@ -119,6 +148,16 @@ public List getActiveKeys(byte[] pbePrefix, String keyNamespace) return activeKeys; } + /** + * Get the specific key identified by pbePrefix, keyNamespace and keyMetadata. + * + * @param pbePrefix The prefix. + * @param keyNamespace The namespace. + * @param keyMetadata The metadata. + * @return the key or {@code null} + * @throws IOException when there is an underlying IOException. + * @throws KeyException when there is an underlying KeyException. + */ public PBEKeyData getKey(byte[] pbePrefix, String keyNamespace, String keyMetadata) throws IOException, KeyException { Connection connection = server.getConnection(); @@ -130,6 +169,32 @@ public PBEKeyData getKey(byte[] pbePrefix, String keyNamespace, String keyMetada } } + /** + * Report read or write operation count on the specific key identified by pbePrefix, keyNamespace + * and keyMetadata. The reported value is added to the existing operation count using the + * Increment mutation. + * @param pbePrefix The prefix. + * @param keyNamespace The namespace. + * @param keyMetadata The metadata. + * @throws IOException when there is an underlying IOException. + */ + public void reportOperation(byte[] pbePrefix, String keyNamespace, String keyMetadata, long count, + boolean isReadOperation) throws IOException { + Connection connection = server.getConnection(); + try (Table table = connection.getTable(KEY_META_TABLE_NAME)) { + byte[] rowKey = constructRowKeyForMetadata(pbePrefix, keyNamespace, + PBEKeyData.constructMetadataHash(keyMetadata)); + Increment incr = new Increment(rowKey) + .addColumn(KEY_META_INFO_FAMILY, + isReadOperation ? READ_OP_COUNT_QUAL_BYTES : WRITE_OP_COUNT_QUAL_BYTES, + count); + table.increment(incr); + } + } + + /** + * Add the mutation columns to the given Put that are derived from the keyData. + */ private Put addMutationColumns(Put put, PBEKeyData keyData) throws IOException { PBEKeyData latestClusterKey = server.getPBEClusterKeyCache().getLatestClusterKey(); if (keyData.getTheKey() != null) { @@ -169,12 +234,8 @@ private PBEKeyData parseFromResult(byte[] pbePrefix, String keyNamespace, Result if (result == null || result.isEmpty()) { return null; } - PBEKeyStatus keyStatus = PBEKeyStatus.forValue( - result.getValue(KEY_META_INFO_FAMILY, KEY_STATUS_QUAL_BYTES)[0]); String dekMetadata = Bytes.toString(result.getValue(KEY_META_INFO_FAMILY, DEK_METADATA_QUAL_BYTES)); - long refreshedTimestamp = Bytes.toLong(result.getValue(KEY_META_INFO_FAMILY, - REFRESHED_TIMESTAMP_QUAL_BYTES)); byte[] dekWrappedByStk = result.getValue(KEY_META_INFO_FAMILY, DEK_WRAPPED_BY_STK_QUAL_BYTES); Key dek = null; if (dekWrappedByStk != null) { @@ -189,8 +250,16 @@ private PBEKeyData parseFromResult(byte[] pbePrefix, String keyNamespace, Result dek = EncryptionUtil.unwrapKey(server.getConfiguration(), null, dekWrappedByStk, clusterKey.getTheKey()); } - PBEKeyData dekKeyData = - new PBEKeyData(pbePrefix, keyNamespace, dek, keyStatus, dekMetadata, refreshedTimestamp); + PBEKeyStatus keyStatus = PBEKeyStatus.forValue( + result.getValue(KEY_META_INFO_FAMILY, KEY_STATUS_QUAL_BYTES)[0]); + long refreshedTimestamp = Bytes.toLong(result.getValue(KEY_META_INFO_FAMILY, + REFRESHED_TIMESTAMP_QUAL_BYTES)); + byte[] readOpValue = result.getValue(KEY_META_INFO_FAMILY, READ_OP_COUNT_QUAL_BYTES); + long readOpCount = readOpValue != null ? Bytes.toLong(readOpValue) : 0; + byte[] writeOpValue = result.getValue(KEY_META_INFO_FAMILY, WRITE_OP_COUNT_QUAL_BYTES); + long writeOpCount = writeOpValue != null ? Bytes.toLong(writeOpValue) : 0; + PBEKeyData dekKeyData = new PBEKeyData(pbePrefix, keyNamespace, dek, keyStatus, dekMetadata, + refreshedTimestamp, readOpCount, writeOpCount); if (dek != null) { long dekChecksum = Bytes.toLong(result.getValue(KEY_META_INFO_FAMILY, DEK_CHECKSUM_QUAL_BYTES)); diff --git a/hbase-shell/src/main/ruby/shell/commands/pbe_get_statuses.rb b/hbase-shell/src/main/ruby/shell/commands/pbe_get_statuses.rb index 70dd6fcd205c..8a34b6579ff1 100644 --- a/hbase-shell/src/main/ruby/shell/commands/pbe_get_statuses.rb +++ b/hbase-shell/src/main/ruby/shell/commands/pbe_get_statuses.rb @@ -26,7 +26,8 @@ def help end def command(pbe_prefix) - formatter.header(['ENCODED-KEY', 'NAMESPACE', 'STATUS', 'METADATA', 'METADATA-HASH', 'REFRESH-TIMESTAMP']) + formatter.header(['ENCODED-KEY', 'NAMESPACE', 'STATUS', 'METADATA', 'METADATA-HASH', + 'REFRESH-TIMESTAMP', 'READ-OP-COUNT', 'WRITE-OP-COUNT']) statuses = pbe_admin.show_pbe_status(pbe_prefix) statuses.each { |status| formatter.row([ @@ -35,7 +36,9 @@ def command(pbe_prefix) status.getKeyStatus().toString(), status.getKeyMetadata(), status.getKeyMetadataHashEncoded(), - status.getRefreshTimestamp() + status.getRefreshTimestamp(), + status.getReadOpCount(), + status.getWriteOpCount() ]) } formatter.footer(statuses.size()) From 236c9cb0543e09b4d4e4931ad52ef59e7e9a4566 Mon Sep 17 00:00:00 2001 From: Hari Dara Date: Sat, 29 Mar 2025 19:03:06 +0530 Subject: [PATCH 08/70] Attempting broader test coverage --- .../org/apache/hadoop/hbase/HConstants.java | 5 + .../io/crypto/PBEKeyStoreKeyProvider.java | 29 +++--- .../hbase/io/crypto/MockPBEKeyProvider.java | 94 +++++++++++++++++++ .../apache/hadoop/hbase/HBaseServerBase.java | 2 +- .../java/org/apache/hadoop/hbase/Server.java | 10 ++ .../hbase/keymeta/PBEClusterKeyAccessor.java | 3 +- .../hadoop/hbase/keymeta/PBEKeyAccessor.java | 27 ++++-- ...eyManager.java => PBEKeyAccessorBase.java} | 36 +++++-- .../hbase/keymeta/PBEKeymetaAdminImpl.java | 54 ++++++----- .../keymeta/PBEKeymetaTableAccessor.java | 49 +++++----- .../hbase/master/TestPBEClusterKey.java | 93 ++++++++++++++++++ .../hadoop/hbase/HBaseTestingUtility.java | 7 +- 12 files changed, 336 insertions(+), 73 deletions(-) create mode 100644 hbase-common/src/test/java/org/apache/hadoop/hbase/io/crypto/MockPBEKeyProvider.java rename hbase-server/src/main/java/org/apache/hadoop/hbase/keymeta/{PBEKeyManager.java => PBEKeyAccessorBase.java} (65%) create mode 100644 hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestPBEClusterKey.java diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/HConstants.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/HConstants.java index 670bda523919..1649690be8ec 100644 --- a/hbase-common/src/main/java/org/apache/hadoop/hbase/HConstants.java +++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/HConstants.java @@ -1293,6 +1293,11 @@ public enum OperationStatusCode { "hbase.crypto.pbe.master.key.name"; public static final String CRYPTO_PBE_ENABLED_CONF_KEY = "hbase.crypto.pbe.enabled"; + public static final boolean CRYPTO_PBE_DEFAULT_ENABLED = false; + + public static final String CRYPTO_PBE_PER_PREFIX_ACTIVE_KEY_COUNT = + "hbase.crypto.pbe.per_prefix.active_count"; + public static final int CRYPTO_PBE_PER_PREFIX_ACTIVE_KEY_DEFAULT_COUNT = 1; public static final String CRYPTO_PBE_PREFIX_CONF_KEY_PREFIX = "hbase.crypto.pbe.prefix."; diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/io/crypto/PBEKeyStoreKeyProvider.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/io/crypto/PBEKeyStoreKeyProvider.java index 4c958a30b01d..098afaa0d954 100644 --- a/hbase-common/src/main/java/org/apache/hadoop/hbase/io/crypto/PBEKeyStoreKeyProvider.java +++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/io/crypto/PBEKeyStoreKeyProvider.java @@ -34,8 +34,7 @@ public PBEKeyData getClusterKey(byte[] clusterId) { throw new RuntimeException("Unable to find cluster key with alias: " + masterKeyAlias); } // Encode clusterId too for consistency with that of PBE prefixes. - String keyMetadata = generateKeyMetadata(masterKeyAlias, - Base64.getEncoder().encodeToString(clusterId)); + String keyMetadata = generateKeyMetadata(masterKeyAlias, encodeToPrefixStr(clusterId)); return new PBEKeyData(clusterId, PBEKeyData.KEY_NAMESPACE_GLOBAL, key, PBEKeyStatus.ACTIVE, keyMetadata); } @@ -43,7 +42,7 @@ public PBEKeyData getClusterKey(byte[] clusterId) { @Override public PBEKeyData getPBEKey(byte[] pbe_prefix, String key_namespace) throws IOException { checkConfig(); - String encodedPrefix = Base64.getEncoder().encodeToString(pbe_prefix); + String encodedPrefix = encodeToPrefixStr(pbe_prefix); String aliasConfKey = HConstants.CRYPTO_PBE_PREFIX_CONF_KEY_PREFIX + encodedPrefix + "." + "alias"; String keyMetadata = generateKeyMetadata(conf.get(aliasConfKey, null), encodedPrefix); @@ -58,14 +57,7 @@ public PBEKeyData unwrapKey(String keyMetadataStr) throws IOException { String activeStatusConfKey = HConstants.CRYPTO_PBE_PREFIX_CONF_KEY_PREFIX + encodedPrefix + ".active"; boolean isActive = conf.getBoolean(activeStatusConfKey, true); - byte[] pbe_prefix; - try { - pbe_prefix = Base64.getDecoder().decode(encodedPrefix); - } - catch (IllegalArgumentException e) { - throw new IOException("Failed to decode specified prefix as Base64 string: " + - encodedPrefix, e); - } + byte[] pbe_prefix = decodeToPrefixBytes(encodedPrefix); String alias = keyMetadata.get(KEY_METADATA_ALIAS); Key key = alias != null ? getKey(alias) : null; if (key != null) { @@ -88,4 +80,19 @@ private void checkConfig() { throw new IllegalStateException("initConfig is not called or config is null"); } } + + public static byte[] decodeToPrefixBytes(String pbePrefix) throws IOException { + byte[] pbe_prefix; + try { + pbe_prefix = Base64.getDecoder().decode(pbePrefix); + } + catch (IllegalArgumentException e) { + throw new IOException("Failed to decode specified prefix as Base64 string: " + pbePrefix, e); + } + return pbe_prefix; + } + + public static String encodeToPrefixStr(byte[] pbe_prefix) { + return Base64.getEncoder().encodeToString(pbe_prefix); + } } diff --git a/hbase-common/src/test/java/org/apache/hadoop/hbase/io/crypto/MockPBEKeyProvider.java b/hbase-common/src/test/java/org/apache/hadoop/hbase/io/crypto/MockPBEKeyProvider.java new file mode 100644 index 000000000000..6a38cdb77403 --- /dev/null +++ b/hbase-common/src/test/java/org/apache/hadoop/hbase/io/crypto/MockPBEKeyProvider.java @@ -0,0 +1,94 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.hadoop.hbase.io.crypto; + +import java.io.IOException; +import java.security.Key; +import java.security.KeyStore; +import java.security.NoSuchAlgorithmException; +import java.util.HashMap; +import java.util.Map; +import javax.crypto.KeyGenerator; + +import org.apache.hadoop.conf.Configuration; +import org.apache.hadoop.hbase.util.Bytes; + +/** + * A simple implementation of PBEKeyProvider for testing. It generates a key on demand given a + * prefix. One can control the state of a key by calling setKeyStatus and can rotate a key by + * calling setKey. + */ +public class MockPBEKeyProvider extends MockAesKeyProvider implements PBEKeyProvider { + public Map keys = new HashMap<>(); + public Map keyStatus = new HashMap<>(); + + @Override public void initConfig(Configuration conf) { + // NO-OP + } + + @Override public PBEKeyData getClusterKey(byte[] clusterId) throws IOException { + return getKey(clusterId); + } + + @Override public PBEKeyData getPBEKey(byte[] pbe_prefix, String key_namespace) + throws IOException { + return getKey(pbe_prefix); + } + + @Override public PBEKeyData unwrapKey(String keyAlias) throws IOException { + return getKey(keyAlias.getBytes()); + } + + /** + * Lookup the key data for the given prefix from keys. If missing, initialize one using generateSecretKey(). + */ + public PBEKeyData getKey(byte[] prefix_bytes) { + String alias = Bytes.toString(prefix_bytes); + Key key = keys.get(alias); + if (key == null) { + key = generateSecretKey(); + keys.put(alias, key); + } + PBEKeyStatus keyStatus = this.keyStatus.get(alias); + return new PBEKeyData(prefix_bytes, PBEKeyData.KEY_NAMESPACE_GLOBAL, key, + keyStatus == null ? PBEKeyStatus.ACTIVE : keyStatus, Bytes.toString(prefix_bytes)); + } + + public void setKeyStatus(byte[] prefix_bytes, PBEKeyStatus status) { + keyStatus.put(Bytes.toString(prefix_bytes), status); + } + + public void setKey(byte[] prefix_bytes, Key key) { + keys.put(Bytes.toString(prefix_bytes), key); + } + + /** + * Generate a new secret key. + * @return the key + */ + public static Key generateSecretKey() { + KeyGenerator keyGen = null; + try { + keyGen = KeyGenerator.getInstance("AES"); + } catch (NoSuchAlgorithmException e) { + throw new RuntimeException(e); + } + keyGen.init(256); + return keyGen.generateKey(); + } +} diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/HBaseServerBase.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/HBaseServerBase.java index ddf0fe578cff..6de1e060714a 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/HBaseServerBase.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/HBaseServerBase.java @@ -430,7 +430,7 @@ public PBEClusterKeyCache getPBEClusterKeyCache() { } protected void buildPBEClusterKeyCache() throws IOException { - if (pbeClusterKeyCache == null) { + if (pbeClusterKeyCache == null && Server.isPBEEnabled(this)) { pbeClusterKeyCache = PBEClusterKeyCache.createCache(new PBEClusterKeyAccessor(this)); } } diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/Server.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/Server.java index dcca89e8b2fb..f7be347cdfa7 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/Server.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/Server.java @@ -122,4 +122,14 @@ default FileSystem getFileSystem() { default boolean isStopping() { return false; } + + /** + * From the given server, determine if PBE is enabbled. + * @return true if PBE is enabled + */ + static boolean isPBEEnabled(Server server) { + return server.getConfiguration() + .getBoolean(HConstants.CRYPTO_PBE_ENABLED_CONF_KEY, HConstants.CRYPTO_PBE_DEFAULT_ENABLED); + } + } diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/keymeta/PBEClusterKeyAccessor.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/keymeta/PBEClusterKeyAccessor.java index ef62e92d93c6..83dd5a250631 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/keymeta/PBEClusterKeyAccessor.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/keymeta/PBEClusterKeyAccessor.java @@ -35,7 +35,7 @@ import static org.apache.hadoop.hbase.HConstants.CLUSTER_KEY_FILE_PREFIX; @InterfaceAudience.Private -public class PBEClusterKeyAccessor extends PBEKeyManager { +public class PBEClusterKeyAccessor extends PBEKeyAccessorBase { protected final Path clusterKeyDir; public PBEClusterKeyAccessor(Server server) throws IOException { @@ -77,6 +77,7 @@ public List getAllClusterKeyFiles() throws IOException { return new ArrayList<>(clusterKeys.values()); } + public PBEKeyData loadClusterKey(Path keyPath) throws IOException { PBEKeyProvider provider = getKeyProvider(); return provider.unwrapKey(loadKeyMetadata(keyPath)); diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/keymeta/PBEKeyAccessor.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/keymeta/PBEKeyAccessor.java index 0714e0b5b362..29f54a720a88 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/keymeta/PBEKeyAccessor.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/keymeta/PBEKeyAccessor.java @@ -18,23 +18,25 @@ package org.apache.hadoop.hbase.keymeta; import org.apache.hadoop.hbase.io.crypto.PBEKeyData; +import org.apache.hadoop.hbase.io.crypto.PBEKeyProvider; +import org.apache.hadoop.hbase.io.crypto.PBEKeyStoreKeyProvider; import org.apache.yetus.audience.InterfaceAudience; import java.io.IOException; import java.security.KeyException; import java.util.List; -// TODO: Also integrate with the key provider when it is not found in the cache??? /** * This class provides a unified access on top of both {@code PBEKeyDataCache} (L1) and * {@code PBEKeymetaTableAccessor} (L2) to access PBE keys. When the getter is called, it first * checks if L1 cache has the key, if not, it tries to get the key from L2. */ @InterfaceAudience.Private -public class PBEKeyAccessor { +public class PBEKeyAccessor extends PBEKeyAccessorBase { private final PBEKeyDataCache keyDataCache; private final PBEKeymetaTableAccessor keymetaAccessor; public PBEKeyAccessor(PBEKeymetaTableAccessor keymetaAccessor) { + super(keymetaAccessor.server); this.keymetaAccessor = keymetaAccessor; keyDataCache = new PBEKeyDataCache(); } @@ -42,17 +44,29 @@ public PBEKeyAccessor(PBEKeymetaTableAccessor keymetaAccessor) { /** * Get key data by key metadata. * - * @param pbePrefix The prefix of the key + * @param pbe_prefix The prefix of the key * @param keyNamespace The namespace of the key * @param keyMetadata The metadata of the key * @return The key data or {@code null} * @throws IOException if an error occurs while retrieving the key */ - public PBEKeyData getKey(byte[] pbePrefix, String keyNamespace, String keyMetadata) - throws IOException, KeyException { + public PBEKeyData getKey(byte[] pbe_prefix, String keyNamespace, String keyMetadata) + throws IOException, KeyException { + checkPBEEnabled(); + // 1. Check L1 cache. PBEKeyData keyData = keyDataCache.getEntry(keyMetadata); if (keyData == null) { - keyData = keymetaAccessor.getKey(pbePrefix, keyNamespace, keyMetadata); + // 2. Check L2 cache. + keyData = keymetaAccessor.getKey(pbe_prefix, keyNamespace, keyMetadata); + if (keyData == null) { + // 3. Check with Key Provider. + PBEKeyProvider provider = getKeyProvider(); + keyData = provider.unwrapKey(keyMetadata); + LOG.info("Got key data with status: {} and metadata: {} for prefix: {}", + keyData.getKeyStatus(), keyData.getKeyMetadata(), + PBEKeyStoreKeyProvider.encodeToPrefixStr(pbe_prefix)); + keymetaAccessor.addKey(keyData); + } if (keyData != null) { keyDataCache.addEntry(keyData); } @@ -70,6 +84,7 @@ public PBEKeyData getKey(byte[] pbePrefix, String keyNamespace, String keyMetada */ public PBEKeyData getAnActiveKey(byte[] pbePrefix, String keyNamespace) throws IOException, KeyException { + checkPBEEnabled(); PBEKeyData keyData = keyDataCache.getRandomEntryForPrefix(pbePrefix, keyNamespace); if (keyData == null) { List activeKeys = keymetaAccessor.getActiveKeys(pbePrefix, keyNamespace); diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/keymeta/PBEKeyManager.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/keymeta/PBEKeyAccessorBase.java similarity index 65% rename from hbase-server/src/main/java/org/apache/hadoop/hbase/keymeta/PBEKeyManager.java rename to hbase-server/src/main/java/org/apache/hadoop/hbase/keymeta/PBEKeyAccessorBase.java index bb426a8661b6..2839669a4f3e 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/keymeta/PBEKeyManager.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/keymeta/PBEKeyAccessorBase.java @@ -25,19 +25,21 @@ import org.apache.yetus.audience.InterfaceAudience; import org.slf4j.Logger; import org.slf4j.LoggerFactory; +import java.io.IOException; /** - * A base class for all keymeta manager implementations. + * A base class for all keymeta accessor/manager implementations. */ @InterfaceAudience.Private -public abstract class PBEKeyManager { - protected static final Logger LOG = LoggerFactory.getLogger(PBEKeyManager.class); +public abstract class PBEKeyAccessorBase { + protected static final Logger LOG = LoggerFactory.getLogger(PBEKeyAccessorBase.class); protected final Server server; private Boolean pbeEnabled; + private Integer perPrefixActiveKeyCount; - public PBEKeyManager(Server server) { + public PBEKeyAccessorBase(Server server) { this.server = server; } @@ -62,9 +64,31 @@ protected PBEKeyProvider getKeyProvider() { */ protected boolean isPBEEnabled() { if (pbeEnabled == null) { - pbeEnabled = server.getConfiguration().getBoolean(HConstants.CRYPTO_PBE_ENABLED_CONF_KEY, - false); + pbeEnabled = Server.isPBEEnabled(server); } return pbeEnabled; } + + /** + * Check if PBE is enabled, otherwise throw exception. + * @throws IOException if PBE is not enabled. + */ + protected void checkPBEEnabled() throws IOException { + if (! isPBEEnabled()) { + throw new IOException("PBE is currently not enabled in HBase configuration"); + } + } + + protected int getPerPrefixActiveKeyConfCount() throws IOException { + if (perPrefixActiveKeyCount == null) { + perPrefixActiveKeyCount = server.getConfiguration().getInt( + HConstants.CRYPTO_PBE_PER_PREFIX_ACTIVE_KEY_COUNT, + HConstants.CRYPTO_PBE_PER_PREFIX_ACTIVE_KEY_DEFAULT_COUNT); + } + if (perPrefixActiveKeyCount <= 0) { + throw new IOException("Invalid value: " + perPrefixActiveKeyCount + " configured for: " + + HConstants.CRYPTO_PBE_PER_PREFIX_ACTIVE_KEY_COUNT); + } + return perPrefixActiveKeyCount; + } } diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/keymeta/PBEKeymetaAdminImpl.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/keymeta/PBEKeymetaAdminImpl.java index bc09f4685a83..08b62574944a 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/keymeta/PBEKeymetaAdminImpl.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/keymeta/PBEKeymetaAdminImpl.java @@ -21,13 +21,15 @@ import org.apache.hadoop.hbase.io.crypto.PBEKeyData; import org.apache.hadoop.hbase.io.crypto.PBEKeyProvider; import org.apache.hadoop.hbase.io.crypto.PBEKeyStatus; +import org.apache.hadoop.hbase.io.crypto.PBEKeyStoreKeyProvider; import org.apache.yetus.audience.InterfaceAudience; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import java.io.IOException; import java.security.KeyException; -import java.util.Base64; +import java.util.HashSet; import java.util.List; +import java.util.Set; @InterfaceAudience.Private public class PBEKeymetaAdminImpl extends PBEKeymetaTableAccessor implements PBEKeymetaAdmin { @@ -39,39 +41,41 @@ public PBEKeymetaAdminImpl(Server server) { @Override public PBEKeyStatus enablePBE(String pbePrefix, String keyNamespace) throws IOException { - if (! isPBEEnabled()) { - throw new IOException("PBE is currently not enabled in HBase configuration"); - } + checkPBEEnabled(); LOG.info("Trying to enable PBE on key: {} under namespace: {}", pbePrefix, keyNamespace); - byte[] pbe_prefix = convertToPrefixBytes(pbePrefix); + byte[] pbe_prefix = PBEKeyStoreKeyProvider.decodeToPrefixBytes(pbePrefix); PBEKeyProvider provider = getKeyProvider(); - PBEKeyData pbeKey = provider.getPBEKey(pbe_prefix, keyNamespace); - LOG.info("Got key data with status: {} and metadata: {} for prefix: {}", pbeKey.getKeyStatus(), - pbeKey.getKeyMetadata(), pbePrefix); - addKey(pbeKey); - return pbeKey.getKeyStatus(); + int perPrefixActiveKeyConfCount = getPerPrefixActiveKeyConfCount(); + Set retrievedKeys = new HashSet<>(perPrefixActiveKeyConfCount); + PBEKeyData pbeKey = null; + for (int i = 0; i < perPrefixActiveKeyConfCount; ++i) { + pbeKey = provider.getPBEKey(pbe_prefix, keyNamespace); + if (pbeKey == null) { + throw new IOException("Invalid null PBE key received from key provider"); + } + if (retrievedKeys.contains(pbeKey)) { + // This typically means, the key provider is not capable of producing multiple active keys. + LOG.info("enablePBE: configured key count per prefix: " + perPrefixActiveKeyConfCount + + " but received only: " + retrievedKeys.size() + " unique keys."); + break; + } + retrievedKeys.add(pbeKey); + LOG.info("enablePBE: got key data with status: {} and metadata: {} for prefix: {}", + pbeKey.getKeyStatus(), pbeKey.getKeyMetadata(), pbePrefix); + addKey(pbeKey); + } + // pbeKey can't be null at this point as perPrefixActiveKeyConfCount will always be > 0, + // but the null check is needed to avoid any warning. + return pbeKey == null ? null : pbeKey.getKeyStatus(); } @Override public List getPBEKeyStatuses(String pbePrefix, String keyNamespace) throws IOException, KeyException { - if (! isPBEEnabled()) { - throw new IOException("PBE is currently not enabled in HBase configuration"); - } + checkPBEEnabled(); LOG.info("Getting key statuses for PBE on key: {} under namespace: {}", pbePrefix, keyNamespace); - byte[] pbe_prefix = convertToPrefixBytes(pbePrefix); + byte[] pbe_prefix = PBEKeyStoreKeyProvider.decodeToPrefixBytes(pbePrefix); return super.getAllKeys(pbe_prefix, keyNamespace); } - - private static byte[] convertToPrefixBytes(String pbePrefix) throws IOException { - byte[] pbe_prefix; - try { - pbe_prefix = Base64.getDecoder().decode(pbePrefix); - } - catch (IllegalArgumentException e) { - throw new IOException("Failed to decode specified prefix as Base64 string: " + pbePrefix, e); - } - return pbe_prefix; - } } diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/keymeta/PBEKeymetaTableAccessor.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/keymeta/PBEKeymetaTableAccessor.java index ec2f25257e9c..4dfc4dbbcc42 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/keymeta/PBEKeymetaTableAccessor.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/keymeta/PBEKeymetaTableAccessor.java @@ -46,7 +46,7 @@ * Accessor for PBE keymeta table. */ @InterfaceAudience.Private -public class PBEKeymetaTableAccessor extends PBEKeyManager { +public class PBEKeymetaTableAccessor extends PBEKeyAccessorBase { private static final String KEY_META_INFO_FAMILY_STR = "info"; public static final byte[] KEY_META_INFO_FAMILY = Bytes.toBytes(KEY_META_INFO_FAMILY_STR); @@ -88,6 +88,7 @@ public PBEKeymetaTableAccessor(Server server) { * @throws IOException when there is an underlying IOException. */ public void addKey(PBEKeyData keyData) throws IOException { + checkPBEEnabled(); final Put putForMetadata = addMutationColumns(new Put(constructRowKeyForMetadata(keyData)), keyData); Connection connection = server.getConnection(); @@ -99,16 +100,17 @@ public void addKey(PBEKeyData keyData) throws IOException { /** * Get all the keys for the specified pbe_prefix and key_namespace. * - * @param pbePrefix The prefix + * @param pbe_prefix The prefix * @param keyNamespace The namespace * @return a list of key data, one for each key, can be empty when none were found. * @throws IOException when there is an underlying IOException. * @throws KeyException when there is an underlying KeyException. */ - protected List getAllKeys(byte[] pbePrefix, String keyNamespace) + protected List getAllKeys(byte[] pbe_prefix, String keyNamespace) throws IOException, KeyException { + checkPBEEnabled(); Connection connection = server.getConnection(); - byte[] prefixForScan = Bytes.add(Bytes.toBytes(pbePrefix.length), pbePrefix, + byte[] prefixForScan = Bytes.add(Bytes.toBytes(pbe_prefix.length), pbe_prefix, Bytes.toBytes(keyNamespace)); try (Table table = connection.getTable(KEY_META_TABLE_NAME)) { PrefixFilter prefixFilter = new PrefixFilter(prefixForScan); @@ -119,7 +121,7 @@ protected List getAllKeys(byte[] pbePrefix, String keyNamespace) ResultScanner scanner = table.getScanner(scan); List allKeys = new ArrayList<>(); for (Result result : scanner) { - PBEKeyData keyData = parseFromResult(pbePrefix, keyNamespace, result); + PBEKeyData keyData = parseFromResult(pbe_prefix, keyNamespace, result); if (keyData != null) { allKeys.add(keyData); } @@ -131,16 +133,17 @@ protected List getAllKeys(byte[] pbePrefix, String keyNamespace) /** * Get all the active keys for the specified pbe_prefix and key_namespace. * - * @param pbePrefix The prefix + * @param pbe_prefix The prefix * @param keyNamespace The namespace * @return a list of key data, one for each active key, can be empty when none were found. * @throws IOException when there is an underlying IOException. * @throws KeyException when there is an underlying KeyException. */ - public List getActiveKeys(byte[] pbePrefix, String keyNamespace) + public List getActiveKeys(byte[] pbe_prefix, String keyNamespace) throws IOException, KeyException { + checkPBEEnabled(); List activeKeys = new ArrayList<>(); - for (PBEKeyData keyData : getAllKeys(pbePrefix, keyNamespace)) { + for (PBEKeyData keyData : getAllKeys(pbe_prefix, keyNamespace)) { if (keyData.getKeyStatus() == PBEKeyStatus.ACTIVE) { activeKeys.add(keyData); } @@ -149,40 +152,42 @@ public List getActiveKeys(byte[] pbePrefix, String keyNamespace) } /** - * Get the specific key identified by pbePrefix, keyNamespace and keyMetadata. + * Get the specific key identified by pbe_prefix, keyNamespace and keyMetadata. * - * @param pbePrefix The prefix. + * @param pbe_prefix The prefix. * @param keyNamespace The namespace. * @param keyMetadata The metadata. * @return the key or {@code null} * @throws IOException when there is an underlying IOException. * @throws KeyException when there is an underlying KeyException. */ - public PBEKeyData getKey(byte[] pbePrefix, String keyNamespace, String keyMetadata) + public PBEKeyData getKey(byte[] pbe_prefix, String keyNamespace, String keyMetadata) throws IOException, KeyException { + checkPBEEnabled(); Connection connection = server.getConnection(); try (Table table = connection.getTable(KEY_META_TABLE_NAME)) { - byte[] rowKey = constructRowKeyForMetadata(pbePrefix, keyNamespace, + byte[] rowKey = constructRowKeyForMetadata(pbe_prefix, keyNamespace, PBEKeyData.constructMetadataHash(keyMetadata)); Result result = table.get(new Get(rowKey)); - return parseFromResult(pbePrefix, keyNamespace, result); + return parseFromResult(pbe_prefix, keyNamespace, result); } } /** - * Report read or write operation count on the specific key identified by pbePrefix, keyNamespace + * Report read or write operation count on the specific key identified by pbe_prefix, keyNamespace * and keyMetadata. The reported value is added to the existing operation count using the * Increment mutation. - * @param pbePrefix The prefix. + * @param pbe_prefix The prefix. * @param keyNamespace The namespace. * @param keyMetadata The metadata. * @throws IOException when there is an underlying IOException. */ - public void reportOperation(byte[] pbePrefix, String keyNamespace, String keyMetadata, long count, + public void reportOperation(byte[] pbe_prefix, String keyNamespace, String keyMetadata, long count, boolean isReadOperation) throws IOException { + checkPBEEnabled(); Connection connection = server.getConnection(); try (Table table = connection.getTable(KEY_META_TABLE_NAME)) { - byte[] rowKey = constructRowKeyForMetadata(pbePrefix, keyNamespace, + byte[] rowKey = constructRowKeyForMetadata(pbe_prefix, keyNamespace, PBEKeyData.constructMetadataHash(keyMetadata)); Increment incr = new Increment(rowKey) .addColumn(KEY_META_INFO_FAMILY, @@ -222,14 +227,14 @@ private byte[] constructRowKeyForMetadata(PBEKeyData keyData) { keyData.getKeyMetadataHash()); } - private static byte[] constructRowKeyForMetadata(byte[] pbePrefix, String keyNamespace, + private static byte[] constructRowKeyForMetadata(byte[] pbe_prefix, String keyNamespace, byte[] keyMetadataHash) { - int prefixLength = pbePrefix.length; - return Bytes.add(Bytes.toBytes(prefixLength), pbePrefix, Bytes.toBytesBinary(keyNamespace), + int prefixLength = pbe_prefix.length; + return Bytes.add(Bytes.toBytes(prefixLength), pbe_prefix, Bytes.toBytesBinary(keyNamespace), keyMetadataHash); } - private PBEKeyData parseFromResult(byte[] pbePrefix, String keyNamespace, Result result) + private PBEKeyData parseFromResult(byte[] pbe_prefix, String keyNamespace, Result result) throws IOException, KeyException { if (result == null || result.isEmpty()) { return null; @@ -258,7 +263,7 @@ private PBEKeyData parseFromResult(byte[] pbePrefix, String keyNamespace, Result long readOpCount = readOpValue != null ? Bytes.toLong(readOpValue) : 0; byte[] writeOpValue = result.getValue(KEY_META_INFO_FAMILY, WRITE_OP_COUNT_QUAL_BYTES); long writeOpCount = writeOpValue != null ? Bytes.toLong(writeOpValue) : 0; - PBEKeyData dekKeyData = new PBEKeyData(pbePrefix, keyNamespace, dek, keyStatus, dekMetadata, + PBEKeyData dekKeyData = new PBEKeyData(pbe_prefix, keyNamespace, dek, keyStatus, dekMetadata, refreshedTimestamp, readOpCount, writeOpCount); if (dek != null) { long dekChecksum = Bytes.toLong(result.getValue(KEY_META_INFO_FAMILY, diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestPBEClusterKey.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestPBEClusterKey.java new file mode 100644 index 000000000000..13493da48d3f --- /dev/null +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestPBEClusterKey.java @@ -0,0 +1,93 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.hadoop.hbase.master; + +import org.apache.hadoop.conf.Configuration; +import org.apache.hadoop.hbase.HBaseClassTestRule; +import org.apache.hadoop.hbase.HBaseTestingUtil; +import org.apache.hadoop.hbase.HConstants; +import org.apache.hadoop.hbase.io.crypto.Encryption; +import org.apache.hadoop.hbase.io.crypto.KeyProvider; +import org.apache.hadoop.hbase.io.crypto.PBEKeyProvider; +import org.apache.hadoop.hbase.io.crypto.MockPBEKeyProvider; +import org.apache.hadoop.hbase.keymeta.PBEClusterKeyCache; +import org.apache.hadoop.hbase.testclassification.MasterTests; +import org.apache.hadoop.hbase.testclassification.MediumTests; +import org.junit.AfterClass; +import org.junit.BeforeClass; +import org.junit.ClassRule; +import org.junit.Test; +import org.junit.experimental.categories.Category; +import java.io.IOException; +import java.security.Key; +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertNotNull; +import static org.junit.Assert.assertTrue; + +@Category({ MasterTests.class, MediumTests.class }) +public class TestPBEClusterKey { + + @ClassRule + public static final HBaseClassTestRule CLASS_RULE = + HBaseClassTestRule.forClass(TestPBEClusterKey.class); + + public static final String CLUSTER_KEY_ALIAS = "cluster-key"; + public static final byte[] CLUSTER_ID = CLUSTER_KEY_ALIAS.getBytes(); + + + private static final HBaseTestingUtil TEST_UTIL = new HBaseTestingUtil(); + private static Configuration conf = TEST_UTIL.getConfiguration(); + + @BeforeClass + public static void setUp() throws Exception { + conf.set(HConstants.CRYPTO_KEYPROVIDER_CONF_KEY, MockPBEKeyProvider.class.getName()); + conf.set(HConstants.CRYPTO_PBE_ENABLED_CONF_KEY, "true"); + + // Start the minicluster + TEST_UTIL.startMiniCluster(1); + } + + @Test + public void testClusterKeyInitializationAndRotation() throws Exception { + HMaster master = TEST_UTIL.getHBaseCluster().getMaster(); + KeyProvider keyProvider = Encryption.getKeyProvider(master.getConfiguration()); + assertNotNull(keyProvider); + assertTrue(keyProvider instanceof PBEKeyProvider); + assertTrue(keyProvider instanceof MockPBEKeyProvider); + MockPBEKeyProvider pbeKeyProvider = (MockPBEKeyProvider) keyProvider; + PBEClusterKeyCache pbeClusterKeyCache = master.getPBEClusterKeyCache(); + assertNotNull(pbeClusterKeyCache); + assertEquals(pbeKeyProvider.getClusterKey(master.getClusterId().getBytes()), + pbeClusterKeyCache.getLatestClusterKey()); + + // Test rotation of cluster key by changing the key that the key provider provides and restart master. + Key newCluterKey = MockPBEKeyProvider.generateSecretKey(); + pbeKeyProvider.setKey(master.getClusterId().getBytes(), newCluterKey); + TEST_UTIL.shutdownMiniCluster(); + Thread.sleep(2000); + TEST_UTIL.restartHBaseCluster(1); + master = TEST_UTIL.getHBaseCluster().getMaster(); + assertEquals(newCluterKey, master.getPBEClusterKeyCache().getLatestClusterKey().getTheKey()); + } + + @AfterClass + public static void tearDown() throws Exception { + TEST_UTIL.shutdownMiniCluster(); + } + +} diff --git a/hbase-testing-util/src/main/java/org/apache/hadoop/hbase/HBaseTestingUtility.java b/hbase-testing-util/src/main/java/org/apache/hadoop/hbase/HBaseTestingUtility.java index dc4bc1816acc..a15b8db5e701 100644 --- a/hbase-testing-util/src/main/java/org/apache/hadoop/hbase/HBaseTestingUtility.java +++ b/hbase-testing-util/src/main/java/org/apache/hadoop/hbase/HBaseTestingUtility.java @@ -89,6 +89,7 @@ import org.apache.hadoop.hbase.io.hfile.ChecksumUtil; import org.apache.hadoop.hbase.io.hfile.HFile; import org.apache.hadoop.hbase.ipc.RpcServerInterface; +import org.apache.hadoop.hbase.keymeta.PBEKeymetaAdminClient; import org.apache.hadoop.hbase.logging.Log4jUtils; import org.apache.hadoop.hbase.mapreduce.MapreduceTestingShim; import org.apache.hadoop.hbase.master.HMaster; @@ -201,6 +202,8 @@ public class HBaseTestingUtility extends HBaseZKTestingUtility { /** This is for unit tests parameterized with a single boolean. */ public static final List MEMSTORETS_TAGS_PARAMETRIZED = memStoreTSAndTagsCombination(); + private Admin hbaseAdmin = null; + /** * Checks to see if a specific port is available. * @param port the port number to check for availability @@ -2942,7 +2945,9 @@ public Admin getAdmin() throws IOException { return hbaseAdmin; } - private Admin hbaseAdmin = null; + public PBEKeymetaAdminClient getPBEAdmin() throws IOException { + return new PBEKeymetaAdminClient(getConnection()); + } /** * Returns an {@link Hbck} instance. Needs be closed when done. From d975e6726d808710365bea10b1835b553a02df27 Mon Sep 17 00:00:00 2001 From: Hari Dara Date: Mon, 31 Mar 2025 17:17:17 +0530 Subject: [PATCH 09/70] Use shutdownMiniHBaseCluster() instead of shutdownMiniCluster --- .../hbase/master/TestPBEClusterKey.java | 35 +++++++++---------- 1 file changed, 16 insertions(+), 19 deletions(-) diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestPBEClusterKey.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestPBEClusterKey.java index 13493da48d3f..cd36d774b985 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestPBEClusterKey.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestPBEClusterKey.java @@ -17,7 +17,6 @@ */ package org.apache.hadoop.hbase.master; -import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hbase.HBaseClassTestRule; import org.apache.hadoop.hbase.HBaseTestingUtil; import org.apache.hadoop.hbase.HConstants; @@ -28,12 +27,11 @@ import org.apache.hadoop.hbase.keymeta.PBEClusterKeyCache; import org.apache.hadoop.hbase.testclassification.MasterTests; import org.apache.hadoop.hbase.testclassification.MediumTests; -import org.junit.AfterClass; -import org.junit.BeforeClass; +import org.junit.After; +import org.junit.Before; import org.junit.ClassRule; import org.junit.Test; import org.junit.experimental.categories.Category; -import java.io.IOException; import java.security.Key; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertNotNull; @@ -46,20 +44,24 @@ public class TestPBEClusterKey { public static final HBaseClassTestRule CLASS_RULE = HBaseClassTestRule.forClass(TestPBEClusterKey.class); + private HBaseTestingUtil TEST_UTIL = new HBaseTestingUtil(); + public static final String CLUSTER_KEY_ALIAS = "cluster-key"; public static final byte[] CLUSTER_ID = CLUSTER_KEY_ALIAS.getBytes(); - - private static final HBaseTestingUtil TEST_UTIL = new HBaseTestingUtil(); - private static Configuration conf = TEST_UTIL.getConfiguration(); - - @BeforeClass - public static void setUp() throws Exception { - conf.set(HConstants.CRYPTO_KEYPROVIDER_CONF_KEY, MockPBEKeyProvider.class.getName()); - conf.set(HConstants.CRYPTO_PBE_ENABLED_CONF_KEY, "true"); + @Before + public void setUp() throws Exception { + TEST_UTIL.getConfiguration().set(HConstants.CRYPTO_KEYPROVIDER_CONF_KEY, MockPBEKeyProvider.class.getName()); + TEST_UTIL.getConfiguration().set(HConstants.CRYPTO_PBE_ENABLED_CONF_KEY, "true"); // Start the minicluster TEST_UTIL.startMiniCluster(1); + TEST_UTIL.waitFor(60000, () -> TEST_UTIL.getMiniHBaseCluster().getMaster().isInitialized()); + } + + @After + public void tearDown() throws Exception { + TEST_UTIL.shutdownMiniCluster(); } @Test @@ -78,16 +80,11 @@ public void testClusterKeyInitializationAndRotation() throws Exception { // Test rotation of cluster key by changing the key that the key provider provides and restart master. Key newCluterKey = MockPBEKeyProvider.generateSecretKey(); pbeKeyProvider.setKey(master.getClusterId().getBytes(), newCluterKey); - TEST_UTIL.shutdownMiniCluster(); + TEST_UTIL.shutdownMiniHBaseCluster(); Thread.sleep(2000); TEST_UTIL.restartHBaseCluster(1); + TEST_UTIL.waitFor(60000, () -> TEST_UTIL.getMiniHBaseCluster().getMaster().isInitialized()); master = TEST_UTIL.getHBaseCluster().getMaster(); assertEquals(newCluterKey, master.getPBEClusterKeyCache().getLatestClusterKey().getTheKey()); } - - @AfterClass - public static void tearDown() throws Exception { - TEST_UTIL.shutdownMiniCluster(); - } - } From 99249b507070bd979adf7dc11f3fa4d1a1db90d4 Mon Sep 17 00:00:00 2001 From: Hari Dara Date: Tue, 1 Apr 2025 09:47:13 +0530 Subject: [PATCH 10/70] Got the test working for STK rotation --- .../hbase/io/crypto/MockPBEKeyProvider.java | 34 +++++++++--- .../hbase/master/TestPBEClusterKey.java | 53 +++++++++++++++---- 2 files changed, 68 insertions(+), 19 deletions(-) diff --git a/hbase-common/src/test/java/org/apache/hadoop/hbase/io/crypto/MockPBEKeyProvider.java b/hbase-common/src/test/java/org/apache/hadoop/hbase/io/crypto/MockPBEKeyProvider.java index 6a38cdb77403..a7dfa71e0689 100644 --- a/hbase-common/src/test/java/org/apache/hadoop/hbase/io/crypto/MockPBEKeyProvider.java +++ b/hbase-common/src/test/java/org/apache/hadoop/hbase/io/crypto/MockPBEKeyProvider.java @@ -27,6 +27,8 @@ import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hbase.util.Bytes; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; /** * A simple implementation of PBEKeyProvider for testing. It generates a key on demand given a @@ -34,15 +36,18 @@ * calling setKey. */ public class MockPBEKeyProvider extends MockAesKeyProvider implements PBEKeyProvider { + protected static final Logger LOG = LoggerFactory.getLogger(MockPBEKeyProvider.class); + public Map keys = new HashMap<>(); public Map keyStatus = new HashMap<>(); + private String clusterKeyAlias = "default_cluster_key_alias"; @Override public void initConfig(Configuration conf) { // NO-OP } @Override public PBEKeyData getClusterKey(byte[] clusterId) throws IOException { - return getKey(clusterId); + return getKey(clusterId, clusterKeyAlias); } @Override public PBEKeyData getPBEKey(byte[] pbe_prefix, String key_namespace) @@ -50,8 +55,12 @@ public class MockPBEKeyProvider extends MockAesKeyProvider implements PBEKeyProv return getKey(pbe_prefix); } - @Override public PBEKeyData unwrapKey(String keyAlias) throws IOException { - return getKey(keyAlias.getBytes()); + @Override public PBEKeyData unwrapKey(String keyMetadata) throws IOException { + String[] meta_toks = keyMetadata.split(":"); + if (keys.containsKey(meta_toks[1])) { + return getKey(meta_toks[0].getBytes(), meta_toks[1]); + } + return null; } /** @@ -59,6 +68,10 @@ public class MockPBEKeyProvider extends MockAesKeyProvider implements PBEKeyProv */ public PBEKeyData getKey(byte[] prefix_bytes) { String alias = Bytes.toString(prefix_bytes); + return getKey(prefix_bytes, alias); + } + + public PBEKeyData getKey(byte[] prefix_bytes, String alias) { Key key = keys.get(alias); if (key == null) { key = generateSecretKey(); @@ -66,15 +79,20 @@ public PBEKeyData getKey(byte[] prefix_bytes) { } PBEKeyStatus keyStatus = this.keyStatus.get(alias); return new PBEKeyData(prefix_bytes, PBEKeyData.KEY_NAMESPACE_GLOBAL, key, - keyStatus == null ? PBEKeyStatus.ACTIVE : keyStatus, Bytes.toString(prefix_bytes)); + keyStatus == null ? PBEKeyStatus.ACTIVE : keyStatus, + Bytes.toString(prefix_bytes)+":"+alias); + } + + public void setKeyStatus(String alias, PBEKeyStatus status) { + keyStatus.put(alias, status); } - public void setKeyStatus(byte[] prefix_bytes, PBEKeyStatus status) { - keyStatus.put(Bytes.toString(prefix_bytes), status); + public void setKey(String alias, Key key) { + keys.put(alias, key); } - public void setKey(byte[] prefix_bytes, Key key) { - keys.put(Bytes.toString(prefix_bytes), key); + public void setCluterKeyAlias(String alias) { + this.clusterKeyAlias = alias; } /** diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestPBEClusterKey.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestPBEClusterKey.java index cd36d774b985..02e8e68720a5 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestPBEClusterKey.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestPBEClusterKey.java @@ -22,16 +22,20 @@ import org.apache.hadoop.hbase.HConstants; import org.apache.hadoop.hbase.io.crypto.Encryption; import org.apache.hadoop.hbase.io.crypto.KeyProvider; +import org.apache.hadoop.hbase.io.crypto.PBEKeyData; import org.apache.hadoop.hbase.io.crypto.PBEKeyProvider; import org.apache.hadoop.hbase.io.crypto.MockPBEKeyProvider; +import org.apache.hadoop.hbase.keymeta.PBEClusterKeyAccessor; import org.apache.hadoop.hbase.keymeta.PBEClusterKeyCache; import org.apache.hadoop.hbase.testclassification.MasterTests; import org.apache.hadoop.hbase.testclassification.MediumTests; +import org.apache.hadoop.hbase.util.Bytes; import org.junit.After; import org.junit.Before; import org.junit.ClassRule; import org.junit.Test; import org.junit.experimental.categories.Category; +import java.io.IOException; import java.security.Key; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertNotNull; @@ -64,6 +68,26 @@ public void tearDown() throws Exception { TEST_UTIL.shutdownMiniCluster(); } + private PBEKeyData validateInitialState(HMaster master, MockPBEKeyProvider pbeKeyProvider ) + throws IOException { + PBEClusterKeyAccessor pbeClusterKeyAccessor = new PBEClusterKeyAccessor(master); + assertEquals(1, pbeClusterKeyAccessor.getAllClusterKeyFiles().size()); + PBEClusterKeyCache pbeClusterKeyCache = master.getPBEClusterKeyCache(); + assertNotNull(pbeClusterKeyCache); + PBEKeyData clusterKey = pbeClusterKeyCache.getLatestClusterKey(); + assertEquals(pbeKeyProvider.getClusterKey(master.getClusterId().getBytes()), clusterKey); + assertEquals(clusterKey, + pbeClusterKeyCache.getClusterKeyByChecksum(clusterKey.getKeyChecksum())); + return clusterKey; + } + + private void restartCluter() throws Exception { + TEST_UTIL.shutdownMiniHBaseCluster(); + Thread.sleep(2000); + TEST_UTIL.restartHBaseCluster(1); + TEST_UTIL.waitFor(60000, () -> TEST_UTIL.getMiniHBaseCluster().getMaster().isInitialized()); + } + @Test public void testClusterKeyInitializationAndRotation() throws Exception { HMaster master = TEST_UTIL.getHBaseCluster().getMaster(); @@ -71,20 +95,27 @@ public void testClusterKeyInitializationAndRotation() throws Exception { assertNotNull(keyProvider); assertTrue(keyProvider instanceof PBEKeyProvider); assertTrue(keyProvider instanceof MockPBEKeyProvider); - MockPBEKeyProvider pbeKeyProvider = (MockPBEKeyProvider) keyProvider; - PBEClusterKeyCache pbeClusterKeyCache = master.getPBEClusterKeyCache(); - assertNotNull(pbeClusterKeyCache); - assertEquals(pbeKeyProvider.getClusterKey(master.getClusterId().getBytes()), - pbeClusterKeyCache.getLatestClusterKey()); + PBEKeyData initialClusterKey = validateInitialState(master, (MockPBEKeyProvider) keyProvider); + + restartCluter(); + master = TEST_UTIL.getHBaseCluster().getMaster(); + validateInitialState(master, (MockPBEKeyProvider) keyProvider); // Test rotation of cluster key by changing the key that the key provider provides and restart master. + String newAlias = "new_cluster_key"; + ((MockPBEKeyProvider) keyProvider).setCluterKeyAlias(newAlias); Key newCluterKey = MockPBEKeyProvider.generateSecretKey(); - pbeKeyProvider.setKey(master.getClusterId().getBytes(), newCluterKey); - TEST_UTIL.shutdownMiniHBaseCluster(); - Thread.sleep(2000); - TEST_UTIL.restartHBaseCluster(1); - TEST_UTIL.waitFor(60000, () -> TEST_UTIL.getMiniHBaseCluster().getMaster().isInitialized()); + ((MockPBEKeyProvider) keyProvider).setKey(newAlias, newCluterKey); + restartCluter(); master = TEST_UTIL.getHBaseCluster().getMaster(); - assertEquals(newCluterKey, master.getPBEClusterKeyCache().getLatestClusterKey().getTheKey()); + PBEClusterKeyAccessor pbeClusterKeyAccessor = new PBEClusterKeyAccessor(master); + assertEquals(2, pbeClusterKeyAccessor.getAllClusterKeyFiles().size()); + PBEClusterKeyCache pbeClusterKeyCache = master.getPBEClusterKeyCache(); + assertEquals(0, Bytes.compareTo(newCluterKey.getEncoded(), + pbeClusterKeyCache.getLatestClusterKey().getTheKey().getEncoded())); + assertEquals(initialClusterKey, + pbeClusterKeyAccessor.loadClusterKey(pbeClusterKeyAccessor.getAllClusterKeyFiles().get(1))); + assertEquals(initialClusterKey, + pbeClusterKeyCache.getClusterKeyByChecksum(initialClusterKey.getKeyChecksum())); } } From 434ff6457379e69c18bb1e9511c03896345ff8fb Mon Sep 17 00:00:00 2001 From: Hari Dara Date: Tue, 1 Apr 2025 12:09:30 +0530 Subject: [PATCH 11/70] Better coverage for default key provider --- .../hbase/io/crypto/PBEKeyProvider.java | 8 ++- .../io/crypto/PBEKeyStoreKeyProvider.java | 14 ++-- .../hbase/io/crypto/MockPBEKeyProvider.java | 4 ++ .../io/crypto/TestPBEKeyStoreKeyProvider.java | 71 +++++++++++++++++-- .../hbase/master/PBEClusterKeyManager.java | 5 ++ .../hbase/master/TestPBEClusterKey.java | 34 ++++++--- 6 files changed, 112 insertions(+), 24 deletions(-) diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/io/crypto/PBEKeyProvider.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/io/crypto/PBEKeyProvider.java index 8bb5ee6ce00d..8da73a3e1895 100644 --- a/hbase-common/src/main/java/org/apache/hadoop/hbase/io/crypto/PBEKeyProvider.java +++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/io/crypto/PBEKeyProvider.java @@ -51,17 +51,19 @@ public interface PBEKeyProvider extends KeyProvider { * * @param pbe_prefix Key prefix * @param key_namespace Key namespace - * @return PBEKeyData for the cluster key and is guaranteed to be not {@code null} + * @return PBEKeyData for the cluster key and is expected to be not {@code null} * @throws IOException if an error occurs while retrieving the key */ PBEKeyData getPBEKey(byte[] pbe_prefix, String key_namespace) throws IOException; /** * Retrieve a key identified by the key metadata. The key metadata is typically generated by the - * same key provider via the {@link #getClusterKey(byte[])} or {@link #getPBEKey(byte[], String)} methods. + * same key provider via the {@link #getClusterKey(byte[])} or {@link #getPBEKey(byte[], String)} + * methods. * * @param keyMetaData Key metadata - * @return PBEKeyData for the key represented by the metadata + * @return PBEKeyData for the key represented by the metadata and is expected to be not + * {@code null} * @throws IOException if an error occurs while generating the key */ PBEKeyData unwrapKey(String keyMetaData) throws IOException; diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/io/crypto/PBEKeyStoreKeyProvider.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/io/crypto/PBEKeyStoreKeyProvider.java index 098afaa0d954..cda74d3e5df7 100644 --- a/hbase-common/src/main/java/org/apache/hadoop/hbase/io/crypto/PBEKeyStoreKeyProvider.java +++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/io/crypto/PBEKeyStoreKeyProvider.java @@ -68,19 +68,19 @@ public PBEKeyData unwrapKey(String keyMetadataStr) throws IOException { isActive ? PBEKeyStatus.FAILED : PBEKeyStatus.DISABLED, keyMetadataStr); } - private String generateKeyMetadata(String aliasName, String encodedPrefix) { - return GsonUtil.getDefaultInstance().toJson(new HashMap() {{ - put(KEY_METADATA_ALIAS, aliasName); - put(KEY_METADATA_PREFIX, encodedPrefix); - }}, HashMap.class); - } - private void checkConfig() { if (conf == null) { throw new IllegalStateException("initConfig is not called or config is null"); } } + public static String generateKeyMetadata(String aliasName, String encodedPrefix) { + return GsonUtil.getDefaultInstance().toJson(new HashMap() {{ + put(KEY_METADATA_ALIAS, aliasName); + put(KEY_METADATA_PREFIX, encodedPrefix); + }}, HashMap.class); + } + public static byte[] decodeToPrefixBytes(String pbePrefix) throws IOException { byte[] pbe_prefix; try { diff --git a/hbase-common/src/test/java/org/apache/hadoop/hbase/io/crypto/MockPBEKeyProvider.java b/hbase-common/src/test/java/org/apache/hadoop/hbase/io/crypto/MockPBEKeyProvider.java index a7dfa71e0689..4b3a1180ebb7 100644 --- a/hbase-common/src/test/java/org/apache/hadoop/hbase/io/crypto/MockPBEKeyProvider.java +++ b/hbase-common/src/test/java/org/apache/hadoop/hbase/io/crypto/MockPBEKeyProvider.java @@ -95,6 +95,10 @@ public void setCluterKeyAlias(String alias) { this.clusterKeyAlias = alias; } + public String getClusterKeyAlias() { + return this.clusterKeyAlias; + } + /** * Generate a new secret key. * @return the key diff --git a/hbase-common/src/test/java/org/apache/hadoop/hbase/io/crypto/TestPBEKeyStoreKeyProvider.java b/hbase-common/src/test/java/org/apache/hadoop/hbase/io/crypto/TestPBEKeyStoreKeyProvider.java index 6cc1bbbdfa22..986b97d6d316 100644 --- a/hbase-common/src/test/java/org/apache/hadoop/hbase/io/crypto/TestPBEKeyStoreKeyProvider.java +++ b/hbase-common/src/test/java/org/apache/hadoop/hbase/io/crypto/TestPBEKeyStoreKeyProvider.java @@ -43,6 +43,7 @@ import static org.apache.hadoop.hbase.io.crypto.PBEKeyStoreKeyProvider.KEY_METADATA_PREFIX; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertNotNull; +import static org.junit.Assert.assertNull; @Category({ MiscTests.class, SmallTests.class }) @RunWith(Parameterized.class) @@ -114,13 +115,44 @@ private void addEntry(String alias, String prefix) { @Test public void testGetPBEKey() throws Exception { - for (Bytes prefix: prefix2key.keySet()) { + for (Bytes prefix : prefix2key.keySet()) { PBEKeyData keyData = pbeProvider.getPBEKey(prefix.get(), PBEKeyData.KEY_NAMESPACE_GLOBAL); assertPBEKeyData(keyData, PBEKeyStatus.ACTIVE, prefix2key.get(prefix).get(), prefix.get(), prefix2alias.get(prefix)); } } + @Test + public void testGetInactiveKey() throws Exception { + Bytes firstPrefix = prefix2key.keySet().iterator().next(); + String encPrefix = Base64.getEncoder().encodeToString(firstPrefix.get()); + conf.set(HConstants.CRYPTO_PBE_PREFIX_CONF_KEY_PREFIX + encPrefix + ".active", "false"); + PBEKeyData keyData = pbeProvider.getPBEKey(firstPrefix.get(), PBEKeyData.KEY_NAMESPACE_GLOBAL); + assertNotNull(keyData); + assertPBEKeyData(keyData, PBEKeyStatus.INACTIVE, prefix2key.get(firstPrefix).get(), + firstPrefix.get(), prefix2alias.get(firstPrefix)); + } + + @Test + public void testGetInvalidKey() throws Exception { + byte[] invalidPrefixBytes = "invalid".getBytes(); + PBEKeyData keyData = pbeProvider.getPBEKey(invalidPrefixBytes, + PBEKeyData.KEY_NAMESPACE_GLOBAL); + assertNotNull(keyData); + assertPBEKeyData(keyData, PBEKeyStatus.FAILED, null, invalidPrefixBytes, null); + } + + @Test + public void testGetDisabledKey() throws Exception { + byte[] invalidPrefix = new byte[] { 1, 2, 3 }; + String invalidPrefixEnc = PBEKeyStoreKeyProvider.encodeToPrefixStr(invalidPrefix); + conf.set(HConstants.CRYPTO_PBE_PREFIX_CONF_KEY_PREFIX + invalidPrefixEnc + ".active", "false"); + PBEKeyData keyData = pbeProvider.getPBEKey(invalidPrefix, PBEKeyData.KEY_NAMESPACE_GLOBAL); + assertNotNull(keyData); + assertPBEKeyData(keyData, PBEKeyStatus.DISABLED, null, + invalidPrefix, null); + } + @Test public void testGetClusterKey() throws Exception { PBEKeyData clusterKeyData = pbeProvider.getClusterKey(clusterId.getBytes()); @@ -128,13 +160,44 @@ public void testGetClusterKey() throws Exception { MASTER_KEY_ALIAS); } + @Test + public void testUnwrapInvalidKey() throws Exception { + String invalidAlias = "invalidAlias"; + byte[] invalidPrefix = new byte[] { 1, 2, 3 }; + String invalidPrefixEnc = PBEKeyStoreKeyProvider.encodeToPrefixStr(invalidPrefix); + String invalidMetadata = PBEKeyStoreKeyProvider.generateKeyMetadata(invalidAlias, + invalidPrefixEnc); + PBEKeyData keyData = pbeProvider.unwrapKey(invalidMetadata); + assertNotNull(keyData); + assertPBEKeyData(keyData, PBEKeyStatus.FAILED, null, invalidPrefix, + invalidAlias); + } + + @Test + public void testUnwrapDisabledKey() throws Exception { + String invalidAlias = "invalidAlias"; + byte[] invalidPrefix = new byte[] { 1, 2, 3 }; + String invalidPrefixEnc = PBEKeyStoreKeyProvider.encodeToPrefixStr(invalidPrefix); + conf.set(HConstants.CRYPTO_PBE_PREFIX_CONF_KEY_PREFIX + invalidPrefixEnc + ".active", "false"); + String invalidMetadata = PBEKeyStoreKeyProvider.generateKeyMetadata(invalidAlias, + invalidPrefixEnc); + PBEKeyData keyData = pbeProvider.unwrapKey(invalidMetadata); + assertNotNull(keyData); + assertPBEKeyData(keyData, PBEKeyStatus.DISABLED, null, invalidPrefix, invalidAlias); + } + private void assertPBEKeyData(PBEKeyData keyData, PBEKeyStatus expKeyStatus, byte[] key, byte[] prefixBytes, String alias) throws Exception { assertNotNull(keyData); assertEquals(expKeyStatus, keyData.getKeyStatus()); - byte[] keyBytes = keyData.getTheKey().getEncoded(); - assertEquals(key.length, keyBytes.length); - assertEquals(new Bytes(key), keyBytes); + if (key == null) { + assertNull(keyData.getTheKey()); + } + else { + byte[] keyBytes = keyData.getTheKey().getEncoded(); + assertEquals(key.length, keyBytes.length); + assertEquals(new Bytes(key), keyBytes); + } Map keyMetadata = GsonUtil.getDefaultInstance().fromJson(keyData.getKeyMetadata(), HashMap.class); assertNotNull(keyMetadata); diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/PBEClusterKeyManager.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/PBEClusterKeyManager.java index 3a8c821fd872..a2dad9aa460a 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/PBEClusterKeyManager.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/PBEClusterKeyManager.java @@ -24,6 +24,7 @@ import org.apache.hadoop.fs.Path; import org.apache.hadoop.hbase.io.crypto.PBEKeyData; import org.apache.hadoop.hbase.io.crypto.PBEKeyProvider; +import org.apache.hadoop.hbase.io.crypto.PBEKeyStatus; import org.apache.hadoop.hbase.keymeta.PBEClusterKeyAccessor; import org.apache.yetus.audience.InterfaceAudience; import static org.apache.hadoop.hbase.HConstants.CLUSTER_KEY_FILE_PREFIX; @@ -73,6 +74,10 @@ private PBEKeyData rotateClusterKey(String currentKeyMetadata) throws IOExceptio PBEKeyProvider provider = getKeyProvider(); PBEKeyData clusterKey = provider.getClusterKey( master.getMasterFileSystem().getClusterId().toString().getBytes()); + if (clusterKey.getKeyStatus() != PBEKeyStatus.ACTIVE) { + throw new IOException("Cluster key is expected to be ACTIVE but it is: " + + clusterKey.getKeyStatus() + " for metadata: " + clusterKey.getKeyMetadata()); + } if (clusterKey != null && clusterKey.getKeyMetadata() != null && ! clusterKey.getKeyMetadata().equals(currentKeyMetadata) && saveLatestClusterKey(clusterKey.getKeyMetadata())) { diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestPBEClusterKey.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestPBEClusterKey.java index 02e8e68720a5..2870433f8fe4 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestPBEClusterKey.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestPBEClusterKey.java @@ -25,6 +25,7 @@ import org.apache.hadoop.hbase.io.crypto.PBEKeyData; import org.apache.hadoop.hbase.io.crypto.PBEKeyProvider; import org.apache.hadoop.hbase.io.crypto.MockPBEKeyProvider; +import org.apache.hadoop.hbase.io.crypto.PBEKeyStatus; import org.apache.hadoop.hbase.keymeta.PBEClusterKeyAccessor; import org.apache.hadoop.hbase.keymeta.PBEClusterKeyCache; import org.apache.hadoop.hbase.testclassification.MasterTests; @@ -33,12 +34,14 @@ import org.junit.After; import org.junit.Before; import org.junit.ClassRule; +import org.junit.Ignore; import org.junit.Test; import org.junit.experimental.categories.Category; import java.io.IOException; import java.security.Key; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertNotNull; +import static org.junit.Assert.assertThrows; import static org.junit.Assert.assertTrue; @Category({ MasterTests.class, MediumTests.class }) @@ -50,9 +53,6 @@ public class TestPBEClusterKey { private HBaseTestingUtil TEST_UTIL = new HBaseTestingUtil(); - public static final String CLUSTER_KEY_ALIAS = "cluster-key"; - public static final byte[] CLUSTER_ID = CLUSTER_KEY_ALIAS.getBytes(); - @Before public void setUp() throws Exception { TEST_UTIL.getConfiguration().set(HConstants.CRYPTO_KEYPROVIDER_CONF_KEY, MockPBEKeyProvider.class.getName()); @@ -81,7 +81,7 @@ private PBEKeyData validateInitialState(HMaster master, MockPBEKeyProvider pbeKe return clusterKey; } - private void restartCluter() throws Exception { + private void restartCluster() throws Exception { TEST_UTIL.shutdownMiniHBaseCluster(); Thread.sleep(2000); TEST_UTIL.restartHBaseCluster(1); @@ -95,18 +95,19 @@ public void testClusterKeyInitializationAndRotation() throws Exception { assertNotNull(keyProvider); assertTrue(keyProvider instanceof PBEKeyProvider); assertTrue(keyProvider instanceof MockPBEKeyProvider); - PBEKeyData initialClusterKey = validateInitialState(master, (MockPBEKeyProvider) keyProvider); + MockPBEKeyProvider pbeKeyProvider = (MockPBEKeyProvider) keyProvider; + PBEKeyData initialClusterKey = validateInitialState(master, pbeKeyProvider); - restartCluter(); + restartCluster(); master = TEST_UTIL.getHBaseCluster().getMaster(); - validateInitialState(master, (MockPBEKeyProvider) keyProvider); + validateInitialState(master, pbeKeyProvider); // Test rotation of cluster key by changing the key that the key provider provides and restart master. String newAlias = "new_cluster_key"; - ((MockPBEKeyProvider) keyProvider).setCluterKeyAlias(newAlias); + pbeKeyProvider.setCluterKeyAlias(newAlias); Key newCluterKey = MockPBEKeyProvider.generateSecretKey(); - ((MockPBEKeyProvider) keyProvider).setKey(newAlias, newCluterKey); - restartCluter(); + pbeKeyProvider.setKey(newAlias, newCluterKey); + restartCluster(); master = TEST_UTIL.getHBaseCluster().getMaster(); PBEClusterKeyAccessor pbeClusterKeyAccessor = new PBEClusterKeyAccessor(master); assertEquals(2, pbeClusterKeyAccessor.getAllClusterKeyFiles().size()); @@ -118,4 +119,17 @@ public void testClusterKeyInitializationAndRotation() throws Exception { assertEquals(initialClusterKey, pbeClusterKeyCache.getClusterKeyByChecksum(initialClusterKey.getKeyChecksum())); } + + @Test + public void testWithInvalidClusterKey() throws Exception { + HMaster master = TEST_UTIL.getHBaseCluster().getMaster(); + KeyProvider keyProvider = Encryption.getKeyProvider(master.getConfiguration()); + MockPBEKeyProvider pbeKeyProvider = (MockPBEKeyProvider) keyProvider; + + // Test startup failure when the cluster key is INACTIVE + PBEClusterKeyManager tmpCKM = new PBEClusterKeyManager(master); + tmpCKM.ensureClusterKeyInitialized(); + pbeKeyProvider.setKeyStatus(pbeKeyProvider.getClusterKeyAlias(), PBEKeyStatus.INACTIVE); + assertThrows(IOException.class, tmpCKM::ensureClusterKeyInitialized); + } } From d0db6e88ed45f417a6f7420ec2a14f85a6a18c0b Mon Sep 17 00:00:00 2001 From: Hari Dara Date: Wed, 2 Apr 2025 09:54:15 +0530 Subject: [PATCH 12/70] Minor refactoring --- .../hbase/io/crypto/PBEKeyProvider.java | 16 +++++ .../io/crypto/PBEKeyStoreKeyProvider.java | 22 ++----- .../hadoop/hbase/keymeta/PBEKeymetaAdmin.java | 4 +- .../io/crypto/TestPBEKeyStoreKeyProvider.java | 6 +- .../hadoop/hbase/keymeta/PBEKeyAccessor.java | 5 +- .../hbase/keymeta/PBEKeymetaAdminImpl.java | 5 +- .../hadoop/hbase/keymeta/PBETestBase.java | 26 ++++++++ .../hbase/master/TestPBEClusterKey.java | 60 +++++++------------ 8 files changed, 77 insertions(+), 67 deletions(-) create mode 100644 hbase-server/src/test/java/org/apache/hadoop/hbase/keymeta/PBETestBase.java diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/io/crypto/PBEKeyProvider.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/io/crypto/PBEKeyProvider.java index 8da73a3e1895..87815022017b 100644 --- a/hbase-common/src/main/java/org/apache/hadoop/hbase/io/crypto/PBEKeyProvider.java +++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/io/crypto/PBEKeyProvider.java @@ -20,6 +20,7 @@ import org.apache.hadoop.conf.Configuration; import org.apache.yetus.audience.InterfaceAudience; import java.io.IOException; +import java.util.Base64; /** * Interface for PBE-based key providers. Defines methods for generating and managing @@ -30,6 +31,21 @@ */ @InterfaceAudience.Public public interface PBEKeyProvider extends KeyProvider { + static byte[] decodeToPrefixBytes(String pbePrefix) throws IOException { + byte[] pbe_prefix; + try { + pbe_prefix = Base64.getDecoder().decode(pbePrefix); + } + catch (IllegalArgumentException e) { + throw new IOException("Failed to decode specified prefix as Base64 string: " + pbePrefix, e); + } + return pbe_prefix; + } + + static String encodeToPrefixStr(byte[] pbe_prefix) { + return Base64.getEncoder().encodeToString(pbe_prefix); + } + /** * Initialize the provider with the given configuration. * diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/io/crypto/PBEKeyStoreKeyProvider.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/io/crypto/PBEKeyStoreKeyProvider.java index cda74d3e5df7..99b9063ebca4 100644 --- a/hbase-common/src/main/java/org/apache/hadoop/hbase/io/crypto/PBEKeyStoreKeyProvider.java +++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/io/crypto/PBEKeyStoreKeyProvider.java @@ -6,7 +6,6 @@ import org.apache.yetus.audience.InterfaceAudience; import java.io.IOException; import java.security.Key; -import java.util.Base64; import java.util.HashMap; import java.util.Map; @@ -34,7 +33,8 @@ public PBEKeyData getClusterKey(byte[] clusterId) { throw new RuntimeException("Unable to find cluster key with alias: " + masterKeyAlias); } // Encode clusterId too for consistency with that of PBE prefixes. - String keyMetadata = generateKeyMetadata(masterKeyAlias, encodeToPrefixStr(clusterId)); + String keyMetadata = generateKeyMetadata(masterKeyAlias, + PBEKeyProvider.encodeToPrefixStr(clusterId)); return new PBEKeyData(clusterId, PBEKeyData.KEY_NAMESPACE_GLOBAL, key, PBEKeyStatus.ACTIVE, keyMetadata); } @@ -42,7 +42,7 @@ public PBEKeyData getClusterKey(byte[] clusterId) { @Override public PBEKeyData getPBEKey(byte[] pbe_prefix, String key_namespace) throws IOException { checkConfig(); - String encodedPrefix = encodeToPrefixStr(pbe_prefix); + String encodedPrefix = PBEKeyProvider.encodeToPrefixStr(pbe_prefix); String aliasConfKey = HConstants.CRYPTO_PBE_PREFIX_CONF_KEY_PREFIX + encodedPrefix + "." + "alias"; String keyMetadata = generateKeyMetadata(conf.get(aliasConfKey, null), encodedPrefix); @@ -57,7 +57,7 @@ public PBEKeyData unwrapKey(String keyMetadataStr) throws IOException { String activeStatusConfKey = HConstants.CRYPTO_PBE_PREFIX_CONF_KEY_PREFIX + encodedPrefix + ".active"; boolean isActive = conf.getBoolean(activeStatusConfKey, true); - byte[] pbe_prefix = decodeToPrefixBytes(encodedPrefix); + byte[] pbe_prefix = PBEKeyProvider.decodeToPrefixBytes(encodedPrefix); String alias = keyMetadata.get(KEY_METADATA_ALIAS); Key key = alias != null ? getKey(alias) : null; if (key != null) { @@ -81,18 +81,4 @@ public static String generateKeyMetadata(String aliasName, String encodedPrefix) }}, HashMap.class); } - public static byte[] decodeToPrefixBytes(String pbePrefix) throws IOException { - byte[] pbe_prefix; - try { - pbe_prefix = Base64.getDecoder().decode(pbePrefix); - } - catch (IllegalArgumentException e) { - throw new IOException("Failed to decode specified prefix as Base64 string: " + pbePrefix, e); - } - return pbe_prefix; - } - - public static String encodeToPrefixStr(byte[] pbe_prefix) { - return Base64.getEncoder().encodeToString(pbe_prefix); - } } diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/keymeta/PBEKeymetaAdmin.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/keymeta/PBEKeymetaAdmin.java index 5c2254484bab..89d38c7a1b6b 100644 --- a/hbase-common/src/main/java/org/apache/hadoop/hbase/keymeta/PBEKeymetaAdmin.java +++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/keymeta/PBEKeymetaAdmin.java @@ -37,7 +37,7 @@ public interface PBEKeymetaAdmin { /** * Enables PBE for the specified key prefix and namespace. * - * @param pbePrefix The prefix for the PBE key. + * @param pbePrefix The prefix for the PBE key in base64 encoded format. * @param keyNamespace The namespace for the PBE key. * * @return The current status of the PBE key. @@ -48,7 +48,7 @@ public interface PBEKeymetaAdmin { /** * Get the status of all the keys for the specified pbe_prefix. * - * @param pbePrefix The prefix for the PBE key. + * @param pbePrefix The prefix for the PBE key in base64 encoded format. * @param keyNamespace The namespace for the PBE key. * @return The list of status objects each identifying the key and its current status. * @throws IOException if an error occurs while enabling PBE. diff --git a/hbase-common/src/test/java/org/apache/hadoop/hbase/io/crypto/TestPBEKeyStoreKeyProvider.java b/hbase-common/src/test/java/org/apache/hadoop/hbase/io/crypto/TestPBEKeyStoreKeyProvider.java index 986b97d6d316..c40dcd7a774a 100644 --- a/hbase-common/src/test/java/org/apache/hadoop/hbase/io/crypto/TestPBEKeyStoreKeyProvider.java +++ b/hbase-common/src/test/java/org/apache/hadoop/hbase/io/crypto/TestPBEKeyStoreKeyProvider.java @@ -145,7 +145,7 @@ public void testGetInvalidKey() throws Exception { @Test public void testGetDisabledKey() throws Exception { byte[] invalidPrefix = new byte[] { 1, 2, 3 }; - String invalidPrefixEnc = PBEKeyStoreKeyProvider.encodeToPrefixStr(invalidPrefix); + String invalidPrefixEnc = PBEKeyProvider.encodeToPrefixStr(invalidPrefix); conf.set(HConstants.CRYPTO_PBE_PREFIX_CONF_KEY_PREFIX + invalidPrefixEnc + ".active", "false"); PBEKeyData keyData = pbeProvider.getPBEKey(invalidPrefix, PBEKeyData.KEY_NAMESPACE_GLOBAL); assertNotNull(keyData); @@ -164,7 +164,7 @@ public void testGetClusterKey() throws Exception { public void testUnwrapInvalidKey() throws Exception { String invalidAlias = "invalidAlias"; byte[] invalidPrefix = new byte[] { 1, 2, 3 }; - String invalidPrefixEnc = PBEKeyStoreKeyProvider.encodeToPrefixStr(invalidPrefix); + String invalidPrefixEnc = PBEKeyProvider.encodeToPrefixStr(invalidPrefix); String invalidMetadata = PBEKeyStoreKeyProvider.generateKeyMetadata(invalidAlias, invalidPrefixEnc); PBEKeyData keyData = pbeProvider.unwrapKey(invalidMetadata); @@ -177,7 +177,7 @@ public void testUnwrapInvalidKey() throws Exception { public void testUnwrapDisabledKey() throws Exception { String invalidAlias = "invalidAlias"; byte[] invalidPrefix = new byte[] { 1, 2, 3 }; - String invalidPrefixEnc = PBEKeyStoreKeyProvider.encodeToPrefixStr(invalidPrefix); + String invalidPrefixEnc = PBEKeyProvider.encodeToPrefixStr(invalidPrefix); conf.set(HConstants.CRYPTO_PBE_PREFIX_CONF_KEY_PREFIX + invalidPrefixEnc + ".active", "false"); String invalidMetadata = PBEKeyStoreKeyProvider.generateKeyMetadata(invalidAlias, invalidPrefixEnc); diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/keymeta/PBEKeyAccessor.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/keymeta/PBEKeyAccessor.java index 29f54a720a88..1c9922c62ab1 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/keymeta/PBEKeyAccessor.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/keymeta/PBEKeyAccessor.java @@ -19,14 +19,13 @@ import org.apache.hadoop.hbase.io.crypto.PBEKeyData; import org.apache.hadoop.hbase.io.crypto.PBEKeyProvider; -import org.apache.hadoop.hbase.io.crypto.PBEKeyStoreKeyProvider; import org.apache.yetus.audience.InterfaceAudience; import java.io.IOException; import java.security.KeyException; import java.util.List; /** - * This class provides a unified access on top of both {@code PBEKeyDataCache} (L1) and + * This class provides unified access on top of both {@code PBEKeyDataCache} (L1) and * {@code PBEKeymetaTableAccessor} (L2) to access PBE keys. When the getter is called, it first * checks if L1 cache has the key, if not, it tries to get the key from L2. */ @@ -64,7 +63,7 @@ public PBEKeyData getKey(byte[] pbe_prefix, String keyNamespace, String keyMetad keyData = provider.unwrapKey(keyMetadata); LOG.info("Got key data with status: {} and metadata: {} for prefix: {}", keyData.getKeyStatus(), keyData.getKeyMetadata(), - PBEKeyStoreKeyProvider.encodeToPrefixStr(pbe_prefix)); + PBEKeyProvider.encodeToPrefixStr(pbe_prefix)); keymetaAccessor.addKey(keyData); } if (keyData != null) { diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/keymeta/PBEKeymetaAdminImpl.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/keymeta/PBEKeymetaAdminImpl.java index 08b62574944a..977a47c7e9f2 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/keymeta/PBEKeymetaAdminImpl.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/keymeta/PBEKeymetaAdminImpl.java @@ -21,7 +21,6 @@ import org.apache.hadoop.hbase.io.crypto.PBEKeyData; import org.apache.hadoop.hbase.io.crypto.PBEKeyProvider; import org.apache.hadoop.hbase.io.crypto.PBEKeyStatus; -import org.apache.hadoop.hbase.io.crypto.PBEKeyStoreKeyProvider; import org.apache.yetus.audience.InterfaceAudience; import org.slf4j.Logger; import org.slf4j.LoggerFactory; @@ -43,7 +42,7 @@ public PBEKeymetaAdminImpl(Server server) { public PBEKeyStatus enablePBE(String pbePrefix, String keyNamespace) throws IOException { checkPBEEnabled(); LOG.info("Trying to enable PBE on key: {} under namespace: {}", pbePrefix, keyNamespace); - byte[] pbe_prefix = PBEKeyStoreKeyProvider.decodeToPrefixBytes(pbePrefix); + byte[] pbe_prefix = PBEKeyProvider.decodeToPrefixBytes(pbePrefix); PBEKeyProvider provider = getKeyProvider(); int perPrefixActiveKeyConfCount = getPerPrefixActiveKeyConfCount(); Set retrievedKeys = new HashSet<>(perPrefixActiveKeyConfCount); @@ -75,7 +74,7 @@ public List getPBEKeyStatuses(String pbePrefix, String keyNamespace) checkPBEEnabled(); LOG.info("Getting key statuses for PBE on key: {} under namespace: {}", pbePrefix, keyNamespace); - byte[] pbe_prefix = PBEKeyStoreKeyProvider.decodeToPrefixBytes(pbePrefix); + byte[] pbe_prefix = PBEKeyProvider.decodeToPrefixBytes(pbePrefix); return super.getAllKeys(pbe_prefix, keyNamespace); } } diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/keymeta/PBETestBase.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/keymeta/PBETestBase.java new file mode 100644 index 000000000000..7d257dcf6ec0 --- /dev/null +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/keymeta/PBETestBase.java @@ -0,0 +1,26 @@ +package org.apache.hadoop.hbase.keymeta; + +import org.apache.hadoop.hbase.HBaseTestingUtil; +import org.apache.hadoop.hbase.HConstants; +import org.apache.hadoop.hbase.io.crypto.MockPBEKeyProvider; +import org.junit.After; +import org.junit.Before; + +public class PBETestBase { + protected HBaseTestingUtil TEST_UTIL = new HBaseTestingUtil(); + + @Before + public void setUp() throws Exception { + TEST_UTIL.getConfiguration().set(HConstants.CRYPTO_KEYPROVIDER_CONF_KEY, MockPBEKeyProvider.class.getName()); + TEST_UTIL.getConfiguration().set(HConstants.CRYPTO_PBE_ENABLED_CONF_KEY, "true"); + + // Start the minicluster + TEST_UTIL.startMiniCluster(1); + TEST_UTIL.waitFor(60000, () -> TEST_UTIL.getMiniHBaseCluster().getMaster().isInitialized()); + } + + @After + public void tearDown() throws Exception { + TEST_UTIL.shutdownMiniCluster(); + } +} diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestPBEClusterKey.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestPBEClusterKey.java index 2870433f8fe4..64ee8a1269e2 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestPBEClusterKey.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestPBEClusterKey.java @@ -28,6 +28,7 @@ import org.apache.hadoop.hbase.io.crypto.PBEKeyStatus; import org.apache.hadoop.hbase.keymeta.PBEClusterKeyAccessor; import org.apache.hadoop.hbase.keymeta.PBEClusterKeyCache; +import org.apache.hadoop.hbase.keymeta.PBETestBase; import org.apache.hadoop.hbase.testclassification.MasterTests; import org.apache.hadoop.hbase.testclassification.MediumTests; import org.apache.hadoop.hbase.util.Bytes; @@ -45,49 +46,12 @@ import static org.junit.Assert.assertTrue; @Category({ MasterTests.class, MediumTests.class }) -public class TestPBEClusterKey { +public class TestPBEClusterKey extends PBETestBase { @ClassRule public static final HBaseClassTestRule CLASS_RULE = HBaseClassTestRule.forClass(TestPBEClusterKey.class); - private HBaseTestingUtil TEST_UTIL = new HBaseTestingUtil(); - - @Before - public void setUp() throws Exception { - TEST_UTIL.getConfiguration().set(HConstants.CRYPTO_KEYPROVIDER_CONF_KEY, MockPBEKeyProvider.class.getName()); - TEST_UTIL.getConfiguration().set(HConstants.CRYPTO_PBE_ENABLED_CONF_KEY, "true"); - - // Start the minicluster - TEST_UTIL.startMiniCluster(1); - TEST_UTIL.waitFor(60000, () -> TEST_UTIL.getMiniHBaseCluster().getMaster().isInitialized()); - } - - @After - public void tearDown() throws Exception { - TEST_UTIL.shutdownMiniCluster(); - } - - private PBEKeyData validateInitialState(HMaster master, MockPBEKeyProvider pbeKeyProvider ) - throws IOException { - PBEClusterKeyAccessor pbeClusterKeyAccessor = new PBEClusterKeyAccessor(master); - assertEquals(1, pbeClusterKeyAccessor.getAllClusterKeyFiles().size()); - PBEClusterKeyCache pbeClusterKeyCache = master.getPBEClusterKeyCache(); - assertNotNull(pbeClusterKeyCache); - PBEKeyData clusterKey = pbeClusterKeyCache.getLatestClusterKey(); - assertEquals(pbeKeyProvider.getClusterKey(master.getClusterId().getBytes()), clusterKey); - assertEquals(clusterKey, - pbeClusterKeyCache.getClusterKeyByChecksum(clusterKey.getKeyChecksum())); - return clusterKey; - } - - private void restartCluster() throws Exception { - TEST_UTIL.shutdownMiniHBaseCluster(); - Thread.sleep(2000); - TEST_UTIL.restartHBaseCluster(1); - TEST_UTIL.waitFor(60000, () -> TEST_UTIL.getMiniHBaseCluster().getMaster().isInitialized()); - } - @Test public void testClusterKeyInitializationAndRotation() throws Exception { HMaster master = TEST_UTIL.getHBaseCluster().getMaster(); @@ -132,4 +96,24 @@ public void testWithInvalidClusterKey() throws Exception { pbeKeyProvider.setKeyStatus(pbeKeyProvider.getClusterKeyAlias(), PBEKeyStatus.INACTIVE); assertThrows(IOException.class, tmpCKM::ensureClusterKeyInitialized); } + + private PBEKeyData validateInitialState(HMaster master, MockPBEKeyProvider pbeKeyProvider ) + throws IOException { + PBEClusterKeyAccessor pbeClusterKeyAccessor = new PBEClusterKeyAccessor(master); + assertEquals(1, pbeClusterKeyAccessor.getAllClusterKeyFiles().size()); + PBEClusterKeyCache pbeClusterKeyCache = master.getPBEClusterKeyCache(); + assertNotNull(pbeClusterKeyCache); + PBEKeyData clusterKey = pbeClusterKeyCache.getLatestClusterKey(); + assertEquals(pbeKeyProvider.getClusterKey(master.getClusterId().getBytes()), clusterKey); + assertEquals(clusterKey, + pbeClusterKeyCache.getClusterKeyByChecksum(clusterKey.getKeyChecksum())); + return clusterKey; + } + + private void restartCluster() throws Exception { + TEST_UTIL.shutdownMiniHBaseCluster(); + Thread.sleep(2000); + TEST_UTIL.restartHBaseCluster(1); + TEST_UTIL.waitFor(60000, () -> TEST_UTIL.getMiniHBaseCluster().getMaster().isInitialized()); + } } From 5a239f6f87542d16d3efcaf9889f5e053e50496a Mon Sep 17 00:00:00 2001 From: Hari Dara Date: Wed, 2 Apr 2025 10:02:08 +0530 Subject: [PATCH 13/70] Renamed PBEClusterKey to SystemKey everywhere --- .../org/apache/hadoop/hbase/HConstants.java | 4 +- .../hbase/io/crypto/PBEKeyProvider.java | 12 +-- .../io/crypto/PBEKeyStoreKeyProvider.java | 2 +- .../hadoop/hbase/util/CommonFSUtils.java | 4 +- .../hbase/io/crypto/MockPBEKeyProvider.java | 12 +-- .../io/crypto/TestPBEKeyStoreKeyProvider.java | 4 +- .../apache/hadoop/hbase/HBaseServerBase.java | 16 ++-- .../hbase/MockRegionServerServices.java | 4 +- .../java/org/apache/hadoop/hbase/Server.java | 4 +- .../hbase/keymeta/PBEClusterKeyCache.java | 74 ------------------- .../keymeta/PBEKeymetaTableAccessor.java | 8 +- ...eyAccessor.java => SystemKeyAccessor.java} | 34 ++++----- .../hadoop/hbase/keymeta/SystemKeyCache.java | 74 +++++++++++++++++++ .../apache/hadoop/hbase/master/HMaster.java | 10 +-- .../hadoop/hbase/master/MasterFileSystem.java | 8 +- ...rKeyManager.java => SystemKeyManager.java} | 66 ++++++++--------- .../hbase/regionserver/HRegionServer.java | 6 +- .../regionserver/ReplicationSyncUp.java | 4 +- .../hbase/master/MockNoopMasterServices.java | 4 +- .../hadoop/hbase/master/MockRegionServer.java | 4 +- .../hbase/master/TestActiveMasterManager.java | 4 +- ...tPBEClusterKey.java => TestSystemKey.java} | 63 ++++++++-------- .../cleaner/TestReplicationHFileCleaner.java | 4 +- ...onProcedureStorePerformanceEvaluation.java | 4 +- .../regionserver/TestHeapMemoryManager.java | 4 +- .../token/TestTokenAuthentication.java | 4 +- .../apache/hadoop/hbase/util/MockServer.java | 4 +- 27 files changed, 215 insertions(+), 226 deletions(-) delete mode 100644 hbase-server/src/main/java/org/apache/hadoop/hbase/keymeta/PBEClusterKeyCache.java rename hbase-server/src/main/java/org/apache/hadoop/hbase/keymeta/{PBEClusterKeyAccessor.java => SystemKeyAccessor.java} (73%) create mode 100644 hbase-server/src/main/java/org/apache/hadoop/hbase/keymeta/SystemKeyCache.java rename hbase-server/src/main/java/org/apache/hadoop/hbase/master/{PBEClusterKeyManager.java => SystemKeyManager.java} (54%) rename hbase-server/src/test/java/org/apache/hadoop/hbase/master/{TestPBEClusterKey.java => TestSystemKey.java} (62%) diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/HConstants.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/HConstants.java index 1649690be8ec..84b6659ee4ac 100644 --- a/hbase-common/src/main/java/org/apache/hadoop/hbase/HConstants.java +++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/HConstants.java @@ -1195,8 +1195,8 @@ public enum OperationStatusCode { /** * Directory used for storing master keys for the cluster */ - public static final String CLUSTER_KEYS_DIRECTORY = ".cluster_keys"; - public static final String CLUSTER_KEY_FILE_PREFIX = "cluster_key."; + public static final String SYSTEM_KEYS_DIRECTORY = ".system_keys"; + public static final String SYSTEM_KEY_FILE_PREFIX = "system_key."; /** * The period (in milliseconds) between computing region server point in time metrics */ diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/io/crypto/PBEKeyProvider.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/io/crypto/PBEKeyProvider.java index 87815022017b..94080f17da88 100644 --- a/hbase-common/src/main/java/org/apache/hadoop/hbase/io/crypto/PBEKeyProvider.java +++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/io/crypto/PBEKeyProvider.java @@ -54,27 +54,27 @@ static String encodeToPrefixStr(byte[] pbe_prefix) { void initConfig(Configuration conf); /** - * Retrieve the cluster key using the given cluster identifier. + * Retrieve the system key using the given system identifier. * - * @param clusterId Cluster identifier - * @return PBEKeyData for the cluster key and is guaranteed to be not {@code null} + * @param systemId system identifier + * @return PBEKeyData for the system key and is guaranteed to be not {@code null} * @throws IOException if an error occurs while retrieving the key */ - PBEKeyData getClusterKey(byte[] clusterId) throws IOException; + PBEKeyData getSystemKey(byte[] systemId) throws IOException; /** * Retrieve a PBE-based key for the specified prefix. * * @param pbe_prefix Key prefix * @param key_namespace Key namespace - * @return PBEKeyData for the cluster key and is expected to be not {@code null} + * @return PBEKeyData for the system key and is expected to be not {@code null} * @throws IOException if an error occurs while retrieving the key */ PBEKeyData getPBEKey(byte[] pbe_prefix, String key_namespace) throws IOException; /** * Retrieve a key identified by the key metadata. The key metadata is typically generated by the - * same key provider via the {@link #getClusterKey(byte[])} or {@link #getPBEKey(byte[], String)} + * same key provider via the {@link #getSystemKey(byte[])} or {@link #getPBEKey(byte[], String)} * methods. * * @param keyMetaData Key metadata diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/io/crypto/PBEKeyStoreKeyProvider.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/io/crypto/PBEKeyStoreKeyProvider.java index 99b9063ebca4..323c4d9d29e4 100644 --- a/hbase-common/src/main/java/org/apache/hadoop/hbase/io/crypto/PBEKeyStoreKeyProvider.java +++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/io/crypto/PBEKeyStoreKeyProvider.java @@ -22,7 +22,7 @@ public void initConfig(Configuration conf) { } @Override - public PBEKeyData getClusterKey(byte[] clusterId) { + public PBEKeyData getSystemKey(byte[] clusterId) { checkConfig(); String masterKeyAlias = conf.get(HConstants.CRYPTO_PBE_MASTERKEY_NAME_CONF_KEY, null); if (masterKeyAlias == null) { diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/util/CommonFSUtils.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/util/CommonFSUtils.java index a2ffee3f8e69..da4662d2c8a0 100644 --- a/hbase-common/src/main/java/org/apache/hadoop/hbase/util/CommonFSUtils.java +++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/util/CommonFSUtils.java @@ -297,8 +297,8 @@ public static void setRootDir(final Configuration c, final Path root) { c.set(HConstants.HBASE_DIR, root.toString()); } - public static Path getClusterKeyDir(final Configuration c) throws IOException { - return new Path(getRootDir(c), HConstants.CLUSTER_KEYS_DIRECTORY); + public static Path getSystemKeyDir(final Configuration c) throws IOException { + return new Path(getRootDir(c), HConstants.SYSTEM_KEYS_DIRECTORY); } public static void setFsDefault(final Configuration c, final Path root) { diff --git a/hbase-common/src/test/java/org/apache/hadoop/hbase/io/crypto/MockPBEKeyProvider.java b/hbase-common/src/test/java/org/apache/hadoop/hbase/io/crypto/MockPBEKeyProvider.java index 4b3a1180ebb7..9f7472f05460 100644 --- a/hbase-common/src/test/java/org/apache/hadoop/hbase/io/crypto/MockPBEKeyProvider.java +++ b/hbase-common/src/test/java/org/apache/hadoop/hbase/io/crypto/MockPBEKeyProvider.java @@ -40,14 +40,14 @@ public class MockPBEKeyProvider extends MockAesKeyProvider implements PBEKeyProv public Map keys = new HashMap<>(); public Map keyStatus = new HashMap<>(); - private String clusterKeyAlias = "default_cluster_key_alias"; + private String systemKeyAlias = "default_system_key_alias"; @Override public void initConfig(Configuration conf) { // NO-OP } - @Override public PBEKeyData getClusterKey(byte[] clusterId) throws IOException { - return getKey(clusterId, clusterKeyAlias); + @Override public PBEKeyData getSystemKey(byte[] systemId) throws IOException { + return getKey(systemId, systemKeyAlias); } @Override public PBEKeyData getPBEKey(byte[] pbe_prefix, String key_namespace) @@ -92,11 +92,11 @@ public void setKey(String alias, Key key) { } public void setCluterKeyAlias(String alias) { - this.clusterKeyAlias = alias; + this.systemKeyAlias = alias; } - public String getClusterKeyAlias() { - return this.clusterKeyAlias; + public String getSystemKeyAlias() { + return this.systemKeyAlias; } /** diff --git a/hbase-common/src/test/java/org/apache/hadoop/hbase/io/crypto/TestPBEKeyStoreKeyProvider.java b/hbase-common/src/test/java/org/apache/hadoop/hbase/io/crypto/TestPBEKeyStoreKeyProvider.java index c40dcd7a774a..4a52cbdcc352 100644 --- a/hbase-common/src/test/java/org/apache/hadoop/hbase/io/crypto/TestPBEKeyStoreKeyProvider.java +++ b/hbase-common/src/test/java/org/apache/hadoop/hbase/io/crypto/TestPBEKeyStoreKeyProvider.java @@ -154,8 +154,8 @@ public void testGetDisabledKey() throws Exception { } @Test - public void testGetClusterKey() throws Exception { - PBEKeyData clusterKeyData = pbeProvider.getClusterKey(clusterId.getBytes()); + public void testGetSystemKey() throws Exception { + PBEKeyData clusterKeyData = pbeProvider.getSystemKey(clusterId.getBytes()); assertPBEKeyData(clusterKeyData, PBEKeyStatus.ACTIVE, masterKey, clusterId.getBytes(), MASTER_KEY_ALIAS); } diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/HBaseServerBase.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/HBaseServerBase.java index 6de1e060714a..850493455d81 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/HBaseServerBase.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/HBaseServerBase.java @@ -52,8 +52,8 @@ import org.apache.hadoop.hbase.http.InfoServer; import org.apache.hadoop.hbase.io.util.MemorySizeUtil; import org.apache.hadoop.hbase.ipc.RpcServerInterface; -import org.apache.hadoop.hbase.keymeta.PBEClusterKeyAccessor; -import org.apache.hadoop.hbase.keymeta.PBEClusterKeyCache; +import org.apache.hadoop.hbase.keymeta.SystemKeyAccessor; +import org.apache.hadoop.hbase.keymeta.SystemKeyCache; import org.apache.hadoop.hbase.keymeta.PBEKeyAccessor; import org.apache.hadoop.hbase.keymeta.PBEKeymetaAdmin; import org.apache.hadoop.hbase.keymeta.PBEKeymetaAdminImpl; @@ -192,7 +192,7 @@ public abstract class HBaseServerBase> extends protected final NettyEventLoopGroupConfig eventLoopGroupConfig; - private PBEClusterKeyCache pbeClusterKeyCache; + private SystemKeyCache systemKeyCache; protected PBEKeymetaAdminImpl pbeKeymetaAdmin; protected PBEKeyAccessor pbeKeyAccessor; @@ -425,13 +425,13 @@ public PBEKeyAccessor getPBEKeyAccessor() { } @Override - public PBEClusterKeyCache getPBEClusterKeyCache() { - return pbeClusterKeyCache; + public SystemKeyCache getSystemKeyCache() { + return systemKeyCache; } - protected void buildPBEClusterKeyCache() throws IOException { - if (pbeClusterKeyCache == null && Server.isPBEEnabled(this)) { - pbeClusterKeyCache = PBEClusterKeyCache.createCache(new PBEClusterKeyAccessor(this)); + protected void buildSystemKeyCache() throws IOException { + if (systemKeyCache == null && Server.isPBEEnabled(this)) { + systemKeyCache = SystemKeyCache.createCache(new SystemKeyAccessor(this)); } } diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/MockRegionServerServices.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/MockRegionServerServices.java index c14a28cd8f9d..24bd2f6f98bb 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/MockRegionServerServices.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/MockRegionServerServices.java @@ -38,7 +38,7 @@ import org.apache.hadoop.hbase.fs.HFileSystem; import org.apache.hadoop.hbase.io.hfile.BlockCache; import org.apache.hadoop.hbase.ipc.RpcServerInterface; -import org.apache.hadoop.hbase.keymeta.PBEClusterKeyCache; +import org.apache.hadoop.hbase.keymeta.SystemKeyCache; import org.apache.hadoop.hbase.keymeta.PBEKeyAccessor; import org.apache.hadoop.hbase.keymeta.PBEKeymetaAdmin; import org.apache.hadoop.hbase.mob.MobFileCache; @@ -259,7 +259,7 @@ public ChoreService getChoreService() { return null; } - @Override public PBEClusterKeyCache getPBEClusterKeyCache() { + @Override public SystemKeyCache getSystemKeyCache() { return null; } diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/Server.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/Server.java index f7be347cdfa7..a4856ad33faa 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/Server.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/Server.java @@ -23,7 +23,7 @@ import org.apache.hadoop.hbase.client.AsyncClusterConnection; import org.apache.hadoop.hbase.client.AsyncConnection; import org.apache.hadoop.hbase.client.Connection; -import org.apache.hadoop.hbase.keymeta.PBEClusterKeyCache; +import org.apache.hadoop.hbase.keymeta.SystemKeyCache; import org.apache.hadoop.hbase.keymeta.PBEKeyAccessor; import org.apache.hadoop.hbase.keymeta.PBEKeymetaAdmin; import org.apache.hadoop.hbase.zookeeper.ZKWatcher; @@ -89,7 +89,7 @@ default AsyncConnection getAsyncConnection() { /** * @return the cache for cluster keys. */ - public PBEClusterKeyCache getPBEClusterKeyCache(); + public SystemKeyCache getSystemKeyCache(); /** * @return the accessor for cluster keys. diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/keymeta/PBEClusterKeyCache.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/keymeta/PBEClusterKeyCache.java deleted file mode 100644 index a77583ecf35b..000000000000 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/keymeta/PBEClusterKeyCache.java +++ /dev/null @@ -1,74 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.apache.hadoop.hbase.keymeta; - -import org.apache.hadoop.fs.Path; -import org.apache.hadoop.hbase.io.crypto.PBEKeyData; -import org.apache.yetus.audience.InterfaceAudience; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; -import java.io.IOException; -import java.util.List; -import java.util.Map; -import java.util.TreeMap; - -@InterfaceAudience.Private -public class PBEClusterKeyCache { - private static final Logger LOG = LoggerFactory.getLogger(PBEClusterKeyCache.class); - - private final PBEKeyData latestClusterKey; - private final Map clusterKeys; - - /** - * Construct the Cluster Key cache from the specified accessor. - * @param accessor - * @return the cache or {@code null} if no keys are found. - * @throws IOException - */ - public static PBEClusterKeyCache createCache(PBEClusterKeyAccessor accessor) throws IOException { - List allClusterKeyFiles = accessor.getAllClusterKeyFiles(); - if (allClusterKeyFiles.isEmpty()) { - LOG.warn("No cluster key files found, skipping cache creation"); - return null; - } - PBEKeyData latestClusterKey = null; - Map clusterKeys = new TreeMap<>(); - for (Path keyPath: allClusterKeyFiles) { - LOG.info("Loading cluster key from: {}", keyPath); - PBEKeyData keyData = accessor.loadClusterKey(keyPath); - if (latestClusterKey == null) { - latestClusterKey = keyData; - } - clusterKeys.put(keyData.getKeyChecksum(), keyData); - } - return new PBEClusterKeyCache(clusterKeys, latestClusterKey); - } - - private PBEClusterKeyCache(Map clusterKeys, PBEKeyData latestClusterKey) { - this.clusterKeys = clusterKeys; - this.latestClusterKey = latestClusterKey; - } - - public PBEKeyData getLatestClusterKey() { - return latestClusterKey; - } - - public PBEKeyData getClusterKeyByChecksum(long checksum) { - return clusterKeys.get(checksum); - } -} diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/keymeta/PBEKeymetaTableAccessor.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/keymeta/PBEKeymetaTableAccessor.java index 4dfc4dbbcc42..5f6a441106c5 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/keymeta/PBEKeymetaTableAccessor.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/keymeta/PBEKeymetaTableAccessor.java @@ -201,10 +201,10 @@ public void reportOperation(byte[] pbe_prefix, String keyNamespace, String keyMe * Add the mutation columns to the given Put that are derived from the keyData. */ private Put addMutationColumns(Put put, PBEKeyData keyData) throws IOException { - PBEKeyData latestClusterKey = server.getPBEClusterKeyCache().getLatestClusterKey(); + PBEKeyData latestSystemKey = server.getSystemKeyCache().getLatestSystemKey(); if (keyData.getTheKey() != null) { byte[] dekWrappedBySTK = EncryptionUtil.wrapKey(server.getConfiguration(), null, - keyData.getTheKey(), latestClusterKey.getTheKey()); + keyData.getTheKey(), latestSystemKey.getTheKey()); put.addColumn(KEY_META_INFO_FAMILY, DEK_CHECKSUM_QUAL_BYTES, Bytes.toBytes(keyData.getKeyChecksum())) .addColumn(KEY_META_INFO_FAMILY, DEK_WRAPPED_BY_STK_QUAL_BYTES, dekWrappedBySTK) @@ -214,7 +214,7 @@ private Put addMutationColumns(Put put, PBEKeyData keyData) throws IOException { .setPriority(HConstants.SYSTEMTABLE_QOS) .addColumn(KEY_META_INFO_FAMILY, DEK_METADATA_QUAL_BYTES, keyData.getKeyMetadata().getBytes()) .addColumn(KEY_META_INFO_FAMILY, STK_CHECKSUM_QUAL_BYTES, - Bytes.toBytes(latestClusterKey.getKeyChecksum())) + Bytes.toBytes(latestSystemKey.getKeyChecksum())) .addColumn(KEY_META_INFO_FAMILY, REFRESHED_TIMESTAMP_QUAL_BYTES, Bytes.toBytes(keyData.getRefreshTimestamp())) .addColumn(KEY_META_INFO_FAMILY, KEY_STATUS_QUAL_BYTES, @@ -246,7 +246,7 @@ private PBEKeyData parseFromResult(byte[] pbe_prefix, String keyNamespace, Resul if (dekWrappedByStk != null) { long stkChecksum = Bytes.toLong(result.getValue(KEY_META_INFO_FAMILY, STK_CHECKSUM_QUAL_BYTES)); - PBEKeyData clusterKey = server.getPBEClusterKeyCache().getClusterKeyByChecksum(stkChecksum); + PBEKeyData clusterKey = server.getSystemKeyCache().getSystemKeyByChecksum(stkChecksum); if (clusterKey == null) { LOG.error("Dropping key with metadata: {} as STK with checksum: {} is unavailable", dekMetadata, stkChecksum); diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/keymeta/PBEClusterKeyAccessor.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/keymeta/SystemKeyAccessor.java similarity index 73% rename from hbase-server/src/main/java/org/apache/hadoop/hbase/keymeta/PBEClusterKeyAccessor.java rename to hbase-server/src/main/java/org/apache/hadoop/hbase/keymeta/SystemKeyAccessor.java index 83dd5a250631..5b0eff824e33 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/keymeta/PBEClusterKeyAccessor.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/keymeta/SystemKeyAccessor.java @@ -32,27 +32,27 @@ import java.util.List; import java.util.Map; import java.util.TreeMap; -import static org.apache.hadoop.hbase.HConstants.CLUSTER_KEY_FILE_PREFIX; +import static org.apache.hadoop.hbase.HConstants.SYSTEM_KEY_FILE_PREFIX; @InterfaceAudience.Private -public class PBEClusterKeyAccessor extends PBEKeyAccessorBase { - protected final Path clusterKeyDir; +public class SystemKeyAccessor extends PBEKeyAccessorBase { + protected final Path systemKeyDir; - public PBEClusterKeyAccessor(Server server) throws IOException { + public SystemKeyAccessor(Server server) throws IOException { super(server); - this.clusterKeyDir = CommonFSUtils.getClusterKeyDir(server.getConfiguration()); + this.systemKeyDir = CommonFSUtils.getSystemKeyDir(server.getConfiguration()); } - public Path getLatestClusterKeyFile() throws IOException { + public Path getLatestSystemKeyFile() throws IOException { if (! isPBEEnabled()) { return null; } - List allClusterKeyFiles = getAllClusterKeyFiles(); + List allClusterKeyFiles = getAllSystemKeyFiles(); if (allClusterKeyFiles.isEmpty()) { throw new RuntimeException("No cluster key initialized yet"); } int currentMaxSeqNum = extractKeySequence(allClusterKeyFiles.get(0)); - return new Path(clusterKeyDir, CLUSTER_KEY_FILE_PREFIX + currentMaxSeqNum); + return new Path(systemKeyDir, SYSTEM_KEY_FILE_PREFIX + currentMaxSeqNum); } /** @@ -63,30 +63,30 @@ public Path getLatestClusterKeyFile() throws IOException { * @return a list of all available cluster key files * @throws IOException */ - public List getAllClusterKeyFiles() throws IOException { + public List getAllSystemKeyFiles() throws IOException { if (!isPBEEnabled()) { return null; } FileSystem fs = server.getFileSystem(); Map clusterKeys = new TreeMap<>(Comparator.reverseOrder()); - for (FileStatus st : fs.globStatus(new Path(clusterKeyDir, CLUSTER_KEY_FILE_PREFIX + "*"))) { + for (FileStatus st : fs.globStatus(new Path(systemKeyDir, SYSTEM_KEY_FILE_PREFIX + "*"))) { Path keyPath = st.getPath(); - int seqNum = extractClusterKeySeqNum(keyPath); + int seqNum = extractSystemKeySeqNum(keyPath); clusterKeys.put(seqNum, keyPath); } return new ArrayList<>(clusterKeys.values()); } - public PBEKeyData loadClusterKey(Path keyPath) throws IOException { + public PBEKeyData loadSystemKey(Path keyPath) throws IOException { PBEKeyProvider provider = getKeyProvider(); return provider.unwrapKey(loadKeyMetadata(keyPath)); } - public int extractClusterKeySeqNum(Path keyPath) throws IOException { - if (keyPath.getName().startsWith(CLUSTER_KEY_FILE_PREFIX)) { + public int extractSystemKeySeqNum(Path keyPath) throws IOException { + if (keyPath.getName().startsWith(SYSTEM_KEY_FILE_PREFIX)) { try { - return Integer.valueOf(keyPath.getName().substring(CLUSTER_KEY_FILE_PREFIX.length())); + return Integer.valueOf(keyPath.getName().substring(SYSTEM_KEY_FILE_PREFIX.length())); } catch (NumberFormatException e) { LOG.error("Invalid file name for a cluster key: {}", keyPath, e); @@ -103,8 +103,8 @@ public int extractClusterKeySeqNum(Path keyPath) throws IOException { */ protected int extractKeySequence(Path clusterKeyFile) throws IOException { int keySeq = -1; - if (clusterKeyFile.getName().startsWith(CLUSTER_KEY_FILE_PREFIX)) { - keySeq = Integer.valueOf(clusterKeyFile.getName().substring(CLUSTER_KEY_FILE_PREFIX.length())); + if (clusterKeyFile.getName().startsWith(SYSTEM_KEY_FILE_PREFIX)) { + keySeq = Integer.valueOf(clusterKeyFile.getName().substring(SYSTEM_KEY_FILE_PREFIX.length())); } return keySeq; } diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/keymeta/SystemKeyCache.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/keymeta/SystemKeyCache.java new file mode 100644 index 000000000000..f61e20b1ae29 --- /dev/null +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/keymeta/SystemKeyCache.java @@ -0,0 +1,74 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.hadoop.hbase.keymeta; + +import org.apache.hadoop.fs.Path; +import org.apache.hadoop.hbase.io.crypto.PBEKeyData; +import org.apache.yetus.audience.InterfaceAudience; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; +import java.io.IOException; +import java.util.List; +import java.util.Map; +import java.util.TreeMap; + +@InterfaceAudience.Private +public class SystemKeyCache { + private static final Logger LOG = LoggerFactory.getLogger(SystemKeyCache.class); + + private final PBEKeyData latestSystemKey; + private final Map systemKeys; + + /** + * Construct the System Key cache from the specified accessor. + * @param accessor + * @return the cache or {@code null} if no keys are found. + * @throws IOException + */ + public static SystemKeyCache createCache(SystemKeyAccessor accessor) throws IOException { + List allSystemKeyFiles = accessor.getAllSystemKeyFiles(); + if (allSystemKeyFiles.isEmpty()) { + LOG.warn("No system key files found, skipping cache creation"); + return null; + } + PBEKeyData latestSystemKey = null; + Map systemKeys = new TreeMap<>(); + for (Path keyPath: allSystemKeyFiles) { + LOG.info("Loading system key from: {}", keyPath); + PBEKeyData keyData = accessor.loadSystemKey(keyPath); + if (latestSystemKey == null) { + latestSystemKey = keyData; + } + systemKeys.put(keyData.getKeyChecksum(), keyData); + } + return new SystemKeyCache(systemKeys, latestSystemKey); + } + + private SystemKeyCache(Map systemKeys, PBEKeyData latestSystemKey) { + this.systemKeys = systemKeys; + this.latestSystemKey = latestSystemKey; + } + + public PBEKeyData getLatestSystemKey() { + return latestSystemKey; + } + + public PBEKeyData getSystemKeyByChecksum(long checksum) { + return systemKeys.get(checksum); + } +} diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/HMaster.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/HMaster.java index 6a93fd2ab667..d97c7ed3f5eb 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/HMaster.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/HMaster.java @@ -122,8 +122,6 @@ import org.apache.hadoop.hbase.ipc.CoprocessorRpcUtils; import org.apache.hadoop.hbase.ipc.RpcServer; import org.apache.hadoop.hbase.ipc.ServerNotRunningYetException; -import org.apache.hadoop.hbase.keymeta.PBEClusterKeyAccessor; -import org.apache.hadoop.hbase.keymeta.PBEClusterKeyCache; import org.apache.hadoop.hbase.keymeta.PBEKeymetaMasterService; import org.apache.hadoop.hbase.log.HBaseMarkers; import org.apache.hadoop.hbase.master.MasterRpcServices.BalanceSwitchMode; @@ -358,7 +356,7 @@ public class HMaster extends HBaseServerBase implements Maste // file system manager for the master FS operations private MasterFileSystem fileSystemManager; private MasterWalManager walManager; - private PBEClusterKeyManager pbeClusterKeyManager; + private SystemKeyManager systemKeyManager; private PBEKeymetaMasterService pbeKeymetaMasterService; // manager to manage procedure-based WAL splitting, can be null if current @@ -997,9 +995,9 @@ private void finishActiveMasterInitialization() throws IOException, InterruptedE ZKClusterId.setClusterId(this.zooKeeper, fileSystemManager.getClusterId()); this.clusterId = clusterId.toString(); - pbeClusterKeyManager = new PBEClusterKeyManager(this); - pbeClusterKeyManager.ensureClusterKeyInitialized(); - buildPBEClusterKeyCache(); + systemKeyManager = new SystemKeyManager(this); + systemKeyManager.ensureSystemKeyInitialized(); + buildSystemKeyCache(); // Precaution. Put in place the old hbck1 lock file to fence out old hbase1s running their // hbck1s against an hbase2 cluster; it could do damage. To skip this behavior, set diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/MasterFileSystem.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/MasterFileSystem.java index 4178e31d0cb2..a4e1202f40a3 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/MasterFileSystem.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/MasterFileSystem.java @@ -66,7 +66,7 @@ public class MasterFileSystem { private final FileSystem walFs; // root log directory on the FS private final Path rootdir; - private final Path clusterKeyDir; + private final Path systemKeyDir; // hbase temp directory used for table construction and deletion private final Path tempdir; // root hbase directory on the FS @@ -97,7 +97,7 @@ public MasterFileSystem(Configuration conf) throws IOException { // default localfs. Presumption is that rootdir is fully-qualified before // we get to here with appropriate fs scheme. this.rootdir = CommonFSUtils.getRootDir(conf); - this.clusterKeyDir = CommonFSUtils.getClusterKeyDir(conf); + this.systemKeyDir = CommonFSUtils.getSystemKeyDir(conf); this.tempdir = new Path(this.rootdir, HConstants.HBASE_TEMP_DIRECTORY); // Cover both bases, the old way of setting default fs and the new. // We're supposed to run on 0.20 and 0.21 anyways. @@ -136,7 +136,7 @@ private void createInitialFileSystemLayout() throws IOException { HConstants.CORRUPT_DIR_NAME, ReplicationUtils.REMOTE_WAL_DIR_NAME }; // check if the root directory exists checkRootDir(this.rootdir, conf, this.fs); - checkRootDir(this.clusterKeyDir, conf, this.fs); + checkRootDir(this.systemKeyDir, conf, this.fs); // Check the directories under rootdir. checkTempDir(this.tempdir, conf, this.fs); @@ -161,7 +161,7 @@ private void createInitialFileSystemLayout() throws IOException { if (isSecurityEnabled) { fs.setPermission(new Path(rootdir, HConstants.VERSION_FILE_NAME), secureRootFilePerms); fs.setPermission(new Path(rootdir, HConstants.CLUSTER_ID_FILE_NAME), secureRootFilePerms); - fs.setPermission(clusterKeyDir, secureRootFilePerms); + fs.setPermission(systemKeyDir, secureRootFilePerms); } FsPermission currentRootPerms = fs.getFileStatus(this.rootdir).getPermission(); if ( diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/PBEClusterKeyManager.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/SystemKeyManager.java similarity index 54% rename from hbase-server/src/main/java/org/apache/hadoop/hbase/master/PBEClusterKeyManager.java rename to hbase-server/src/main/java/org/apache/hadoop/hbase/master/SystemKeyManager.java index a2dad9aa460a..e4ece78a4852 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/PBEClusterKeyManager.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/SystemKeyManager.java @@ -25,90 +25,90 @@ import org.apache.hadoop.hbase.io.crypto.PBEKeyData; import org.apache.hadoop.hbase.io.crypto.PBEKeyProvider; import org.apache.hadoop.hbase.io.crypto.PBEKeyStatus; -import org.apache.hadoop.hbase.keymeta.PBEClusterKeyAccessor; +import org.apache.hadoop.hbase.keymeta.SystemKeyAccessor; import org.apache.yetus.audience.InterfaceAudience; -import static org.apache.hadoop.hbase.HConstants.CLUSTER_KEY_FILE_PREFIX; +import static org.apache.hadoop.hbase.HConstants.SYSTEM_KEY_FILE_PREFIX; @InterfaceAudience.Private -public class PBEClusterKeyManager extends PBEClusterKeyAccessor { +public class SystemKeyManager extends SystemKeyAccessor { private final MasterServices master; - public PBEClusterKeyManager(MasterServices master) throws IOException { + public SystemKeyManager(MasterServices master) throws IOException { super(master); this.master = master; } - public void ensureClusterKeyInitialized() throws IOException { + public void ensureSystemKeyInitialized() throws IOException { if (! isPBEEnabled()) { return; } - List clusterKeys = getAllClusterKeyFiles(); + List clusterKeys = getAllSystemKeyFiles(); if (clusterKeys.isEmpty()) { - LOG.info("Initializing Cluster Key for the first time"); + LOG.info("Initializing System Key for the first time"); // Double check for cluster key as another HMaster might have succeeded. - if (rotateClusterKey(null) == null && getAllClusterKeyFiles().isEmpty()) { - throw new RuntimeException("Failed to generate or save Cluster Key"); + if (rotateSystemKey(null) == null && getAllSystemKeyFiles().isEmpty()) { + throw new RuntimeException("Failed to generate or save System Key"); } } - else if (rotateClusterKeyIfChanged() != null) { - LOG.info("Cluster key has been rotated"); + else if (rotateSystemKeyIfChanged() != null) { + LOG.info("System key has been rotated"); } else { - LOG.info("Cluster key is already initialized and unchanged"); + LOG.info("System key is already initialized and unchanged"); } } - public PBEKeyData rotateClusterKeyIfChanged() throws IOException { + public PBEKeyData rotateSystemKeyIfChanged() throws IOException { if (! isPBEEnabled()) { return null; } - Path latestFile = getLatestClusterKeyFile(); + Path latestFile = getLatestSystemKeyFile(); String latestKeyMetadata = loadKeyMetadata(latestFile); - return rotateClusterKey(latestKeyMetadata); + return rotateSystemKey(latestKeyMetadata); } - private PBEKeyData rotateClusterKey(String currentKeyMetadata) throws IOException { + private PBEKeyData rotateSystemKey(String currentKeyMetadata) throws IOException { if (! isPBEEnabled()) { return null; } PBEKeyProvider provider = getKeyProvider(); - PBEKeyData clusterKey = provider.getClusterKey( + PBEKeyData clusterKey = provider.getSystemKey( master.getMasterFileSystem().getClusterId().toString().getBytes()); if (clusterKey.getKeyStatus() != PBEKeyStatus.ACTIVE) { - throw new IOException("Cluster key is expected to be ACTIVE but it is: " + + throw new IOException("System key is expected to be ACTIVE but it is: " + clusterKey.getKeyStatus() + " for metadata: " + clusterKey.getKeyMetadata()); } if (clusterKey != null && clusterKey.getKeyMetadata() != null && ! clusterKey.getKeyMetadata().equals(currentKeyMetadata) && - saveLatestClusterKey(clusterKey.getKeyMetadata())) { + saveLatestSystemKey(clusterKey.getKeyMetadata())) { return clusterKey; } return null; } - private boolean saveLatestClusterKey(String keyMetadata) throws IOException { - List allClusterKeyFiles = getAllClusterKeyFiles(); - int nextClusterKeySeq = (allClusterKeyFiles.isEmpty() ? -1 - : extractKeySequence(allClusterKeyFiles.get(0))) + 1; - LOG.info("Trying to save a new cluster key at seq: {}", nextClusterKeySeq); + private boolean saveLatestSystemKey(String keyMetadata) throws IOException { + List allSystemKeyFiles = getAllSystemKeyFiles(); + int nextSystemKeySeq = (allSystemKeyFiles.isEmpty() ? -1 + : extractKeySequence(allSystemKeyFiles.get(0))) + 1; + LOG.info("Trying to save a new cluster key at seq: {}", nextSystemKeySeq); MasterFileSystem masterFS = master.getMasterFileSystem(); - Path nextClusterKeyPath = new Path(clusterKeyDir, - CLUSTER_KEY_FILE_PREFIX + nextClusterKeySeq); - Path tempClusterKeyFile = new Path(masterFS.getTempDir(), - nextClusterKeyPath.getName() + UUID.randomUUID()); + Path nextSystemKeyPath = new Path(systemKeyDir, + SYSTEM_KEY_FILE_PREFIX + nextSystemKeySeq); + Path tempSystemKeyFile = new Path(masterFS.getTempDir(), + nextSystemKeyPath.getName() + UUID.randomUUID()); try (FSDataOutputStream fsDataOutputStream = masterFS.getFileSystem() - .create(tempClusterKeyFile)) { + .create(tempSystemKeyFile)) { fsDataOutputStream.writeUTF(keyMetadata); - boolean succeeded = masterFS.getFileSystem().rename(tempClusterKeyFile, nextClusterKeyPath); + boolean succeeded = masterFS.getFileSystem().rename(tempSystemKeyFile, nextSystemKeyPath); if (succeeded) { - LOG.info("Cluster key save succeeded for seq: {}", nextClusterKeySeq); + LOG.info("System key save succeeded for seq: {}", nextSystemKeySeq); } else { - LOG.error("Cluster key save failed for seq: {}", nextClusterKeySeq); + LOG.error("System key save failed for seq: {}", nextSystemKeySeq); } return succeeded; } finally { - masterFS.getFileSystem().delete(tempClusterKeyFile, false); + masterFS.getFileSystem().delete(tempSystemKeyFile, false); } } } diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HRegionServer.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HRegionServer.java index eb5d9e31b8fa..4641ceb29152 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HRegionServer.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HRegionServer.java @@ -120,11 +120,7 @@ import org.apache.hadoop.hbase.ipc.RpcServer; import org.apache.hadoop.hbase.ipc.ServerNotRunningYetException; import org.apache.hadoop.hbase.ipc.ServerRpcController; -import org.apache.hadoop.hbase.keymeta.PBEClusterKeyAccessor; -import org.apache.hadoop.hbase.keymeta.PBEClusterKeyCache; import org.apache.hadoop.hbase.keymeta.PBEKeyAccessor; -import org.apache.hadoop.hbase.keymeta.PBEKeymetaAdmin; -import org.apache.hadoop.hbase.keymeta.PBEKeymetaAdminImpl; import org.apache.hadoop.hbase.log.HBaseMarkers; import org.apache.hadoop.hbase.mob.MobFileCache; import org.apache.hadoop.hbase.mob.RSMobFileCleanerChore; @@ -1453,7 +1449,7 @@ protected void handleReportForDutyResponse(final RegionServerStartupResponse c) initializeFileSystem(); } - buildPBEClusterKeyCache(); + buildSystemKeyCache(); pbeKeyAccessor = new PBEKeyAccessor(pbeKeymetaAdmin); // hack! Maps DFSClient => RegionServer for logs. HDFS made this diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/replication/regionserver/ReplicationSyncUp.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/replication/regionserver/ReplicationSyncUp.java index a42c24e1a040..d6690b4bff16 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/replication/regionserver/ReplicationSyncUp.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/replication/regionserver/ReplicationSyncUp.java @@ -41,7 +41,7 @@ import org.apache.hadoop.hbase.ServerName; import org.apache.hadoop.hbase.client.AsyncClusterConnection; import org.apache.hadoop.hbase.client.Connection; -import org.apache.hadoop.hbase.keymeta.PBEClusterKeyCache; +import org.apache.hadoop.hbase.keymeta.SystemKeyCache; import org.apache.hadoop.hbase.keymeta.PBEKeyAccessor; import org.apache.hadoop.hbase.keymeta.PBEKeymetaAdmin; import org.apache.hadoop.hbase.master.replication.OfflineTableReplicationQueueStorage; @@ -369,7 +369,7 @@ public ChoreService getChoreService() { return null; } - @Override public PBEClusterKeyCache getPBEClusterKeyCache() { + @Override public SystemKeyCache getSystemKeyCache() { return null; } diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/MockNoopMasterServices.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/MockNoopMasterServices.java index d9b384a1d98e..c8958fae761c 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/MockNoopMasterServices.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/MockNoopMasterServices.java @@ -38,7 +38,7 @@ import org.apache.hadoop.hbase.client.TableDescriptor; import org.apache.hadoop.hbase.executor.ExecutorService; import org.apache.hadoop.hbase.favored.FavoredNodesManager; -import org.apache.hadoop.hbase.keymeta.PBEClusterKeyCache; +import org.apache.hadoop.hbase.keymeta.SystemKeyCache; import org.apache.hadoop.hbase.keymeta.PBEKeyAccessor; import org.apache.hadoop.hbase.keymeta.PBEKeymetaAdmin; import org.apache.hadoop.hbase.master.assignment.AssignmentManager; @@ -119,7 +119,7 @@ public ChoreService getChoreService() { return null; } - @Override public PBEClusterKeyCache getPBEClusterKeyCache() { + @Override public SystemKeyCache getSystemKeyCache() { return null; } diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/MockRegionServer.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/MockRegionServer.java index 0cb5ea436949..ac19adfaf0b0 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/MockRegionServer.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/MockRegionServer.java @@ -52,7 +52,7 @@ import org.apache.hadoop.hbase.io.hfile.BlockCache; import org.apache.hadoop.hbase.ipc.HBaseRpcController; import org.apache.hadoop.hbase.ipc.RpcServerInterface; -import org.apache.hadoop.hbase.keymeta.PBEClusterKeyCache; +import org.apache.hadoop.hbase.keymeta.SystemKeyCache; import org.apache.hadoop.hbase.keymeta.PBEKeyAccessor; import org.apache.hadoop.hbase.keymeta.PBEKeymetaAdmin; import org.apache.hadoop.hbase.mob.MobFileCache; @@ -559,7 +559,7 @@ public ChoreService getChoreService() { return null; } - @Override public PBEClusterKeyCache getPBEClusterKeyCache() { + @Override public SystemKeyCache getSystemKeyCache() { return null; } diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestActiveMasterManager.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestActiveMasterManager.java index f850ab0f52b3..e09ce4ded4c9 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestActiveMasterManager.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestActiveMasterManager.java @@ -33,7 +33,7 @@ import org.apache.hadoop.hbase.HBaseClassTestRule; import org.apache.hadoop.hbase.HBaseTestingUtil; import org.apache.hadoop.hbase.ServerName; -import org.apache.hadoop.hbase.keymeta.PBEClusterKeyCache; +import org.apache.hadoop.hbase.keymeta.SystemKeyCache; import org.apache.hadoop.hbase.keymeta.PBEKeyAccessor; import org.apache.hadoop.hbase.keymeta.PBEKeymetaAdmin; import org.apache.hadoop.hbase.monitoring.MonitoredTask; @@ -331,7 +331,7 @@ public ActiveMasterManager getActiveMasterManager() { return activeMasterManager; } - @Override public PBEClusterKeyCache getPBEClusterKeyCache() { + @Override public SystemKeyCache getSystemKeyCache() { return null; } diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestPBEClusterKey.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestSystemKey.java similarity index 62% rename from hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestPBEClusterKey.java rename to hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestSystemKey.java index 64ee8a1269e2..3ab1aa140d34 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestPBEClusterKey.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestSystemKey.java @@ -18,24 +18,19 @@ package org.apache.hadoop.hbase.master; import org.apache.hadoop.hbase.HBaseClassTestRule; -import org.apache.hadoop.hbase.HBaseTestingUtil; -import org.apache.hadoop.hbase.HConstants; import org.apache.hadoop.hbase.io.crypto.Encryption; import org.apache.hadoop.hbase.io.crypto.KeyProvider; import org.apache.hadoop.hbase.io.crypto.PBEKeyData; import org.apache.hadoop.hbase.io.crypto.PBEKeyProvider; import org.apache.hadoop.hbase.io.crypto.MockPBEKeyProvider; import org.apache.hadoop.hbase.io.crypto.PBEKeyStatus; -import org.apache.hadoop.hbase.keymeta.PBEClusterKeyAccessor; -import org.apache.hadoop.hbase.keymeta.PBEClusterKeyCache; +import org.apache.hadoop.hbase.keymeta.SystemKeyAccessor; +import org.apache.hadoop.hbase.keymeta.SystemKeyCache; import org.apache.hadoop.hbase.keymeta.PBETestBase; import org.apache.hadoop.hbase.testclassification.MasterTests; import org.apache.hadoop.hbase.testclassification.MediumTests; import org.apache.hadoop.hbase.util.Bytes; -import org.junit.After; -import org.junit.Before; import org.junit.ClassRule; -import org.junit.Ignore; import org.junit.Test; import org.junit.experimental.categories.Category; import java.io.IOException; @@ -46,23 +41,23 @@ import static org.junit.Assert.assertTrue; @Category({ MasterTests.class, MediumTests.class }) -public class TestPBEClusterKey extends PBETestBase { +public class TestSystemKey extends PBETestBase { @ClassRule public static final HBaseClassTestRule CLASS_RULE = - HBaseClassTestRule.forClass(TestPBEClusterKey.class); + HBaseClassTestRule.forClass(TestSystemKey.class); @Test - public void testClusterKeyInitializationAndRotation() throws Exception { + public void testSystemKeyInitializationAndRotation() throws Exception { HMaster master = TEST_UTIL.getHBaseCluster().getMaster(); KeyProvider keyProvider = Encryption.getKeyProvider(master.getConfiguration()); assertNotNull(keyProvider); assertTrue(keyProvider instanceof PBEKeyProvider); assertTrue(keyProvider instanceof MockPBEKeyProvider); MockPBEKeyProvider pbeKeyProvider = (MockPBEKeyProvider) keyProvider; - PBEKeyData initialClusterKey = validateInitialState(master, pbeKeyProvider); + PBEKeyData initialSystemKey = validateInitialState(master, pbeKeyProvider); - restartCluster(); + restartSystem(); master = TEST_UTIL.getHBaseCluster().getMaster(); validateInitialState(master, pbeKeyProvider); @@ -71,46 +66,46 @@ public void testClusterKeyInitializationAndRotation() throws Exception { pbeKeyProvider.setCluterKeyAlias(newAlias); Key newCluterKey = MockPBEKeyProvider.generateSecretKey(); pbeKeyProvider.setKey(newAlias, newCluterKey); - restartCluster(); + restartSystem(); master = TEST_UTIL.getHBaseCluster().getMaster(); - PBEClusterKeyAccessor pbeClusterKeyAccessor = new PBEClusterKeyAccessor(master); - assertEquals(2, pbeClusterKeyAccessor.getAllClusterKeyFiles().size()); - PBEClusterKeyCache pbeClusterKeyCache = master.getPBEClusterKeyCache(); + SystemKeyAccessor systemKeyAccessor = new SystemKeyAccessor(master); + assertEquals(2, systemKeyAccessor.getAllSystemKeyFiles().size()); + SystemKeyCache systemKeyCache = master.getSystemKeyCache(); assertEquals(0, Bytes.compareTo(newCluterKey.getEncoded(), - pbeClusterKeyCache.getLatestClusterKey().getTheKey().getEncoded())); - assertEquals(initialClusterKey, - pbeClusterKeyAccessor.loadClusterKey(pbeClusterKeyAccessor.getAllClusterKeyFiles().get(1))); - assertEquals(initialClusterKey, - pbeClusterKeyCache.getClusterKeyByChecksum(initialClusterKey.getKeyChecksum())); + systemKeyCache.getLatestSystemKey().getTheKey().getEncoded())); + assertEquals(initialSystemKey, + systemKeyAccessor.loadSystemKey(systemKeyAccessor.getAllSystemKeyFiles().get(1))); + assertEquals(initialSystemKey, + systemKeyCache.getSystemKeyByChecksum(initialSystemKey.getKeyChecksum())); } @Test - public void testWithInvalidClusterKey() throws Exception { + public void testWithInvalidSystemKey() throws Exception { HMaster master = TEST_UTIL.getHBaseCluster().getMaster(); KeyProvider keyProvider = Encryption.getKeyProvider(master.getConfiguration()); MockPBEKeyProvider pbeKeyProvider = (MockPBEKeyProvider) keyProvider; // Test startup failure when the cluster key is INACTIVE - PBEClusterKeyManager tmpCKM = new PBEClusterKeyManager(master); - tmpCKM.ensureClusterKeyInitialized(); - pbeKeyProvider.setKeyStatus(pbeKeyProvider.getClusterKeyAlias(), PBEKeyStatus.INACTIVE); - assertThrows(IOException.class, tmpCKM::ensureClusterKeyInitialized); + SystemKeyManager tmpCKM = new SystemKeyManager(master); + tmpCKM.ensureSystemKeyInitialized(); + pbeKeyProvider.setKeyStatus(pbeKeyProvider.getSystemKeyAlias(), PBEKeyStatus.INACTIVE); + assertThrows(IOException.class, tmpCKM::ensureSystemKeyInitialized); } private PBEKeyData validateInitialState(HMaster master, MockPBEKeyProvider pbeKeyProvider ) throws IOException { - PBEClusterKeyAccessor pbeClusterKeyAccessor = new PBEClusterKeyAccessor(master); - assertEquals(1, pbeClusterKeyAccessor.getAllClusterKeyFiles().size()); - PBEClusterKeyCache pbeClusterKeyCache = master.getPBEClusterKeyCache(); - assertNotNull(pbeClusterKeyCache); - PBEKeyData clusterKey = pbeClusterKeyCache.getLatestClusterKey(); - assertEquals(pbeKeyProvider.getClusterKey(master.getClusterId().getBytes()), clusterKey); + SystemKeyAccessor systemKeyAccessor = new SystemKeyAccessor(master); + assertEquals(1, systemKeyAccessor.getAllSystemKeyFiles().size()); + SystemKeyCache systemKeyCache = master.getSystemKeyCache(); + assertNotNull(systemKeyCache); + PBEKeyData clusterKey = systemKeyCache.getLatestSystemKey(); + assertEquals(pbeKeyProvider.getSystemKey(master.getClusterId().getBytes()), clusterKey); assertEquals(clusterKey, - pbeClusterKeyCache.getClusterKeyByChecksum(clusterKey.getKeyChecksum())); + systemKeyCache.getSystemKeyByChecksum(clusterKey.getKeyChecksum())); return clusterKey; } - private void restartCluster() throws Exception { + private void restartSystem() throws Exception { TEST_UTIL.shutdownMiniHBaseCluster(); Thread.sleep(2000); TEST_UTIL.restartHBaseCluster(1); diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/cleaner/TestReplicationHFileCleaner.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/cleaner/TestReplicationHFileCleaner.java index 69de03d223cd..2f85ee3e92e4 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/cleaner/TestReplicationHFileCleaner.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/cleaner/TestReplicationHFileCleaner.java @@ -38,7 +38,7 @@ import org.apache.hadoop.hbase.TableName; import org.apache.hadoop.hbase.client.Connection; import org.apache.hadoop.hbase.client.TableDescriptor; -import org.apache.hadoop.hbase.keymeta.PBEClusterKeyCache; +import org.apache.hadoop.hbase.keymeta.SystemKeyCache; import org.apache.hadoop.hbase.keymeta.PBEKeyAccessor; import org.apache.hadoop.hbase.keymeta.PBEKeymetaAdmin; import org.apache.hadoop.hbase.master.HMaster; @@ -218,7 +218,7 @@ public Connection getConnection() { } } - @Override public PBEClusterKeyCache getPBEClusterKeyCache() { + @Override public SystemKeyCache getSystemKeyCache() { return null; } diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/procedure2/store/region/RegionProcedureStorePerformanceEvaluation.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/procedure2/store/region/RegionProcedureStorePerformanceEvaluation.java index c4309acea7d4..783a04168eca 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/procedure2/store/region/RegionProcedureStorePerformanceEvaluation.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/procedure2/store/region/RegionProcedureStorePerformanceEvaluation.java @@ -24,7 +24,7 @@ import org.apache.hadoop.hbase.HBaseConfiguration; import org.apache.hadoop.hbase.ServerName; import org.apache.hadoop.hbase.io.util.MemorySizeUtil; -import org.apache.hadoop.hbase.keymeta.PBEClusterKeyCache; +import org.apache.hadoop.hbase.keymeta.SystemKeyCache; import org.apache.hadoop.hbase.keymeta.PBEKeyAccessor; import org.apache.hadoop.hbase.keymeta.PBEKeymetaAdmin; import org.apache.hadoop.hbase.master.region.MasterRegion; @@ -61,7 +61,7 @@ public ServerName getServerName() { return serverName; } - @Override public PBEClusterKeyCache getPBEClusterKeyCache() { + @Override public SystemKeyCache getSystemKeyCache() { return null; } diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestHeapMemoryManager.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestHeapMemoryManager.java index 81c9cb075db5..7069cd6380ff 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestHeapMemoryManager.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestHeapMemoryManager.java @@ -46,7 +46,7 @@ import org.apache.hadoop.hbase.io.hfile.CachedBlock; import org.apache.hadoop.hbase.io.hfile.ResizableBlockCache; import org.apache.hadoop.hbase.io.util.MemorySizeUtil; -import org.apache.hadoop.hbase.keymeta.PBEClusterKeyCache; +import org.apache.hadoop.hbase.keymeta.SystemKeyCache; import org.apache.hadoop.hbase.keymeta.PBEKeyAccessor; import org.apache.hadoop.hbase.keymeta.PBEKeymetaAdmin; import org.apache.hadoop.hbase.regionserver.HeapMemoryManager.TunerContext; @@ -840,7 +840,7 @@ public ChoreService getChoreService() { return null; } - @Override public PBEClusterKeyCache getPBEClusterKeyCache() { + @Override public SystemKeyCache getSystemKeyCache() { return null; } diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/security/token/TestTokenAuthentication.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/security/token/TestTokenAuthentication.java index 8d8b2f177378..d2f92b0a13f5 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/security/token/TestTokenAuthentication.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/security/token/TestTokenAuthentication.java @@ -55,7 +55,7 @@ import org.apache.hadoop.hbase.ipc.RpcServerInterface; import org.apache.hadoop.hbase.ipc.ServerRpcController; import org.apache.hadoop.hbase.ipc.SimpleRpcServer; -import org.apache.hadoop.hbase.keymeta.PBEClusterKeyCache; +import org.apache.hadoop.hbase.keymeta.SystemKeyCache; import org.apache.hadoop.hbase.keymeta.PBEKeyAccessor; import org.apache.hadoop.hbase.keymeta.PBEKeymetaAdmin; import org.apache.hadoop.hbase.log.HBaseMarkers; @@ -353,7 +353,7 @@ public ChoreService getChoreService() { return null; } - @Override public PBEClusterKeyCache getPBEClusterKeyCache() { + @Override public SystemKeyCache getSystemKeyCache() { return null; } diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/util/MockServer.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/util/MockServer.java index 3896a2a4348e..280322c6e424 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/util/MockServer.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/util/MockServer.java @@ -26,7 +26,7 @@ import org.apache.hadoop.hbase.ServerName; import org.apache.hadoop.hbase.client.AsyncClusterConnection; import org.apache.hadoop.hbase.client.Connection; -import org.apache.hadoop.hbase.keymeta.PBEClusterKeyCache; +import org.apache.hadoop.hbase.keymeta.SystemKeyCache; import org.apache.hadoop.hbase.keymeta.PBEKeyAccessor; import org.apache.hadoop.hbase.keymeta.PBEKeymetaAdmin; import org.apache.hadoop.hbase.log.HBaseMarkers; @@ -103,7 +103,7 @@ public ChoreService getChoreService() { throw new UnsupportedOperationException(); } - @Override public PBEClusterKeyCache getPBEClusterKeyCache() { + @Override public SystemKeyCache getSystemKeyCache() { return null; } From df49852f0f7d8f68ace9c13e66000307237aafe3 Mon Sep 17 00:00:00 2001 From: Hari Dara Date: Wed, 2 Apr 2025 23:04:20 +0530 Subject: [PATCH 14/70] Renamed "PBE key" to "Managed ey" --- .../hbase/keymeta/KeymetaAdminClient.java | 60 ++++++++++ .../hbase/keymeta/PBEKeymetaAdminClient.java | 59 ---------- .../hadoop/hbase/io/crypto/Encryption.java | 4 +- .../{PBEKeyData.java => ManagedKeyData.java} | 62 +++++----- ...yProvider.java => ManagedKeyProvider.java} | 60 +++++----- ...BEKeyStatus.java => ManagedKeyStatus.java} | 18 +-- ...r.java => ManagedKeyStoreKeyProvider.java} | 24 ++-- ...PBEKeymetaAdmin.java => KeymetaAdmin.java} | 34 +++--- ...vider.java => MockManagedKeyProvider.java} | 38 +++---- ...va => TestManagedKeyStoreKeyProvider.java} | 64 +++++------ .../{PBEAdmin.proto => ManagedKeys.proto} | 28 ++--- .../apache/hadoop/hbase/HBaseServerBase.java | 20 ++-- .../hbase/MockRegionServerServices.java | 8 +- .../java/org/apache/hadoop/hbase/Server.java | 8 +- ...cessorBase.java => KeyManagementBase.java} | 18 +-- ...taAdminImpl.java => KeymetaAdminImpl.java} | 36 +++--- ...Service.java => KeymetaMasterService.java} | 14 +-- ...point.java => KeymetaServiceEndpoint.java} | 107 +++++++++--------- ...ccessor.java => KeymetaTableAccessor.java} | 79 ++++++------- ...yAccessor.java => ManagedKeyAccessor.java} | 40 +++---- ...ataCache.java => ManagedKeyDataCache.java} | 68 +++++------ .../hbase/keymeta/SystemKeyAccessor.java | 10 +- .../hadoop/hbase/keymeta/SystemKeyCache.java | 18 +-- .../apache/hadoop/hbase/master/HMaster.java | 8 +- .../hadoop/hbase/master/SystemKeyManager.java | 16 +-- .../hbase/regionserver/HRegionServer.java | 4 +- .../regionserver/ReplicationSyncUp.java | 8 +- ...ETestBase.java => ManagedKeyTestBase.java} | 6 +- .../hbase/master/MockNoopMasterServices.java | 8 +- .../hadoop/hbase/master/MockRegionServer.java | 8 +- .../hbase/master/TestActiveMasterManager.java | 8 +- .../hadoop/hbase/master/TestSystemKey.java | 30 ++--- .../cleaner/TestReplicationHFileCleaner.java | 8 +- ...onProcedureStorePerformanceEvaluation.java | 8 +- .../regionserver/TestHeapMemoryManager.java | 8 +- .../token/TestTokenAuthentication.java | 8 +- .../apache/hadoop/hbase/util/MockServer.java | 8 +- hbase-shell/src/main/ruby/hbase/pbe_admin.rb | 4 +- .../hadoop/hbase/HBaseTestingUtility.java | 6 +- 39 files changed, 512 insertions(+), 511 deletions(-) create mode 100644 hbase-client/src/main/java/org/apache/hadoop/hbase/keymeta/KeymetaAdminClient.java delete mode 100644 hbase-client/src/main/java/org/apache/hadoop/hbase/keymeta/PBEKeymetaAdminClient.java rename hbase-common/src/main/java/org/apache/hadoop/hbase/io/crypto/{PBEKeyData.java => ManagedKeyData.java} (80%) rename hbase-common/src/main/java/org/apache/hadoop/hbase/io/crypto/{PBEKeyProvider.java => ManagedKeyProvider.java} (58%) rename hbase-common/src/main/java/org/apache/hadoop/hbase/io/crypto/{PBEKeyStatus.java => ManagedKeyStatus.java} (77%) rename hbase-common/src/main/java/org/apache/hadoop/hbase/io/crypto/{PBEKeyStoreKeyProvider.java => ManagedKeyStoreKeyProvider.java} (70%) rename hbase-common/src/main/java/org/apache/hadoop/hbase/keymeta/{PBEKeymetaAdmin.java => KeymetaAdmin.java} (55%) rename hbase-common/src/test/java/org/apache/hadoop/hbase/io/crypto/{MockPBEKeyProvider.java => MockManagedKeyProvider.java} (67%) rename hbase-common/src/test/java/org/apache/hadoop/hbase/io/crypto/{TestPBEKeyStoreKeyProvider.java => TestManagedKeyStoreKeyProvider.java} (71%) rename hbase-protocol-shaded/src/main/protobuf/server/{PBEAdmin.proto => ManagedKeys.proto} (72%) rename hbase-server/src/main/java/org/apache/hadoop/hbase/keymeta/{PBEKeyAccessorBase.java => KeyManagementBase.java} (86%) rename hbase-server/src/main/java/org/apache/hadoop/hbase/keymeta/{PBEKeymetaAdminImpl.java => KeymetaAdminImpl.java} (68%) rename hbase-server/src/main/java/org/apache/hadoop/hbase/keymeta/{PBEKeymetaMasterService.java => KeymetaMasterService.java} (80%) rename hbase-server/src/main/java/org/apache/hadoop/hbase/keymeta/{PBEKeymetaServiceEndpoint.java => KeymetaServiceEndpoint.java} (57%) rename hbase-server/src/main/java/org/apache/hadoop/hbase/keymeta/{PBEKeymetaTableAccessor.java => KeymetaTableAccessor.java} (77%) rename hbase-server/src/main/java/org/apache/hadoop/hbase/keymeta/{PBEKeyAccessor.java => ManagedKeyAccessor.java} (65%) rename hbase-server/src/main/java/org/apache/hadoop/hbase/keymeta/{PBEKeyDataCache.java => ManagedKeyDataCache.java} (57%) rename hbase-server/src/test/java/org/apache/hadoop/hbase/keymeta/{PBETestBase.java => ManagedKeyTestBase.java} (82%) diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/keymeta/KeymetaAdminClient.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/keymeta/KeymetaAdminClient.java new file mode 100644 index 000000000000..07f5a57440b6 --- /dev/null +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/keymeta/KeymetaAdminClient.java @@ -0,0 +1,60 @@ +package org.apache.hadoop.hbase.keymeta; + +import org.apache.hadoop.hbase.client.Connection; +import org.apache.hadoop.hbase.io.crypto.ManagedKeyData; +import org.apache.hadoop.hbase.io.crypto.ManagedKeyStatus; +import org.apache.hadoop.hbase.protobuf.generated.ManagedKeysProtos; +import org.apache.hadoop.hbase.protobuf.generated.ManagedKeysProtos.ManagedKeysRequest; +import org.apache.hadoop.hbase.protobuf.generated.ManagedKeysProtos.ManagedKeysResponse; +import org.apache.hadoop.hbase.shaded.protobuf.ProtobufUtil; +import org.apache.hbase.thirdparty.com.google.protobuf.ServiceException; +import org.apache.yetus.audience.InterfaceAudience; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; +import java.io.IOException; +import java.security.KeyException; +import java.util.ArrayList; +import java.util.List; + +@InterfaceAudience.Public +public class KeymetaAdminClient implements KeymetaAdmin { + private static final Logger LOG = LoggerFactory.getLogger(KeymetaAdminClient.class); + private ManagedKeysProtos.ManagedKeysService.BlockingInterface stub; + + public KeymetaAdminClient(Connection conn) throws IOException { + this.stub = ManagedKeysProtos.ManagedKeysService.newBlockingStub(conn.getAdmin().coprocessorService()); + } + + @Override + public ManagedKeyStatus enableManagedKeys(String custSpec, String keyNamespace) throws IOException { + try { + ManagedKeysResponse response = stub.enableManagedKeys(null, + ManagedKeysRequest.newBuilder().setCustSpec(custSpec).setKeyNamespace(keyNamespace).build()); + LOG.info("Got response: " + response); + return ManagedKeyStatus.forValue((byte) response.getPbeStatus().getNumber()); + } catch (ServiceException e) { + throw ProtobufUtil.handleRemoteException(e); + } + } + + @Override + public List getManagedKeys(String custSpec, String keyNamespace) + throws IOException, KeyException { + List keyStatuses = new ArrayList<>(); + try { + ManagedKeysProtos.GetManagedKeysResponse statusResponse = stub.getManagedKeys(null, + ManagedKeysRequest.newBuilder().setCustSpec(custSpec).setKeyNamespace(keyNamespace).build()); + for (ManagedKeysResponse status: statusResponse.getStatusList()) { + keyStatuses.add(new ManagedKeyData( + status.getCustSpecBytes().toByteArray(), + status.getKeyNamespace(), null, + ManagedKeyStatus.forValue((byte) status.getPbeStatus().getNumber()), + status.getKeyMetadata(), + status.getRefreshTimestamp(), status.getReadOpCount(), status.getWriteOpCount())); + } + } catch (ServiceException e) { + throw ProtobufUtil.handleRemoteException(e); + } + return keyStatuses; + } +} diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/keymeta/PBEKeymetaAdminClient.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/keymeta/PBEKeymetaAdminClient.java deleted file mode 100644 index bc39f3d37b29..000000000000 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/keymeta/PBEKeymetaAdminClient.java +++ /dev/null @@ -1,59 +0,0 @@ -package org.apache.hadoop.hbase.keymeta; - -import org.apache.hadoop.hbase.client.Connection; -import org.apache.hadoop.hbase.io.crypto.PBEKeyData; -import org.apache.hadoop.hbase.io.crypto.PBEKeyStatus; -import org.apache.hadoop.hbase.protobuf.generated.PBEAdminProtos; -import org.apache.hadoop.hbase.protobuf.generated.PBEAdminProtos.PBEAdminRequest; -import org.apache.hadoop.hbase.protobuf.generated.PBEAdminProtos.PBEAdminResponse; -import org.apache.hadoop.hbase.shaded.protobuf.ProtobufUtil; -import org.apache.hbase.thirdparty.com.google.protobuf.ServiceException; -import org.apache.yetus.audience.InterfaceAudience; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; -import java.io.IOException; -import java.security.KeyException; -import java.util.ArrayList; -import java.util.List; - -@InterfaceAudience.Public -public class PBEKeymetaAdminClient implements PBEKeymetaAdmin { - private static final Logger LOG = LoggerFactory.getLogger(PBEKeymetaAdminClient.class); - private PBEAdminProtos.PBEAdminService.BlockingInterface stub; - - public PBEKeymetaAdminClient(Connection conn) throws IOException { - this.stub = PBEAdminProtos.PBEAdminService.newBlockingStub(conn.getAdmin().coprocessorService()); - } - - @Override - public PBEKeyStatus enablePBE(String pbePrefix, String keyNamespace) throws IOException { - try { - PBEAdminResponse pbeAdminResponse = stub.enablePBE(null, - PBEAdminRequest.newBuilder().setPbePrefix(pbePrefix).setKeyNamespace(keyNamespace).build()); - LOG.info("Got response: " + pbeAdminResponse); - return PBEKeyStatus.forValue((byte) pbeAdminResponse.getPbeStatus().getNumber()); - } catch (ServiceException e) { - throw ProtobufUtil.handleRemoteException(e); - } - } - - @Override public List getPBEKeyStatuses(String pbePrefix, String keyNamespace) - throws IOException, KeyException { - List keyStatuses = new ArrayList<>(); - try { - PBEAdminProtos.PBEGetStatusResponse statusResponse = stub.getPBEStatuses(null, - PBEAdminRequest.newBuilder().setPbePrefix(pbePrefix).setKeyNamespace(keyNamespace).build()); - for (PBEAdminResponse status: statusResponse.getStatusList()) { - keyStatuses.add(new PBEKeyData( - status.getPbePrefixBytes().toByteArray(), - status.getKeyNamespace(), null, - PBEKeyStatus.forValue((byte) status.getPbeStatus().getNumber()), - status.getKeyMetadata(), - status.getRefreshTimestamp(), status.getReadOpCount(), status.getWriteOpCount())); - } - } catch (ServiceException e) { - throw ProtobufUtil.handleRemoteException(e); - } - return keyStatuses; - } -} diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/io/crypto/Encryption.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/io/crypto/Encryption.java index c0abe8ccc59f..175df69011f0 100644 --- a/hbase-common/src/main/java/org/apache/hadoop/hbase/io/crypto/Encryption.java +++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/io/crypto/Encryption.java @@ -577,8 +577,8 @@ public static KeyProvider getKeyProvider(Configuration conf) { provider = (KeyProvider) ReflectionUtils .newInstance(getClassLoaderForClass(KeyProvider.class).loadClass(providerClassName), conf); provider.init(providerParameters); - if (provider instanceof PBEKeyProvider) { - ((PBEKeyProvider) provider).initConfig(conf); + if (provider instanceof ManagedKeyProvider) { + ((ManagedKeyProvider) provider).initConfig(conf); } if (LOG.isDebugEnabled()) { LOG.debug("Installed " + providerClassName + " into key provider cache"); diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/io/crypto/PBEKeyData.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/io/crypto/ManagedKeyData.java similarity index 80% rename from hbase-common/src/main/java/org/apache/hadoop/hbase/io/crypto/PBEKeyData.java rename to hbase-common/src/main/java/org/apache/hadoop/hbase/io/crypto/ManagedKeyData.java index b49b74659233..7579b6d391ae 100644 --- a/hbase-common/src/main/java/org/apache/hadoop/hbase/io/crypto/PBEKeyData.java +++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/io/crypto/ManagedKeyData.java @@ -32,14 +32,14 @@ /** * This class represents an encryption key data which includes the key itself, its status, metadata * and a prefix. The metadata encodes enough information on the key such that it can be used to - * retrieve the exact same key again in the future. If the key status is {@link PBEKeyStatus#FAILED} + * retrieve the exact same key again in the future. If the key status is {@link ManagedKeyStatus#FAILED} * expect the key to be {@code null}. * * The key data is represented by the following fields: *
    - *
  • pbe_prefix: The prefix for which this key belongs to
  • + *
  • cust_spec: The prefix for which this key belongs to
  • *
  • theKey: The key capturing the bytes and encoding
  • - *
  • keyStatus: The status of the key (see {@link PBEKeyStatus})
  • + *
  • keyStatus: The status of the key (see {@link ManagedKeyStatus})
  • *
  • keyMetadata: Metadata that identifies the key
  • *
* @@ -50,13 +50,13 @@ * for validation and identification. */ @InterfaceAudience.Public -public class PBEKeyData { +public class ManagedKeyData { public static final String KEY_NAMESPACE_GLOBAL = "*"; - private final byte[] pbePrefix; + private final byte[] custSpec; private final String keyNamespace; private final Key theKey; - private final PBEKeyStatus keyStatus; + private final ManagedKeyStatus keyStatus; private final String keyMetadata; private final long refreshTimestamp; private final long readOpCount; @@ -67,33 +67,33 @@ public class PBEKeyData { /** * Constructs a new instance with the given parameters. * - * @param pbe_prefix The PBE prefix associated with the key. + * @param cust_spec The Custodian specification associated with the key. * @param theKey The actual key, can be {@code null}. * @param keyStatus The status of the key. * @param keyMetadata The metadata associated with the key. - * @throws NullPointerException if any of pbe_prefix, keyStatus or keyMetadata is null. + * @throws NullPointerException if any of cust_spec, keyStatus or keyMetadata is null. */ - public PBEKeyData(byte[] pbe_prefix, String key_namespace, Key theKey, PBEKeyStatus keyStatus, - String keyMetadata) { - this(pbe_prefix, key_namespace, theKey, keyStatus, keyMetadata, + public ManagedKeyData(byte[] cust_spec, String key_namespace, Key theKey, ManagedKeyStatus keyStatus, + String keyMetadata) { + this(cust_spec, key_namespace, theKey, keyStatus, keyMetadata, EnvironmentEdgeManager.currentTime(), 0, 0); } /** * Constructs a new instance with the given parameters. * - * @param pbe_prefix The PBE prefix associated with the key. + * @param cust_spec The Custodian specification associated with the key. * @param theKey The actual key, can be {@code null}. * @param keyStatus The status of the key. * @param keyMetadata The metadata associated with the key. * @param refreshTimestamp The timestamp when this key was last refreshed. * @param readOpCount The current number of read operations for this key. * @param writeOpCount The current number of write operations for this key. - * @throws NullPointerException if any of pbe_prefix, keyStatus or keyMetadata is null. + * @throws NullPointerException if any of cust_spec, keyStatus or keyMetadata is null. */ - public PBEKeyData(byte[] pbe_prefix, String key_namespace, Key theKey, PBEKeyStatus keyStatus, - String keyMetadata, long refreshTimestamp, long readOpCount, long writeOpCount) { - Preconditions.checkNotNull(pbe_prefix, "pbe_prefix should not be null"); + public ManagedKeyData(byte[] cust_spec, String key_namespace, Key theKey, ManagedKeyStatus keyStatus, + String keyMetadata, long refreshTimestamp, long readOpCount, long writeOpCount) { + Preconditions.checkNotNull(cust_spec, "cust_spec should not be null"); Preconditions.checkNotNull(key_namespace, "key_namespace should not be null"); Preconditions.checkNotNull(keyStatus, "keyStatus should not be null"); Preconditions.checkNotNull(keyMetadata, "keyMetadata should not be null"); @@ -102,7 +102,7 @@ public PBEKeyData(byte[] pbe_prefix, String key_namespace, Key theKey, PBEKeySta Preconditions.checkArgument(writeOpCount >= 0, "writeOpCount: " + writeOpCount + " should be >= 0"); - this.pbePrefix = pbe_prefix; + this.custSpec = cust_spec; this.keyNamespace = key_namespace; this.theKey = theKey; this.keyStatus = keyStatus; @@ -113,20 +113,20 @@ public PBEKeyData(byte[] pbe_prefix, String key_namespace, Key theKey, PBEKeySta } /** - * Returns the PBE prefix associated with the key. + * Returns the Custodian specification associated with the key. * - * @return The PBE prefix as a byte array. + * @return The Custodian specification as a byte array. */ - public byte[] getPBEPrefix() { - return pbePrefix; + public byte[] getCustodianSpec() { + return custSpec; } /** - * Return the PBE prefix in Base64 encoded form. - * @return the encoded PBE prefix. + * Return the Custodian specification in Base64 encoded form. + * @return the encoded Custodian specification. */ - public String getPBEPrefixEncoded() { - return Base64.getEncoder().encodeToString(pbePrefix); + public String getCustodianSpecEncoded() { + return Base64.getEncoder().encodeToString(custSpec); } @@ -151,9 +151,9 @@ public Key getTheKey() { /** * Returns the status of the key. * - * @return The key status as a {@code PBEKeyStatus} enum value. + * @return The key status as a {@code ManagedKeyStatus} enum value. */ - public PBEKeyStatus getKeyStatus() { + public ManagedKeyStatus getKeyStatus() { return keyStatus; } @@ -167,7 +167,7 @@ public String getKeyMetadata() { } @Override public String toString() { - return "PBEKeyData{" + "pbePrefix=" + Arrays.toString(pbePrefix) + ", keyNamespace='" + return "ManagedKeyData{" + "custSpecix=" + Arrays.toString(custSpec) + ", keyNamespace='" + keyNamespace + '\'' + ", keyStatus=" + keyStatus + ", keyMetadata='" + keyMetadata + '\'' + ", refreshTimestamp=" + refreshTimestamp + '}'; } @@ -252,10 +252,10 @@ public boolean equals(Object o) { if (o == null || getClass() != o.getClass()) return false; - PBEKeyData that = (PBEKeyData) o; + ManagedKeyData that = (ManagedKeyData) o; return new EqualsBuilder() - .append(pbePrefix, that.pbePrefix) + .append(custSpec, that.custSpec) .append(keyNamespace, that.keyNamespace) .append(theKey, that.theKey) .append(keyStatus, that.keyStatus) @@ -266,7 +266,7 @@ public boolean equals(Object o) { @Override public int hashCode() { return new HashCodeBuilder(17, 37) - .append(pbePrefix) + .append(custSpec) .append(keyNamespace) .append(theKey) .append(keyStatus) diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/io/crypto/PBEKeyProvider.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/io/crypto/ManagedKeyProvider.java similarity index 58% rename from hbase-common/src/main/java/org/apache/hadoop/hbase/io/crypto/PBEKeyProvider.java rename to hbase-common/src/main/java/org/apache/hadoop/hbase/io/crypto/ManagedKeyProvider.java index 94080f17da88..97a6c91e55f8 100644 --- a/hbase-common/src/main/java/org/apache/hadoop/hbase/io/crypto/PBEKeyProvider.java +++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/io/crypto/ManagedKeyProvider.java @@ -23,29 +23,14 @@ import java.util.Base64; /** - * Interface for PBE-based key providers. Defines methods for generating and managing - * PBE-derived keys, as well as handling key storage and retrieval. + * Interface for key providers of managed keys. Defines methods for generating and managing + * managed keys, as well as handling key storage and retrieval. * * The interface extends the basic {@link KeyProvider} interface with additional - * methods for working with PBE keys. + * methods for working with managed keys. */ @InterfaceAudience.Public -public interface PBEKeyProvider extends KeyProvider { - static byte[] decodeToPrefixBytes(String pbePrefix) throws IOException { - byte[] pbe_prefix; - try { - pbe_prefix = Base64.getDecoder().decode(pbePrefix); - } - catch (IllegalArgumentException e) { - throw new IOException("Failed to decode specified prefix as Base64 string: " + pbePrefix, e); - } - return pbe_prefix; - } - - static String encodeToPrefixStr(byte[] pbe_prefix) { - return Base64.getEncoder().encodeToString(pbe_prefix); - } - +public interface ManagedKeyProvider extends KeyProvider { /** * Initialize the provider with the given configuration. * @@ -57,30 +42,47 @@ static String encodeToPrefixStr(byte[] pbe_prefix) { * Retrieve the system key using the given system identifier. * * @param systemId system identifier - * @return PBEKeyData for the system key and is guaranteed to be not {@code null} + * @return ManagedKeyData for the system key and is guaranteed to be not {@code null} * @throws IOException if an error occurs while retrieving the key */ - PBEKeyData getSystemKey(byte[] systemId) throws IOException; + ManagedKeyData getSystemKey(byte[] systemId) throws IOException; /** - * Retrieve a PBE-based key for the specified prefix. + * Retrieve a managed key for the specified prefix. * - * @param pbe_prefix Key prefix + * @param cust_spec Custodian specification. * @param key_namespace Key namespace - * @return PBEKeyData for the system key and is expected to be not {@code null} + * @return ManagedKeyData for the system key and is expected to be not {@code null} * @throws IOException if an error occurs while retrieving the key */ - PBEKeyData getPBEKey(byte[] pbe_prefix, String key_namespace) throws IOException; + ManagedKeyData getManagedKey(byte[] cust_spec, String key_namespace) throws IOException; /** * Retrieve a key identified by the key metadata. The key metadata is typically generated by the - * same key provider via the {@link #getSystemKey(byte[])} or {@link #getPBEKey(byte[], String)} - * methods. + * same key provider via the {@link #getSystemKey(byte[])} or + * {@link #getManagedKey(byte[], String)} methods. * * @param keyMetaData Key metadata - * @return PBEKeyData for the key represented by the metadata and is expected to be not + * @return ManagedKeyData for the key represented by the metadata and is expected to be not * {@code null} * @throws IOException if an error occurs while generating the key */ - PBEKeyData unwrapKey(String keyMetaData) throws IOException; + ManagedKeyData unwrapKey(String keyMetaData) throws IOException; + + static byte[] decodeToBytes(String custSpec) throws IOException { + byte[] cust_spec; + try { + cust_spec = Base64.getDecoder().decode(custSpec); + } + catch (IllegalArgumentException e) { + throw new IOException("Failed to decode specified custodian spec as Base64 string: " + + custSpec, e); + } + return cust_spec; + } + + static String encodeToStr(byte[] cust_spec) { + return Base64.getEncoder().encodeToString(cust_spec); + } + } diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/io/crypto/PBEKeyStatus.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/io/crypto/ManagedKeyStatus.java similarity index 77% rename from hbase-common/src/main/java/org/apache/hadoop/hbase/io/crypto/PBEKeyStatus.java rename to hbase-common/src/main/java/org/apache/hadoop/hbase/io/crypto/ManagedKeyStatus.java index 2b7118120172..6ef9cfcfc77d 100644 --- a/hbase-common/src/main/java/org/apache/hadoop/hbase/io/crypto/PBEKeyStatus.java +++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/io/crypto/ManagedKeyStatus.java @@ -25,7 +25,7 @@ * Enum of PBE key status. The status of a PBE key is used to indicate the state of the key. */ @InterfaceAudience.Public -public enum PBEKeyStatus { +public enum ManagedKeyStatus { /** Represents the active status of a PBE key. */ ACTIVE((byte) 1), /** Represents the inactive status of a PBE key. */ @@ -36,11 +36,11 @@ public enum PBEKeyStatus { DISABLED((byte) 4), ; - private static Map lookupByVal; + private static Map lookupByVal; private final byte val; - private PBEKeyStatus(byte val) { + private ManagedKeyStatus(byte val) { this.val = val; } @@ -53,14 +53,14 @@ public byte getVal() { } /** - * Returns the PBEKeyStatus for the given numeric value. - * @param val The numeric value of the desired PBEKeyStatus - * @return The PBEKeyStatus corresponding to the given value + * Returns the ManagedKeyStatus for the given numeric value. + * @param val The numeric value of the desired ManagedKeyStatus + * @return The ManagedKeyStatus corresponding to the given value */ - public static PBEKeyStatus forValue(byte val) { + public static ManagedKeyStatus forValue(byte val) { if (lookupByVal == null) { - Map tbl = new HashMap<>(); - for (PBEKeyStatus e: PBEKeyStatus.values()) { + Map tbl = new HashMap<>(); + for (ManagedKeyStatus e: ManagedKeyStatus.values()) { tbl.put(e.getVal(), e); } lookupByVal = tbl; diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/io/crypto/PBEKeyStoreKeyProvider.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/io/crypto/ManagedKeyStoreKeyProvider.java similarity index 70% rename from hbase-common/src/main/java/org/apache/hadoop/hbase/io/crypto/PBEKeyStoreKeyProvider.java rename to hbase-common/src/main/java/org/apache/hadoop/hbase/io/crypto/ManagedKeyStoreKeyProvider.java index 323c4d9d29e4..0104e3bc4461 100644 --- a/hbase-common/src/main/java/org/apache/hadoop/hbase/io/crypto/PBEKeyStoreKeyProvider.java +++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/io/crypto/ManagedKeyStoreKeyProvider.java @@ -10,7 +10,7 @@ import java.util.Map; @InterfaceAudience.Public -public class PBEKeyStoreKeyProvider extends KeyStoreKeyProvider implements PBEKeyProvider { +public class ManagedKeyStoreKeyProvider extends KeyStoreKeyProvider implements ManagedKeyProvider { public static final String KEY_METADATA_ALIAS = "KeyAlias"; public static final String KEY_METADATA_PREFIX = "PBE_PREFIX"; @@ -22,7 +22,7 @@ public void initConfig(Configuration conf) { } @Override - public PBEKeyData getSystemKey(byte[] clusterId) { + public ManagedKeyData getSystemKey(byte[] clusterId) { checkConfig(); String masterKeyAlias = conf.get(HConstants.CRYPTO_PBE_MASTERKEY_NAME_CONF_KEY, null); if (masterKeyAlias == null) { @@ -34,15 +34,15 @@ public PBEKeyData getSystemKey(byte[] clusterId) { } // Encode clusterId too for consistency with that of PBE prefixes. String keyMetadata = generateKeyMetadata(masterKeyAlias, - PBEKeyProvider.encodeToPrefixStr(clusterId)); - return new PBEKeyData(clusterId, PBEKeyData.KEY_NAMESPACE_GLOBAL, key, PBEKeyStatus.ACTIVE, + ManagedKeyProvider.encodeToStr(clusterId)); + return new ManagedKeyData(clusterId, ManagedKeyData.KEY_NAMESPACE_GLOBAL, key, ManagedKeyStatus.ACTIVE, keyMetadata); } @Override - public PBEKeyData getPBEKey(byte[] pbe_prefix, String key_namespace) throws IOException { + public ManagedKeyData getManagedKey(byte[] cust_spec, String key_namespace) throws IOException { checkConfig(); - String encodedPrefix = PBEKeyProvider.encodeToPrefixStr(pbe_prefix); + String encodedPrefix = ManagedKeyProvider.encodeToStr(cust_spec); String aliasConfKey = HConstants.CRYPTO_PBE_PREFIX_CONF_KEY_PREFIX + encodedPrefix + "." + "alias"; String keyMetadata = generateKeyMetadata(conf.get(aliasConfKey, null), encodedPrefix); @@ -50,22 +50,22 @@ public PBEKeyData getPBEKey(byte[] pbe_prefix, String key_namespace) throws IOEx } @Override - public PBEKeyData unwrapKey(String keyMetadataStr) throws IOException { + public ManagedKeyData unwrapKey(String keyMetadataStr) throws IOException { Map keyMetadata = GsonUtil.getDefaultInstance().fromJson(keyMetadataStr, HashMap.class); String encodedPrefix = keyMetadata.get(KEY_METADATA_PREFIX); String activeStatusConfKey = HConstants.CRYPTO_PBE_PREFIX_CONF_KEY_PREFIX + encodedPrefix + ".active"; boolean isActive = conf.getBoolean(activeStatusConfKey, true); - byte[] pbe_prefix = PBEKeyProvider.decodeToPrefixBytes(encodedPrefix); + byte[] cust_spec = ManagedKeyProvider.decodeToBytes(encodedPrefix); String alias = keyMetadata.get(KEY_METADATA_ALIAS); Key key = alias != null ? getKey(alias) : null; if (key != null) { - return new PBEKeyData(pbe_prefix, PBEKeyData.KEY_NAMESPACE_GLOBAL, key, - isActive ? PBEKeyStatus.ACTIVE : PBEKeyStatus.INACTIVE, keyMetadataStr); + return new ManagedKeyData(cust_spec, ManagedKeyData.KEY_NAMESPACE_GLOBAL, key, + isActive ? ManagedKeyStatus.ACTIVE : ManagedKeyStatus.INACTIVE, keyMetadataStr); } - return new PBEKeyData(pbe_prefix, PBEKeyData.KEY_NAMESPACE_GLOBAL, null, - isActive ? PBEKeyStatus.FAILED : PBEKeyStatus.DISABLED, keyMetadataStr); + return new ManagedKeyData(cust_spec, ManagedKeyData.KEY_NAMESPACE_GLOBAL, null, + isActive ? ManagedKeyStatus.FAILED : ManagedKeyStatus.DISABLED, keyMetadataStr); } private void checkConfig() { diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/keymeta/PBEKeymetaAdmin.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/keymeta/KeymetaAdmin.java similarity index 55% rename from hbase-common/src/main/java/org/apache/hadoop/hbase/keymeta/PBEKeymetaAdmin.java rename to hbase-common/src/main/java/org/apache/hadoop/hbase/keymeta/KeymetaAdmin.java index 89d38c7a1b6b..56895cb9c44b 100644 --- a/hbase-common/src/main/java/org/apache/hadoop/hbase/keymeta/PBEKeymetaAdmin.java +++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/keymeta/KeymetaAdmin.java @@ -17,42 +17,38 @@ */ package org.apache.hadoop.hbase.keymeta; -import org.apache.hadoop.hbase.io.crypto.PBEKeyData; -import org.apache.hadoop.hbase.io.crypto.PBEKeyStatus; +import org.apache.hadoop.hbase.io.crypto.ManagedKeyData; +import org.apache.hadoop.hbase.io.crypto.ManagedKeyStatus; import org.apache.yetus.audience.InterfaceAudience; import java.io.IOException; import java.security.KeyException; import java.util.List; /** - * PBEKeymetaAdmin is an interface for administrative functions related to PBE keys. + * KeymetaAdmin is an interface for administrative functions related to managed keys. * It handles the following methods: - * - *
    - *
  • enablePBE(): Enables PBE for a given pbe_prefix and namespace.
  • - *
*/ @InterfaceAudience.Public -public interface PBEKeymetaAdmin { +public interface KeymetaAdmin { /** - * Enables PBE for the specified key prefix and namespace. + * Enables key management for the specified custodian specification and namespace. * - * @param pbePrefix The prefix for the PBE key in base64 encoded format. - * @param keyNamespace The namespace for the PBE key. + * @param custSpec The custodian specification in base64 encoded format. + * @param keyNamespace The namespace for the key management. * - * @return The current status of the PBE key. - * @throws IOException if an error occurs while enabling PBE. + * @return The current status of the managed key. + * @throws IOException if an error occurs while enabling key management. */ - PBEKeyStatus enablePBE(String pbePrefix, String keyNamespace) throws IOException; + ManagedKeyStatus enableManagedKeys(String custSpec, String keyNamespace) throws IOException; /** - * Get the status of all the keys for the specified pbe_prefix. + * Get the status of all the keys for the specified custodian. * - * @param pbePrefix The prefix for the PBE key in base64 encoded format. - * @param keyNamespace The namespace for the PBE key. + * @param custSpec The custodian specification in base64 encoded format. + * @param keyNamespace The namespace for the key management. * @return The list of status objects each identifying the key and its current status. - * @throws IOException if an error occurs while enabling PBE. + * @throws IOException if an error occurs while enabling key management. */ - List getPBEKeyStatuses(String pbePrefix, String keyNamespace) + List getManagedKeys(String custSpec, String keyNamespace) throws IOException, KeyException; } diff --git a/hbase-common/src/test/java/org/apache/hadoop/hbase/io/crypto/MockPBEKeyProvider.java b/hbase-common/src/test/java/org/apache/hadoop/hbase/io/crypto/MockManagedKeyProvider.java similarity index 67% rename from hbase-common/src/test/java/org/apache/hadoop/hbase/io/crypto/MockPBEKeyProvider.java rename to hbase-common/src/test/java/org/apache/hadoop/hbase/io/crypto/MockManagedKeyProvider.java index 9f7472f05460..281edb3ef1fd 100644 --- a/hbase-common/src/test/java/org/apache/hadoop/hbase/io/crypto/MockPBEKeyProvider.java +++ b/hbase-common/src/test/java/org/apache/hadoop/hbase/io/crypto/MockManagedKeyProvider.java @@ -19,7 +19,6 @@ import java.io.IOException; import java.security.Key; -import java.security.KeyStore; import java.security.NoSuchAlgorithmException; import java.util.HashMap; import java.util.Map; @@ -31,31 +30,31 @@ import org.slf4j.LoggerFactory; /** - * A simple implementation of PBEKeyProvider for testing. It generates a key on demand given a + * A simple implementation of ManagedKeyProvider for testing. It generates a key on demand given a * prefix. One can control the state of a key by calling setKeyStatus and can rotate a key by * calling setKey. */ -public class MockPBEKeyProvider extends MockAesKeyProvider implements PBEKeyProvider { - protected static final Logger LOG = LoggerFactory.getLogger(MockPBEKeyProvider.class); +public class MockManagedKeyProvider extends MockAesKeyProvider implements ManagedKeyProvider { + protected static final Logger LOG = LoggerFactory.getLogger(MockManagedKeyProvider.class); public Map keys = new HashMap<>(); - public Map keyStatus = new HashMap<>(); + public Map keyStatus = new HashMap<>(); private String systemKeyAlias = "default_system_key_alias"; @Override public void initConfig(Configuration conf) { // NO-OP } - @Override public PBEKeyData getSystemKey(byte[] systemId) throws IOException { + @Override public ManagedKeyData getSystemKey(byte[] systemId) throws IOException { return getKey(systemId, systemKeyAlias); } - @Override public PBEKeyData getPBEKey(byte[] pbe_prefix, String key_namespace) + @Override public ManagedKeyData getManagedKey(byte[] cust_spec, String key_namespace) throws IOException { - return getKey(pbe_prefix); + return getKey(cust_spec); } - @Override public PBEKeyData unwrapKey(String keyMetadata) throws IOException { + @Override public ManagedKeyData unwrapKey(String keyMetadata) throws IOException { String[] meta_toks = keyMetadata.split(":"); if (keys.containsKey(meta_toks[1])) { return getKey(meta_toks[0].getBytes(), meta_toks[1]); @@ -64,26 +63,27 @@ public class MockPBEKeyProvider extends MockAesKeyProvider implements PBEKeyProv } /** - * Lookup the key data for the given prefix from keys. If missing, initialize one using generateSecretKey(). + * Lookup the key data for the given cust_spec from keys. If missing, initialize one using + * generateSecretKey(). */ - public PBEKeyData getKey(byte[] prefix_bytes) { - String alias = Bytes.toString(prefix_bytes); - return getKey(prefix_bytes, alias); + public ManagedKeyData getKey(byte[] cust_spec) { + String alias = Bytes.toString(cust_spec); + return getKey(cust_spec, alias); } - public PBEKeyData getKey(byte[] prefix_bytes, String alias) { + public ManagedKeyData getKey(byte[] cust_spec, String alias) { Key key = keys.get(alias); if (key == null) { key = generateSecretKey(); keys.put(alias, key); } - PBEKeyStatus keyStatus = this.keyStatus.get(alias); - return new PBEKeyData(prefix_bytes, PBEKeyData.KEY_NAMESPACE_GLOBAL, key, - keyStatus == null ? PBEKeyStatus.ACTIVE : keyStatus, - Bytes.toString(prefix_bytes)+":"+alias); + ManagedKeyStatus keyStatus = this.keyStatus.get(alias); + return new ManagedKeyData(cust_spec, ManagedKeyData.KEY_NAMESPACE_GLOBAL, key, + keyStatus == null ? ManagedKeyStatus.ACTIVE : keyStatus, + Bytes.toString(cust_spec)+":"+alias); } - public void setKeyStatus(String alias, PBEKeyStatus status) { + public void setKeyStatus(String alias, ManagedKeyStatus status) { keyStatus.put(alias, status); } diff --git a/hbase-common/src/test/java/org/apache/hadoop/hbase/io/crypto/TestPBEKeyStoreKeyProvider.java b/hbase-common/src/test/java/org/apache/hadoop/hbase/io/crypto/TestManagedKeyStoreKeyProvider.java similarity index 71% rename from hbase-common/src/test/java/org/apache/hadoop/hbase/io/crypto/TestPBEKeyStoreKeyProvider.java rename to hbase-common/src/test/java/org/apache/hadoop/hbase/io/crypto/TestManagedKeyStoreKeyProvider.java index 4a52cbdcc352..85a44a00bdf8 100644 --- a/hbase-common/src/test/java/org/apache/hadoop/hbase/io/crypto/TestPBEKeyStoreKeyProvider.java +++ b/hbase-common/src/test/java/org/apache/hadoop/hbase/io/crypto/TestManagedKeyStoreKeyProvider.java @@ -39,15 +39,15 @@ import java.util.Map; import java.util.Properties; import java.util.UUID; -import static org.apache.hadoop.hbase.io.crypto.PBEKeyStoreKeyProvider.KEY_METADATA_ALIAS; -import static org.apache.hadoop.hbase.io.crypto.PBEKeyStoreKeyProvider.KEY_METADATA_PREFIX; +import static org.apache.hadoop.hbase.io.crypto.ManagedKeyStoreKeyProvider.KEY_METADATA_ALIAS; +import static org.apache.hadoop.hbase.io.crypto.ManagedKeyStoreKeyProvider.KEY_METADATA_PREFIX; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertNotNull; import static org.junit.Assert.assertNull; @Category({ MiscTests.class, SmallTests.class }) @RunWith(Parameterized.class) -public class TestPBEKeyStoreKeyProvider extends TestKeyStoreKeyProvider { +public class TestManagedKeyStoreKeyProvider extends TestKeyStoreKeyProvider { private static final String MASTER_KEY_ALIAS = "master-alias"; @@ -55,10 +55,10 @@ public class TestPBEKeyStoreKeyProvider extends TestKeyStoreKeyProvider { @ClassRule public static final HBaseClassTestRule CLASS_RULE = - HBaseClassTestRule.forClass(TestPBEKeyStoreKeyProvider.class); + HBaseClassTestRule.forClass(TestManagedKeyStoreKeyProvider.class); private int nPrefixes = 2; - private PBEKeyProvider pbeProvider; + private ManagedKeyProvider managedKeyProvider; private Map prefix2key = new HashMap<>(); private Map prefix2alias = new HashMap<>(); @@ -68,12 +68,12 @@ public class TestPBEKeyStoreKeyProvider extends TestKeyStoreKeyProvider { @Before public void setUp() throws Exception { super.setUp();; - pbeProvider = (PBEKeyProvider) provider; - pbeProvider.initConfig(conf); + managedKeyProvider = (ManagedKeyProvider) provider; + managedKeyProvider.initConfig(conf); } protected KeyProvider createProvider() { - return new PBEKeyStoreKeyProvider(); + return new ManagedKeyStoreKeyProvider(); } protected void addCustomEntries(KeyStore store, Properties passwdProps) throws Exception { @@ -114,10 +114,10 @@ private void addEntry(String alias, String prefix) { } @Test - public void testGetPBEKey() throws Exception { + public void testGetManagedKey() throws Exception { for (Bytes prefix : prefix2key.keySet()) { - PBEKeyData keyData = pbeProvider.getPBEKey(prefix.get(), PBEKeyData.KEY_NAMESPACE_GLOBAL); - assertPBEKeyData(keyData, PBEKeyStatus.ACTIVE, prefix2key.get(prefix).get(), prefix.get(), + ManagedKeyData keyData = managedKeyProvider.getManagedKey(prefix.get(), ManagedKeyData.KEY_NAMESPACE_GLOBAL); + assertPBEKeyData(keyData, ManagedKeyStatus.ACTIVE, prefix2key.get(prefix).get(), prefix.get(), prefix2alias.get(prefix)); } } @@ -127,36 +127,36 @@ public void testGetInactiveKey() throws Exception { Bytes firstPrefix = prefix2key.keySet().iterator().next(); String encPrefix = Base64.getEncoder().encodeToString(firstPrefix.get()); conf.set(HConstants.CRYPTO_PBE_PREFIX_CONF_KEY_PREFIX + encPrefix + ".active", "false"); - PBEKeyData keyData = pbeProvider.getPBEKey(firstPrefix.get(), PBEKeyData.KEY_NAMESPACE_GLOBAL); + ManagedKeyData keyData = managedKeyProvider.getManagedKey(firstPrefix.get(), ManagedKeyData.KEY_NAMESPACE_GLOBAL); assertNotNull(keyData); - assertPBEKeyData(keyData, PBEKeyStatus.INACTIVE, prefix2key.get(firstPrefix).get(), + assertPBEKeyData(keyData, ManagedKeyStatus.INACTIVE, prefix2key.get(firstPrefix).get(), firstPrefix.get(), prefix2alias.get(firstPrefix)); } @Test public void testGetInvalidKey() throws Exception { byte[] invalidPrefixBytes = "invalid".getBytes(); - PBEKeyData keyData = pbeProvider.getPBEKey(invalidPrefixBytes, - PBEKeyData.KEY_NAMESPACE_GLOBAL); + ManagedKeyData keyData = managedKeyProvider.getManagedKey(invalidPrefixBytes, + ManagedKeyData.KEY_NAMESPACE_GLOBAL); assertNotNull(keyData); - assertPBEKeyData(keyData, PBEKeyStatus.FAILED, null, invalidPrefixBytes, null); + assertPBEKeyData(keyData, ManagedKeyStatus.FAILED, null, invalidPrefixBytes, null); } @Test public void testGetDisabledKey() throws Exception { byte[] invalidPrefix = new byte[] { 1, 2, 3 }; - String invalidPrefixEnc = PBEKeyProvider.encodeToPrefixStr(invalidPrefix); + String invalidPrefixEnc = ManagedKeyProvider.encodeToStr(invalidPrefix); conf.set(HConstants.CRYPTO_PBE_PREFIX_CONF_KEY_PREFIX + invalidPrefixEnc + ".active", "false"); - PBEKeyData keyData = pbeProvider.getPBEKey(invalidPrefix, PBEKeyData.KEY_NAMESPACE_GLOBAL); + ManagedKeyData keyData = managedKeyProvider.getManagedKey(invalidPrefix, ManagedKeyData.KEY_NAMESPACE_GLOBAL); assertNotNull(keyData); - assertPBEKeyData(keyData, PBEKeyStatus.DISABLED, null, + assertPBEKeyData(keyData, ManagedKeyStatus.DISABLED, null, invalidPrefix, null); } @Test public void testGetSystemKey() throws Exception { - PBEKeyData clusterKeyData = pbeProvider.getSystemKey(clusterId.getBytes()); - assertPBEKeyData(clusterKeyData, PBEKeyStatus.ACTIVE, masterKey, clusterId.getBytes(), + ManagedKeyData clusterKeyData = managedKeyProvider.getSystemKey(clusterId.getBytes()); + assertPBEKeyData(clusterKeyData, ManagedKeyStatus.ACTIVE, masterKey, clusterId.getBytes(), MASTER_KEY_ALIAS); } @@ -164,12 +164,12 @@ public void testGetSystemKey() throws Exception { public void testUnwrapInvalidKey() throws Exception { String invalidAlias = "invalidAlias"; byte[] invalidPrefix = new byte[] { 1, 2, 3 }; - String invalidPrefixEnc = PBEKeyProvider.encodeToPrefixStr(invalidPrefix); - String invalidMetadata = PBEKeyStoreKeyProvider.generateKeyMetadata(invalidAlias, + String invalidPrefixEnc = ManagedKeyProvider.encodeToStr(invalidPrefix); + String invalidMetadata = ManagedKeyStoreKeyProvider.generateKeyMetadata(invalidAlias, invalidPrefixEnc); - PBEKeyData keyData = pbeProvider.unwrapKey(invalidMetadata); + ManagedKeyData keyData = managedKeyProvider.unwrapKey(invalidMetadata); assertNotNull(keyData); - assertPBEKeyData(keyData, PBEKeyStatus.FAILED, null, invalidPrefix, + assertPBEKeyData(keyData, ManagedKeyStatus.FAILED, null, invalidPrefix, invalidAlias); } @@ -177,16 +177,16 @@ public void testUnwrapInvalidKey() throws Exception { public void testUnwrapDisabledKey() throws Exception { String invalidAlias = "invalidAlias"; byte[] invalidPrefix = new byte[] { 1, 2, 3 }; - String invalidPrefixEnc = PBEKeyProvider.encodeToPrefixStr(invalidPrefix); + String invalidPrefixEnc = ManagedKeyProvider.encodeToStr(invalidPrefix); conf.set(HConstants.CRYPTO_PBE_PREFIX_CONF_KEY_PREFIX + invalidPrefixEnc + ".active", "false"); - String invalidMetadata = PBEKeyStoreKeyProvider.generateKeyMetadata(invalidAlias, + String invalidMetadata = ManagedKeyStoreKeyProvider.generateKeyMetadata(invalidAlias, invalidPrefixEnc); - PBEKeyData keyData = pbeProvider.unwrapKey(invalidMetadata); + ManagedKeyData keyData = managedKeyProvider.unwrapKey(invalidMetadata); assertNotNull(keyData); - assertPBEKeyData(keyData, PBEKeyStatus.DISABLED, null, invalidPrefix, invalidAlias); + assertPBEKeyData(keyData, ManagedKeyStatus.DISABLED, null, invalidPrefix, invalidAlias); } - private void assertPBEKeyData(PBEKeyData keyData, PBEKeyStatus expKeyStatus, byte[] key, + private void assertPBEKeyData(ManagedKeyData keyData, ManagedKeyStatus expKeyStatus, byte[] key, byte[] prefixBytes, String alias) throws Exception { assertNotNull(keyData); assertEquals(expKeyStatus, keyData.getKeyStatus()); @@ -201,10 +201,10 @@ private void assertPBEKeyData(PBEKeyData keyData, PBEKeyStatus expKeyStatus, byt Map keyMetadata = GsonUtil.getDefaultInstance().fromJson(keyData.getKeyMetadata(), HashMap.class); assertNotNull(keyMetadata); - assertEquals(new Bytes(prefixBytes), keyData.getPBEPrefix()); + assertEquals(new Bytes(prefixBytes), keyData.getCustodianSpec()); assertEquals(alias, keyMetadata.get(KEY_METADATA_ALIAS)); assertEquals(Base64.getEncoder().encodeToString(prefixBytes), keyMetadata.get(KEY_METADATA_PREFIX)); - assertEquals(keyData, pbeProvider.unwrapKey(keyData.getKeyMetadata())); + assertEquals(keyData, managedKeyProvider.unwrapKey(keyData.getKeyMetadata())); } } diff --git a/hbase-protocol-shaded/src/main/protobuf/server/PBEAdmin.proto b/hbase-protocol-shaded/src/main/protobuf/server/ManagedKeys.proto similarity index 72% rename from hbase-protocol-shaded/src/main/protobuf/server/PBEAdmin.proto rename to hbase-protocol-shaded/src/main/protobuf/server/ManagedKeys.proto index 78c68d2a745d..48b68b4d4cbe 100644 --- a/hbase-protocol-shaded/src/main/protobuf/server/PBEAdmin.proto +++ b/hbase-protocol-shaded/src/main/protobuf/server/ManagedKeys.proto @@ -19,40 +19,40 @@ syntax = "proto2"; package hbase.pb; option java_package = "org.apache.hadoop.hbase.protobuf.generated"; -option java_outer_classname = "PBEAdminProtos"; +option java_outer_classname = "ManagedKeysProtos"; option java_generic_services = true; option java_generate_equals_and_hash = true; option optimize_for = SPEED; -message PBEAdminRequest { - required string pbe_prefix = 1; +message ManagedKeysRequest { + required string cust_spec = 1; required string key_namespace = 2; } -enum PBEKeyStatus { +enum ManagedKeyStatus { PBE_ACTIVE = 1; PBE_INACTIVE = 2; PBE_FAILED = 3; PBE_DISABLED = 4; } -message PBEAdminResponse { - required string pbe_prefix = 1; +message ManagedKeysResponse { + required string cust_spec = 1; required string key_namespace = 2; - required PBEKeyStatus pbe_status = 3; + required ManagedKeyStatus pbe_status = 3; optional string key_metadata = 4; optional int64 refresh_timestamp = 5; optional int64 read_op_count = 6; optional int64 write_op_count = 7; } -message PBEGetStatusResponse { - repeated PBEAdminResponse status = 1; +message GetManagedKeysResponse { + repeated ManagedKeysResponse status = 1; } -service PBEAdminService { - rpc EnablePBE(PBEAdminRequest) - returns (PBEAdminResponse); - rpc GetPBEStatuses(PBEAdminRequest) - returns (PBEGetStatusResponse); +service ManagedKeysService { + rpc EnableManagedKeys(ManagedKeysRequest) + returns (ManagedKeysResponse); + rpc GetManagedKeys(ManagedKeysRequest) + returns (GetManagedKeysResponse); } diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/HBaseServerBase.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/HBaseServerBase.java index 850493455d81..f52afc418381 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/HBaseServerBase.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/HBaseServerBase.java @@ -52,11 +52,11 @@ import org.apache.hadoop.hbase.http.InfoServer; import org.apache.hadoop.hbase.io.util.MemorySizeUtil; import org.apache.hadoop.hbase.ipc.RpcServerInterface; +import org.apache.hadoop.hbase.keymeta.KeymetaAdmin; +import org.apache.hadoop.hbase.keymeta.KeymetaAdminImpl; import org.apache.hadoop.hbase.keymeta.SystemKeyAccessor; import org.apache.hadoop.hbase.keymeta.SystemKeyCache; -import org.apache.hadoop.hbase.keymeta.PBEKeyAccessor; -import org.apache.hadoop.hbase.keymeta.PBEKeymetaAdmin; -import org.apache.hadoop.hbase.keymeta.PBEKeymetaAdminImpl; +import org.apache.hadoop.hbase.keymeta.ManagedKeyAccessor; import org.apache.hadoop.hbase.master.HMaster; import org.apache.hadoop.hbase.master.MasterCoprocessorHost; import org.apache.hadoop.hbase.namequeues.NamedQueueRecorder; @@ -193,8 +193,8 @@ public abstract class HBaseServerBase> extends protected final NettyEventLoopGroupConfig eventLoopGroupConfig; private SystemKeyCache systemKeyCache; - protected PBEKeymetaAdminImpl pbeKeymetaAdmin; - protected PBEKeyAccessor pbeKeyAccessor; + protected KeymetaAdminImpl keymetaAdmin; + protected ManagedKeyAccessor managedKeyAccessor; private void setupSignalHandlers() { if (!SystemUtils.IS_OS_WINDOWS) { @@ -292,7 +292,7 @@ public HBaseServerBase(Configuration conf, String name) throws IOException { initializeFileSystem(); - pbeKeymetaAdmin = new PBEKeymetaAdminImpl(this); + keymetaAdmin = new KeymetaAdminImpl(this); int choreServiceInitialSize = conf.getInt(CHORE_SERVICE_INITIAL_POOL_SIZE, DEFAULT_CHORE_SERVICE_INITIAL_POOL_SIZE); @@ -415,13 +415,13 @@ public ZKWatcher getZooKeeper() { } @Override - public PBEKeymetaAdmin getPBEKeymetaAdmin() { - return pbeKeymetaAdmin; + public KeymetaAdmin getPBEKeymetaAdmin() { + return keymetaAdmin; } @Override - public PBEKeyAccessor getPBEKeyAccessor() { - return pbeKeyAccessor; + public ManagedKeyAccessor getPBEKeyAccessor() { + return managedKeyAccessor; } @Override diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/MockRegionServerServices.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/MockRegionServerServices.java index 24bd2f6f98bb..f4097eb2909f 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/MockRegionServerServices.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/MockRegionServerServices.java @@ -38,9 +38,9 @@ import org.apache.hadoop.hbase.fs.HFileSystem; import org.apache.hadoop.hbase.io.hfile.BlockCache; import org.apache.hadoop.hbase.ipc.RpcServerInterface; +import org.apache.hadoop.hbase.keymeta.KeymetaAdmin; import org.apache.hadoop.hbase.keymeta.SystemKeyCache; -import org.apache.hadoop.hbase.keymeta.PBEKeyAccessor; -import org.apache.hadoop.hbase.keymeta.PBEKeymetaAdmin; +import org.apache.hadoop.hbase.keymeta.ManagedKeyAccessor; import org.apache.hadoop.hbase.mob.MobFileCache; import org.apache.hadoop.hbase.quotas.RegionServerRpcQuotaManager; import org.apache.hadoop.hbase.quotas.RegionServerSpaceQuotaManager; @@ -263,11 +263,11 @@ public ChoreService getChoreService() { return null; } - @Override public PBEKeyAccessor getPBEKeyAccessor() { + @Override public ManagedKeyAccessor getPBEKeyAccessor() { return null; } - @Override public PBEKeymetaAdmin getPBEKeymetaAdmin() { + @Override public KeymetaAdmin getPBEKeymetaAdmin() { return null; } diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/Server.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/Server.java index a4856ad33faa..bd0195d04222 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/Server.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/Server.java @@ -23,9 +23,9 @@ import org.apache.hadoop.hbase.client.AsyncClusterConnection; import org.apache.hadoop.hbase.client.AsyncConnection; import org.apache.hadoop.hbase.client.Connection; +import org.apache.hadoop.hbase.keymeta.KeymetaAdmin; import org.apache.hadoop.hbase.keymeta.SystemKeyCache; -import org.apache.hadoop.hbase.keymeta.PBEKeyAccessor; -import org.apache.hadoop.hbase.keymeta.PBEKeymetaAdmin; +import org.apache.hadoop.hbase.keymeta.ManagedKeyAccessor; import org.apache.hadoop.hbase.zookeeper.ZKWatcher; import org.apache.yetus.audience.InterfaceAudience; @@ -94,12 +94,12 @@ default AsyncConnection getAsyncConnection() { /** * @return the accessor for cluster keys. */ - public PBEKeyAccessor getPBEKeyAccessor(); + public ManagedKeyAccessor getPBEKeyAccessor(); /** * @return the admin for keymeta. */ - public PBEKeymetaAdmin getPBEKeymetaAdmin(); + public KeymetaAdmin getPBEKeymetaAdmin(); /** Returns Return the FileSystem object used (can return null!). */ // TODO: Distinguish between "dataFs" and "walFs". diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/keymeta/PBEKeyAccessorBase.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/keymeta/KeyManagementBase.java similarity index 86% rename from hbase-server/src/main/java/org/apache/hadoop/hbase/keymeta/PBEKeyAccessorBase.java rename to hbase-server/src/main/java/org/apache/hadoop/hbase/keymeta/KeyManagementBase.java index 2839669a4f3e..d95d98ab9146 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/keymeta/PBEKeyAccessorBase.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/keymeta/KeyManagementBase.java @@ -20,7 +20,7 @@ import org.apache.hadoop.hbase.HConstants; import org.apache.hadoop.hbase.io.crypto.Encryption; import org.apache.hadoop.hbase.io.crypto.KeyProvider; -import org.apache.hadoop.hbase.io.crypto.PBEKeyProvider; +import org.apache.hadoop.hbase.io.crypto.ManagedKeyProvider; import org.apache.hadoop.hbase.Server; import org.apache.yetus.audience.InterfaceAudience; import org.slf4j.Logger; @@ -31,15 +31,15 @@ * A base class for all keymeta accessor/manager implementations. */ @InterfaceAudience.Private -public abstract class PBEKeyAccessorBase { - protected static final Logger LOG = LoggerFactory.getLogger(PBEKeyAccessorBase.class); +public abstract class KeyManagementBase { + protected static final Logger LOG = LoggerFactory.getLogger(KeyManagementBase.class); protected final Server server; private Boolean pbeEnabled; private Integer perPrefixActiveKeyCount; - public PBEKeyAccessorBase(Server server) { + public KeyManagementBase(Server server) { this.server = server; } @@ -47,15 +47,15 @@ public PBEKeyAccessorBase(Server server) { * A utility method for getting the PBE key provider. * @return the key provider * @throws RuntimeException if no provider is configured or if the configured provider is not an - * instance of PBEKeyProvider + * instance of ManagedKeyProvider */ - protected PBEKeyProvider getKeyProvider() { + protected ManagedKeyProvider getKeyProvider() { KeyProvider provider = Encryption.getKeyProvider(server.getConfiguration()); - if (!(provider instanceof PBEKeyProvider)) { + if (!(provider instanceof ManagedKeyProvider)) { throw new RuntimeException( - "KeyProvider: " + provider.getClass().getName() + " expected to be of type PBEKeyProvider"); + "KeyProvider: " + provider.getClass().getName() + " expected to be of type ManagedKeyProvider"); } - return (PBEKeyProvider) provider; + return (ManagedKeyProvider) provider; } /** diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/keymeta/PBEKeymetaAdminImpl.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/keymeta/KeymetaAdminImpl.java similarity index 68% rename from hbase-server/src/main/java/org/apache/hadoop/hbase/keymeta/PBEKeymetaAdminImpl.java rename to hbase-server/src/main/java/org/apache/hadoop/hbase/keymeta/KeymetaAdminImpl.java index 977a47c7e9f2..c5870eb5008c 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/keymeta/PBEKeymetaAdminImpl.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/keymeta/KeymetaAdminImpl.java @@ -18,9 +18,9 @@ package org.apache.hadoop.hbase.keymeta; import org.apache.hadoop.hbase.Server; -import org.apache.hadoop.hbase.io.crypto.PBEKeyData; -import org.apache.hadoop.hbase.io.crypto.PBEKeyProvider; -import org.apache.hadoop.hbase.io.crypto.PBEKeyStatus; +import org.apache.hadoop.hbase.io.crypto.ManagedKeyData; +import org.apache.hadoop.hbase.io.crypto.ManagedKeyProvider; +import org.apache.hadoop.hbase.io.crypto.ManagedKeyStatus; import org.apache.yetus.audience.InterfaceAudience; import org.slf4j.Logger; import org.slf4j.LoggerFactory; @@ -31,24 +31,24 @@ import java.util.Set; @InterfaceAudience.Private -public class PBEKeymetaAdminImpl extends PBEKeymetaTableAccessor implements PBEKeymetaAdmin { - private static final Logger LOG = LoggerFactory.getLogger(PBEKeymetaAdminImpl.class); +public class KeymetaAdminImpl extends KeymetaTableAccessor implements KeymetaAdmin { + private static final Logger LOG = LoggerFactory.getLogger(KeymetaAdminImpl.class); - public PBEKeymetaAdminImpl(Server server) { + public KeymetaAdminImpl(Server server) { super(server); } @Override - public PBEKeyStatus enablePBE(String pbePrefix, String keyNamespace) throws IOException { + public ManagedKeyStatus enableManagedKeys(String custSpec, String keyNamespace) throws IOException { checkPBEEnabled(); - LOG.info("Trying to enable PBE on key: {} under namespace: {}", pbePrefix, keyNamespace); - byte[] pbe_prefix = PBEKeyProvider.decodeToPrefixBytes(pbePrefix); - PBEKeyProvider provider = getKeyProvider(); + LOG.info("Trying to enable PBE on key: {} under namespace: {}", custSpec, keyNamespace); + byte[] cust_spec = ManagedKeyProvider.decodeToBytes(custSpec); + ManagedKeyProvider provider = getKeyProvider(); int perPrefixActiveKeyConfCount = getPerPrefixActiveKeyConfCount(); - Set retrievedKeys = new HashSet<>(perPrefixActiveKeyConfCount); - PBEKeyData pbeKey = null; + Set retrievedKeys = new HashSet<>(perPrefixActiveKeyConfCount); + ManagedKeyData pbeKey = null; for (int i = 0; i < perPrefixActiveKeyConfCount; ++i) { - pbeKey = provider.getPBEKey(pbe_prefix, keyNamespace); + pbeKey = provider.getManagedKey(cust_spec, keyNamespace); if (pbeKey == null) { throw new IOException("Invalid null PBE key received from key provider"); } @@ -60,7 +60,7 @@ public PBEKeyStatus enablePBE(String pbePrefix, String keyNamespace) throws IOEx } retrievedKeys.add(pbeKey); LOG.info("enablePBE: got key data with status: {} and metadata: {} for prefix: {}", - pbeKey.getKeyStatus(), pbeKey.getKeyMetadata(), pbePrefix); + pbeKey.getKeyStatus(), pbeKey.getKeyMetadata(), custSpec); addKey(pbeKey); } // pbeKey can't be null at this point as perPrefixActiveKeyConfCount will always be > 0, @@ -69,12 +69,12 @@ public PBEKeyStatus enablePBE(String pbePrefix, String keyNamespace) throws IOEx } @Override - public List getPBEKeyStatuses(String pbePrefix, String keyNamespace) + public List getManagedKeys(String custSpec, String keyNamespace) throws IOException, KeyException { checkPBEEnabled(); - LOG.info("Getting key statuses for PBE on key: {} under namespace: {}", pbePrefix, + LOG.info("Getting key statuses for PBE on key: {} under namespace: {}", custSpec, keyNamespace); - byte[] pbe_prefix = PBEKeyProvider.decodeToPrefixBytes(pbePrefix); - return super.getAllKeys(pbe_prefix, keyNamespace); + byte[] cust_spec = ManagedKeyProvider.decodeToBytes(custSpec); + return super.getAllKeys(cust_spec, keyNamespace); } } diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/keymeta/PBEKeymetaMasterService.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/keymeta/KeymetaMasterService.java similarity index 80% rename from hbase-server/src/main/java/org/apache/hadoop/hbase/keymeta/PBEKeymetaMasterService.java rename to hbase-server/src/main/java/org/apache/hadoop/hbase/keymeta/KeymetaMasterService.java index 4c4980aee364..0a1d85c9dfeb 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/keymeta/PBEKeymetaMasterService.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/keymeta/KeymetaMasterService.java @@ -27,22 +27,22 @@ import java.io.IOException; @InterfaceAudience.Private -public class PBEKeymetaMasterService { - private static final Logger LOG = LoggerFactory.getLogger(PBEKeymetaMasterService.class); +public class KeymetaMasterService { + private static final Logger LOG = LoggerFactory.getLogger(KeymetaMasterService.class); private final MasterServices master; Boolean pbeEnabled; private static final TableDescriptorBuilder TABLE_DESCRIPTOR_BUILDER = TableDescriptorBuilder - .newBuilder(PBEKeymetaTableAccessor.KEY_META_TABLE_NAME).setRegionReplication(1) + .newBuilder(KeymetaTableAccessor.KEY_META_TABLE_NAME).setRegionReplication(1) .setPriority(HConstants.SYSTEMTABLE_QOS) .setColumnFamily(ColumnFamilyDescriptorBuilder.newBuilder( - PBEKeymetaTableAccessor.KEY_META_INFO_FAMILY) + KeymetaTableAccessor.KEY_META_INFO_FAMILY) .setScope(HConstants.REPLICATION_SCOPE_LOCAL).setMaxVersions(1) .setInMemory(true) .build()); - public PBEKeymetaMasterService(MasterServices masterServices) { + public KeymetaMasterService(MasterServices masterServices) { this.master = masterServices; } @@ -50,9 +50,9 @@ public void init() throws IOException { if (!isPBEEnabled()) { return; } - if (!master.getTableDescriptors().exists(PBEKeymetaTableAccessor.KEY_META_TABLE_NAME)) { + if (!master.getTableDescriptors().exists(KeymetaTableAccessor.KEY_META_TABLE_NAME)) { LOG.info("{} table not found. Creating.", - PBEKeymetaTableAccessor.KEY_META_TABLE_NAME.getNameWithNamespaceInclAsString()); + KeymetaTableAccessor.KEY_META_TABLE_NAME.getNameWithNamespaceInclAsString()); this.master.createSystemTable(TABLE_DESCRIPTOR_BUILDER.build()); } } diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/keymeta/PBEKeymetaServiceEndpoint.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/keymeta/KeymetaServiceEndpoint.java similarity index 57% rename from hbase-server/src/main/java/org/apache/hadoop/hbase/keymeta/PBEKeymetaServiceEndpoint.java rename to hbase-server/src/main/java/org/apache/hadoop/hbase/keymeta/KeymetaServiceEndpoint.java index 40b614dc1a7f..2fb4042f99a6 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/keymeta/PBEKeymetaServiceEndpoint.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/keymeta/KeymetaServiceEndpoint.java @@ -21,15 +21,15 @@ import org.apache.hadoop.hbase.coprocessor.CoreCoprocessor; import org.apache.hadoop.hbase.coprocessor.HasMasterServices; import org.apache.hadoop.hbase.coprocessor.MasterCoprocessor; -import org.apache.hadoop.hbase.io.crypto.PBEKeyData; -import org.apache.hadoop.hbase.io.crypto.PBEKeyStatus; +import org.apache.hadoop.hbase.io.crypto.ManagedKeyData; +import org.apache.hadoop.hbase.io.crypto.ManagedKeyStatus; import org.apache.hadoop.hbase.ipc.CoprocessorRpcUtils; import org.apache.hadoop.hbase.master.MasterServices; -import org.apache.hadoop.hbase.protobuf.generated.PBEAdminProtos; -import org.apache.hadoop.hbase.protobuf.generated.PBEAdminProtos.PBEAdminRequest; -import org.apache.hadoop.hbase.protobuf.generated.PBEAdminProtos.PBEAdminResponse; -import org.apache.hadoop.hbase.protobuf.generated.PBEAdminProtos.PBEGetStatusResponse; -import org.apache.hadoop.hbase.protobuf.generated.PBEAdminProtos.PBEAdminService; +import org.apache.hadoop.hbase.protobuf.generated.ManagedKeysProtos; +import org.apache.hadoop.hbase.protobuf.generated.ManagedKeysProtos.ManagedKeysRequest; +import org.apache.hadoop.hbase.protobuf.generated.ManagedKeysProtos.ManagedKeysResponse; +import org.apache.hadoop.hbase.protobuf.generated.ManagedKeysProtos.GetManagedKeysResponse; +import org.apache.hadoop.hbase.protobuf.generated.ManagedKeysProtos.ManagedKeysService; import org.apache.hbase.thirdparty.com.google.protobuf.ByteString; import org.apache.hbase.thirdparty.com.google.protobuf.RpcCallback; import org.apache.hbase.thirdparty.com.google.protobuf.RpcController; @@ -49,20 +49,20 @@ * methods: * *
    - *
  • enablePBE(): Enables PBE for a given pbe_prefix and namespace.
  • + *
  • enablePBE(): Enables PBE for a given cust_spec and namespace.
  • *
* - * This endpoint is designed to work in conjunction with the {@link org.apache.hadoop.hbase.keymeta.PBEKeymetaAdmin} + * This endpoint is designed to work in conjunction with the {@link KeymetaAdmin} * interface, which provides the actual implementation of the key metadata operations. *

*/ @CoreCoprocessor @InterfaceAudience.Private -public class PBEKeymetaServiceEndpoint implements MasterCoprocessor { - private static final Logger LOG = LoggerFactory.getLogger(PBEKeymetaServiceEndpoint.class); +public class KeymetaServiceEndpoint implements MasterCoprocessor { + private static final Logger LOG = LoggerFactory.getLogger(KeymetaServiceEndpoint.class); private MasterServices master = null; - private final PBEAdminService pbeAdminService = new KeyMetaAdminServiceImpl(); + private final ManagedKeysService managedKeysService = new KeyMetaAdminServiceImpl(); /** * Starts the coprocessor by initializing the reference to the {@link org.apache.hadoop.hbase.master.MasterServices} @@ -82,21 +82,21 @@ public void start(CoprocessorEnvironment env) throws IOException { /** * Returns an iterable of the available coprocessor services, which includes the - * {@link PBEAdminService} implemented by - * {@link org.apache.hadoop.hbase.keymeta.PBEKeymetaServiceEndpoint.KeyMetaAdminServiceImpl}. + * {@link ManagedKeysService} implemented by + * {@link KeymetaServiceEndpoint.KeyMetaAdminServiceImpl}. * * @return An iterable of the available coprocessor services. */ @Override public Iterable getServices() { - return Collections.singleton(pbeAdminService); + return Collections.singleton(managedKeysService); } /** - * The implementation of the {@link PBEAdminProtos.PBEAdminService} + * The implementation of the {@link ManagedKeysProtos.ManagedKeysService} * interface, which provides the actual method implementations for enabling PBE. */ - private class KeyMetaAdminServiceImpl extends PBEAdminService { + private class KeyMetaAdminServiceImpl extends ManagedKeysService { /** * Enables PBE for a given tenant and namespace, as specified in the provided request. * @@ -105,36 +105,37 @@ private class KeyMetaAdminServiceImpl extends PBEAdminService { * @param done The callback to be invoked with the response. */ @Override - public void enablePBE(RpcController controller, PBEAdminRequest request, - RpcCallback done) { - PBEAdminResponse.Builder builder = getResponseBuilder(controller, request); - if (builder.getPbePrefix() != null) { + public void enableManagedKeys(RpcController controller, ManagedKeysRequest request, + RpcCallback done) { + ManagedKeysResponse.Builder builder = getResponseBuilder(controller, request); + if (builder.getCustSpec() != null) { try { - PBEKeyStatus pbeKeyStatus = master.getPBEKeymetaAdmin() - .enablePBE(request.getPbePrefix(), request.getKeyNamespace()); - builder.setPbeStatus(PBEAdminProtos.PBEKeyStatus.valueOf(pbeKeyStatus.getVal())); + ManagedKeyStatus managedKeyStatus = master.getPBEKeymetaAdmin() + .enableManagedKeys(request.getCustSpec(), request.getKeyNamespace()); + builder.setPbeStatus(ManagedKeysProtos.ManagedKeyStatus.valueOf( + managedKeyStatus.getVal())); } catch (IOException e) { CoprocessorRpcUtils.setControllerException(controller, e); - builder.setPbeStatus(PBEAdminProtos.PBEKeyStatus.PBE_FAILED); + builder.setPbeStatus(ManagedKeysProtos.ManagedKeyStatus.PBE_FAILED); } } done.run(builder.build()); } @Override - public void getPBEStatuses(RpcController controller, PBEAdminRequest request, - RpcCallback done) { - PBEGetStatusResponse.Builder responseBuilder = - PBEGetStatusResponse.newBuilder(); - PBEAdminResponse.Builder builder = getResponseBuilder(controller, request); - if (builder.getPbePrefix() != null) { + public void getManagedKeys(RpcController controller, ManagedKeysRequest request, + RpcCallback done) { + GetManagedKeysResponse.Builder responseBuilder = + GetManagedKeysResponse.newBuilder(); + ManagedKeysResponse.Builder builder = getResponseBuilder(controller, request); + if (builder.getCustSpec() != null) { try { - List pbeKeyStatuses = master.getPBEKeymetaAdmin() - .getPBEKeyStatuses(request.getPbePrefix(), request.getKeyNamespace()); - for (PBEKeyData keyData: pbeKeyStatuses) { + List managedKeyStatuses = master.getPBEKeymetaAdmin() + .getManagedKeys(request.getCustSpec(), request.getKeyNamespace()); + for (ManagedKeyData keyData: managedKeyStatuses) { builder.setPbeStatus( - PBEAdminProtos.PBEKeyStatus.valueOf(keyData.getKeyStatus().getVal())); - builder.setPbeStatus(PBEAdminProtos.PBEKeyStatus.valueOf( + ManagedKeysProtos.ManagedKeyStatus.valueOf(keyData.getKeyStatus().getVal())); + builder.setPbeStatus(ManagedKeysProtos.ManagedKeyStatus.valueOf( keyData.getKeyStatus().getVal())) .setKeyMetadata(keyData.getKeyMetadata()) .setRefreshTimestamp(keyData.getRefreshTimestamp()) @@ -145,40 +146,40 @@ public void getPBEStatuses(RpcController controller, PBEAdminRequest request, } } catch (IOException e) { CoprocessorRpcUtils.setControllerException(controller, e); - builder.setPbeStatus(PBEAdminProtos.PBEKeyStatus.PBE_FAILED); + builder.setPbeStatus(ManagedKeysProtos.ManagedKeyStatus.PBE_FAILED); } catch (KeyException e) { CoprocessorRpcUtils.setControllerException(controller, new IOException(e)); - builder.setPbeStatus(PBEAdminProtos.PBEKeyStatus.PBE_FAILED); + builder.setPbeStatus(ManagedKeysProtos.ManagedKeyStatus.PBE_FAILED); } } done.run(responseBuilder.build()); } - private byte[] convertToPBEBytes(RpcController controller, PBEAdminRequest request, - PBEAdminResponse.Builder builder) { - byte[] pbe_prefix = null; + private byte[] convertToPBEBytes(RpcController controller, ManagedKeysRequest request, + ManagedKeysResponse.Builder builder) { + byte[] cust_spec = null; try { - pbe_prefix = Base64.getDecoder().decode(request.getPbePrefix()); + cust_spec = Base64.getDecoder().decode(request.getCustSpec()); } catch (IllegalArgumentException e) { - builder.setPbeStatus(PBEAdminProtos.PBEKeyStatus.PBE_FAILED); + builder.setPbeStatus(ManagedKeysProtos.ManagedKeyStatus.PBE_FAILED); CoprocessorRpcUtils.setControllerException(controller, new IOException( - "Failed to decode specified prefix as Base64 string: " + request.getPbePrefix(), e)); + "Failed to decode specified prefix as Base64 string: " + request.getCustSpec(), e)); } - return pbe_prefix; + return cust_spec; } - private PBEAdminResponse.Builder getResponseBuilder(RpcController controller, - PBEAdminRequest request) { - PBEAdminResponse.Builder builder = PBEAdminResponse.newBuilder() + private ManagedKeysResponse.Builder getResponseBuilder(RpcController controller, + ManagedKeysRequest request) { + ManagedKeysResponse.Builder builder = ManagedKeysResponse.newBuilder() .setKeyNamespace(request.getKeyNamespace()); - byte[] pbe_prefix = null; + byte[] cust_spec = null; try { - pbe_prefix = Base64.getDecoder().decode(request.getPbePrefix()); - builder.setPbePrefixBytes(ByteString.copyFrom(pbe_prefix)); + cust_spec = Base64.getDecoder().decode(request.getCustSpec()); + builder.setCustSpecBytes(ByteString.copyFrom(cust_spec)); } catch (IllegalArgumentException e) { - builder.setPbeStatus(PBEAdminProtos.PBEKeyStatus.PBE_FAILED); + builder.setPbeStatus(ManagedKeysProtos.ManagedKeyStatus.PBE_FAILED); CoprocessorRpcUtils.setControllerException(controller, new IOException( - "Failed to decode specified prefix as Base64 string: " + request.getPbePrefix(), e)); + "Failed to decode specified prefix as Base64 string: " + request.getCustSpec(), e)); } return builder; } diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/keymeta/PBEKeymetaTableAccessor.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/keymeta/KeymetaTableAccessor.java similarity index 77% rename from hbase-server/src/main/java/org/apache/hadoop/hbase/keymeta/PBEKeymetaTableAccessor.java rename to hbase-server/src/main/java/org/apache/hadoop/hbase/keymeta/KeymetaTableAccessor.java index 5f6a441106c5..c64de26f90a6 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/keymeta/PBEKeymetaTableAccessor.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/keymeta/KeymetaTableAccessor.java @@ -31,8 +31,8 @@ import org.apache.hadoop.hbase.client.Scan; import org.apache.hadoop.hbase.client.Table; import org.apache.hadoop.hbase.filter.PrefixFilter; -import org.apache.hadoop.hbase.io.crypto.PBEKeyData; -import org.apache.hadoop.hbase.io.crypto.PBEKeyStatus; +import org.apache.hadoop.hbase.io.crypto.ManagedKeyData; +import org.apache.hadoop.hbase.io.crypto.ManagedKeyStatus; import org.apache.hadoop.hbase.security.EncryptionUtil; import org.apache.hadoop.hbase.util.Bytes; import org.apache.yetus.audience.InterfaceAudience; @@ -46,7 +46,7 @@ * Accessor for PBE keymeta table. */ @InterfaceAudience.Private -public class PBEKeymetaTableAccessor extends PBEKeyAccessorBase { +public class KeymetaTableAccessor extends KeyManagementBase { private static final String KEY_META_INFO_FAMILY_STR = "info"; public static final byte[] KEY_META_INFO_FAMILY = Bytes.toBytes(KEY_META_INFO_FAMILY_STR); @@ -78,7 +78,7 @@ public class PBEKeymetaTableAccessor extends PBEKeyAccessorBase { public static final String WRITE_OP_COUNT_QUAL_NAME = "write_op_count"; public static final byte[] WRITE_OP_COUNT_QUAL_BYTES = Bytes.toBytes(WRITE_OP_COUNT_QUAL_NAME); - public PBEKeymetaTableAccessor(Server server) { + public KeymetaTableAccessor(Server server) { super(server); } @@ -87,7 +87,7 @@ public PBEKeymetaTableAccessor(Server server) { * @param keyData The key data. * @throws IOException when there is an underlying IOException. */ - public void addKey(PBEKeyData keyData) throws IOException { + public void addKey(ManagedKeyData keyData) throws IOException { checkPBEEnabled(); final Put putForMetadata = addMutationColumns(new Put(constructRowKeyForMetadata(keyData)), keyData); @@ -98,19 +98,19 @@ public void addKey(PBEKeyData keyData) throws IOException { } /** - * Get all the keys for the specified pbe_prefix and key_namespace. + * Get all the keys for the specified cust_spec and key_namespace. * - * @param pbe_prefix The prefix + * @param cust_spec The custodian specification * @param keyNamespace The namespace * @return a list of key data, one for each key, can be empty when none were found. * @throws IOException when there is an underlying IOException. * @throws KeyException when there is an underlying KeyException. */ - protected List getAllKeys(byte[] pbe_prefix, String keyNamespace) + protected List getAllKeys(byte[] cust_spec, String keyNamespace) throws IOException, KeyException { checkPBEEnabled(); Connection connection = server.getConnection(); - byte[] prefixForScan = Bytes.add(Bytes.toBytes(pbe_prefix.length), pbe_prefix, + byte[] prefixForScan = Bytes.add(Bytes.toBytes(cust_spec.length), cust_spec, Bytes.toBytes(keyNamespace)); try (Table table = connection.getTable(KEY_META_TABLE_NAME)) { PrefixFilter prefixFilter = new PrefixFilter(prefixForScan); @@ -119,9 +119,9 @@ protected List getAllKeys(byte[] pbe_prefix, String keyNamespace) scan.addFamily(KEY_META_INFO_FAMILY); ResultScanner scanner = table.getScanner(scan); - List allKeys = new ArrayList<>(); + List allKeys = new ArrayList<>(); for (Result result : scanner) { - PBEKeyData keyData = parseFromResult(pbe_prefix, keyNamespace, result); + ManagedKeyData keyData = parseFromResult(cust_spec, keyNamespace, result); if (keyData != null) { allKeys.add(keyData); } @@ -131,20 +131,20 @@ protected List getAllKeys(byte[] pbe_prefix, String keyNamespace) } /** - * Get all the active keys for the specified pbe_prefix and key_namespace. + * Get all the active keys for the specified cust_spec and key_namespace. * - * @param pbe_prefix The prefix + * @param cust_spec The prefix * @param keyNamespace The namespace * @return a list of key data, one for each active key, can be empty when none were found. * @throws IOException when there is an underlying IOException. * @throws KeyException when there is an underlying KeyException. */ - public List getActiveKeys(byte[] pbe_prefix, String keyNamespace) + public List getActiveKeys(byte[] cust_spec, String keyNamespace) throws IOException, KeyException { checkPBEEnabled(); - List activeKeys = new ArrayList<>(); - for (PBEKeyData keyData : getAllKeys(pbe_prefix, keyNamespace)) { - if (keyData.getKeyStatus() == PBEKeyStatus.ACTIVE) { + List activeKeys = new ArrayList<>(); + for (ManagedKeyData keyData : getAllKeys(cust_spec, keyNamespace)) { + if (keyData.getKeyStatus() == ManagedKeyStatus.ACTIVE) { activeKeys.add(keyData); } } @@ -152,43 +152,43 @@ public List getActiveKeys(byte[] pbe_prefix, String keyNamespace) } /** - * Get the specific key identified by pbe_prefix, keyNamespace and keyMetadata. + * Get the specific key identified by cust_spec, keyNamespace and keyMetadata. * - * @param pbe_prefix The prefix. + * @param cust_spec The prefix. * @param keyNamespace The namespace. * @param keyMetadata The metadata. * @return the key or {@code null} * @throws IOException when there is an underlying IOException. * @throws KeyException when there is an underlying KeyException. */ - public PBEKeyData getKey(byte[] pbe_prefix, String keyNamespace, String keyMetadata) + public ManagedKeyData getKey(byte[] cust_spec, String keyNamespace, String keyMetadata) throws IOException, KeyException { checkPBEEnabled(); Connection connection = server.getConnection(); try (Table table = connection.getTable(KEY_META_TABLE_NAME)) { - byte[] rowKey = constructRowKeyForMetadata(pbe_prefix, keyNamespace, - PBEKeyData.constructMetadataHash(keyMetadata)); + byte[] rowKey = constructRowKeyForMetadata(cust_spec, keyNamespace, + ManagedKeyData.constructMetadataHash(keyMetadata)); Result result = table.get(new Get(rowKey)); - return parseFromResult(pbe_prefix, keyNamespace, result); + return parseFromResult(cust_spec, keyNamespace, result); } } /** - * Report read or write operation count on the specific key identified by pbe_prefix, keyNamespace + * Report read or write operation count on the specific key identified by cust_spec, keyNamespace * and keyMetadata. The reported value is added to the existing operation count using the * Increment mutation. - * @param pbe_prefix The prefix. + * @param cust_spec The prefix. * @param keyNamespace The namespace. * @param keyMetadata The metadata. * @throws IOException when there is an underlying IOException. */ - public void reportOperation(byte[] pbe_prefix, String keyNamespace, String keyMetadata, long count, + public void reportOperation(byte[] cust_spec, String keyNamespace, String keyMetadata, long count, boolean isReadOperation) throws IOException { checkPBEEnabled(); Connection connection = server.getConnection(); try (Table table = connection.getTable(KEY_META_TABLE_NAME)) { - byte[] rowKey = constructRowKeyForMetadata(pbe_prefix, keyNamespace, - PBEKeyData.constructMetadataHash(keyMetadata)); + byte[] rowKey = constructRowKeyForMetadata(cust_spec, keyNamespace, + ManagedKeyData.constructMetadataHash(keyMetadata)); Increment incr = new Increment(rowKey) .addColumn(KEY_META_INFO_FAMILY, isReadOperation ? READ_OP_COUNT_QUAL_BYTES : WRITE_OP_COUNT_QUAL_BYTES, @@ -200,8 +200,8 @@ public void reportOperation(byte[] pbe_prefix, String keyNamespace, String keyMe /** * Add the mutation columns to the given Put that are derived from the keyData. */ - private Put addMutationColumns(Put put, PBEKeyData keyData) throws IOException { - PBEKeyData latestSystemKey = server.getSystemKeyCache().getLatestSystemKey(); + private Put addMutationColumns(Put put, ManagedKeyData keyData) throws IOException { + ManagedKeyData latestSystemKey = server.getSystemKeyCache().getLatestSystemKey(); if (keyData.getTheKey() != null) { byte[] dekWrappedBySTK = EncryptionUtil.wrapKey(server.getConfiguration(), null, keyData.getTheKey(), latestSystemKey.getTheKey()); @@ -222,19 +222,19 @@ private Put addMutationColumns(Put put, PBEKeyData keyData) throws IOException { ; } - private byte[] constructRowKeyForMetadata(PBEKeyData keyData) { - return constructRowKeyForMetadata(keyData.getPBEPrefix(), keyData.getKeyNamespace(), + private byte[] constructRowKeyForMetadata(ManagedKeyData keyData) { + return constructRowKeyForMetadata(keyData.getCustodianSpec(), keyData.getKeyNamespace(), keyData.getKeyMetadataHash()); } - private static byte[] constructRowKeyForMetadata(byte[] pbe_prefix, String keyNamespace, + private static byte[] constructRowKeyForMetadata(byte[] cust_spec, String keyNamespace, byte[] keyMetadataHash) { - int prefixLength = pbe_prefix.length; - return Bytes.add(Bytes.toBytes(prefixLength), pbe_prefix, Bytes.toBytesBinary(keyNamespace), + int prefixLength = cust_spec.length; + return Bytes.add(Bytes.toBytes(prefixLength), cust_spec, Bytes.toBytesBinary(keyNamespace), keyMetadataHash); } - private PBEKeyData parseFromResult(byte[] pbe_prefix, String keyNamespace, Result result) + private ManagedKeyData parseFromResult(byte[] cust_spec, String keyNamespace, Result result) throws IOException, KeyException { if (result == null || result.isEmpty()) { return null; @@ -246,7 +246,7 @@ private PBEKeyData parseFromResult(byte[] pbe_prefix, String keyNamespace, Resul if (dekWrappedByStk != null) { long stkChecksum = Bytes.toLong(result.getValue(KEY_META_INFO_FAMILY, STK_CHECKSUM_QUAL_BYTES)); - PBEKeyData clusterKey = server.getSystemKeyCache().getSystemKeyByChecksum(stkChecksum); + ManagedKeyData clusterKey = server.getSystemKeyCache().getSystemKeyByChecksum(stkChecksum); if (clusterKey == null) { LOG.error("Dropping key with metadata: {} as STK with checksum: {} is unavailable", dekMetadata, stkChecksum); @@ -255,7 +255,7 @@ private PBEKeyData parseFromResult(byte[] pbe_prefix, String keyNamespace, Resul dek = EncryptionUtil.unwrapKey(server.getConfiguration(), null, dekWrappedByStk, clusterKey.getTheKey()); } - PBEKeyStatus keyStatus = PBEKeyStatus.forValue( + ManagedKeyStatus keyStatus = ManagedKeyStatus.forValue( result.getValue(KEY_META_INFO_FAMILY, KEY_STATUS_QUAL_BYTES)[0]); long refreshedTimestamp = Bytes.toLong(result.getValue(KEY_META_INFO_FAMILY, REFRESHED_TIMESTAMP_QUAL_BYTES)); @@ -263,7 +263,8 @@ private PBEKeyData parseFromResult(byte[] pbe_prefix, String keyNamespace, Resul long readOpCount = readOpValue != null ? Bytes.toLong(readOpValue) : 0; byte[] writeOpValue = result.getValue(KEY_META_INFO_FAMILY, WRITE_OP_COUNT_QUAL_BYTES); long writeOpCount = writeOpValue != null ? Bytes.toLong(writeOpValue) : 0; - PBEKeyData dekKeyData = new PBEKeyData(pbe_prefix, keyNamespace, dek, keyStatus, dekMetadata, + ManagedKeyData + dekKeyData = new ManagedKeyData(cust_spec, keyNamespace, dek, keyStatus, dekMetadata, refreshedTimestamp, readOpCount, writeOpCount); if (dek != null) { long dekChecksum = Bytes.toLong(result.getValue(KEY_META_INFO_FAMILY, diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/keymeta/PBEKeyAccessor.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/keymeta/ManagedKeyAccessor.java similarity index 65% rename from hbase-server/src/main/java/org/apache/hadoop/hbase/keymeta/PBEKeyAccessor.java rename to hbase-server/src/main/java/org/apache/hadoop/hbase/keymeta/ManagedKeyAccessor.java index 1c9922c62ab1..24d496f10bfc 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/keymeta/PBEKeyAccessor.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/keymeta/ManagedKeyAccessor.java @@ -17,8 +17,8 @@ */ package org.apache.hadoop.hbase.keymeta; -import org.apache.hadoop.hbase.io.crypto.PBEKeyData; -import org.apache.hadoop.hbase.io.crypto.PBEKeyProvider; +import org.apache.hadoop.hbase.io.crypto.ManagedKeyData; +import org.apache.hadoop.hbase.io.crypto.ManagedKeyProvider; import org.apache.yetus.audience.InterfaceAudience; import java.io.IOException; import java.security.KeyException; @@ -26,44 +26,44 @@ /** * This class provides unified access on top of both {@code PBEKeyDataCache} (L1) and - * {@code PBEKeymetaTableAccessor} (L2) to access PBE keys. When the getter is called, it first + * {@code KeymetaTableAccessor} (L2) to access PBE keys. When the getter is called, it first * checks if L1 cache has the key, if not, it tries to get the key from L2. */ @InterfaceAudience.Private -public class PBEKeyAccessor extends PBEKeyAccessorBase { - private final PBEKeyDataCache keyDataCache; - private final PBEKeymetaTableAccessor keymetaAccessor; +public class ManagedKeyAccessor extends KeyManagementBase { + private final ManagedKeyDataCache keyDataCache; + private final KeymetaTableAccessor keymetaAccessor; - public PBEKeyAccessor(PBEKeymetaTableAccessor keymetaAccessor) { + public ManagedKeyAccessor(KeymetaTableAccessor keymetaAccessor) { super(keymetaAccessor.server); this.keymetaAccessor = keymetaAccessor; - keyDataCache = new PBEKeyDataCache(); + keyDataCache = new ManagedKeyDataCache(); } /** * Get key data by key metadata. * - * @param pbe_prefix The prefix of the key + * @param cust_spec The custodian spec. * @param keyNamespace The namespace of the key * @param keyMetadata The metadata of the key * @return The key data or {@code null} * @throws IOException if an error occurs while retrieving the key */ - public PBEKeyData getKey(byte[] pbe_prefix, String keyNamespace, String keyMetadata) + public ManagedKeyData getKey(byte[] cust_spec, String keyNamespace, String keyMetadata) throws IOException, KeyException { checkPBEEnabled(); // 1. Check L1 cache. - PBEKeyData keyData = keyDataCache.getEntry(keyMetadata); + ManagedKeyData keyData = keyDataCache.getEntry(keyMetadata); if (keyData == null) { // 2. Check L2 cache. - keyData = keymetaAccessor.getKey(pbe_prefix, keyNamespace, keyMetadata); + keyData = keymetaAccessor.getKey(cust_spec, keyNamespace, keyMetadata); if (keyData == null) { // 3. Check with Key Provider. - PBEKeyProvider provider = getKeyProvider(); + ManagedKeyProvider provider = getKeyProvider(); keyData = provider.unwrapKey(keyMetadata); LOG.info("Got key data with status: {} and metadata: {} for prefix: {}", keyData.getKeyStatus(), keyData.getKeyMetadata(), - PBEKeyProvider.encodeToPrefixStr(pbe_prefix)); + ManagedKeyProvider.encodeToStr(cust_spec)); keymetaAccessor.addKey(keyData); } if (keyData != null) { @@ -76,21 +76,21 @@ public PBEKeyData getKey(byte[] pbe_prefix, String keyNamespace, String keyMetad /** * Get an active key for the given prefix suitable for use in encryption. * - * @param pbePrefix The prefix of the key + * @param cust_spec The custodian specification * @param keyNamespace The namespace of the key * @return The key data * @throws IOException if an error occurs while retrieving the key */ - public PBEKeyData getAnActiveKey(byte[] pbePrefix, String keyNamespace) + public ManagedKeyData getAnActiveKey(byte[] cust_spec, String keyNamespace) throws IOException, KeyException { checkPBEEnabled(); - PBEKeyData keyData = keyDataCache.getRandomEntryForPrefix(pbePrefix, keyNamespace); + ManagedKeyData keyData = keyDataCache.getRandomEntryForPrefix(cust_spec, keyNamespace); if (keyData == null) { - List activeKeys = keymetaAccessor.getActiveKeys(pbePrefix, keyNamespace); - for (PBEKeyData kd: activeKeys) { + List activeKeys = keymetaAccessor.getActiveKeys(cust_spec, keyNamespace); + for (ManagedKeyData kd: activeKeys) { keyDataCache.addEntry(kd); } - keyData = keyDataCache.getRandomEntryForPrefix(pbePrefix, keyNamespace); + keyData = keyDataCache.getRandomEntryForPrefix(cust_spec, keyNamespace); } return keyData; } diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/keymeta/PBEKeyDataCache.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/keymeta/ManagedKeyDataCache.java similarity index 57% rename from hbase-server/src/main/java/org/apache/hadoop/hbase/keymeta/PBEKeyDataCache.java rename to hbase-server/src/main/java/org/apache/hadoop/hbase/keymeta/ManagedKeyDataCache.java index 51bba86a3c4b..2406559decb4 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/keymeta/PBEKeyDataCache.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/keymeta/ManagedKeyDataCache.java @@ -17,8 +17,8 @@ */ package org.apache.hadoop.hbase.keymeta; -import org.apache.hadoop.hbase.io.crypto.PBEKeyData; -import org.apache.hadoop.hbase.io.crypto.PBEKeyStatus; +import org.apache.hadoop.hbase.io.crypto.ManagedKeyData; +import org.apache.hadoop.hbase.io.crypto.ManagedKeyStatus; import org.apache.hadoop.hbase.util.Bytes; import org.apache.yetus.audience.InterfaceAudience; @@ -29,15 +29,15 @@ import java.util.concurrent.locks.ReentrantLock; /** - * In-memory cache for PBEKeyData entries, using key metadata as the cache key. + * In-memory cache for ManagedKeyData entries, using key metadata as the cache key. */ @InterfaceAudience.Private -public class PBEKeyDataCache { - private final Map cache; - private final Map>> prefixCache; +public class ManagedKeyDataCache { + private final Map cache; + private final Map>> prefixCache; private final ReentrantLock lock; - public PBEKeyDataCache() { + public ManagedKeyDataCache() { this.prefixCache = new HashMap<>(); this.cache = new HashMap<>(); this.lock = new ReentrantLock(); @@ -46,29 +46,29 @@ public PBEKeyDataCache() { /** * Adds a new entry to the cache. * - * @param pbeKeyData the PBEKeyData entry to be added + * @param keyData the ManagedKeyData entry to be added */ - public void addEntry(PBEKeyData pbeKeyData) { + public void addEntry(ManagedKeyData keyData) { lock.lock(); try { - Bytes pbePrefix = new Bytes(pbeKeyData.getPBEPrefix()); - String keyNamespace = pbeKeyData.getKeyNamespace(); + Bytes custSpec = new Bytes(keyData.getCustodianSpec()); + String keyNamespace = keyData.getKeyNamespace(); - cache.put(pbeKeyData.getKeyMetadata(), pbeKeyData); + cache.put(keyData.getKeyMetadata(), keyData); - Map> nsCache = prefixCache.get(keyNamespace); + Map> nsCache = prefixCache.get(keyNamespace); if (nsCache == null) { nsCache = new HashMap<>(); prefixCache.put(keyNamespace, nsCache); } - List keyList = nsCache.get(pbePrefix); + List keyList = nsCache.get(custSpec); if (keyList == null) { keyList = new ArrayList<>(); - prefixCache.get(keyNamespace).put(pbePrefix, keyList); + prefixCache.get(keyNamespace).put(custSpec, keyList); } - keyList.add(pbeKeyData); + keyList.add(keyData); } finally { lock.unlock(); } @@ -78,9 +78,9 @@ public void addEntry(PBEKeyData pbeKeyData) { * Retrieves an entry from the cache based on its key metadata. * * @param keyMetadata the key metadata of the entry to be retrieved - * @return the corresponding PBEKeyData entry, or null if not found + * @return the corresponding ManagedKeyData entry, or null if not found */ - public PBEKeyData getEntry(String keyMetadata) { + public ManagedKeyData getEntry(String keyMetadata) { lock.lock(); try { return cache.get(keyMetadata); @@ -93,21 +93,21 @@ public PBEKeyData getEntry(String keyMetadata) { * Removes an entry from the cache based on its key metadata. * * @param keyMetadata the key metadata of the entry to be removed - * @return the removed PBEKeyData entry, or null if not found + * @return the removed ManagedKeyData entry, or null if not found */ - public PBEKeyData removeEntry(String keyMetadata) { + public ManagedKeyData removeEntry(String keyMetadata) { lock.lock(); try { - PBEKeyData removedEntry = cache.remove(keyMetadata); + ManagedKeyData removedEntry = cache.remove(keyMetadata); if (removedEntry != null) { - Bytes pbePrefix = new Bytes(removedEntry.getPBEPrefix()); + Bytes custSpec = new Bytes(removedEntry.getCustodianSpec()); String keyNamespace = removedEntry.getKeyNamespace(); - Map> nsCache = prefixCache.get(keyNamespace); - List keyList = nsCache != null ? nsCache.get(pbePrefix) : null; + Map> nsCache = prefixCache.get(keyNamespace); + List keyList = nsCache != null ? nsCache.get(custSpec) : null; if (keyList != null) { keyList.remove(removedEntry); if (keyList.isEmpty()) { - prefixCache.get(keyNamespace).remove(pbePrefix); + prefixCache.get(keyNamespace).remove(custSpec); } } } @@ -121,21 +121,21 @@ public PBEKeyData removeEntry(String keyMetadata) { * Retrieves a random entry from the cache based on its PBE prefix, key namespace, and filters out entries with * a status other than ACTIVE. * - * @param pbe_prefix the PBE prefix to search for + * @param cust_spec the custodian specification. * @param keyNamespace the key namespace to search for - * @return a random PBEKeyData entry with the given PBE prefix and ACTIVE status, or null if not found + * @return a random ManagedKeyData entry with the given PBE prefix and ACTIVE status, or null if not found */ - public PBEKeyData getRandomEntryForPrefix(byte[] pbe_prefix, String keyNamespace) { + public ManagedKeyData getRandomEntryForPrefix(byte[] cust_spec, String keyNamespace) { lock.lock(); try { - List activeEntries = new ArrayList<>(); + List activeEntries = new ArrayList<>(); - Bytes pbePrefix = new Bytes(pbe_prefix); - Map> nsCache = prefixCache.get(keyNamespace); - List keyList = nsCache != null ? nsCache.get(pbePrefix) : null; + Bytes custSpec = new Bytes(cust_spec); + Map> nsCache = prefixCache.get(keyNamespace); + List keyList = nsCache != null ? nsCache.get(custSpec) : null; if (keyList != null) { - for (PBEKeyData entry : keyList) { - if (entry.getKeyStatus() == PBEKeyStatus.ACTIVE) { + for (ManagedKeyData entry : keyList) { + if (entry.getKeyStatus() == ManagedKeyStatus.ACTIVE) { activeEntries.add(entry); } } diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/keymeta/SystemKeyAccessor.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/keymeta/SystemKeyAccessor.java index 5b0eff824e33..1c73d5a3059b 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/keymeta/SystemKeyAccessor.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/keymeta/SystemKeyAccessor.java @@ -22,8 +22,8 @@ import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; import org.apache.hadoop.hbase.Server; -import org.apache.hadoop.hbase.io.crypto.PBEKeyData; -import org.apache.hadoop.hbase.io.crypto.PBEKeyProvider; +import org.apache.hadoop.hbase.io.crypto.ManagedKeyData; +import org.apache.hadoop.hbase.io.crypto.ManagedKeyProvider; import org.apache.hadoop.hbase.util.CommonFSUtils; import org.apache.yetus.audience.InterfaceAudience; import java.io.IOException; @@ -35,7 +35,7 @@ import static org.apache.hadoop.hbase.HConstants.SYSTEM_KEY_FILE_PREFIX; @InterfaceAudience.Private -public class SystemKeyAccessor extends PBEKeyAccessorBase { +public class SystemKeyAccessor extends KeyManagementBase { protected final Path systemKeyDir; public SystemKeyAccessor(Server server) throws IOException { @@ -78,8 +78,8 @@ public List getAllSystemKeyFiles() throws IOException { return new ArrayList<>(clusterKeys.values()); } - public PBEKeyData loadSystemKey(Path keyPath) throws IOException { - PBEKeyProvider provider = getKeyProvider(); + public ManagedKeyData loadSystemKey(Path keyPath) throws IOException { + ManagedKeyProvider provider = getKeyProvider(); return provider.unwrapKey(loadKeyMetadata(keyPath)); } diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/keymeta/SystemKeyCache.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/keymeta/SystemKeyCache.java index f61e20b1ae29..e0e4a5a84795 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/keymeta/SystemKeyCache.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/keymeta/SystemKeyCache.java @@ -18,7 +18,7 @@ package org.apache.hadoop.hbase.keymeta; import org.apache.hadoop.fs.Path; -import org.apache.hadoop.hbase.io.crypto.PBEKeyData; +import org.apache.hadoop.hbase.io.crypto.ManagedKeyData; import org.apache.yetus.audience.InterfaceAudience; import org.slf4j.Logger; import org.slf4j.LoggerFactory; @@ -31,8 +31,8 @@ public class SystemKeyCache { private static final Logger LOG = LoggerFactory.getLogger(SystemKeyCache.class); - private final PBEKeyData latestSystemKey; - private final Map systemKeys; + private final ManagedKeyData latestSystemKey; + private final Map systemKeys; /** * Construct the System Key cache from the specified accessor. @@ -46,11 +46,11 @@ public static SystemKeyCache createCache(SystemKeyAccessor accessor) throws IOEx LOG.warn("No system key files found, skipping cache creation"); return null; } - PBEKeyData latestSystemKey = null; - Map systemKeys = new TreeMap<>(); + ManagedKeyData latestSystemKey = null; + Map systemKeys = new TreeMap<>(); for (Path keyPath: allSystemKeyFiles) { LOG.info("Loading system key from: {}", keyPath); - PBEKeyData keyData = accessor.loadSystemKey(keyPath); + ManagedKeyData keyData = accessor.loadSystemKey(keyPath); if (latestSystemKey == null) { latestSystemKey = keyData; } @@ -59,16 +59,16 @@ public static SystemKeyCache createCache(SystemKeyAccessor accessor) throws IOEx return new SystemKeyCache(systemKeys, latestSystemKey); } - private SystemKeyCache(Map systemKeys, PBEKeyData latestSystemKey) { + private SystemKeyCache(Map systemKeys, ManagedKeyData latestSystemKey) { this.systemKeys = systemKeys; this.latestSystemKey = latestSystemKey; } - public PBEKeyData getLatestSystemKey() { + public ManagedKeyData getLatestSystemKey() { return latestSystemKey; } - public PBEKeyData getSystemKeyByChecksum(long checksum) { + public ManagedKeyData getSystemKeyByChecksum(long checksum) { return systemKeys.get(checksum); } } diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/HMaster.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/HMaster.java index d97c7ed3f5eb..e4bc0f2ea768 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/HMaster.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/HMaster.java @@ -122,7 +122,7 @@ import org.apache.hadoop.hbase.ipc.CoprocessorRpcUtils; import org.apache.hadoop.hbase.ipc.RpcServer; import org.apache.hadoop.hbase.ipc.ServerNotRunningYetException; -import org.apache.hadoop.hbase.keymeta.PBEKeymetaMasterService; +import org.apache.hadoop.hbase.keymeta.KeymetaMasterService; import org.apache.hadoop.hbase.log.HBaseMarkers; import org.apache.hadoop.hbase.master.MasterRpcServices.BalanceSwitchMode; import org.apache.hadoop.hbase.master.assignment.AssignmentManager; @@ -357,7 +357,7 @@ public class HMaster extends HBaseServerBase implements Maste private MasterFileSystem fileSystemManager; private MasterWalManager walManager; private SystemKeyManager systemKeyManager; - private PBEKeymetaMasterService pbeKeymetaMasterService; + private KeymetaMasterService keymetaMasterService; // manager to manage procedure-based WAL splitting, can be null if current // is zk-based WAL splitting. SplitWALManager will replace SplitLogManager @@ -1038,8 +1038,8 @@ private void finishActiveMasterInitialization() throws IOException, InterruptedE Map, List>> procsByType = procedureExecutor .getActiveProceduresNoCopy().stream().collect(Collectors.groupingBy(p -> p.getClass())); - pbeKeymetaMasterService = new PBEKeymetaMasterService(this); - pbeKeymetaMasterService.init(); + keymetaMasterService = new KeymetaMasterService(this); + keymetaMasterService.init(); // Create Assignment Manager this.assignmentManager = createAssignmentManager(this, masterRegion); diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/SystemKeyManager.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/SystemKeyManager.java index e4ece78a4852..ba40906c955b 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/SystemKeyManager.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/SystemKeyManager.java @@ -22,9 +22,9 @@ import java.util.UUID; import org.apache.hadoop.fs.FSDataOutputStream; import org.apache.hadoop.fs.Path; -import org.apache.hadoop.hbase.io.crypto.PBEKeyData; -import org.apache.hadoop.hbase.io.crypto.PBEKeyProvider; -import org.apache.hadoop.hbase.io.crypto.PBEKeyStatus; +import org.apache.hadoop.hbase.io.crypto.ManagedKeyData; +import org.apache.hadoop.hbase.io.crypto.ManagedKeyProvider; +import org.apache.hadoop.hbase.io.crypto.ManagedKeyStatus; import org.apache.hadoop.hbase.keymeta.SystemKeyAccessor; import org.apache.yetus.audience.InterfaceAudience; import static org.apache.hadoop.hbase.HConstants.SYSTEM_KEY_FILE_PREFIX; @@ -58,7 +58,7 @@ else if (rotateSystemKeyIfChanged() != null) { } } - public PBEKeyData rotateSystemKeyIfChanged() throws IOException { + public ManagedKeyData rotateSystemKeyIfChanged() throws IOException { if (! isPBEEnabled()) { return null; } @@ -67,14 +67,14 @@ public PBEKeyData rotateSystemKeyIfChanged() throws IOException { return rotateSystemKey(latestKeyMetadata); } - private PBEKeyData rotateSystemKey(String currentKeyMetadata) throws IOException { + private ManagedKeyData rotateSystemKey(String currentKeyMetadata) throws IOException { if (! isPBEEnabled()) { return null; } - PBEKeyProvider provider = getKeyProvider(); - PBEKeyData clusterKey = provider.getSystemKey( + ManagedKeyProvider provider = getKeyProvider(); + ManagedKeyData clusterKey = provider.getSystemKey( master.getMasterFileSystem().getClusterId().toString().getBytes()); - if (clusterKey.getKeyStatus() != PBEKeyStatus.ACTIVE) { + if (clusterKey.getKeyStatus() != ManagedKeyStatus.ACTIVE) { throw new IOException("System key is expected to be ACTIVE but it is: " + clusterKey.getKeyStatus() + " for metadata: " + clusterKey.getKeyMetadata()); } diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HRegionServer.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HRegionServer.java index 4641ceb29152..0df4d1db890f 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HRegionServer.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HRegionServer.java @@ -120,7 +120,7 @@ import org.apache.hadoop.hbase.ipc.RpcServer; import org.apache.hadoop.hbase.ipc.ServerNotRunningYetException; import org.apache.hadoop.hbase.ipc.ServerRpcController; -import org.apache.hadoop.hbase.keymeta.PBEKeyAccessor; +import org.apache.hadoop.hbase.keymeta.ManagedKeyAccessor; import org.apache.hadoop.hbase.log.HBaseMarkers; import org.apache.hadoop.hbase.mob.MobFileCache; import org.apache.hadoop.hbase.mob.RSMobFileCleanerChore; @@ -1450,7 +1450,7 @@ protected void handleReportForDutyResponse(final RegionServerStartupResponse c) } buildSystemKeyCache(); - pbeKeyAccessor = new PBEKeyAccessor(pbeKeymetaAdmin); + managedKeyAccessor = new ManagedKeyAccessor(keymetaAdmin); // hack! Maps DFSClient => RegionServer for logs. HDFS made this // config param for task trackers, but we can piggyback off of it. diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/replication/regionserver/ReplicationSyncUp.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/replication/regionserver/ReplicationSyncUp.java index d6690b4bff16..ea376369d542 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/replication/regionserver/ReplicationSyncUp.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/replication/regionserver/ReplicationSyncUp.java @@ -41,9 +41,9 @@ import org.apache.hadoop.hbase.ServerName; import org.apache.hadoop.hbase.client.AsyncClusterConnection; import org.apache.hadoop.hbase.client.Connection; +import org.apache.hadoop.hbase.keymeta.KeymetaAdmin; import org.apache.hadoop.hbase.keymeta.SystemKeyCache; -import org.apache.hadoop.hbase.keymeta.PBEKeyAccessor; -import org.apache.hadoop.hbase.keymeta.PBEKeymetaAdmin; +import org.apache.hadoop.hbase.keymeta.ManagedKeyAccessor; import org.apache.hadoop.hbase.master.replication.OfflineTableReplicationQueueStorage; import org.apache.hadoop.hbase.replication.ReplicationException; import org.apache.hadoop.hbase.replication.ReplicationGroupOffset; @@ -373,11 +373,11 @@ public ChoreService getChoreService() { return null; } - @Override public PBEKeyAccessor getPBEKeyAccessor() { + @Override public ManagedKeyAccessor getPBEKeyAccessor() { return null; } - @Override public PBEKeymetaAdmin getPBEKeymetaAdmin() { + @Override public KeymetaAdmin getPBEKeymetaAdmin() { return null; } diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/keymeta/PBETestBase.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/keymeta/ManagedKeyTestBase.java similarity index 82% rename from hbase-server/src/test/java/org/apache/hadoop/hbase/keymeta/PBETestBase.java rename to hbase-server/src/test/java/org/apache/hadoop/hbase/keymeta/ManagedKeyTestBase.java index 7d257dcf6ec0..f649aedc38f1 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/keymeta/PBETestBase.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/keymeta/ManagedKeyTestBase.java @@ -2,16 +2,16 @@ import org.apache.hadoop.hbase.HBaseTestingUtil; import org.apache.hadoop.hbase.HConstants; -import org.apache.hadoop.hbase.io.crypto.MockPBEKeyProvider; +import org.apache.hadoop.hbase.io.crypto.MockManagedKeyProvider; import org.junit.After; import org.junit.Before; -public class PBETestBase { +public class ManagedKeyTestBase { protected HBaseTestingUtil TEST_UTIL = new HBaseTestingUtil(); @Before public void setUp() throws Exception { - TEST_UTIL.getConfiguration().set(HConstants.CRYPTO_KEYPROVIDER_CONF_KEY, MockPBEKeyProvider.class.getName()); + TEST_UTIL.getConfiguration().set(HConstants.CRYPTO_KEYPROVIDER_CONF_KEY, MockManagedKeyProvider.class.getName()); TEST_UTIL.getConfiguration().set(HConstants.CRYPTO_PBE_ENABLED_CONF_KEY, "true"); // Start the minicluster diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/MockNoopMasterServices.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/MockNoopMasterServices.java index c8958fae761c..f35e2e7fb23f 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/MockNoopMasterServices.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/MockNoopMasterServices.java @@ -38,9 +38,9 @@ import org.apache.hadoop.hbase.client.TableDescriptor; import org.apache.hadoop.hbase.executor.ExecutorService; import org.apache.hadoop.hbase.favored.FavoredNodesManager; +import org.apache.hadoop.hbase.keymeta.KeymetaAdmin; import org.apache.hadoop.hbase.keymeta.SystemKeyCache; -import org.apache.hadoop.hbase.keymeta.PBEKeyAccessor; -import org.apache.hadoop.hbase.keymeta.PBEKeymetaAdmin; +import org.apache.hadoop.hbase.keymeta.ManagedKeyAccessor; import org.apache.hadoop.hbase.master.assignment.AssignmentManager; import org.apache.hadoop.hbase.master.hbck.HbckChore; import org.apache.hadoop.hbase.master.janitor.CatalogJanitor; @@ -123,11 +123,11 @@ public ChoreService getChoreService() { return null; } - @Override public PBEKeyAccessor getPBEKeyAccessor() { + @Override public ManagedKeyAccessor getPBEKeyAccessor() { return null; } - @Override public PBEKeymetaAdmin getPBEKeymetaAdmin() { + @Override public KeymetaAdmin getPBEKeymetaAdmin() { return null; } diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/MockRegionServer.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/MockRegionServer.java index ac19adfaf0b0..6008e6884e13 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/MockRegionServer.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/MockRegionServer.java @@ -52,9 +52,9 @@ import org.apache.hadoop.hbase.io.hfile.BlockCache; import org.apache.hadoop.hbase.ipc.HBaseRpcController; import org.apache.hadoop.hbase.ipc.RpcServerInterface; +import org.apache.hadoop.hbase.keymeta.KeymetaAdmin; import org.apache.hadoop.hbase.keymeta.SystemKeyCache; -import org.apache.hadoop.hbase.keymeta.PBEKeyAccessor; -import org.apache.hadoop.hbase.keymeta.PBEKeymetaAdmin; +import org.apache.hadoop.hbase.keymeta.ManagedKeyAccessor; import org.apache.hadoop.hbase.mob.MobFileCache; import org.apache.hadoop.hbase.quotas.RegionServerRpcQuotaManager; import org.apache.hadoop.hbase.quotas.RegionServerSpaceQuotaManager; @@ -563,11 +563,11 @@ public ChoreService getChoreService() { return null; } - @Override public PBEKeyAccessor getPBEKeyAccessor() { + @Override public ManagedKeyAccessor getPBEKeyAccessor() { return null; } - @Override public PBEKeymetaAdmin getPBEKeymetaAdmin() { + @Override public KeymetaAdmin getPBEKeymetaAdmin() { return null; } diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestActiveMasterManager.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestActiveMasterManager.java index e09ce4ded4c9..1255971b458d 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestActiveMasterManager.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestActiveMasterManager.java @@ -34,8 +34,8 @@ import org.apache.hadoop.hbase.HBaseTestingUtil; import org.apache.hadoop.hbase.ServerName; import org.apache.hadoop.hbase.keymeta.SystemKeyCache; -import org.apache.hadoop.hbase.keymeta.PBEKeyAccessor; -import org.apache.hadoop.hbase.keymeta.PBEKeymetaAdmin; +import org.apache.hadoop.hbase.keymeta.ManagedKeyAccessor; +import org.apache.hadoop.hbase.keymeta.KeymetaAdmin; import org.apache.hadoop.hbase.monitoring.MonitoredTask; import org.apache.hadoop.hbase.monitoring.TaskGroup; import org.apache.hadoop.hbase.testclassification.MasterTests; @@ -335,11 +335,11 @@ public ActiveMasterManager getActiveMasterManager() { return null; } - @Override public PBEKeyAccessor getPBEKeyAccessor() { + @Override public ManagedKeyAccessor getPBEKeyAccessor() { return null; } - @Override public PBEKeymetaAdmin getPBEKeymetaAdmin() { + @Override public KeymetaAdmin getPBEKeymetaAdmin() { return null; } } diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestSystemKey.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestSystemKey.java index 3ab1aa140d34..acc1094da62d 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestSystemKey.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestSystemKey.java @@ -20,13 +20,13 @@ import org.apache.hadoop.hbase.HBaseClassTestRule; import org.apache.hadoop.hbase.io.crypto.Encryption; import org.apache.hadoop.hbase.io.crypto.KeyProvider; -import org.apache.hadoop.hbase.io.crypto.PBEKeyData; -import org.apache.hadoop.hbase.io.crypto.PBEKeyProvider; -import org.apache.hadoop.hbase.io.crypto.MockPBEKeyProvider; -import org.apache.hadoop.hbase.io.crypto.PBEKeyStatus; +import org.apache.hadoop.hbase.io.crypto.ManagedKeyData; +import org.apache.hadoop.hbase.io.crypto.ManagedKeyProvider; +import org.apache.hadoop.hbase.io.crypto.MockManagedKeyProvider; +import org.apache.hadoop.hbase.io.crypto.ManagedKeyStatus; import org.apache.hadoop.hbase.keymeta.SystemKeyAccessor; import org.apache.hadoop.hbase.keymeta.SystemKeyCache; -import org.apache.hadoop.hbase.keymeta.PBETestBase; +import org.apache.hadoop.hbase.keymeta.ManagedKeyTestBase; import org.apache.hadoop.hbase.testclassification.MasterTests; import org.apache.hadoop.hbase.testclassification.MediumTests; import org.apache.hadoop.hbase.util.Bytes; @@ -41,7 +41,7 @@ import static org.junit.Assert.assertTrue; @Category({ MasterTests.class, MediumTests.class }) -public class TestSystemKey extends PBETestBase { +public class TestSystemKey extends ManagedKeyTestBase { @ClassRule public static final HBaseClassTestRule CLASS_RULE = @@ -52,10 +52,10 @@ public void testSystemKeyInitializationAndRotation() throws Exception { HMaster master = TEST_UTIL.getHBaseCluster().getMaster(); KeyProvider keyProvider = Encryption.getKeyProvider(master.getConfiguration()); assertNotNull(keyProvider); - assertTrue(keyProvider instanceof PBEKeyProvider); - assertTrue(keyProvider instanceof MockPBEKeyProvider); - MockPBEKeyProvider pbeKeyProvider = (MockPBEKeyProvider) keyProvider; - PBEKeyData initialSystemKey = validateInitialState(master, pbeKeyProvider); + assertTrue(keyProvider instanceof ManagedKeyProvider); + assertTrue(keyProvider instanceof MockManagedKeyProvider); + MockManagedKeyProvider pbeKeyProvider = (MockManagedKeyProvider) keyProvider; + ManagedKeyData initialSystemKey = validateInitialState(master, pbeKeyProvider); restartSystem(); master = TEST_UTIL.getHBaseCluster().getMaster(); @@ -64,7 +64,7 @@ public void testSystemKeyInitializationAndRotation() throws Exception { // Test rotation of cluster key by changing the key that the key provider provides and restart master. String newAlias = "new_cluster_key"; pbeKeyProvider.setCluterKeyAlias(newAlias); - Key newCluterKey = MockPBEKeyProvider.generateSecretKey(); + Key newCluterKey = MockManagedKeyProvider.generateSecretKey(); pbeKeyProvider.setKey(newAlias, newCluterKey); restartSystem(); master = TEST_UTIL.getHBaseCluster().getMaster(); @@ -83,22 +83,22 @@ public void testSystemKeyInitializationAndRotation() throws Exception { public void testWithInvalidSystemKey() throws Exception { HMaster master = TEST_UTIL.getHBaseCluster().getMaster(); KeyProvider keyProvider = Encryption.getKeyProvider(master.getConfiguration()); - MockPBEKeyProvider pbeKeyProvider = (MockPBEKeyProvider) keyProvider; + MockManagedKeyProvider pbeKeyProvider = (MockManagedKeyProvider) keyProvider; // Test startup failure when the cluster key is INACTIVE SystemKeyManager tmpCKM = new SystemKeyManager(master); tmpCKM.ensureSystemKeyInitialized(); - pbeKeyProvider.setKeyStatus(pbeKeyProvider.getSystemKeyAlias(), PBEKeyStatus.INACTIVE); + pbeKeyProvider.setKeyStatus(pbeKeyProvider.getSystemKeyAlias(), ManagedKeyStatus.INACTIVE); assertThrows(IOException.class, tmpCKM::ensureSystemKeyInitialized); } - private PBEKeyData validateInitialState(HMaster master, MockPBEKeyProvider pbeKeyProvider ) + private ManagedKeyData validateInitialState(HMaster master, MockManagedKeyProvider pbeKeyProvider ) throws IOException { SystemKeyAccessor systemKeyAccessor = new SystemKeyAccessor(master); assertEquals(1, systemKeyAccessor.getAllSystemKeyFiles().size()); SystemKeyCache systemKeyCache = master.getSystemKeyCache(); assertNotNull(systemKeyCache); - PBEKeyData clusterKey = systemKeyCache.getLatestSystemKey(); + ManagedKeyData clusterKey = systemKeyCache.getLatestSystemKey(); assertEquals(pbeKeyProvider.getSystemKey(master.getClusterId().getBytes()), clusterKey); assertEquals(clusterKey, systemKeyCache.getSystemKeyByChecksum(clusterKey.getKeyChecksum())); diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/cleaner/TestReplicationHFileCleaner.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/cleaner/TestReplicationHFileCleaner.java index 2f85ee3e92e4..e80ee85f1c16 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/cleaner/TestReplicationHFileCleaner.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/cleaner/TestReplicationHFileCleaner.java @@ -38,9 +38,9 @@ import org.apache.hadoop.hbase.TableName; import org.apache.hadoop.hbase.client.Connection; import org.apache.hadoop.hbase.client.TableDescriptor; +import org.apache.hadoop.hbase.keymeta.KeymetaAdmin; import org.apache.hadoop.hbase.keymeta.SystemKeyCache; -import org.apache.hadoop.hbase.keymeta.PBEKeyAccessor; -import org.apache.hadoop.hbase.keymeta.PBEKeymetaAdmin; +import org.apache.hadoop.hbase.keymeta.ManagedKeyAccessor; import org.apache.hadoop.hbase.master.HMaster; import org.apache.hadoop.hbase.replication.ReplicationException; import org.apache.hadoop.hbase.replication.ReplicationFactory; @@ -222,11 +222,11 @@ public Connection getConnection() { return null; } - @Override public PBEKeyAccessor getPBEKeyAccessor() { + @Override public ManagedKeyAccessor getPBEKeyAccessor() { return null; } - @Override public PBEKeymetaAdmin getPBEKeymetaAdmin() { + @Override public KeymetaAdmin getPBEKeymetaAdmin() { return null; } diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/procedure2/store/region/RegionProcedureStorePerformanceEvaluation.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/procedure2/store/region/RegionProcedureStorePerformanceEvaluation.java index 783a04168eca..b3adc8ace813 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/procedure2/store/region/RegionProcedureStorePerformanceEvaluation.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/procedure2/store/region/RegionProcedureStorePerformanceEvaluation.java @@ -24,9 +24,9 @@ import org.apache.hadoop.hbase.HBaseConfiguration; import org.apache.hadoop.hbase.ServerName; import org.apache.hadoop.hbase.io.util.MemorySizeUtil; +import org.apache.hadoop.hbase.keymeta.KeymetaAdmin; import org.apache.hadoop.hbase.keymeta.SystemKeyCache; -import org.apache.hadoop.hbase.keymeta.PBEKeyAccessor; -import org.apache.hadoop.hbase.keymeta.PBEKeymetaAdmin; +import org.apache.hadoop.hbase.keymeta.ManagedKeyAccessor; import org.apache.hadoop.hbase.master.region.MasterRegion; import org.apache.hadoop.hbase.master.region.MasterRegionFactory; import org.apache.hadoop.hbase.procedure2.store.ProcedureStorePerformanceEvaluation; @@ -65,11 +65,11 @@ public ServerName getServerName() { return null; } - @Override public PBEKeyAccessor getPBEKeyAccessor() { + @Override public ManagedKeyAccessor getPBEKeyAccessor() { return null; } - @Override public PBEKeymetaAdmin getPBEKeymetaAdmin() { + @Override public KeymetaAdmin getPBEKeymetaAdmin() { return null; } } diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestHeapMemoryManager.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestHeapMemoryManager.java index 7069cd6380ff..2ddd6739b1ab 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestHeapMemoryManager.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestHeapMemoryManager.java @@ -46,9 +46,9 @@ import org.apache.hadoop.hbase.io.hfile.CachedBlock; import org.apache.hadoop.hbase.io.hfile.ResizableBlockCache; import org.apache.hadoop.hbase.io.util.MemorySizeUtil; +import org.apache.hadoop.hbase.keymeta.KeymetaAdmin; import org.apache.hadoop.hbase.keymeta.SystemKeyCache; -import org.apache.hadoop.hbase.keymeta.PBEKeyAccessor; -import org.apache.hadoop.hbase.keymeta.PBEKeymetaAdmin; +import org.apache.hadoop.hbase.keymeta.ManagedKeyAccessor; import org.apache.hadoop.hbase.regionserver.HeapMemoryManager.TunerContext; import org.apache.hadoop.hbase.regionserver.HeapMemoryManager.TunerResult; import org.apache.hadoop.hbase.testclassification.MediumTests; @@ -844,11 +844,11 @@ public ChoreService getChoreService() { return null; } - @Override public PBEKeyAccessor getPBEKeyAccessor() { + @Override public ManagedKeyAccessor getPBEKeyAccessor() { return null; } - @Override public PBEKeymetaAdmin getPBEKeymetaAdmin() { + @Override public KeymetaAdmin getPBEKeymetaAdmin() { return null; } diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/security/token/TestTokenAuthentication.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/security/token/TestTokenAuthentication.java index d2f92b0a13f5..863dfc7fcfdd 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/security/token/TestTokenAuthentication.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/security/token/TestTokenAuthentication.java @@ -56,8 +56,8 @@ import org.apache.hadoop.hbase.ipc.ServerRpcController; import org.apache.hadoop.hbase.ipc.SimpleRpcServer; import org.apache.hadoop.hbase.keymeta.SystemKeyCache; -import org.apache.hadoop.hbase.keymeta.PBEKeyAccessor; -import org.apache.hadoop.hbase.keymeta.PBEKeymetaAdmin; +import org.apache.hadoop.hbase.keymeta.ManagedKeyAccessor; +import org.apache.hadoop.hbase.keymeta.KeymetaAdmin; import org.apache.hadoop.hbase.log.HBaseMarkers; import org.apache.hadoop.hbase.regionserver.RegionServerServices; import org.apache.hadoop.hbase.security.SecurityInfo; @@ -357,11 +357,11 @@ public ChoreService getChoreService() { return null; } - @Override public PBEKeyAccessor getPBEKeyAccessor() { + @Override public ManagedKeyAccessor getPBEKeyAccessor() { return null; } - @Override public PBEKeymetaAdmin getPBEKeymetaAdmin() { + @Override public KeymetaAdmin getPBEKeymetaAdmin() { return null; } diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/util/MockServer.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/util/MockServer.java index 280322c6e424..8e9e4e56866c 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/util/MockServer.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/util/MockServer.java @@ -27,8 +27,8 @@ import org.apache.hadoop.hbase.client.AsyncClusterConnection; import org.apache.hadoop.hbase.client.Connection; import org.apache.hadoop.hbase.keymeta.SystemKeyCache; -import org.apache.hadoop.hbase.keymeta.PBEKeyAccessor; -import org.apache.hadoop.hbase.keymeta.PBEKeymetaAdmin; +import org.apache.hadoop.hbase.keymeta.ManagedKeyAccessor; +import org.apache.hadoop.hbase.keymeta.KeymetaAdmin; import org.apache.hadoop.hbase.log.HBaseMarkers; import org.apache.hadoop.hbase.zookeeper.ZKWatcher; import org.slf4j.Logger; @@ -107,11 +107,11 @@ public ChoreService getChoreService() { return null; } - @Override public PBEKeyAccessor getPBEKeyAccessor() { + @Override public ManagedKeyAccessor getPBEKeyAccessor() { return null; } - @Override public PBEKeymetaAdmin getPBEKeymetaAdmin() { + @Override public KeymetaAdmin getPBEKeymetaAdmin() { return null; } diff --git a/hbase-shell/src/main/ruby/hbase/pbe_admin.rb b/hbase-shell/src/main/ruby/hbase/pbe_admin.rb index 656999be8071..710edabfdc64 100644 --- a/hbase-shell/src/main/ruby/hbase/pbe_admin.rb +++ b/hbase-shell/src/main/ruby/hbase/pbe_admin.rb @@ -16,8 +16,8 @@ # include Java -java_import org.apache.hadoop.hbase.io.crypto.PBEKeyData -java_import org.apache.hadoop.hbase.keymeta.PBEKeymetaAdminClient +java_import org.apache.hadoop.hbase.io.crypto.ManagedKeyData +java_import org.apache.hadoop.hbase.keymeta.KeymetaAdminClient module Hbase class PBEAdmin diff --git a/hbase-testing-util/src/main/java/org/apache/hadoop/hbase/HBaseTestingUtility.java b/hbase-testing-util/src/main/java/org/apache/hadoop/hbase/HBaseTestingUtility.java index a15b8db5e701..6fe8428b27f7 100644 --- a/hbase-testing-util/src/main/java/org/apache/hadoop/hbase/HBaseTestingUtility.java +++ b/hbase-testing-util/src/main/java/org/apache/hadoop/hbase/HBaseTestingUtility.java @@ -89,7 +89,7 @@ import org.apache.hadoop.hbase.io.hfile.ChecksumUtil; import org.apache.hadoop.hbase.io.hfile.HFile; import org.apache.hadoop.hbase.ipc.RpcServerInterface; -import org.apache.hadoop.hbase.keymeta.PBEKeymetaAdminClient; +import org.apache.hadoop.hbase.keymeta.KeymetaAdminClient; import org.apache.hadoop.hbase.logging.Log4jUtils; import org.apache.hadoop.hbase.mapreduce.MapreduceTestingShim; import org.apache.hadoop.hbase.master.HMaster; @@ -2945,8 +2945,8 @@ public Admin getAdmin() throws IOException { return hbaseAdmin; } - public PBEKeymetaAdminClient getPBEAdmin() throws IOException { - return new PBEKeymetaAdminClient(getConnection()); + public KeymetaAdminClient getPBEAdmin() throws IOException { + return new KeymetaAdminClient(getConnection()); } /** From ca262b5f19b392309a2c48ab535c19833002b790 Mon Sep 17 00:00:00 2001 From: Hari Dara Date: Thu, 3 Apr 2025 09:59:32 +0530 Subject: [PATCH 15/70] Renamed "cust spec" to "key cust" --- .../hbase/keymeta/KeymetaAdminClient.java | 10 ++-- .../hbase/io/crypto/ManagedKeyData.java | 44 ++++++++--------- .../hbase/io/crypto/ManagedKeyProvider.java | 20 ++++---- .../io/crypto/ManagedKeyStoreKeyProvider.java | 10 ++-- .../hadoop/hbase/keymeta/KeymetaAdmin.java | 10 ++-- .../io/crypto/MockManagedKeyProvider.java | 18 +++---- .../TestManagedKeyStoreKeyProvider.java | 2 +- .../main/protobuf/server/ManagedKeys.proto | 4 +- .../hbase/keymeta/KeymetaAdminImpl.java | 18 +++---- .../hbase/keymeta/KeymetaServiceEndpoint.java | 26 +++++----- .../hbase/keymeta/KeymetaTableAccessor.java | 48 +++++++++---------- .../hbase/keymeta/ManagedKeyAccessor.java | 18 +++---- .../hbase/keymeta/ManagedKeyDataCache.java | 20 ++++---- 13 files changed, 124 insertions(+), 124 deletions(-) diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/keymeta/KeymetaAdminClient.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/keymeta/KeymetaAdminClient.java index 07f5a57440b6..78e36779de0f 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/keymeta/KeymetaAdminClient.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/keymeta/KeymetaAdminClient.java @@ -26,10 +26,10 @@ public KeymetaAdminClient(Connection conn) throws IOException { } @Override - public ManagedKeyStatus enableManagedKeys(String custSpec, String keyNamespace) throws IOException { + public ManagedKeyStatus enableManagedKeys(String keyCust, String keyNamespace) throws IOException { try { ManagedKeysResponse response = stub.enableManagedKeys(null, - ManagedKeysRequest.newBuilder().setCustSpec(custSpec).setKeyNamespace(keyNamespace).build()); + ManagedKeysRequest.newBuilder().setKeyCust(keyCust).setKeyNamespace(keyNamespace).build()); LOG.info("Got response: " + response); return ManagedKeyStatus.forValue((byte) response.getPbeStatus().getNumber()); } catch (ServiceException e) { @@ -38,15 +38,15 @@ public ManagedKeyStatus enableManagedKeys(String custSpec, String keyNamespace) } @Override - public List getManagedKeys(String custSpec, String keyNamespace) + public List getManagedKeys(String keyCust, String keyNamespace) throws IOException, KeyException { List keyStatuses = new ArrayList<>(); try { ManagedKeysProtos.GetManagedKeysResponse statusResponse = stub.getManagedKeys(null, - ManagedKeysRequest.newBuilder().setCustSpec(custSpec).setKeyNamespace(keyNamespace).build()); + ManagedKeysRequest.newBuilder().setKeyCust(keyCust).setKeyNamespace(keyNamespace).build()); for (ManagedKeysResponse status: statusResponse.getStatusList()) { keyStatuses.add(new ManagedKeyData( - status.getCustSpecBytes().toByteArray(), + status.getKeyCustBytes().toByteArray(), status.getKeyNamespace(), null, ManagedKeyStatus.forValue((byte) status.getPbeStatus().getNumber()), status.getKeyMetadata(), diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/io/crypto/ManagedKeyData.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/io/crypto/ManagedKeyData.java index 7579b6d391ae..95777be89942 100644 --- a/hbase-common/src/main/java/org/apache/hadoop/hbase/io/crypto/ManagedKeyData.java +++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/io/crypto/ManagedKeyData.java @@ -37,7 +37,7 @@ * * The key data is represented by the following fields: *
    - *
  • cust_spec: The prefix for which this key belongs to
  • + *
  • key_cust: The prefix for which this key belongs to
  • *
  • theKey: The key capturing the bytes and encoding
  • *
  • keyStatus: The status of the key (see {@link ManagedKeyStatus})
  • *
  • keyMetadata: Metadata that identifies the key
  • @@ -53,7 +53,7 @@ public class ManagedKeyData { public static final String KEY_NAMESPACE_GLOBAL = "*"; - private final byte[] custSpec; + private final byte[] keyCust; private final String keyNamespace; private final Key theKey; private final ManagedKeyStatus keyStatus; @@ -67,33 +67,33 @@ public class ManagedKeyData { /** * Constructs a new instance with the given parameters. * - * @param cust_spec The Custodian specification associated with the key. + * @param key_cust The key custodian. * @param theKey The actual key, can be {@code null}. * @param keyStatus The status of the key. * @param keyMetadata The metadata associated with the key. - * @throws NullPointerException if any of cust_spec, keyStatus or keyMetadata is null. + * @throws NullPointerException if any of key_cust, keyStatus or keyMetadata is null. */ - public ManagedKeyData(byte[] cust_spec, String key_namespace, Key theKey, ManagedKeyStatus keyStatus, + public ManagedKeyData(byte[] key_cust, String key_namespace, Key theKey, ManagedKeyStatus keyStatus, String keyMetadata) { - this(cust_spec, key_namespace, theKey, keyStatus, keyMetadata, + this(key_cust, key_namespace, theKey, keyStatus, keyMetadata, EnvironmentEdgeManager.currentTime(), 0, 0); } /** * Constructs a new instance with the given parameters. * - * @param cust_spec The Custodian specification associated with the key. + * @param key_cust The key custodian. * @param theKey The actual key, can be {@code null}. * @param keyStatus The status of the key. * @param keyMetadata The metadata associated with the key. * @param refreshTimestamp The timestamp when this key was last refreshed. * @param readOpCount The current number of read operations for this key. * @param writeOpCount The current number of write operations for this key. - * @throws NullPointerException if any of cust_spec, keyStatus or keyMetadata is null. + * @throws NullPointerException if any of key_cust, keyStatus or keyMetadata is null. */ - public ManagedKeyData(byte[] cust_spec, String key_namespace, Key theKey, ManagedKeyStatus keyStatus, + public ManagedKeyData(byte[] key_cust, String key_namespace, Key theKey, ManagedKeyStatus keyStatus, String keyMetadata, long refreshTimestamp, long readOpCount, long writeOpCount) { - Preconditions.checkNotNull(cust_spec, "cust_spec should not be null"); + Preconditions.checkNotNull(key_cust, "key_cust should not be null"); Preconditions.checkNotNull(key_namespace, "key_namespace should not be null"); Preconditions.checkNotNull(keyStatus, "keyStatus should not be null"); Preconditions.checkNotNull(keyMetadata, "keyMetadata should not be null"); @@ -102,7 +102,7 @@ public ManagedKeyData(byte[] cust_spec, String key_namespace, Key theKey, Manage Preconditions.checkArgument(writeOpCount >= 0, "writeOpCount: " + writeOpCount + " should be >= 0"); - this.custSpec = cust_spec; + this.keyCust = key_cust; this.keyNamespace = key_namespace; this.theKey = theKey; this.keyStatus = keyStatus; @@ -113,20 +113,20 @@ public ManagedKeyData(byte[] cust_spec, String key_namespace, Key theKey, Manage } /** - * Returns the Custodian specification associated with the key. + * Returns the custodian associated with the key. * - * @return The Custodian specification as a byte array. + * @return The key custodian as a byte array. */ - public byte[] getCustodianSpec() { - return custSpec; + public byte[] getKeyCustodian() { + return keyCust; } /** - * Return the Custodian specification in Base64 encoded form. - * @return the encoded Custodian specification. + * Return the key Custodian in Base64 encoded form. + * @return the encoded key custodian */ - public String getCustodianSpecEncoded() { - return Base64.getEncoder().encodeToString(custSpec); + public String getKeyCustodianEncoded() { + return Base64.getEncoder().encodeToString(keyCust); } @@ -167,7 +167,7 @@ public String getKeyMetadata() { } @Override public String toString() { - return "ManagedKeyData{" + "custSpecix=" + Arrays.toString(custSpec) + ", keyNamespace='" + return "ManagedKeyData{" + "custSpecix=" + Arrays.toString(keyCust) + ", keyNamespace='" + keyNamespace + '\'' + ", keyStatus=" + keyStatus + ", keyMetadata='" + keyMetadata + '\'' + ", refreshTimestamp=" + refreshTimestamp + '}'; } @@ -255,7 +255,7 @@ public boolean equals(Object o) { ManagedKeyData that = (ManagedKeyData) o; return new EqualsBuilder() - .append(custSpec, that.custSpec) + .append(keyCust, that.keyCust) .append(keyNamespace, that.keyNamespace) .append(theKey, that.theKey) .append(keyStatus, that.keyStatus) @@ -266,7 +266,7 @@ public boolean equals(Object o) { @Override public int hashCode() { return new HashCodeBuilder(17, 37) - .append(custSpec) + .append(keyCust) .append(keyNamespace) .append(theKey) .append(keyStatus) diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/io/crypto/ManagedKeyProvider.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/io/crypto/ManagedKeyProvider.java index 97a6c91e55f8..faceaf9212ee 100644 --- a/hbase-common/src/main/java/org/apache/hadoop/hbase/io/crypto/ManagedKeyProvider.java +++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/io/crypto/ManagedKeyProvider.java @@ -50,12 +50,12 @@ public interface ManagedKeyProvider extends KeyProvider { /** * Retrieve a managed key for the specified prefix. * - * @param cust_spec Custodian specification. + * @param key_cust The key custodian. * @param key_namespace Key namespace * @return ManagedKeyData for the system key and is expected to be not {@code null} * @throws IOException if an error occurs while retrieving the key */ - ManagedKeyData getManagedKey(byte[] cust_spec, String key_namespace) throws IOException; + ManagedKeyData getManagedKey(byte[] key_cust, String key_namespace) throws IOException; /** * Retrieve a key identified by the key metadata. The key metadata is typically generated by the @@ -69,20 +69,20 @@ public interface ManagedKeyProvider extends KeyProvider { */ ManagedKeyData unwrapKey(String keyMetaData) throws IOException; - static byte[] decodeToBytes(String custSpec) throws IOException { - byte[] cust_spec; + static byte[] decodeToBytes(String keyCust) throws IOException { + byte[] key_cust; try { - cust_spec = Base64.getDecoder().decode(custSpec); + key_cust = Base64.getDecoder().decode(keyCust); } catch (IllegalArgumentException e) { - throw new IOException("Failed to decode specified custodian spec as Base64 string: " + - custSpec, e); + throw new IOException("Failed to decode specified key custodian as Base64 string: " + + keyCust, e); } - return cust_spec; + return key_cust; } - static String encodeToStr(byte[] cust_spec) { - return Base64.getEncoder().encodeToString(cust_spec); + static String encodeToStr(byte[] key_cust) { + return Base64.getEncoder().encodeToString(key_cust); } } diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/io/crypto/ManagedKeyStoreKeyProvider.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/io/crypto/ManagedKeyStoreKeyProvider.java index 0104e3bc4461..5e3b1043c850 100644 --- a/hbase-common/src/main/java/org/apache/hadoop/hbase/io/crypto/ManagedKeyStoreKeyProvider.java +++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/io/crypto/ManagedKeyStoreKeyProvider.java @@ -40,9 +40,9 @@ public ManagedKeyData getSystemKey(byte[] clusterId) { } @Override - public ManagedKeyData getManagedKey(byte[] cust_spec, String key_namespace) throws IOException { + public ManagedKeyData getManagedKey(byte[] key_cust, String key_namespace) throws IOException { checkConfig(); - String encodedPrefix = ManagedKeyProvider.encodeToStr(cust_spec); + String encodedPrefix = ManagedKeyProvider.encodeToStr(key_cust); String aliasConfKey = HConstants.CRYPTO_PBE_PREFIX_CONF_KEY_PREFIX + encodedPrefix + "." + "alias"; String keyMetadata = generateKeyMetadata(conf.get(aliasConfKey, null), encodedPrefix); @@ -57,14 +57,14 @@ public ManagedKeyData unwrapKey(String keyMetadataStr) throws IOException { String activeStatusConfKey = HConstants.CRYPTO_PBE_PREFIX_CONF_KEY_PREFIX + encodedPrefix + ".active"; boolean isActive = conf.getBoolean(activeStatusConfKey, true); - byte[] cust_spec = ManagedKeyProvider.decodeToBytes(encodedPrefix); + byte[] key_cust = ManagedKeyProvider.decodeToBytes(encodedPrefix); String alias = keyMetadata.get(KEY_METADATA_ALIAS); Key key = alias != null ? getKey(alias) : null; if (key != null) { - return new ManagedKeyData(cust_spec, ManagedKeyData.KEY_NAMESPACE_GLOBAL, key, + return new ManagedKeyData(key_cust, ManagedKeyData.KEY_NAMESPACE_GLOBAL, key, isActive ? ManagedKeyStatus.ACTIVE : ManagedKeyStatus.INACTIVE, keyMetadataStr); } - return new ManagedKeyData(cust_spec, ManagedKeyData.KEY_NAMESPACE_GLOBAL, null, + return new ManagedKeyData(key_cust, ManagedKeyData.KEY_NAMESPACE_GLOBAL, null, isActive ? ManagedKeyStatus.FAILED : ManagedKeyStatus.DISABLED, keyMetadataStr); } diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/keymeta/KeymetaAdmin.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/keymeta/KeymetaAdmin.java index 56895cb9c44b..4edc90f2ac20 100644 --- a/hbase-common/src/main/java/org/apache/hadoop/hbase/keymeta/KeymetaAdmin.java +++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/keymeta/KeymetaAdmin.java @@ -31,24 +31,24 @@ @InterfaceAudience.Public public interface KeymetaAdmin { /** - * Enables key management for the specified custodian specification and namespace. + * Enables key management for the specified custodian and namespace. * - * @param custSpec The custodian specification in base64 encoded format. + * @param keyCust The key custodian in base64 encoded format. * @param keyNamespace The namespace for the key management. * * @return The current status of the managed key. * @throws IOException if an error occurs while enabling key management. */ - ManagedKeyStatus enableManagedKeys(String custSpec, String keyNamespace) throws IOException; + ManagedKeyStatus enableManagedKeys(String keyCust, String keyNamespace) throws IOException; /** * Get the status of all the keys for the specified custodian. * - * @param custSpec The custodian specification in base64 encoded format. + * @param keyCust The key custodian in base64 encoded format. * @param keyNamespace The namespace for the key management. * @return The list of status objects each identifying the key and its current status. * @throws IOException if an error occurs while enabling key management. */ - List getManagedKeys(String custSpec, String keyNamespace) + List getManagedKeys(String keyCust, String keyNamespace) throws IOException, KeyException; } diff --git a/hbase-common/src/test/java/org/apache/hadoop/hbase/io/crypto/MockManagedKeyProvider.java b/hbase-common/src/test/java/org/apache/hadoop/hbase/io/crypto/MockManagedKeyProvider.java index 281edb3ef1fd..444e8695c1a1 100644 --- a/hbase-common/src/test/java/org/apache/hadoop/hbase/io/crypto/MockManagedKeyProvider.java +++ b/hbase-common/src/test/java/org/apache/hadoop/hbase/io/crypto/MockManagedKeyProvider.java @@ -49,9 +49,9 @@ public class MockManagedKeyProvider extends MockAesKeyProvider implements Manage return getKey(systemId, systemKeyAlias); } - @Override public ManagedKeyData getManagedKey(byte[] cust_spec, String key_namespace) + @Override public ManagedKeyData getManagedKey(byte[] key_cust, String key_namespace) throws IOException { - return getKey(cust_spec); + return getKey(key_cust); } @Override public ManagedKeyData unwrapKey(String keyMetadata) throws IOException { @@ -63,24 +63,24 @@ public class MockManagedKeyProvider extends MockAesKeyProvider implements Manage } /** - * Lookup the key data for the given cust_spec from keys. If missing, initialize one using + * Lookup the key data for the given key_cust from keys. If missing, initialize one using * generateSecretKey(). */ - public ManagedKeyData getKey(byte[] cust_spec) { - String alias = Bytes.toString(cust_spec); - return getKey(cust_spec, alias); + public ManagedKeyData getKey(byte[] key_cust) { + String alias = Bytes.toString(key_cust); + return getKey(key_cust, alias); } - public ManagedKeyData getKey(byte[] cust_spec, String alias) { + public ManagedKeyData getKey(byte[] key_cust, String alias) { Key key = keys.get(alias); if (key == null) { key = generateSecretKey(); keys.put(alias, key); } ManagedKeyStatus keyStatus = this.keyStatus.get(alias); - return new ManagedKeyData(cust_spec, ManagedKeyData.KEY_NAMESPACE_GLOBAL, key, + return new ManagedKeyData(key_cust, ManagedKeyData.KEY_NAMESPACE_GLOBAL, key, keyStatus == null ? ManagedKeyStatus.ACTIVE : keyStatus, - Bytes.toString(cust_spec)+":"+alias); + Bytes.toString(key_cust)+":"+alias); } public void setKeyStatus(String alias, ManagedKeyStatus status) { diff --git a/hbase-common/src/test/java/org/apache/hadoop/hbase/io/crypto/TestManagedKeyStoreKeyProvider.java b/hbase-common/src/test/java/org/apache/hadoop/hbase/io/crypto/TestManagedKeyStoreKeyProvider.java index 85a44a00bdf8..3733bee5bb3c 100644 --- a/hbase-common/src/test/java/org/apache/hadoop/hbase/io/crypto/TestManagedKeyStoreKeyProvider.java +++ b/hbase-common/src/test/java/org/apache/hadoop/hbase/io/crypto/TestManagedKeyStoreKeyProvider.java @@ -201,7 +201,7 @@ private void assertPBEKeyData(ManagedKeyData keyData, ManagedKeyStatus expKeySta Map keyMetadata = GsonUtil.getDefaultInstance().fromJson(keyData.getKeyMetadata(), HashMap.class); assertNotNull(keyMetadata); - assertEquals(new Bytes(prefixBytes), keyData.getCustodianSpec()); + assertEquals(new Bytes(prefixBytes), keyData.getKeyCustodian()); assertEquals(alias, keyMetadata.get(KEY_METADATA_ALIAS)); assertEquals(Base64.getEncoder().encodeToString(prefixBytes), keyMetadata.get(KEY_METADATA_PREFIX)); diff --git a/hbase-protocol-shaded/src/main/protobuf/server/ManagedKeys.proto b/hbase-protocol-shaded/src/main/protobuf/server/ManagedKeys.proto index 48b68b4d4cbe..94fed871900b 100644 --- a/hbase-protocol-shaded/src/main/protobuf/server/ManagedKeys.proto +++ b/hbase-protocol-shaded/src/main/protobuf/server/ManagedKeys.proto @@ -25,7 +25,7 @@ option java_generate_equals_and_hash = true; option optimize_for = SPEED; message ManagedKeysRequest { - required string cust_spec = 1; + required string key_cust = 1; required string key_namespace = 2; } @@ -37,7 +37,7 @@ enum ManagedKeyStatus { } message ManagedKeysResponse { - required string cust_spec = 1; + required string key_cust = 1; required string key_namespace = 2; required ManagedKeyStatus pbe_status = 3; optional string key_metadata = 4; diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/keymeta/KeymetaAdminImpl.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/keymeta/KeymetaAdminImpl.java index c5870eb5008c..0cc25ef1afc3 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/keymeta/KeymetaAdminImpl.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/keymeta/KeymetaAdminImpl.java @@ -39,16 +39,16 @@ public KeymetaAdminImpl(Server server) { } @Override - public ManagedKeyStatus enableManagedKeys(String custSpec, String keyNamespace) throws IOException { + public ManagedKeyStatus enableManagedKeys(String keyCust, String keyNamespace) throws IOException { checkPBEEnabled(); - LOG.info("Trying to enable PBE on key: {} under namespace: {}", custSpec, keyNamespace); - byte[] cust_spec = ManagedKeyProvider.decodeToBytes(custSpec); + LOG.info("Trying to enable PBE on key: {} under namespace: {}", keyCust, keyNamespace); + byte[] key_cust = ManagedKeyProvider.decodeToBytes(keyCust); ManagedKeyProvider provider = getKeyProvider(); int perPrefixActiveKeyConfCount = getPerPrefixActiveKeyConfCount(); Set retrievedKeys = new HashSet<>(perPrefixActiveKeyConfCount); ManagedKeyData pbeKey = null; for (int i = 0; i < perPrefixActiveKeyConfCount; ++i) { - pbeKey = provider.getManagedKey(cust_spec, keyNamespace); + pbeKey = provider.getManagedKey(key_cust, keyNamespace); if (pbeKey == null) { throw new IOException("Invalid null PBE key received from key provider"); } @@ -60,7 +60,7 @@ public ManagedKeyStatus enableManagedKeys(String custSpec, String keyNamespace) } retrievedKeys.add(pbeKey); LOG.info("enablePBE: got key data with status: {} and metadata: {} for prefix: {}", - pbeKey.getKeyStatus(), pbeKey.getKeyMetadata(), custSpec); + pbeKey.getKeyStatus(), pbeKey.getKeyMetadata(), keyCust); addKey(pbeKey); } // pbeKey can't be null at this point as perPrefixActiveKeyConfCount will always be > 0, @@ -69,12 +69,12 @@ public ManagedKeyStatus enableManagedKeys(String custSpec, String keyNamespace) } @Override - public List getManagedKeys(String custSpec, String keyNamespace) + public List getManagedKeys(String keyCust, String keyNamespace) throws IOException, KeyException { checkPBEEnabled(); - LOG.info("Getting key statuses for PBE on key: {} under namespace: {}", custSpec, + LOG.info("Getting key statuses for PBE on key: {} under namespace: {}", keyCust, keyNamespace); - byte[] cust_spec = ManagedKeyProvider.decodeToBytes(custSpec); - return super.getAllKeys(cust_spec, keyNamespace); + byte[] key_cust = ManagedKeyProvider.decodeToBytes(keyCust); + return super.getAllKeys(key_cust, keyNamespace); } } diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/keymeta/KeymetaServiceEndpoint.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/keymeta/KeymetaServiceEndpoint.java index 2fb4042f99a6..11240bfcb5eb 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/keymeta/KeymetaServiceEndpoint.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/keymeta/KeymetaServiceEndpoint.java @@ -49,7 +49,7 @@ * methods: * *
      - *
    • enablePBE(): Enables PBE for a given cust_spec and namespace.
    • + *
    • enablePBE(): Enables PBE for a given key_cust and namespace.
    • *
    * * This endpoint is designed to work in conjunction with the {@link KeymetaAdmin} @@ -108,10 +108,10 @@ private class KeyMetaAdminServiceImpl extends ManagedKeysService { public void enableManagedKeys(RpcController controller, ManagedKeysRequest request, RpcCallback done) { ManagedKeysResponse.Builder builder = getResponseBuilder(controller, request); - if (builder.getCustSpec() != null) { + if (builder.getKeyCust() != null) { try { ManagedKeyStatus managedKeyStatus = master.getPBEKeymetaAdmin() - .enableManagedKeys(request.getCustSpec(), request.getKeyNamespace()); + .enableManagedKeys(request.getKeyCust(), request.getKeyNamespace()); builder.setPbeStatus(ManagedKeysProtos.ManagedKeyStatus.valueOf( managedKeyStatus.getVal())); } catch (IOException e) { @@ -128,10 +128,10 @@ public void getManagedKeys(RpcController controller, ManagedKeysRequest request, GetManagedKeysResponse.Builder responseBuilder = GetManagedKeysResponse.newBuilder(); ManagedKeysResponse.Builder builder = getResponseBuilder(controller, request); - if (builder.getCustSpec() != null) { + if (builder.getKeyCust() != null) { try { List managedKeyStatuses = master.getPBEKeymetaAdmin() - .getManagedKeys(request.getCustSpec(), request.getKeyNamespace()); + .getManagedKeys(request.getKeyCust(), request.getKeyNamespace()); for (ManagedKeyData keyData: managedKeyStatuses) { builder.setPbeStatus( ManagedKeysProtos.ManagedKeyStatus.valueOf(keyData.getKeyStatus().getVal())); @@ -157,29 +157,29 @@ public void getManagedKeys(RpcController controller, ManagedKeysRequest request, private byte[] convertToPBEBytes(RpcController controller, ManagedKeysRequest request, ManagedKeysResponse.Builder builder) { - byte[] cust_spec = null; + byte[] key_cust = null; try { - cust_spec = Base64.getDecoder().decode(request.getCustSpec()); + key_cust = Base64.getDecoder().decode(request.getKeyCust()); } catch (IllegalArgumentException e) { builder.setPbeStatus(ManagedKeysProtos.ManagedKeyStatus.PBE_FAILED); CoprocessorRpcUtils.setControllerException(controller, new IOException( - "Failed to decode specified prefix as Base64 string: " + request.getCustSpec(), e)); + "Failed to decode specified prefix as Base64 string: " + request.getKeyCust(), e)); } - return cust_spec; + return key_cust; } private ManagedKeysResponse.Builder getResponseBuilder(RpcController controller, ManagedKeysRequest request) { ManagedKeysResponse.Builder builder = ManagedKeysResponse.newBuilder() .setKeyNamespace(request.getKeyNamespace()); - byte[] cust_spec = null; + byte[] key_cust = null; try { - cust_spec = Base64.getDecoder().decode(request.getCustSpec()); - builder.setCustSpecBytes(ByteString.copyFrom(cust_spec)); + key_cust = Base64.getDecoder().decode(request.getKeyCust()); + builder.setKeyCustBytes(ByteString.copyFrom(key_cust)); } catch (IllegalArgumentException e) { builder.setPbeStatus(ManagedKeysProtos.ManagedKeyStatus.PBE_FAILED); CoprocessorRpcUtils.setControllerException(controller, new IOException( - "Failed to decode specified prefix as Base64 string: " + request.getCustSpec(), e)); + "Failed to decode specified prefix as Base64 string: " + request.getKeyCust(), e)); } return builder; } diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/keymeta/KeymetaTableAccessor.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/keymeta/KeymetaTableAccessor.java index c64de26f90a6..2b976a2c2540 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/keymeta/KeymetaTableAccessor.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/keymeta/KeymetaTableAccessor.java @@ -98,19 +98,19 @@ public void addKey(ManagedKeyData keyData) throws IOException { } /** - * Get all the keys for the specified cust_spec and key_namespace. + * Get all the keys for the specified key_cust and key_namespace. * - * @param cust_spec The custodian specification + * @param key_cust The key custodian. * @param keyNamespace The namespace * @return a list of key data, one for each key, can be empty when none were found. * @throws IOException when there is an underlying IOException. * @throws KeyException when there is an underlying KeyException. */ - protected List getAllKeys(byte[] cust_spec, String keyNamespace) + protected List getAllKeys(byte[] key_cust, String keyNamespace) throws IOException, KeyException { checkPBEEnabled(); Connection connection = server.getConnection(); - byte[] prefixForScan = Bytes.add(Bytes.toBytes(cust_spec.length), cust_spec, + byte[] prefixForScan = Bytes.add(Bytes.toBytes(key_cust.length), key_cust, Bytes.toBytes(keyNamespace)); try (Table table = connection.getTable(KEY_META_TABLE_NAME)) { PrefixFilter prefixFilter = new PrefixFilter(prefixForScan); @@ -121,7 +121,7 @@ protected List getAllKeys(byte[] cust_spec, String keyNamespace) ResultScanner scanner = table.getScanner(scan); List allKeys = new ArrayList<>(); for (Result result : scanner) { - ManagedKeyData keyData = parseFromResult(cust_spec, keyNamespace, result); + ManagedKeyData keyData = parseFromResult(key_cust, keyNamespace, result); if (keyData != null) { allKeys.add(keyData); } @@ -131,19 +131,19 @@ protected List getAllKeys(byte[] cust_spec, String keyNamespace) } /** - * Get all the active keys for the specified cust_spec and key_namespace. + * Get all the active keys for the specified key_cust and key_namespace. * - * @param cust_spec The prefix + * @param key_cust The prefix * @param keyNamespace The namespace * @return a list of key data, one for each active key, can be empty when none were found. * @throws IOException when there is an underlying IOException. * @throws KeyException when there is an underlying KeyException. */ - public List getActiveKeys(byte[] cust_spec, String keyNamespace) + public List getActiveKeys(byte[] key_cust, String keyNamespace) throws IOException, KeyException { checkPBEEnabled(); List activeKeys = new ArrayList<>(); - for (ManagedKeyData keyData : getAllKeys(cust_spec, keyNamespace)) { + for (ManagedKeyData keyData : getAllKeys(key_cust, keyNamespace)) { if (keyData.getKeyStatus() == ManagedKeyStatus.ACTIVE) { activeKeys.add(keyData); } @@ -152,42 +152,42 @@ public List getActiveKeys(byte[] cust_spec, String keyNamespace) } /** - * Get the specific key identified by cust_spec, keyNamespace and keyMetadata. + * Get the specific key identified by key_cust, keyNamespace and keyMetadata. * - * @param cust_spec The prefix. + * @param key_cust The prefix. * @param keyNamespace The namespace. * @param keyMetadata The metadata. * @return the key or {@code null} * @throws IOException when there is an underlying IOException. * @throws KeyException when there is an underlying KeyException. */ - public ManagedKeyData getKey(byte[] cust_spec, String keyNamespace, String keyMetadata) + public ManagedKeyData getKey(byte[] key_cust, String keyNamespace, String keyMetadata) throws IOException, KeyException { checkPBEEnabled(); Connection connection = server.getConnection(); try (Table table = connection.getTable(KEY_META_TABLE_NAME)) { - byte[] rowKey = constructRowKeyForMetadata(cust_spec, keyNamespace, + byte[] rowKey = constructRowKeyForMetadata(key_cust, keyNamespace, ManagedKeyData.constructMetadataHash(keyMetadata)); Result result = table.get(new Get(rowKey)); - return parseFromResult(cust_spec, keyNamespace, result); + return parseFromResult(key_cust, keyNamespace, result); } } /** - * Report read or write operation count on the specific key identified by cust_spec, keyNamespace + * Report read or write operation count on the specific key identified by key_cust, keyNamespace * and keyMetadata. The reported value is added to the existing operation count using the * Increment mutation. - * @param cust_spec The prefix. + * @param key_cust The prefix. * @param keyNamespace The namespace. * @param keyMetadata The metadata. * @throws IOException when there is an underlying IOException. */ - public void reportOperation(byte[] cust_spec, String keyNamespace, String keyMetadata, long count, + public void reportOperation(byte[] key_cust, String keyNamespace, String keyMetadata, long count, boolean isReadOperation) throws IOException { checkPBEEnabled(); Connection connection = server.getConnection(); try (Table table = connection.getTable(KEY_META_TABLE_NAME)) { - byte[] rowKey = constructRowKeyForMetadata(cust_spec, keyNamespace, + byte[] rowKey = constructRowKeyForMetadata(key_cust, keyNamespace, ManagedKeyData.constructMetadataHash(keyMetadata)); Increment incr = new Increment(rowKey) .addColumn(KEY_META_INFO_FAMILY, @@ -223,18 +223,18 @@ private Put addMutationColumns(Put put, ManagedKeyData keyData) throws IOExcepti } private byte[] constructRowKeyForMetadata(ManagedKeyData keyData) { - return constructRowKeyForMetadata(keyData.getCustodianSpec(), keyData.getKeyNamespace(), + return constructRowKeyForMetadata(keyData.getKeyCustodian(), keyData.getKeyNamespace(), keyData.getKeyMetadataHash()); } - private static byte[] constructRowKeyForMetadata(byte[] cust_spec, String keyNamespace, + private static byte[] constructRowKeyForMetadata(byte[] key_cust, String keyNamespace, byte[] keyMetadataHash) { - int prefixLength = cust_spec.length; - return Bytes.add(Bytes.toBytes(prefixLength), cust_spec, Bytes.toBytesBinary(keyNamespace), + int prefixLength = key_cust.length; + return Bytes.add(Bytes.toBytes(prefixLength), key_cust, Bytes.toBytesBinary(keyNamespace), keyMetadataHash); } - private ManagedKeyData parseFromResult(byte[] cust_spec, String keyNamespace, Result result) + private ManagedKeyData parseFromResult(byte[] key_cust, String keyNamespace, Result result) throws IOException, KeyException { if (result == null || result.isEmpty()) { return null; @@ -264,7 +264,7 @@ private ManagedKeyData parseFromResult(byte[] cust_spec, String keyNamespace, Re byte[] writeOpValue = result.getValue(KEY_META_INFO_FAMILY, WRITE_OP_COUNT_QUAL_BYTES); long writeOpCount = writeOpValue != null ? Bytes.toLong(writeOpValue) : 0; ManagedKeyData - dekKeyData = new ManagedKeyData(cust_spec, keyNamespace, dek, keyStatus, dekMetadata, + dekKeyData = new ManagedKeyData(key_cust, keyNamespace, dek, keyStatus, dekMetadata, refreshedTimestamp, readOpCount, writeOpCount); if (dek != null) { long dekChecksum = Bytes.toLong(result.getValue(KEY_META_INFO_FAMILY, diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/keymeta/ManagedKeyAccessor.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/keymeta/ManagedKeyAccessor.java index 24d496f10bfc..c5608510830d 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/keymeta/ManagedKeyAccessor.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/keymeta/ManagedKeyAccessor.java @@ -43,27 +43,27 @@ public ManagedKeyAccessor(KeymetaTableAccessor keymetaAccessor) { /** * Get key data by key metadata. * - * @param cust_spec The custodian spec. + * @param key_cust The key custodian. * @param keyNamespace The namespace of the key * @param keyMetadata The metadata of the key * @return The key data or {@code null} * @throws IOException if an error occurs while retrieving the key */ - public ManagedKeyData getKey(byte[] cust_spec, String keyNamespace, String keyMetadata) + public ManagedKeyData getKey(byte[] key_cust, String keyNamespace, String keyMetadata) throws IOException, KeyException { checkPBEEnabled(); // 1. Check L1 cache. ManagedKeyData keyData = keyDataCache.getEntry(keyMetadata); if (keyData == null) { // 2. Check L2 cache. - keyData = keymetaAccessor.getKey(cust_spec, keyNamespace, keyMetadata); + keyData = keymetaAccessor.getKey(key_cust, keyNamespace, keyMetadata); if (keyData == null) { // 3. Check with Key Provider. ManagedKeyProvider provider = getKeyProvider(); keyData = provider.unwrapKey(keyMetadata); LOG.info("Got key data with status: {} and metadata: {} for prefix: {}", keyData.getKeyStatus(), keyData.getKeyMetadata(), - ManagedKeyProvider.encodeToStr(cust_spec)); + ManagedKeyProvider.encodeToStr(key_cust)); keymetaAccessor.addKey(keyData); } if (keyData != null) { @@ -76,21 +76,21 @@ public ManagedKeyData getKey(byte[] cust_spec, String keyNamespace, String keyMe /** * Get an active key for the given prefix suitable for use in encryption. * - * @param cust_spec The custodian specification + * @param key_cust The key custodian. * @param keyNamespace The namespace of the key * @return The key data * @throws IOException if an error occurs while retrieving the key */ - public ManagedKeyData getAnActiveKey(byte[] cust_spec, String keyNamespace) + public ManagedKeyData getAnActiveKey(byte[] key_cust, String keyNamespace) throws IOException, KeyException { checkPBEEnabled(); - ManagedKeyData keyData = keyDataCache.getRandomEntryForPrefix(cust_spec, keyNamespace); + ManagedKeyData keyData = keyDataCache.getRandomEntryForPrefix(key_cust, keyNamespace); if (keyData == null) { - List activeKeys = keymetaAccessor.getActiveKeys(cust_spec, keyNamespace); + List activeKeys = keymetaAccessor.getActiveKeys(key_cust, keyNamespace); for (ManagedKeyData kd: activeKeys) { keyDataCache.addEntry(kd); } - keyData = keyDataCache.getRandomEntryForPrefix(cust_spec, keyNamespace); + keyData = keyDataCache.getRandomEntryForPrefix(key_cust, keyNamespace); } return keyData; } diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/keymeta/ManagedKeyDataCache.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/keymeta/ManagedKeyDataCache.java index 2406559decb4..6e8e977e9a69 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/keymeta/ManagedKeyDataCache.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/keymeta/ManagedKeyDataCache.java @@ -51,7 +51,7 @@ public ManagedKeyDataCache() { public void addEntry(ManagedKeyData keyData) { lock.lock(); try { - Bytes custSpec = new Bytes(keyData.getCustodianSpec()); + Bytes keyCust = new Bytes(keyData.getKeyCustodian()); String keyNamespace = keyData.getKeyNamespace(); cache.put(keyData.getKeyMetadata(), keyData); @@ -62,10 +62,10 @@ public void addEntry(ManagedKeyData keyData) { prefixCache.put(keyNamespace, nsCache); } - List keyList = nsCache.get(custSpec); + List keyList = nsCache.get(keyCust); if (keyList == null) { keyList = new ArrayList<>(); - prefixCache.get(keyNamespace).put(custSpec, keyList); + prefixCache.get(keyNamespace).put(keyCust, keyList); } keyList.add(keyData); @@ -100,14 +100,14 @@ public ManagedKeyData removeEntry(String keyMetadata) { try { ManagedKeyData removedEntry = cache.remove(keyMetadata); if (removedEntry != null) { - Bytes custSpec = new Bytes(removedEntry.getCustodianSpec()); + Bytes keyCust = new Bytes(removedEntry.getKeyCustodian()); String keyNamespace = removedEntry.getKeyNamespace(); Map> nsCache = prefixCache.get(keyNamespace); - List keyList = nsCache != null ? nsCache.get(custSpec) : null; + List keyList = nsCache != null ? nsCache.get(keyCust) : null; if (keyList != null) { keyList.remove(removedEntry); if (keyList.isEmpty()) { - prefixCache.get(keyNamespace).remove(custSpec); + prefixCache.get(keyNamespace).remove(keyCust); } } } @@ -121,18 +121,18 @@ public ManagedKeyData removeEntry(String keyMetadata) { * Retrieves a random entry from the cache based on its PBE prefix, key namespace, and filters out entries with * a status other than ACTIVE. * - * @param cust_spec the custodian specification. + * @param key_cust The key custodian. * @param keyNamespace the key namespace to search for * @return a random ManagedKeyData entry with the given PBE prefix and ACTIVE status, or null if not found */ - public ManagedKeyData getRandomEntryForPrefix(byte[] cust_spec, String keyNamespace) { + public ManagedKeyData getRandomEntryForPrefix(byte[] key_cust, String keyNamespace) { lock.lock(); try { List activeEntries = new ArrayList<>(); - Bytes custSpec = new Bytes(cust_spec); + Bytes keyCust = new Bytes(key_cust); Map> nsCache = prefixCache.get(keyNamespace); - List keyList = nsCache != null ? nsCache.get(custSpec) : null; + List keyList = nsCache != null ? nsCache.get(keyCust) : null; if (keyList != null) { for (ManagedKeyData entry : keyList) { if (entry.getKeyStatus() == ManagedKeyStatus.ACTIVE) { From e5b82b920c7dec68b697c88075727a00c1f0d0b0 Mon Sep 17 00:00:00 2001 From: Hari Dara Date: Thu, 3 Apr 2025 14:48:02 +0530 Subject: [PATCH 16/70] Replaced left over references to PBE --- .../org/apache/hadoop/hbase/HConstants.java | 25 ++++++++------- .../hbase/io/crypto/ManagedKeyStatus.java | 12 +++---- .../io/crypto/ManagedKeyStoreKeyProvider.java | 14 ++++---- .../TestManagedKeyStoreKeyProvider.java | 32 +++++++++---------- .../main/protobuf/server/ManagedKeys.proto | 8 ++--- .../apache/hadoop/hbase/HBaseServerBase.java | 6 ++-- .../hbase/MockRegionServerServices.java | 4 +-- .../java/org/apache/hadoop/hbase/Server.java | 13 ++++---- .../hbase/keymeta/KeyManagementBase.java | 32 +++++++++---------- .../hbase/keymeta/KeymetaAdminImpl.java | 18 ++++++----- .../hbase/keymeta/KeymetaMasterService.java | 19 ++++------- .../hbase/keymeta/KeymetaServiceEndpoint.java | 31 ++++++++---------- .../hbase/keymeta/KeymetaTableAccessor.java | 12 +++---- .../hbase/keymeta/ManagedKeyAccessor.java | 8 ++--- .../hbase/keymeta/ManagedKeyDataCache.java | 7 ++-- .../hbase/keymeta/SystemKeyAccessor.java | 11 ++++--- .../hadoop/hbase/master/SystemKeyManager.java | 6 ++-- .../regionserver/ReplicationSyncUp.java | 4 +-- .../hbase/keymeta/ManagedKeyTestBase.java | 2 +- .../hbase/master/MockNoopMasterServices.java | 4 +-- .../hadoop/hbase/master/MockRegionServer.java | 4 +-- .../hbase/master/TestActiveMasterManager.java | 4 +-- .../cleaner/TestReplicationHFileCleaner.java | 4 +-- ...onProcedureStorePerformanceEvaluation.java | 4 +-- .../regionserver/TestHeapMemoryManager.java | 4 +-- .../token/TestTokenAuthentication.java | 4 +-- .../apache/hadoop/hbase/util/MockServer.java | 4 +-- .../hadoop/hbase/HBaseTestingUtility.java | 2 +- 28 files changed, 147 insertions(+), 151 deletions(-) diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/HConstants.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/HConstants.java index 84b6659ee4ac..8cdd6bf03645 100644 --- a/hbase-common/src/main/java/org/apache/hadoop/hbase/HConstants.java +++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/HConstants.java @@ -1289,18 +1289,6 @@ public enum OperationStatusCode { /** Configuration key for the name of the master key for the cluster, a string */ public static final String CRYPTO_MASTERKEY_NAME_CONF_KEY = "hbase.crypto.master.key.name"; - public static final String CRYPTO_PBE_MASTERKEY_NAME_CONF_KEY = - "hbase.crypto.pbe.master.key.name"; - - public static final String CRYPTO_PBE_ENABLED_CONF_KEY = "hbase.crypto.pbe.enabled"; - public static final boolean CRYPTO_PBE_DEFAULT_ENABLED = false; - - public static final String CRYPTO_PBE_PER_PREFIX_ACTIVE_KEY_COUNT = - "hbase.crypto.pbe.per_prefix.active_count"; - public static final int CRYPTO_PBE_PER_PREFIX_ACTIVE_KEY_DEFAULT_COUNT = 1; - - public static final String CRYPTO_PBE_PREFIX_CONF_KEY_PREFIX = "hbase.crypto.pbe.prefix."; - /** Configuration key for the name of the alternate master key for the cluster, a string */ public static final String CRYPTO_MASTERKEY_ALTERNATE_NAME_CONF_KEY = "hbase.crypto.master.alternate.key.name"; @@ -1321,6 +1309,19 @@ public enum OperationStatusCode { /** Configuration key for enabling WAL encryption, a boolean */ public static final String ENABLE_WAL_ENCRYPTION = "hbase.regionserver.wal.encryption"; + public static final String CRYPTO_MANAGED_KEY_STORE_SYSTEM_KEY_NAME_CONF_KEY = + "hbase.crypto.managed_key_store.system.key.name"; + public static final String CRYPTO_MANAGED_KEY_STORE_CONF_KEY_PREFIX = + "hbase.crypto.managed_key_store.cust."; + + public static final String CRYPTO_MANAGED_KEYS_ENABLED_CONF_KEY = + "hbase.crypto.managed_keys.enabled"; + public static final boolean CRYPTO_MANAGED_KEYS_DEFAULT_ENABLED = false; + + public static final String CRYPTO_MANAGED_KEYS_PER_CUST_ACTIVE_KEY_COUNT = + "hbase.crypto.managed_keys.per_cust.active_count"; + public static final int CRYPTO_MANAGED_KEYS_PER_CUST_ACTIVE_KEY_DEFAULT_COUNT = 1; + /** Configuration key for setting RPC codec class name */ public static final String RPC_CODEC_CONF_KEY = "hbase.client.rpc.codec"; diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/io/crypto/ManagedKeyStatus.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/io/crypto/ManagedKeyStatus.java index 6ef9cfcfc77d..768927ba037b 100644 --- a/hbase-common/src/main/java/org/apache/hadoop/hbase/io/crypto/ManagedKeyStatus.java +++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/io/crypto/ManagedKeyStatus.java @@ -22,17 +22,17 @@ import java.util.Map; /** - * Enum of PBE key status. The status of a PBE key is used to indicate the state of the key. + * Enum of Managed key status. It is used to indicate the status of managed custodian keys. */ @InterfaceAudience.Public public enum ManagedKeyStatus { - /** Represents the active status of a PBE key. */ + /** Represents the active status of a managed key. */ ACTIVE((byte) 1), - /** Represents the inactive status of a PBE key. */ + /** Represents the inactive status of a managed key. */ INACTIVE((byte) 2), - /** Represents the retrieval failure status of a PBE key. */ + /** Represents the retrieval failure status of a managed key. */ FAILED((byte) 3), - /** Represents the disabled status of a PBE key. */ + /** Represents the disabled status of a managed key. */ DISABLED((byte) 4), ; @@ -45,7 +45,7 @@ private ManagedKeyStatus(byte val) { } /** - * Returns the numeric value of the PBE key status. + * Returns the numeric value of the managed key status. * @return byte value */ public byte getVal() { diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/io/crypto/ManagedKeyStoreKeyProvider.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/io/crypto/ManagedKeyStoreKeyProvider.java index 5e3b1043c850..21f23da4cd34 100644 --- a/hbase-common/src/main/java/org/apache/hadoop/hbase/io/crypto/ManagedKeyStoreKeyProvider.java +++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/io/crypto/ManagedKeyStoreKeyProvider.java @@ -12,7 +12,7 @@ @InterfaceAudience.Public public class ManagedKeyStoreKeyProvider extends KeyStoreKeyProvider implements ManagedKeyProvider { public static final String KEY_METADATA_ALIAS = "KeyAlias"; - public static final String KEY_METADATA_PREFIX = "PBE_PREFIX"; + public static final String KEY_METADATA_CUST = "KEY_CUST"; private Configuration conf; @@ -24,7 +24,7 @@ public void initConfig(Configuration conf) { @Override public ManagedKeyData getSystemKey(byte[] clusterId) { checkConfig(); - String masterKeyAlias = conf.get(HConstants.CRYPTO_PBE_MASTERKEY_NAME_CONF_KEY, null); + String masterKeyAlias = conf.get(HConstants.CRYPTO_MANAGED_KEY_STORE_SYSTEM_KEY_NAME_CONF_KEY, null); if (masterKeyAlias == null) { throw new RuntimeException("No alias configured for master key"); } @@ -32,7 +32,7 @@ public ManagedKeyData getSystemKey(byte[] clusterId) { if (key == null) { throw new RuntimeException("Unable to find cluster key with alias: " + masterKeyAlias); } - // Encode clusterId too for consistency with that of PBE prefixes. + // Encode clusterId too for consistency with that of key custodian. String keyMetadata = generateKeyMetadata(masterKeyAlias, ManagedKeyProvider.encodeToStr(clusterId)); return new ManagedKeyData(clusterId, ManagedKeyData.KEY_NAMESPACE_GLOBAL, key, ManagedKeyStatus.ACTIVE, @@ -43,7 +43,7 @@ public ManagedKeyData getSystemKey(byte[] clusterId) { public ManagedKeyData getManagedKey(byte[] key_cust, String key_namespace) throws IOException { checkConfig(); String encodedPrefix = ManagedKeyProvider.encodeToStr(key_cust); - String aliasConfKey = HConstants.CRYPTO_PBE_PREFIX_CONF_KEY_PREFIX + encodedPrefix + "." + + String aliasConfKey = HConstants.CRYPTO_MANAGED_KEY_STORE_CONF_KEY_PREFIX + encodedPrefix + "." + "alias"; String keyMetadata = generateKeyMetadata(conf.get(aliasConfKey, null), encodedPrefix); return unwrapKey(keyMetadata); @@ -53,8 +53,8 @@ public ManagedKeyData getManagedKey(byte[] key_cust, String key_namespace) throw public ManagedKeyData unwrapKey(String keyMetadataStr) throws IOException { Map keyMetadata = GsonUtil.getDefaultInstance().fromJson(keyMetadataStr, HashMap.class); - String encodedPrefix = keyMetadata.get(KEY_METADATA_PREFIX); - String activeStatusConfKey = HConstants.CRYPTO_PBE_PREFIX_CONF_KEY_PREFIX + encodedPrefix + + String encodedPrefix = keyMetadata.get(KEY_METADATA_CUST); + String activeStatusConfKey = HConstants.CRYPTO_MANAGED_KEY_STORE_CONF_KEY_PREFIX + encodedPrefix + ".active"; boolean isActive = conf.getBoolean(activeStatusConfKey, true); byte[] key_cust = ManagedKeyProvider.decodeToBytes(encodedPrefix); @@ -77,7 +77,7 @@ private void checkConfig() { public static String generateKeyMetadata(String aliasName, String encodedPrefix) { return GsonUtil.getDefaultInstance().toJson(new HashMap() {{ put(KEY_METADATA_ALIAS, aliasName); - put(KEY_METADATA_PREFIX, encodedPrefix); + put(KEY_METADATA_CUST, encodedPrefix); }}, HashMap.class); } diff --git a/hbase-common/src/test/java/org/apache/hadoop/hbase/io/crypto/TestManagedKeyStoreKeyProvider.java b/hbase-common/src/test/java/org/apache/hadoop/hbase/io/crypto/TestManagedKeyStoreKeyProvider.java index 3733bee5bb3c..d09f4540e1c9 100644 --- a/hbase-common/src/test/java/org/apache/hadoop/hbase/io/crypto/TestManagedKeyStoreKeyProvider.java +++ b/hbase-common/src/test/java/org/apache/hadoop/hbase/io/crypto/TestManagedKeyStoreKeyProvider.java @@ -40,7 +40,7 @@ import java.util.Properties; import java.util.UUID; import static org.apache.hadoop.hbase.io.crypto.ManagedKeyStoreKeyProvider.KEY_METADATA_ALIAS; -import static org.apache.hadoop.hbase.io.crypto.ManagedKeyStoreKeyProvider.KEY_METADATA_PREFIX; +import static org.apache.hadoop.hbase.io.crypto.ManagedKeyStoreKeyProvider.KEY_METADATA_CUST; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertNotNull; import static org.junit.Assert.assertNull; @@ -88,7 +88,7 @@ protected void addCustomEntries(KeyStore store, Properties passwdProps) throws E new KeyStore.PasswordProtection(withPasswordOnAlias ? PASSWORD.toCharArray() : new char[0])); String encPrefix = Base64.getEncoder().encodeToString(prefix.getBytes()); - String confKey = HConstants.CRYPTO_PBE_PREFIX_CONF_KEY_PREFIX + encPrefix + "." + "alias"; + String confKey = HConstants.CRYPTO_MANAGED_KEY_STORE_CONF_KEY_PREFIX + encPrefix + "." + "alias"; conf.set(confKey, alias); passwdProps.setProperty(alias, PASSWORD); @@ -101,7 +101,7 @@ protected void addCustomEntries(KeyStore store, Properties passwdProps) throws E new KeyStore.PasswordProtection(withPasswordOnAlias ? PASSWORD.toCharArray() : new char[0])); - conf.set(HConstants.CRYPTO_PBE_MASTERKEY_NAME_CONF_KEY, MASTER_KEY_ALIAS); + conf.set(HConstants.CRYPTO_MANAGED_KEY_STORE_SYSTEM_KEY_NAME_CONF_KEY, MASTER_KEY_ALIAS); passwdProps.setProperty(MASTER_KEY_ALIAS, PASSWORD); } @@ -109,7 +109,7 @@ protected void addCustomEntries(KeyStore store, Properties passwdProps) throws E private void addEntry(String alias, String prefix) { String encPrefix = Base64.getEncoder().encodeToString(prefix.getBytes()); - String confKey = HConstants.CRYPTO_PBE_PREFIX_CONF_KEY_PREFIX + encPrefix + "." + "alias"; + String confKey = HConstants.CRYPTO_MANAGED_KEY_STORE_CONF_KEY_PREFIX + encPrefix + "." + "alias"; conf.set(confKey, alias); } @@ -117,7 +117,7 @@ private void addEntry(String alias, String prefix) { public void testGetManagedKey() throws Exception { for (Bytes prefix : prefix2key.keySet()) { ManagedKeyData keyData = managedKeyProvider.getManagedKey(prefix.get(), ManagedKeyData.KEY_NAMESPACE_GLOBAL); - assertPBEKeyData(keyData, ManagedKeyStatus.ACTIVE, prefix2key.get(prefix).get(), prefix.get(), + assertKeyData(keyData, ManagedKeyStatus.ACTIVE, prefix2key.get(prefix).get(), prefix.get(), prefix2alias.get(prefix)); } } @@ -126,10 +126,10 @@ public void testGetManagedKey() throws Exception { public void testGetInactiveKey() throws Exception { Bytes firstPrefix = prefix2key.keySet().iterator().next(); String encPrefix = Base64.getEncoder().encodeToString(firstPrefix.get()); - conf.set(HConstants.CRYPTO_PBE_PREFIX_CONF_KEY_PREFIX + encPrefix + ".active", "false"); + conf.set(HConstants.CRYPTO_MANAGED_KEY_STORE_CONF_KEY_PREFIX + encPrefix + ".active", "false"); ManagedKeyData keyData = managedKeyProvider.getManagedKey(firstPrefix.get(), ManagedKeyData.KEY_NAMESPACE_GLOBAL); assertNotNull(keyData); - assertPBEKeyData(keyData, ManagedKeyStatus.INACTIVE, prefix2key.get(firstPrefix).get(), + assertKeyData(keyData, ManagedKeyStatus.INACTIVE, prefix2key.get(firstPrefix).get(), firstPrefix.get(), prefix2alias.get(firstPrefix)); } @@ -139,24 +139,24 @@ public void testGetInvalidKey() throws Exception { ManagedKeyData keyData = managedKeyProvider.getManagedKey(invalidPrefixBytes, ManagedKeyData.KEY_NAMESPACE_GLOBAL); assertNotNull(keyData); - assertPBEKeyData(keyData, ManagedKeyStatus.FAILED, null, invalidPrefixBytes, null); + assertKeyData(keyData, ManagedKeyStatus.FAILED, null, invalidPrefixBytes, null); } @Test public void testGetDisabledKey() throws Exception { byte[] invalidPrefix = new byte[] { 1, 2, 3 }; String invalidPrefixEnc = ManagedKeyProvider.encodeToStr(invalidPrefix); - conf.set(HConstants.CRYPTO_PBE_PREFIX_CONF_KEY_PREFIX + invalidPrefixEnc + ".active", "false"); + conf.set(HConstants.CRYPTO_MANAGED_KEY_STORE_CONF_KEY_PREFIX + invalidPrefixEnc + ".active", "false"); ManagedKeyData keyData = managedKeyProvider.getManagedKey(invalidPrefix, ManagedKeyData.KEY_NAMESPACE_GLOBAL); assertNotNull(keyData); - assertPBEKeyData(keyData, ManagedKeyStatus.DISABLED, null, + assertKeyData(keyData, ManagedKeyStatus.DISABLED, null, invalidPrefix, null); } @Test public void testGetSystemKey() throws Exception { ManagedKeyData clusterKeyData = managedKeyProvider.getSystemKey(clusterId.getBytes()); - assertPBEKeyData(clusterKeyData, ManagedKeyStatus.ACTIVE, masterKey, clusterId.getBytes(), + assertKeyData(clusterKeyData, ManagedKeyStatus.ACTIVE, masterKey, clusterId.getBytes(), MASTER_KEY_ALIAS); } @@ -169,7 +169,7 @@ public void testUnwrapInvalidKey() throws Exception { invalidPrefixEnc); ManagedKeyData keyData = managedKeyProvider.unwrapKey(invalidMetadata); assertNotNull(keyData); - assertPBEKeyData(keyData, ManagedKeyStatus.FAILED, null, invalidPrefix, + assertKeyData(keyData, ManagedKeyStatus.FAILED, null, invalidPrefix, invalidAlias); } @@ -178,15 +178,15 @@ public void testUnwrapDisabledKey() throws Exception { String invalidAlias = "invalidAlias"; byte[] invalidPrefix = new byte[] { 1, 2, 3 }; String invalidPrefixEnc = ManagedKeyProvider.encodeToStr(invalidPrefix); - conf.set(HConstants.CRYPTO_PBE_PREFIX_CONF_KEY_PREFIX + invalidPrefixEnc + ".active", "false"); + conf.set(HConstants.CRYPTO_MANAGED_KEY_STORE_CONF_KEY_PREFIX + invalidPrefixEnc + ".active", "false"); String invalidMetadata = ManagedKeyStoreKeyProvider.generateKeyMetadata(invalidAlias, invalidPrefixEnc); ManagedKeyData keyData = managedKeyProvider.unwrapKey(invalidMetadata); assertNotNull(keyData); - assertPBEKeyData(keyData, ManagedKeyStatus.DISABLED, null, invalidPrefix, invalidAlias); + assertKeyData(keyData, ManagedKeyStatus.DISABLED, null, invalidPrefix, invalidAlias); } - private void assertPBEKeyData(ManagedKeyData keyData, ManagedKeyStatus expKeyStatus, byte[] key, + private void assertKeyData(ManagedKeyData keyData, ManagedKeyStatus expKeyStatus, byte[] key, byte[] prefixBytes, String alias) throws Exception { assertNotNull(keyData); assertEquals(expKeyStatus, keyData.getKeyStatus()); @@ -204,7 +204,7 @@ private void assertPBEKeyData(ManagedKeyData keyData, ManagedKeyStatus expKeySta assertEquals(new Bytes(prefixBytes), keyData.getKeyCustodian()); assertEquals(alias, keyMetadata.get(KEY_METADATA_ALIAS)); assertEquals(Base64.getEncoder().encodeToString(prefixBytes), - keyMetadata.get(KEY_METADATA_PREFIX)); + keyMetadata.get(KEY_METADATA_CUST)); assertEquals(keyData, managedKeyProvider.unwrapKey(keyData.getKeyMetadata())); } } diff --git a/hbase-protocol-shaded/src/main/protobuf/server/ManagedKeys.proto b/hbase-protocol-shaded/src/main/protobuf/server/ManagedKeys.proto index 94fed871900b..acbc71b41dbf 100644 --- a/hbase-protocol-shaded/src/main/protobuf/server/ManagedKeys.proto +++ b/hbase-protocol-shaded/src/main/protobuf/server/ManagedKeys.proto @@ -30,10 +30,10 @@ message ManagedKeysRequest { } enum ManagedKeyStatus { - PBE_ACTIVE = 1; - PBE_INACTIVE = 2; - PBE_FAILED = 3; - PBE_DISABLED = 4; + KEY_ACTIVE = 1; + KEY_INACTIVE = 2; + KEY_FAILED = 3; + KEY_DISABLED = 4; } message ManagedKeysResponse { diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/HBaseServerBase.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/HBaseServerBase.java index f52afc418381..8c18ad2349a4 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/HBaseServerBase.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/HBaseServerBase.java @@ -415,12 +415,12 @@ public ZKWatcher getZooKeeper() { } @Override - public KeymetaAdmin getPBEKeymetaAdmin() { + public KeymetaAdmin getKeymetaAdmin() { return keymetaAdmin; } @Override - public ManagedKeyAccessor getPBEKeyAccessor() { + public ManagedKeyAccessor getManagedKeyAccessor() { return managedKeyAccessor; } @@ -430,7 +430,7 @@ public SystemKeyCache getSystemKeyCache() { } protected void buildSystemKeyCache() throws IOException { - if (systemKeyCache == null && Server.isPBEEnabled(this)) { + if (systemKeyCache == null && Server.isKeyManagementEnabled(this)) { systemKeyCache = SystemKeyCache.createCache(new SystemKeyAccessor(this)); } } diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/MockRegionServerServices.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/MockRegionServerServices.java index f4097eb2909f..821a7a2a7dda 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/MockRegionServerServices.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/MockRegionServerServices.java @@ -263,11 +263,11 @@ public ChoreService getChoreService() { return null; } - @Override public ManagedKeyAccessor getPBEKeyAccessor() { + @Override public ManagedKeyAccessor getManagedKeyAccessor() { return null; } - @Override public KeymetaAdmin getPBEKeymetaAdmin() { + @Override public KeymetaAdmin getKeymetaAdmin() { return null; } diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/Server.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/Server.java index bd0195d04222..120430b5c637 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/Server.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/Server.java @@ -94,12 +94,12 @@ default AsyncConnection getAsyncConnection() { /** * @return the accessor for cluster keys. */ - public ManagedKeyAccessor getPBEKeyAccessor(); + public ManagedKeyAccessor getManagedKeyAccessor(); /** * @return the admin for keymeta. */ - public KeymetaAdmin getPBEKeymetaAdmin(); + public KeymetaAdmin getKeymetaAdmin(); /** Returns Return the FileSystem object used (can return null!). */ // TODO: Distinguish between "dataFs" and "walFs". @@ -124,12 +124,13 @@ default boolean isStopping() { } /** - * From the given server, determine if PBE is enabbled. - * @return true if PBE is enabled + * From the given server, determine if key management is enabbled. + * @return true if key management is enabled */ - static boolean isPBEEnabled(Server server) { + static boolean isKeyManagementEnabled(Server server) { return server.getConfiguration() - .getBoolean(HConstants.CRYPTO_PBE_ENABLED_CONF_KEY, HConstants.CRYPTO_PBE_DEFAULT_ENABLED); + .getBoolean(HConstants.CRYPTO_MANAGED_KEYS_ENABLED_CONF_KEY, + HConstants.CRYPTO_MANAGED_KEYS_DEFAULT_ENABLED); } } diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/keymeta/KeyManagementBase.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/keymeta/KeyManagementBase.java index d95d98ab9146..b304490f71e7 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/keymeta/KeyManagementBase.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/keymeta/KeyManagementBase.java @@ -36,7 +36,7 @@ public abstract class KeyManagementBase { protected final Server server; - private Boolean pbeEnabled; + private Boolean keyManagementEnabled; private Integer perPrefixActiveKeyCount; public KeyManagementBase(Server server) { @@ -44,7 +44,7 @@ public KeyManagementBase(Server server) { } /** - * A utility method for getting the PBE key provider. + * A utility method for getting the managed key provider. * @return the key provider * @throws RuntimeException if no provider is configured or if the configured provider is not an * instance of ManagedKeyProvider @@ -59,35 +59,35 @@ protected ManagedKeyProvider getKeyProvider() { } /** - * A utility method for checking if PBE is enabled. - * @return true if PBE is enabled + * A utility method for checking if key management is enabled. + * @return true if key management is enabled */ - protected boolean isPBEEnabled() { - if (pbeEnabled == null) { - pbeEnabled = Server.isPBEEnabled(server); + protected boolean isKeyManagementEnabled() { + if (keyManagementEnabled == null) { + keyManagementEnabled = Server.isKeyManagementEnabled(server); } - return pbeEnabled; + return keyManagementEnabled; } /** - * Check if PBE is enabled, otherwise throw exception. - * @throws IOException if PBE is not enabled. + * Check if key management is enabled, otherwise throw exception. + * @throws IOException if key management is not enabled. */ - protected void checkPBEEnabled() throws IOException { - if (! isPBEEnabled()) { - throw new IOException("PBE is currently not enabled in HBase configuration"); + protected void assertKeyManagementEnabled() throws IOException { + if (! isKeyManagementEnabled()) { + throw new IOException("Key manage is currently not enabled in HBase configuration"); } } protected int getPerPrefixActiveKeyConfCount() throws IOException { if (perPrefixActiveKeyCount == null) { perPrefixActiveKeyCount = server.getConfiguration().getInt( - HConstants.CRYPTO_PBE_PER_PREFIX_ACTIVE_KEY_COUNT, - HConstants.CRYPTO_PBE_PER_PREFIX_ACTIVE_KEY_DEFAULT_COUNT); + HConstants.CRYPTO_MANAGED_KEYS_PER_CUST_ACTIVE_KEY_COUNT, + HConstants.CRYPTO_MANAGED_KEYS_PER_CUST_ACTIVE_KEY_DEFAULT_COUNT); } if (perPrefixActiveKeyCount <= 0) { throw new IOException("Invalid value: " + perPrefixActiveKeyCount + " configured for: " + - HConstants.CRYPTO_PBE_PER_PREFIX_ACTIVE_KEY_COUNT); + HConstants.CRYPTO_MANAGED_KEYS_PER_CUST_ACTIVE_KEY_COUNT); } return perPrefixActiveKeyCount; } diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/keymeta/KeymetaAdminImpl.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/keymeta/KeymetaAdminImpl.java index 0cc25ef1afc3..36404a5f4056 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/keymeta/KeymetaAdminImpl.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/keymeta/KeymetaAdminImpl.java @@ -40,8 +40,9 @@ public KeymetaAdminImpl(Server server) { @Override public ManagedKeyStatus enableManagedKeys(String keyCust, String keyNamespace) throws IOException { - checkPBEEnabled(); - LOG.info("Trying to enable PBE on key: {} under namespace: {}", keyCust, keyNamespace); + assertKeyManagementEnabled(); + LOG.info("Trying to enable key management on custodian: {} under namespace: {}", keyCust, + keyNamespace); byte[] key_cust = ManagedKeyProvider.decodeToBytes(keyCust); ManagedKeyProvider provider = getKeyProvider(); int perPrefixActiveKeyConfCount = getPerPrefixActiveKeyConfCount(); @@ -50,16 +51,17 @@ public ManagedKeyStatus enableManagedKeys(String keyCust, String keyNamespace) t for (int i = 0; i < perPrefixActiveKeyConfCount; ++i) { pbeKey = provider.getManagedKey(key_cust, keyNamespace); if (pbeKey == null) { - throw new IOException("Invalid null PBE key received from key provider"); + throw new IOException("Invalid null managed key received from key provider"); } if (retrievedKeys.contains(pbeKey)) { // This typically means, the key provider is not capable of producing multiple active keys. - LOG.info("enablePBE: configured key count per prefix: " + perPrefixActiveKeyConfCount + - " but received only: " + retrievedKeys.size() + " unique keys."); + LOG.info("enableManagedKeys: configured key count per prefix: " + + perPrefixActiveKeyConfCount + " but received only: " + retrievedKeys.size() + + " unique keys."); break; } retrievedKeys.add(pbeKey); - LOG.info("enablePBE: got key data with status: {} and metadata: {} for prefix: {}", + LOG.info("enableManagedKeys: got key data with status: {} and metadata: {} for custodian: {}", pbeKey.getKeyStatus(), pbeKey.getKeyMetadata(), keyCust); addKey(pbeKey); } @@ -71,8 +73,8 @@ public ManagedKeyStatus enableManagedKeys(String keyCust, String keyNamespace) t @Override public List getManagedKeys(String keyCust, String keyNamespace) throws IOException, KeyException { - checkPBEEnabled(); - LOG.info("Getting key statuses for PBE on key: {} under namespace: {}", keyCust, + assertKeyManagementEnabled(); + LOG.info("Getting key statuses for custodian: {} under namespace: {}", keyCust, keyNamespace); byte[] key_cust = ManagedKeyProvider.decodeToBytes(keyCust); return super.getAllKeys(key_cust, keyNamespace); diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/keymeta/KeymetaMasterService.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/keymeta/KeymetaMasterService.java index 0a1d85c9dfeb..497ed0a7d93f 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/keymeta/KeymetaMasterService.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/keymeta/KeymetaMasterService.java @@ -18,6 +18,7 @@ package org.apache.hadoop.hbase.keymeta; import org.apache.hadoop.hbase.HConstants; +import org.apache.hadoop.hbase.Server; import org.apache.hadoop.hbase.client.ColumnFamilyDescriptorBuilder; import org.apache.hadoop.hbase.client.TableDescriptorBuilder; import org.apache.hadoop.hbase.master.MasterServices; @@ -27,11 +28,10 @@ import java.io.IOException; @InterfaceAudience.Private -public class KeymetaMasterService { +public class KeymetaMasterService extends KeyManagementBase { private static final Logger LOG = LoggerFactory.getLogger(KeymetaMasterService.class); private final MasterServices master; - Boolean pbeEnabled; private static final TableDescriptorBuilder TABLE_DESCRIPTOR_BUILDER = TableDescriptorBuilder .newBuilder(KeymetaTableAccessor.KEY_META_TABLE_NAME).setRegionReplication(1) @@ -43,25 +43,18 @@ public class KeymetaMasterService { .build()); public KeymetaMasterService(MasterServices masterServices) { - this.master = masterServices; + super(masterServices); + master = masterServices; } public void init() throws IOException { - if (!isPBEEnabled()) { + if (!isKeyManagementEnabled()) { return; } if (!master.getTableDescriptors().exists(KeymetaTableAccessor.KEY_META_TABLE_NAME)) { LOG.info("{} table not found. Creating.", KeymetaTableAccessor.KEY_META_TABLE_NAME.getNameWithNamespaceInclAsString()); - this.master.createSystemTable(TABLE_DESCRIPTOR_BUILDER.build()); + master.createSystemTable(TABLE_DESCRIPTOR_BUILDER.build()); } } - - private boolean isPBEEnabled() { - if (pbeEnabled == null) { - pbeEnabled = master.getConfiguration().getBoolean(HConstants.CRYPTO_PBE_ENABLED_CONF_KEY, - false); - } - return pbeEnabled; - } } diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/keymeta/KeymetaServiceEndpoint.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/keymeta/KeymetaServiceEndpoint.java index 11240bfcb5eb..0e65a7d01a30 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/keymeta/KeymetaServiceEndpoint.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/keymeta/KeymetaServiceEndpoint.java @@ -44,13 +44,8 @@ import java.util.List; /** - * This class implements a coprocessor service endpoint for the Phoenix Query Server's - * PBE (Prefix Based Encryption) key metadata operations. It handles the following - * methods: - * - *
      - *
    • enablePBE(): Enables PBE for a given key_cust and namespace.
    • - *
    + * This class implements a coprocessor service endpoint for the key management metadata operations. + * It handles the following methods: * * This endpoint is designed to work in conjunction with the {@link KeymetaAdmin} * interface, which provides the actual implementation of the key metadata operations. @@ -94,11 +89,13 @@ public Iterable getServices() { /** * The implementation of the {@link ManagedKeysProtos.ManagedKeysService} - * interface, which provides the actual method implementations for enabling PBE. + * interface, which provides the actual method implementations for enabling key management. */ private class KeyMetaAdminServiceImpl extends ManagedKeysService { + /** - * Enables PBE for a given tenant and namespace, as specified in the provided request. + * Enables key management for a given tenant and namespace, as specified in the provided + * request. * * @param controller The RPC controller. * @param request The request containing the tenant and table specifications. @@ -110,13 +107,13 @@ public void enableManagedKeys(RpcController controller, ManagedKeysRequest reque ManagedKeysResponse.Builder builder = getResponseBuilder(controller, request); if (builder.getKeyCust() != null) { try { - ManagedKeyStatus managedKeyStatus = master.getPBEKeymetaAdmin() + ManagedKeyStatus managedKeyStatus = master.getKeymetaAdmin() .enableManagedKeys(request.getKeyCust(), request.getKeyNamespace()); builder.setPbeStatus(ManagedKeysProtos.ManagedKeyStatus.valueOf( managedKeyStatus.getVal())); } catch (IOException e) { CoprocessorRpcUtils.setControllerException(controller, e); - builder.setPbeStatus(ManagedKeysProtos.ManagedKeyStatus.PBE_FAILED); + builder.setPbeStatus(ManagedKeysProtos.ManagedKeyStatus.KEY_FAILED); } } done.run(builder.build()); @@ -130,7 +127,7 @@ public void getManagedKeys(RpcController controller, ManagedKeysRequest request, ManagedKeysResponse.Builder builder = getResponseBuilder(controller, request); if (builder.getKeyCust() != null) { try { - List managedKeyStatuses = master.getPBEKeymetaAdmin() + List managedKeyStatuses = master.getKeymetaAdmin() .getManagedKeys(request.getKeyCust(), request.getKeyNamespace()); for (ManagedKeyData keyData: managedKeyStatuses) { builder.setPbeStatus( @@ -146,22 +143,22 @@ public void getManagedKeys(RpcController controller, ManagedKeysRequest request, } } catch (IOException e) { CoprocessorRpcUtils.setControllerException(controller, e); - builder.setPbeStatus(ManagedKeysProtos.ManagedKeyStatus.PBE_FAILED); + builder.setPbeStatus(ManagedKeysProtos.ManagedKeyStatus.KEY_FAILED); } catch (KeyException e) { CoprocessorRpcUtils.setControllerException(controller, new IOException(e)); - builder.setPbeStatus(ManagedKeysProtos.ManagedKeyStatus.PBE_FAILED); + builder.setPbeStatus(ManagedKeysProtos.ManagedKeyStatus.KEY_FAILED); } } done.run(responseBuilder.build()); } - private byte[] convertToPBEBytes(RpcController controller, ManagedKeysRequest request, + private byte[] convertToKeyCustBytes(RpcController controller, ManagedKeysRequest request, ManagedKeysResponse.Builder builder) { byte[] key_cust = null; try { key_cust = Base64.getDecoder().decode(request.getKeyCust()); } catch (IllegalArgumentException e) { - builder.setPbeStatus(ManagedKeysProtos.ManagedKeyStatus.PBE_FAILED); + builder.setPbeStatus(ManagedKeysProtos.ManagedKeyStatus.KEY_FAILED); CoprocessorRpcUtils.setControllerException(controller, new IOException( "Failed to decode specified prefix as Base64 string: " + request.getKeyCust(), e)); } @@ -177,7 +174,7 @@ private ManagedKeysResponse.Builder getResponseBuilder(RpcController controller, key_cust = Base64.getDecoder().decode(request.getKeyCust()); builder.setKeyCustBytes(ByteString.copyFrom(key_cust)); } catch (IllegalArgumentException e) { - builder.setPbeStatus(ManagedKeysProtos.ManagedKeyStatus.PBE_FAILED); + builder.setPbeStatus(ManagedKeysProtos.ManagedKeyStatus.KEY_FAILED); CoprocessorRpcUtils.setControllerException(controller, new IOException( "Failed to decode specified prefix as Base64 string: " + request.getKeyCust(), e)); } diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/keymeta/KeymetaTableAccessor.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/keymeta/KeymetaTableAccessor.java index 2b976a2c2540..ce22eaeaa447 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/keymeta/KeymetaTableAccessor.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/keymeta/KeymetaTableAccessor.java @@ -43,7 +43,7 @@ import java.util.List; /** - * Accessor for PBE keymeta table. + * Accessor for keymeta table as part of key management. */ @InterfaceAudience.Private public class KeymetaTableAccessor extends KeyManagementBase { @@ -88,7 +88,7 @@ public KeymetaTableAccessor(Server server) { * @throws IOException when there is an underlying IOException. */ public void addKey(ManagedKeyData keyData) throws IOException { - checkPBEEnabled(); + assertKeyManagementEnabled(); final Put putForMetadata = addMutationColumns(new Put(constructRowKeyForMetadata(keyData)), keyData); Connection connection = server.getConnection(); @@ -108,7 +108,7 @@ public void addKey(ManagedKeyData keyData) throws IOException { */ protected List getAllKeys(byte[] key_cust, String keyNamespace) throws IOException, KeyException { - checkPBEEnabled(); + assertKeyManagementEnabled(); Connection connection = server.getConnection(); byte[] prefixForScan = Bytes.add(Bytes.toBytes(key_cust.length), key_cust, Bytes.toBytes(keyNamespace)); @@ -141,7 +141,7 @@ protected List getAllKeys(byte[] key_cust, String keyNamespace) */ public List getActiveKeys(byte[] key_cust, String keyNamespace) throws IOException, KeyException { - checkPBEEnabled(); + assertKeyManagementEnabled(); List activeKeys = new ArrayList<>(); for (ManagedKeyData keyData : getAllKeys(key_cust, keyNamespace)) { if (keyData.getKeyStatus() == ManagedKeyStatus.ACTIVE) { @@ -163,7 +163,7 @@ public List getActiveKeys(byte[] key_cust, String keyNamespace) */ public ManagedKeyData getKey(byte[] key_cust, String keyNamespace, String keyMetadata) throws IOException, KeyException { - checkPBEEnabled(); + assertKeyManagementEnabled(); Connection connection = server.getConnection(); try (Table table = connection.getTable(KEY_META_TABLE_NAME)) { byte[] rowKey = constructRowKeyForMetadata(key_cust, keyNamespace, @@ -184,7 +184,7 @@ public ManagedKeyData getKey(byte[] key_cust, String keyNamespace, String keyMet */ public void reportOperation(byte[] key_cust, String keyNamespace, String keyMetadata, long count, boolean isReadOperation) throws IOException { - checkPBEEnabled(); + assertKeyManagementEnabled(); Connection connection = server.getConnection(); try (Table table = connection.getTable(KEY_META_TABLE_NAME)) { byte[] rowKey = constructRowKeyForMetadata(key_cust, keyNamespace, diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/keymeta/ManagedKeyAccessor.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/keymeta/ManagedKeyAccessor.java index c5608510830d..923bd7b4436d 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/keymeta/ManagedKeyAccessor.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/keymeta/ManagedKeyAccessor.java @@ -25,8 +25,8 @@ import java.util.List; /** - * This class provides unified access on top of both {@code PBEKeyDataCache} (L1) and - * {@code KeymetaTableAccessor} (L2) to access PBE keys. When the getter is called, it first + * This class provides unified access on top of both {@code ManagedKeyDataCache} (L1) and + * {@code KeymetaTableAccessor} (L2) to access managed keys. When the getter is called, it first * checks if L1 cache has the key, if not, it tries to get the key from L2. */ @InterfaceAudience.Private @@ -51,7 +51,7 @@ public ManagedKeyAccessor(KeymetaTableAccessor keymetaAccessor) { */ public ManagedKeyData getKey(byte[] key_cust, String keyNamespace, String keyMetadata) throws IOException, KeyException { - checkPBEEnabled(); + assertKeyManagementEnabled(); // 1. Check L1 cache. ManagedKeyData keyData = keyDataCache.getEntry(keyMetadata); if (keyData == null) { @@ -83,7 +83,7 @@ public ManagedKeyData getKey(byte[] key_cust, String keyNamespace, String keyMet */ public ManagedKeyData getAnActiveKey(byte[] key_cust, String keyNamespace) throws IOException, KeyException { - checkPBEEnabled(); + assertKeyManagementEnabled(); ManagedKeyData keyData = keyDataCache.getRandomEntryForPrefix(key_cust, keyNamespace); if (keyData == null) { List activeKeys = keymetaAccessor.getActiveKeys(key_cust, keyNamespace); diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/keymeta/ManagedKeyDataCache.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/keymeta/ManagedKeyDataCache.java index 6e8e977e9a69..a204244cfa12 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/keymeta/ManagedKeyDataCache.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/keymeta/ManagedKeyDataCache.java @@ -118,12 +118,13 @@ public ManagedKeyData removeEntry(String keyMetadata) { } /** - * Retrieves a random entry from the cache based on its PBE prefix, key namespace, and filters out entries with - * a status other than ACTIVE. + * Retrieves a random entry from the cache based on its key custodian, key namespace, and filters + * out entries with a status other than ACTIVE. * * @param key_cust The key custodian. * @param keyNamespace the key namespace to search for - * @return a random ManagedKeyData entry with the given PBE prefix and ACTIVE status, or null if not found + * @return a random ManagedKeyData entry with the given custodian and ACTIVE status, or null if + * not found */ public ManagedKeyData getRandomEntryForPrefix(byte[] key_cust, String keyNamespace) { lock.lock(); diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/keymeta/SystemKeyAccessor.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/keymeta/SystemKeyAccessor.java index 1c73d5a3059b..0cc61c6ba103 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/keymeta/SystemKeyAccessor.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/keymeta/SystemKeyAccessor.java @@ -44,7 +44,7 @@ public SystemKeyAccessor(Server server) throws IOException { } public Path getLatestSystemKeyFile() throws IOException { - if (! isPBEEnabled()) { + if (! isKeyManagementEnabled()) { return null; } List allClusterKeyFiles = getAllSystemKeyFiles(); @@ -57,19 +57,20 @@ public Path getLatestSystemKeyFile() throws IOException { /** * Return all available cluster key files and return them in the order of latest to oldest. - * If no cluster key files are available, then return an empty list. If PBE is not enabled, - * then return null. + * If no cluster key files are available, then return an empty list. If key management is not + * enabled, then return null. * * @return a list of all available cluster key files * @throws IOException */ public List getAllSystemKeyFiles() throws IOException { - if (!isPBEEnabled()) { + if (!isKeyManagementEnabled()) { return null; } FileSystem fs = server.getFileSystem(); Map clusterKeys = new TreeMap<>(Comparator.reverseOrder()); - for (FileStatus st : fs.globStatus(new Path(systemKeyDir, SYSTEM_KEY_FILE_PREFIX + "*"))) { + for (FileStatus st : fs.globStatus(new Path(systemKeyDir, + SYSTEM_KEY_FILE_PREFIX + "*"))) { Path keyPath = st.getPath(); int seqNum = extractSystemKeySeqNum(keyPath); clusterKeys.put(seqNum, keyPath); diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/SystemKeyManager.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/SystemKeyManager.java index ba40906c955b..f91fb22c49d4 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/SystemKeyManager.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/SystemKeyManager.java @@ -39,7 +39,7 @@ public SystemKeyManager(MasterServices master) throws IOException { } public void ensureSystemKeyInitialized() throws IOException { - if (! isPBEEnabled()) { + if (! isKeyManagementEnabled()) { return; } List clusterKeys = getAllSystemKeyFiles(); @@ -59,7 +59,7 @@ else if (rotateSystemKeyIfChanged() != null) { } public ManagedKeyData rotateSystemKeyIfChanged() throws IOException { - if (! isPBEEnabled()) { + if (! isKeyManagementEnabled()) { return null; } Path latestFile = getLatestSystemKeyFile(); @@ -68,7 +68,7 @@ public ManagedKeyData rotateSystemKeyIfChanged() throws IOException { } private ManagedKeyData rotateSystemKey(String currentKeyMetadata) throws IOException { - if (! isPBEEnabled()) { + if (! isKeyManagementEnabled()) { return null; } ManagedKeyProvider provider = getKeyProvider(); diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/replication/regionserver/ReplicationSyncUp.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/replication/regionserver/ReplicationSyncUp.java index ea376369d542..ba2c9bbf2a0e 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/replication/regionserver/ReplicationSyncUp.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/replication/regionserver/ReplicationSyncUp.java @@ -373,11 +373,11 @@ public ChoreService getChoreService() { return null; } - @Override public ManagedKeyAccessor getPBEKeyAccessor() { + @Override public ManagedKeyAccessor getManagedKeyAccessor() { return null; } - @Override public KeymetaAdmin getPBEKeymetaAdmin() { + @Override public KeymetaAdmin getKeymetaAdmin() { return null; } diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/keymeta/ManagedKeyTestBase.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/keymeta/ManagedKeyTestBase.java index f649aedc38f1..17efa663bf99 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/keymeta/ManagedKeyTestBase.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/keymeta/ManagedKeyTestBase.java @@ -12,7 +12,7 @@ public class ManagedKeyTestBase { @Before public void setUp() throws Exception { TEST_UTIL.getConfiguration().set(HConstants.CRYPTO_KEYPROVIDER_CONF_KEY, MockManagedKeyProvider.class.getName()); - TEST_UTIL.getConfiguration().set(HConstants.CRYPTO_PBE_ENABLED_CONF_KEY, "true"); + TEST_UTIL.getConfiguration().set(HConstants.CRYPTO_MANAGED_KEYS_ENABLED_CONF_KEY, "true"); // Start the minicluster TEST_UTIL.startMiniCluster(1); diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/MockNoopMasterServices.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/MockNoopMasterServices.java index f35e2e7fb23f..3cf11d108f08 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/MockNoopMasterServices.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/MockNoopMasterServices.java @@ -123,11 +123,11 @@ public ChoreService getChoreService() { return null; } - @Override public ManagedKeyAccessor getPBEKeyAccessor() { + @Override public ManagedKeyAccessor getManagedKeyAccessor() { return null; } - @Override public KeymetaAdmin getPBEKeymetaAdmin() { + @Override public KeymetaAdmin getKeymetaAdmin() { return null; } diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/MockRegionServer.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/MockRegionServer.java index 6008e6884e13..806bf3f61b66 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/MockRegionServer.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/MockRegionServer.java @@ -563,11 +563,11 @@ public ChoreService getChoreService() { return null; } - @Override public ManagedKeyAccessor getPBEKeyAccessor() { + @Override public ManagedKeyAccessor getManagedKeyAccessor() { return null; } - @Override public KeymetaAdmin getPBEKeymetaAdmin() { + @Override public KeymetaAdmin getKeymetaAdmin() { return null; } diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestActiveMasterManager.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestActiveMasterManager.java index 1255971b458d..3ef6f9c4cd66 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestActiveMasterManager.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestActiveMasterManager.java @@ -335,11 +335,11 @@ public ActiveMasterManager getActiveMasterManager() { return null; } - @Override public ManagedKeyAccessor getPBEKeyAccessor() { + @Override public ManagedKeyAccessor getManagedKeyAccessor() { return null; } - @Override public KeymetaAdmin getPBEKeymetaAdmin() { + @Override public KeymetaAdmin getKeymetaAdmin() { return null; } } diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/cleaner/TestReplicationHFileCleaner.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/cleaner/TestReplicationHFileCleaner.java index e80ee85f1c16..dbaeb4b6d950 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/cleaner/TestReplicationHFileCleaner.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/cleaner/TestReplicationHFileCleaner.java @@ -222,11 +222,11 @@ public Connection getConnection() { return null; } - @Override public ManagedKeyAccessor getPBEKeyAccessor() { + @Override public ManagedKeyAccessor getManagedKeyAccessor() { return null; } - @Override public KeymetaAdmin getPBEKeymetaAdmin() { + @Override public KeymetaAdmin getKeymetaAdmin() { return null; } diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/procedure2/store/region/RegionProcedureStorePerformanceEvaluation.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/procedure2/store/region/RegionProcedureStorePerformanceEvaluation.java index b3adc8ace813..ba8e910a6f9c 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/procedure2/store/region/RegionProcedureStorePerformanceEvaluation.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/procedure2/store/region/RegionProcedureStorePerformanceEvaluation.java @@ -65,11 +65,11 @@ public ServerName getServerName() { return null; } - @Override public ManagedKeyAccessor getPBEKeyAccessor() { + @Override public ManagedKeyAccessor getManagedKeyAccessor() { return null; } - @Override public KeymetaAdmin getPBEKeymetaAdmin() { + @Override public KeymetaAdmin getKeymetaAdmin() { return null; } } diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestHeapMemoryManager.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestHeapMemoryManager.java index 2ddd6739b1ab..9aeaf36c7a84 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestHeapMemoryManager.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestHeapMemoryManager.java @@ -844,11 +844,11 @@ public ChoreService getChoreService() { return null; } - @Override public ManagedKeyAccessor getPBEKeyAccessor() { + @Override public ManagedKeyAccessor getManagedKeyAccessor() { return null; } - @Override public KeymetaAdmin getPBEKeymetaAdmin() { + @Override public KeymetaAdmin getKeymetaAdmin() { return null; } diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/security/token/TestTokenAuthentication.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/security/token/TestTokenAuthentication.java index 863dfc7fcfdd..ee5f42ea5524 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/security/token/TestTokenAuthentication.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/security/token/TestTokenAuthentication.java @@ -357,11 +357,11 @@ public ChoreService getChoreService() { return null; } - @Override public ManagedKeyAccessor getPBEKeyAccessor() { + @Override public ManagedKeyAccessor getManagedKeyAccessor() { return null; } - @Override public KeymetaAdmin getPBEKeymetaAdmin() { + @Override public KeymetaAdmin getKeymetaAdmin() { return null; } diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/util/MockServer.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/util/MockServer.java index 8e9e4e56866c..d1d0f89b7915 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/util/MockServer.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/util/MockServer.java @@ -107,11 +107,11 @@ public ChoreService getChoreService() { return null; } - @Override public ManagedKeyAccessor getPBEKeyAccessor() { + @Override public ManagedKeyAccessor getManagedKeyAccessor() { return null; } - @Override public KeymetaAdmin getPBEKeymetaAdmin() { + @Override public KeymetaAdmin getKeymetaAdmin() { return null; } diff --git a/hbase-testing-util/src/main/java/org/apache/hadoop/hbase/HBaseTestingUtility.java b/hbase-testing-util/src/main/java/org/apache/hadoop/hbase/HBaseTestingUtility.java index 6fe8428b27f7..659bfe34067a 100644 --- a/hbase-testing-util/src/main/java/org/apache/hadoop/hbase/HBaseTestingUtility.java +++ b/hbase-testing-util/src/main/java/org/apache/hadoop/hbase/HBaseTestingUtility.java @@ -2945,7 +2945,7 @@ public Admin getAdmin() throws IOException { return hbaseAdmin; } - public KeymetaAdminClient getPBEAdmin() throws IOException { + public KeymetaAdminClient getKeymetaAdmin() throws IOException { return new KeymetaAdminClient(getConnection()); } From 4cb86926f759ca73ab5076efd85adfb654a408b5 Mon Sep 17 00:00:00 2001 From: Hari Dara Date: Thu, 3 Apr 2025 16:33:04 +0530 Subject: [PATCH 17/70] Update PBE references in hbase-shell --- .../hbase/keymeta/KeymetaAdminClient.java | 4 +-- .../io/crypto/ManagedKeyStoreKeyProvider.java | 18 ++++++------ .../main/protobuf/server/ManagedKeys.proto | 2 +- .../hbase/keymeta/KeymetaServiceEndpoint.java | 16 +++++------ hbase-shell/src/main/ruby/hbase/hbase.rb | 4 +-- .../hbase/{pbe_admin.rb => keymeta_admin.rb} | 28 +++++++++---------- hbase-shell/src/main/ruby/hbase_constants.rb | 2 +- hbase-shell/src/main/ruby/shell.rb | 16 +++++------ hbase-shell/src/main/ruby/shell/commands.rb | 4 +-- ...pbe_enable.rb => enable_key_management.rb} | 10 +++---- ...pbe_get_statuses.rb => show_key_status.rb} | 10 +++---- 11 files changed, 57 insertions(+), 57 deletions(-) rename hbase-shell/src/main/ruby/hbase/{pbe_admin.rb => keymeta_admin.rb} (62%) rename hbase-shell/src/main/ruby/shell/commands/{pbe_enable.rb => enable_key_management.rb} (79%) rename hbase-shell/src/main/ruby/shell/commands/{pbe_get_statuses.rb => show_key_status.rb} (86%) diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/keymeta/KeymetaAdminClient.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/keymeta/KeymetaAdminClient.java index 78e36779de0f..3d82974862de 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/keymeta/KeymetaAdminClient.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/keymeta/KeymetaAdminClient.java @@ -31,7 +31,7 @@ public ManagedKeyStatus enableManagedKeys(String keyCust, String keyNamespace) t ManagedKeysResponse response = stub.enableManagedKeys(null, ManagedKeysRequest.newBuilder().setKeyCust(keyCust).setKeyNamespace(keyNamespace).build()); LOG.info("Got response: " + response); - return ManagedKeyStatus.forValue((byte) response.getPbeStatus().getNumber()); + return ManagedKeyStatus.forValue((byte) response.getKeyStatus().getNumber()); } catch (ServiceException e) { throw ProtobufUtil.handleRemoteException(e); } @@ -48,7 +48,7 @@ public List getManagedKeys(String keyCust, String keyNamespace) keyStatuses.add(new ManagedKeyData( status.getKeyCustBytes().toByteArray(), status.getKeyNamespace(), null, - ManagedKeyStatus.forValue((byte) status.getPbeStatus().getNumber()), + ManagedKeyStatus.forValue((byte) status.getKeyStatus().getNumber()), status.getKeyMetadata(), status.getRefreshTimestamp(), status.getReadOpCount(), status.getWriteOpCount())); } diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/io/crypto/ManagedKeyStoreKeyProvider.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/io/crypto/ManagedKeyStoreKeyProvider.java index 21f23da4cd34..a18f7a0be2f1 100644 --- a/hbase-common/src/main/java/org/apache/hadoop/hbase/io/crypto/ManagedKeyStoreKeyProvider.java +++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/io/crypto/ManagedKeyStoreKeyProvider.java @@ -12,7 +12,7 @@ @InterfaceAudience.Public public class ManagedKeyStoreKeyProvider extends KeyStoreKeyProvider implements ManagedKeyProvider { public static final String KEY_METADATA_ALIAS = "KeyAlias"; - public static final String KEY_METADATA_CUST = "KEY_CUST"; + public static final String KEY_METADATA_CUST = "KeyCustodian"; private Configuration conf; @@ -42,10 +42,10 @@ public ManagedKeyData getSystemKey(byte[] clusterId) { @Override public ManagedKeyData getManagedKey(byte[] key_cust, String key_namespace) throws IOException { checkConfig(); - String encodedPrefix = ManagedKeyProvider.encodeToStr(key_cust); - String aliasConfKey = HConstants.CRYPTO_MANAGED_KEY_STORE_CONF_KEY_PREFIX + encodedPrefix + "." + + String encodedCust = ManagedKeyProvider.encodeToStr(key_cust); + String aliasConfKey = HConstants.CRYPTO_MANAGED_KEY_STORE_CONF_KEY_PREFIX + encodedCust + "." + "alias"; - String keyMetadata = generateKeyMetadata(conf.get(aliasConfKey, null), encodedPrefix); + String keyMetadata = generateKeyMetadata(conf.get(aliasConfKey, null), encodedCust); return unwrapKey(keyMetadata); } @@ -53,11 +53,11 @@ public ManagedKeyData getManagedKey(byte[] key_cust, String key_namespace) throw public ManagedKeyData unwrapKey(String keyMetadataStr) throws IOException { Map keyMetadata = GsonUtil.getDefaultInstance().fromJson(keyMetadataStr, HashMap.class); - String encodedPrefix = keyMetadata.get(KEY_METADATA_CUST); - String activeStatusConfKey = HConstants.CRYPTO_MANAGED_KEY_STORE_CONF_KEY_PREFIX + encodedPrefix + + String encodedCust = keyMetadata.get(KEY_METADATA_CUST); + String activeStatusConfKey = HConstants.CRYPTO_MANAGED_KEY_STORE_CONF_KEY_PREFIX + encodedCust + ".active"; boolean isActive = conf.getBoolean(activeStatusConfKey, true); - byte[] key_cust = ManagedKeyProvider.decodeToBytes(encodedPrefix); + byte[] key_cust = ManagedKeyProvider.decodeToBytes(encodedCust); String alias = keyMetadata.get(KEY_METADATA_ALIAS); Key key = alias != null ? getKey(alias) : null; if (key != null) { @@ -74,10 +74,10 @@ private void checkConfig() { } } - public static String generateKeyMetadata(String aliasName, String encodedPrefix) { + public static String generateKeyMetadata(String aliasName, String encodedCust) { return GsonUtil.getDefaultInstance().toJson(new HashMap() {{ put(KEY_METADATA_ALIAS, aliasName); - put(KEY_METADATA_CUST, encodedPrefix); + put(KEY_METADATA_CUST, encodedCust); }}, HashMap.class); } diff --git a/hbase-protocol-shaded/src/main/protobuf/server/ManagedKeys.proto b/hbase-protocol-shaded/src/main/protobuf/server/ManagedKeys.proto index acbc71b41dbf..0270d845ad65 100644 --- a/hbase-protocol-shaded/src/main/protobuf/server/ManagedKeys.proto +++ b/hbase-protocol-shaded/src/main/protobuf/server/ManagedKeys.proto @@ -39,7 +39,7 @@ enum ManagedKeyStatus { message ManagedKeysResponse { required string key_cust = 1; required string key_namespace = 2; - required ManagedKeyStatus pbe_status = 3; + required ManagedKeyStatus key_status = 3; optional string key_metadata = 4; optional int64 refresh_timestamp = 5; optional int64 read_op_count = 6; diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/keymeta/KeymetaServiceEndpoint.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/keymeta/KeymetaServiceEndpoint.java index 0e65a7d01a30..b2899955685e 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/keymeta/KeymetaServiceEndpoint.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/keymeta/KeymetaServiceEndpoint.java @@ -109,11 +109,11 @@ public void enableManagedKeys(RpcController controller, ManagedKeysRequest reque try { ManagedKeyStatus managedKeyStatus = master.getKeymetaAdmin() .enableManagedKeys(request.getKeyCust(), request.getKeyNamespace()); - builder.setPbeStatus(ManagedKeysProtos.ManagedKeyStatus.valueOf( + builder.setKeyStatus(ManagedKeysProtos.ManagedKeyStatus.valueOf( managedKeyStatus.getVal())); } catch (IOException e) { CoprocessorRpcUtils.setControllerException(controller, e); - builder.setPbeStatus(ManagedKeysProtos.ManagedKeyStatus.KEY_FAILED); + builder.setKeyStatus(ManagedKeysProtos.ManagedKeyStatus.KEY_FAILED); } } done.run(builder.build()); @@ -130,9 +130,9 @@ public void getManagedKeys(RpcController controller, ManagedKeysRequest request, List managedKeyStatuses = master.getKeymetaAdmin() .getManagedKeys(request.getKeyCust(), request.getKeyNamespace()); for (ManagedKeyData keyData: managedKeyStatuses) { - builder.setPbeStatus( + builder.setKeyStatus( ManagedKeysProtos.ManagedKeyStatus.valueOf(keyData.getKeyStatus().getVal())); - builder.setPbeStatus(ManagedKeysProtos.ManagedKeyStatus.valueOf( + builder.setKeyStatus(ManagedKeysProtos.ManagedKeyStatus.valueOf( keyData.getKeyStatus().getVal())) .setKeyMetadata(keyData.getKeyMetadata()) .setRefreshTimestamp(keyData.getRefreshTimestamp()) @@ -143,10 +143,10 @@ public void getManagedKeys(RpcController controller, ManagedKeysRequest request, } } catch (IOException e) { CoprocessorRpcUtils.setControllerException(controller, e); - builder.setPbeStatus(ManagedKeysProtos.ManagedKeyStatus.KEY_FAILED); + builder.setKeyStatus(ManagedKeysProtos.ManagedKeyStatus.KEY_FAILED); } catch (KeyException e) { CoprocessorRpcUtils.setControllerException(controller, new IOException(e)); - builder.setPbeStatus(ManagedKeysProtos.ManagedKeyStatus.KEY_FAILED); + builder.setKeyStatus(ManagedKeysProtos.ManagedKeyStatus.KEY_FAILED); } } done.run(responseBuilder.build()); @@ -158,7 +158,7 @@ private byte[] convertToKeyCustBytes(RpcController controller, ManagedKeysReques try { key_cust = Base64.getDecoder().decode(request.getKeyCust()); } catch (IllegalArgumentException e) { - builder.setPbeStatus(ManagedKeysProtos.ManagedKeyStatus.KEY_FAILED); + builder.setKeyStatus(ManagedKeysProtos.ManagedKeyStatus.KEY_FAILED); CoprocessorRpcUtils.setControllerException(controller, new IOException( "Failed to decode specified prefix as Base64 string: " + request.getKeyCust(), e)); } @@ -174,7 +174,7 @@ private ManagedKeysResponse.Builder getResponseBuilder(RpcController controller, key_cust = Base64.getDecoder().decode(request.getKeyCust()); builder.setKeyCustBytes(ByteString.copyFrom(key_cust)); } catch (IllegalArgumentException e) { - builder.setPbeStatus(ManagedKeysProtos.ManagedKeyStatus.KEY_FAILED); + builder.setKeyStatus(ManagedKeysProtos.ManagedKeyStatus.KEY_FAILED); CoprocessorRpcUtils.setControllerException(controller, new IOException( "Failed to decode specified prefix as Base64 string: " + request.getKeyCust(), e)); } diff --git a/hbase-shell/src/main/ruby/hbase/hbase.rb b/hbase-shell/src/main/ruby/hbase/hbase.rb index d93adadd640f..9b24e5caa973 100644 --- a/hbase-shell/src/main/ruby/hbase/hbase.rb +++ b/hbase-shell/src/main/ruby/hbase/hbase.rb @@ -59,8 +59,8 @@ def rsgroup_admin ::Hbase::RSGroupAdmin.new(self.connection) end - def pbe_admin - ::Hbase::PBEAdmin.new(@connection) + def keymeta_admin + ::Hbase::KeymetaAdmin.new(@connection) end def taskmonitor diff --git a/hbase-shell/src/main/ruby/hbase/pbe_admin.rb b/hbase-shell/src/main/ruby/hbase/keymeta_admin.rb similarity index 62% rename from hbase-shell/src/main/ruby/hbase/pbe_admin.rb rename to hbase-shell/src/main/ruby/hbase/keymeta_admin.rb index 710edabfdc64..619f8bf30c88 100644 --- a/hbase-shell/src/main/ruby/hbase/pbe_admin.rb +++ b/hbase-shell/src/main/ruby/hbase/keymeta_admin.rb @@ -20,10 +20,10 @@ java_import org.apache.hadoop.hbase.keymeta.KeymetaAdminClient module Hbase - class PBEAdmin + class KeymetaAdmin def initialize(connection) @connection = connection - @admin = PBEKeymetaAdminClient.new(connection) + @admin = KeymetaAdminClient.new(connection) @hb_admin = @connection.getAdmin end @@ -31,22 +31,22 @@ def close @admin.close end - def pbe_enable(pbe_prefix) - prefix, namespace = extract_prefix_info(pbe_prefix) - @admin.enablePBE(prefix, namespace) + def enable_key_management(key_info) + cust, namespace = extract_cust_info(key_info) + @admin.enableManagedKeys(cust, namespace) end - def show_pbe_status(pbe_prefix) - prefix, namespace = extract_prefix_info(pbe_prefix) - @admin.getPBEKeyStatuses(prefix, namespace) + def get_key_statuses(key_info) + cust, namespace = extract_cust_info(key_info) + @admin.getManagedKeys(cust, namespace) end - def extract_prefix_info(pbe_prefix) - prefixInfo = pbe_prefix.split(':') - raise(ArgumentError, 'Invalid prefix:namespace format') unless (prefixInfo.length == 1 || - prefixInfo.length == 2) - return prefixInfo[0], prefixInfo.length > 1 ? prefixInfo[1] : - PBEKeyData::KEY_NAMESPACE_GLOBAL + def extract_cust_info(key_info) + custInfo = key_info.split(':') + raise(ArgumentError, 'Invalid cust:namespace format') unless (custInfo.length == 1 || + custInfo.length == 2) + return custInfo[0], custInfo.length > 1 ? custInfo[1] : + ManagedKeyData::KEY_NAMESPACE_GLOBAL end end end diff --git a/hbase-shell/src/main/ruby/hbase_constants.rb b/hbase-shell/src/main/ruby/hbase_constants.rb index 87effe071a37..67892e5538c0 100644 --- a/hbase-shell/src/main/ruby/hbase_constants.rb +++ b/hbase-shell/src/main/ruby/hbase_constants.rb @@ -138,4 +138,4 @@ def self.promote_constants(constants) require 'hbase/security' require 'hbase/visibility_labels' require 'hbase/rsgroup_admin' -require 'hbase/pbe_admin' +require 'hbase/keymeta_admin' diff --git a/hbase-shell/src/main/ruby/shell.rb b/hbase-shell/src/main/ruby/shell.rb index c8afe02240ef..c87498bc6d35 100644 --- a/hbase-shell/src/main/ruby/shell.rb +++ b/hbase-shell/src/main/ruby/shell.rb @@ -150,8 +150,8 @@ def hbase_rsgroup_admin @rsgroup_admin ||= hbase.rsgroup_admin end - def hbase_pbe_admin - @pbe_admin ||= hbase.pbe_admin + def hbase_keymeta_admin + @keymeta_admin ||= hbase.keymeta_admin end ## @@ -620,13 +620,13 @@ def self.exception_handler(hide_traceback) ) Shell.load_command_group( - 'pbe', - full_name: 'PBE', - comment: "NOTE: The PBE KeyMeta Coprocessor Endpoint must be enabled on the Master else commands fail with: - UnknownProtocolException: No registered Master Coprocessor Endpoint found for PBEAdminService", + 'keymeta', + full_name: 'Keymeta', + comment: "NOTE: The KeyMeta Coprocessor Endpoint must be enabled on the Master else commands fail with: + UnknownProtocolException: No registered Master Coprocessor Endpoint found for ManagedKeysService", commands: %w[ - pbe_enable - pbe_get_statuses + enable_key_management + show_key_status ] ) diff --git a/hbase-shell/src/main/ruby/shell/commands.rb b/hbase-shell/src/main/ruby/shell/commands.rb index 3b9a11a0962f..a97dddc4e6a0 100644 --- a/hbase-shell/src/main/ruby/shell/commands.rb +++ b/hbase-shell/src/main/ruby/shell/commands.rb @@ -105,8 +105,8 @@ def rsgroup_admin @shell.hbase_rsgroup_admin end - def pbe_admin - @shell.hbase_pbe_admin + def keymeta_admin + @shell.hbase_keymeta_admin end #---------------------------------------------------------------------- diff --git a/hbase-shell/src/main/ruby/shell/commands/pbe_enable.rb b/hbase-shell/src/main/ruby/shell/commands/enable_key_management.rb similarity index 79% rename from hbase-shell/src/main/ruby/shell/commands/pbe_enable.rb rename to hbase-shell/src/main/ruby/shell/commands/enable_key_management.rb index ccaba7762470..64ffe47ee83e 100644 --- a/hbase-shell/src/main/ruby/shell/commands/pbe_enable.rb +++ b/hbase-shell/src/main/ruby/shell/commands/enable_key_management.rb @@ -17,18 +17,18 @@ module Shell module Commands - class PbeEnable < Command + class EnableKeyManagement < Command def help <<-EOF -Enable PBE for a given prefix:namespace (prefix in Base64 format). +Enable key management for a given cust:namespace (cust in Base64 format). If no namespace is specified, the global namespace (*) is used. EOF end - def command(pbe_prefix) + def command(key_info) formatter.header(['KEY', 'STATUS']) - status = pbe_admin.pbe_enable(pbe_prefix) - formatter.row([pbe_prefix, status.toString()]) + status = keymeta_admin.enable_key_management(key_info) + formatter.row([key_info, status.toString()]) end end end diff --git a/hbase-shell/src/main/ruby/shell/commands/pbe_get_statuses.rb b/hbase-shell/src/main/ruby/shell/commands/show_key_status.rb similarity index 86% rename from hbase-shell/src/main/ruby/shell/commands/pbe_get_statuses.rb rename to hbase-shell/src/main/ruby/shell/commands/show_key_status.rb index 8a34b6579ff1..540f5feb4cae 100644 --- a/hbase-shell/src/main/ruby/shell/commands/pbe_get_statuses.rb +++ b/hbase-shell/src/main/ruby/shell/commands/show_key_status.rb @@ -17,21 +17,21 @@ module Shell module Commands - class PbeGetStatuses < Command + class ShowKeyStatus < Command def help <<-EOF -Get key statuses for a given prefix:namespace (prefix in Base64 format). +Show key statuses for a given cust:namespace (cust in Base64 format). If no namespace is specified, the global namespace (*) is used. EOF end - def command(pbe_prefix) + def command(key_info) formatter.header(['ENCODED-KEY', 'NAMESPACE', 'STATUS', 'METADATA', 'METADATA-HASH', 'REFRESH-TIMESTAMP', 'READ-OP-COUNT', 'WRITE-OP-COUNT']) - statuses = pbe_admin.show_pbe_status(pbe_prefix) + statuses = keymeta_admin.get_key_statuses(key_info) statuses.each { |status| formatter.row([ - status.getPBEPrefixEncoded(), + status.getKeyCustodianEncoded(), status.getKeyNamespace(), status.getKeyStatus().toString(), status.getKeyMetadata(), From 53b8c6344821529655b63ea1a027fa077c14b08c Mon Sep 17 00:00:00 2001 From: Hari Dara Date: Fri, 11 Apr 2025 22:49:20 +0530 Subject: [PATCH 18/70] Rename enableManagedKeys to enableKeyManagement, some test coverage --- .../hbase/keymeta/KeymetaAdminClient.java | 5 +- .../hbase/io/crypto/ManagedKeyData.java | 21 +- .../io/crypto/ManagedKeyStoreKeyProvider.java | 6 +- .../hadoop/hbase/keymeta/KeymetaAdmin.java | 2 +- .../io/crypto/MockManagedKeyProvider.java | 27 +-- .../TestManagedKeyStoreKeyProvider.java | 8 +- .../main/protobuf/server/ManagedKeys.proto | 2 +- .../hbase/keymeta/KeymetaAdminImpl.java | 3 +- .../hbase/keymeta/KeymetaServiceEndpoint.java | 4 +- .../hbase/keymeta/ManagedKeyTestBase.java | 2 + .../hbase/keymeta/TestManagedKeymeta.java | 70 +++++++ .../hbase/master/TestSystemKeyAccessor.java | 182 ++++++++++++++++++ .../src/main/ruby/hbase/keymeta_admin.rb | 4 +- 13 files changed, 306 insertions(+), 30 deletions(-) create mode 100644 hbase-server/src/test/java/org/apache/hadoop/hbase/keymeta/TestManagedKeymeta.java create mode 100644 hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestSystemKeyAccessor.java diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/keymeta/KeymetaAdminClient.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/keymeta/KeymetaAdminClient.java index 3d82974862de..41d048774713 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/keymeta/KeymetaAdminClient.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/keymeta/KeymetaAdminClient.java @@ -26,9 +26,10 @@ public KeymetaAdminClient(Connection conn) throws IOException { } @Override - public ManagedKeyStatus enableManagedKeys(String keyCust, String keyNamespace) throws IOException { + public ManagedKeyStatus enableKeyManagement(String keyCust, String keyNamespace) + throws IOException { try { - ManagedKeysResponse response = stub.enableManagedKeys(null, + ManagedKeysResponse response = stub.enableKeyManagement(null, ManagedKeysRequest.newBuilder().setKeyCust(keyCust).setKeyNamespace(keyNamespace).build()); LOG.info("Got response: " + response); return ManagedKeyStatus.forValue((byte) response.getKeyStatus().getNumber()); diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/io/crypto/ManagedKeyData.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/io/crypto/ManagedKeyData.java index 95777be89942..5b5c2afda9ea 100644 --- a/hbase-common/src/main/java/org/apache/hadoop/hbase/io/crypto/ManagedKeyData.java +++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/io/crypto/ManagedKeyData.java @@ -19,6 +19,7 @@ import org.apache.commons.lang3.builder.EqualsBuilder; import org.apache.commons.lang3.builder.HashCodeBuilder; +import org.apache.hadoop.classification.VisibleForTesting; import org.apache.hadoop.hbase.util.EnvironmentEdgeManager; import org.apache.hadoop.util.DataChecksum; import org.apache.hbase.thirdparty.com.google.common.base.Preconditions; @@ -51,7 +52,17 @@ */ @InterfaceAudience.Public public class ManagedKeyData { - public static final String KEY_NAMESPACE_GLOBAL = "*"; + /** + * Special value to be used for custodian or namespace to indicate that it is global, meaning it + * is not associated with a specific custodian or namespace. + */ + public static final String KEY_SPACE_GLOBAL = "*"; + + /** + * Encoded form of global custodian. + */ + public static final String KEY_GLOBAL_CUSTODIAN = + ManagedKeyProvider.encodeToStr(KEY_SPACE_GLOBAL.getBytes()); private final byte[] keyCust; private final String keyNamespace; @@ -112,6 +123,12 @@ public ManagedKeyData(byte[] key_cust, String key_namespace, Key theKey, Managed this.writeOpCount = writeOpCount; } + @VisibleForTesting + public ManagedKeyData cloneWithoutKey() { + return new ManagedKeyData(keyCust, keyNamespace, null, keyStatus, keyMetadata, + refreshTimestamp, readOpCount, writeOpCount); + } + /** * Returns the custodian associated with the key. * @@ -167,7 +184,7 @@ public String getKeyMetadata() { } @Override public String toString() { - return "ManagedKeyData{" + "custSpecix=" + Arrays.toString(keyCust) + ", keyNamespace='" + return "ManagedKeyData{" + "keyCustodian=" + Arrays.toString(keyCust) + ", keyNamespace='" + keyNamespace + '\'' + ", keyStatus=" + keyStatus + ", keyMetadata='" + keyMetadata + '\'' + ", refreshTimestamp=" + refreshTimestamp + '}'; } diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/io/crypto/ManagedKeyStoreKeyProvider.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/io/crypto/ManagedKeyStoreKeyProvider.java index a18f7a0be2f1..8c42ab0b99f8 100644 --- a/hbase-common/src/main/java/org/apache/hadoop/hbase/io/crypto/ManagedKeyStoreKeyProvider.java +++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/io/crypto/ManagedKeyStoreKeyProvider.java @@ -35,7 +35,7 @@ public ManagedKeyData getSystemKey(byte[] clusterId) { // Encode clusterId too for consistency with that of key custodian. String keyMetadata = generateKeyMetadata(masterKeyAlias, ManagedKeyProvider.encodeToStr(clusterId)); - return new ManagedKeyData(clusterId, ManagedKeyData.KEY_NAMESPACE_GLOBAL, key, ManagedKeyStatus.ACTIVE, + return new ManagedKeyData(clusterId, ManagedKeyData.KEY_SPACE_GLOBAL, key, ManagedKeyStatus.ACTIVE, keyMetadata); } @@ -61,10 +61,10 @@ public ManagedKeyData unwrapKey(String keyMetadataStr) throws IOException { String alias = keyMetadata.get(KEY_METADATA_ALIAS); Key key = alias != null ? getKey(alias) : null; if (key != null) { - return new ManagedKeyData(key_cust, ManagedKeyData.KEY_NAMESPACE_GLOBAL, key, + return new ManagedKeyData(key_cust, ManagedKeyData.KEY_SPACE_GLOBAL, key, isActive ? ManagedKeyStatus.ACTIVE : ManagedKeyStatus.INACTIVE, keyMetadataStr); } - return new ManagedKeyData(key_cust, ManagedKeyData.KEY_NAMESPACE_GLOBAL, null, + return new ManagedKeyData(key_cust, ManagedKeyData.KEY_SPACE_GLOBAL, null, isActive ? ManagedKeyStatus.FAILED : ManagedKeyStatus.DISABLED, keyMetadataStr); } diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/keymeta/KeymetaAdmin.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/keymeta/KeymetaAdmin.java index 4edc90f2ac20..4ff4a88a2f15 100644 --- a/hbase-common/src/main/java/org/apache/hadoop/hbase/keymeta/KeymetaAdmin.java +++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/keymeta/KeymetaAdmin.java @@ -39,7 +39,7 @@ public interface KeymetaAdmin { * @return The current status of the managed key. * @throws IOException if an error occurs while enabling key management. */ - ManagedKeyStatus enableManagedKeys(String keyCust, String keyNamespace) throws IOException; + ManagedKeyStatus enableKeyManagement(String keyCust, String keyNamespace) throws IOException; /** * Get the status of all the keys for the specified custodian. diff --git a/hbase-common/src/test/java/org/apache/hadoop/hbase/io/crypto/MockManagedKeyProvider.java b/hbase-common/src/test/java/org/apache/hadoop/hbase/io/crypto/MockManagedKeyProvider.java index 444e8695c1a1..babf77b54746 100644 --- a/hbase-common/src/test/java/org/apache/hadoop/hbase/io/crypto/MockManagedKeyProvider.java +++ b/hbase-common/src/test/java/org/apache/hadoop/hbase/io/crypto/MockManagedKeyProvider.java @@ -71,18 +71,6 @@ public ManagedKeyData getKey(byte[] key_cust) { return getKey(key_cust, alias); } - public ManagedKeyData getKey(byte[] key_cust, String alias) { - Key key = keys.get(alias); - if (key == null) { - key = generateSecretKey(); - keys.put(alias, key); - } - ManagedKeyStatus keyStatus = this.keyStatus.get(alias); - return new ManagedKeyData(key_cust, ManagedKeyData.KEY_NAMESPACE_GLOBAL, key, - keyStatus == null ? ManagedKeyStatus.ACTIVE : keyStatus, - Bytes.toString(key_cust)+":"+alias); - } - public void setKeyStatus(String alias, ManagedKeyStatus status) { keyStatus.put(alias, status); } @@ -113,4 +101,19 @@ public static Key generateSecretKey() { keyGen.init(256); return keyGen.generateKey(); } + + private ManagedKeyData getKey(byte[] key_cust, String alias) { + ManagedKeyStatus keyStatus = this.keyStatus.get(alias); + Key key = null; + if (keyStatus != ManagedKeyStatus.FAILED && keyStatus != ManagedKeyStatus.DISABLED) { + key = keys.get(alias); + if (key == null) { + key = generateSecretKey(); + keys.put(alias, key); + } + } + return new ManagedKeyData(key_cust, ManagedKeyData.KEY_SPACE_GLOBAL, key, + keyStatus == null ? ManagedKeyStatus.ACTIVE : keyStatus, + Bytes.toString(key_cust)+":"+alias); + } } diff --git a/hbase-common/src/test/java/org/apache/hadoop/hbase/io/crypto/TestManagedKeyStoreKeyProvider.java b/hbase-common/src/test/java/org/apache/hadoop/hbase/io/crypto/TestManagedKeyStoreKeyProvider.java index d09f4540e1c9..1aa50a48f340 100644 --- a/hbase-common/src/test/java/org/apache/hadoop/hbase/io/crypto/TestManagedKeyStoreKeyProvider.java +++ b/hbase-common/src/test/java/org/apache/hadoop/hbase/io/crypto/TestManagedKeyStoreKeyProvider.java @@ -116,7 +116,7 @@ private void addEntry(String alias, String prefix) { @Test public void testGetManagedKey() throws Exception { for (Bytes prefix : prefix2key.keySet()) { - ManagedKeyData keyData = managedKeyProvider.getManagedKey(prefix.get(), ManagedKeyData.KEY_NAMESPACE_GLOBAL); + ManagedKeyData keyData = managedKeyProvider.getManagedKey(prefix.get(), ManagedKeyData.KEY_SPACE_GLOBAL); assertKeyData(keyData, ManagedKeyStatus.ACTIVE, prefix2key.get(prefix).get(), prefix.get(), prefix2alias.get(prefix)); } @@ -127,7 +127,7 @@ public void testGetInactiveKey() throws Exception { Bytes firstPrefix = prefix2key.keySet().iterator().next(); String encPrefix = Base64.getEncoder().encodeToString(firstPrefix.get()); conf.set(HConstants.CRYPTO_MANAGED_KEY_STORE_CONF_KEY_PREFIX + encPrefix + ".active", "false"); - ManagedKeyData keyData = managedKeyProvider.getManagedKey(firstPrefix.get(), ManagedKeyData.KEY_NAMESPACE_GLOBAL); + ManagedKeyData keyData = managedKeyProvider.getManagedKey(firstPrefix.get(), ManagedKeyData.KEY_SPACE_GLOBAL); assertNotNull(keyData); assertKeyData(keyData, ManagedKeyStatus.INACTIVE, prefix2key.get(firstPrefix).get(), firstPrefix.get(), prefix2alias.get(firstPrefix)); @@ -137,7 +137,7 @@ public void testGetInactiveKey() throws Exception { public void testGetInvalidKey() throws Exception { byte[] invalidPrefixBytes = "invalid".getBytes(); ManagedKeyData keyData = managedKeyProvider.getManagedKey(invalidPrefixBytes, - ManagedKeyData.KEY_NAMESPACE_GLOBAL); + ManagedKeyData.KEY_SPACE_GLOBAL); assertNotNull(keyData); assertKeyData(keyData, ManagedKeyStatus.FAILED, null, invalidPrefixBytes, null); } @@ -147,7 +147,7 @@ public void testGetDisabledKey() throws Exception { byte[] invalidPrefix = new byte[] { 1, 2, 3 }; String invalidPrefixEnc = ManagedKeyProvider.encodeToStr(invalidPrefix); conf.set(HConstants.CRYPTO_MANAGED_KEY_STORE_CONF_KEY_PREFIX + invalidPrefixEnc + ".active", "false"); - ManagedKeyData keyData = managedKeyProvider.getManagedKey(invalidPrefix, ManagedKeyData.KEY_NAMESPACE_GLOBAL); + ManagedKeyData keyData = managedKeyProvider.getManagedKey(invalidPrefix, ManagedKeyData.KEY_SPACE_GLOBAL); assertNotNull(keyData); assertKeyData(keyData, ManagedKeyStatus.DISABLED, null, invalidPrefix, null); diff --git a/hbase-protocol-shaded/src/main/protobuf/server/ManagedKeys.proto b/hbase-protocol-shaded/src/main/protobuf/server/ManagedKeys.proto index 0270d845ad65..5b87ef8c8aff 100644 --- a/hbase-protocol-shaded/src/main/protobuf/server/ManagedKeys.proto +++ b/hbase-protocol-shaded/src/main/protobuf/server/ManagedKeys.proto @@ -51,7 +51,7 @@ message GetManagedKeysResponse { } service ManagedKeysService { - rpc EnableManagedKeys(ManagedKeysRequest) + rpc EnableKeyManagement(ManagedKeysRequest) returns (ManagedKeysResponse); rpc GetManagedKeys(ManagedKeysRequest) returns (GetManagedKeysResponse); diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/keymeta/KeymetaAdminImpl.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/keymeta/KeymetaAdminImpl.java index 36404a5f4056..ec971b44762f 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/keymeta/KeymetaAdminImpl.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/keymeta/KeymetaAdminImpl.java @@ -39,7 +39,8 @@ public KeymetaAdminImpl(Server server) { } @Override - public ManagedKeyStatus enableManagedKeys(String keyCust, String keyNamespace) throws IOException { + public ManagedKeyStatus enableKeyManagement(String keyCust, String keyNamespace) + throws IOException { assertKeyManagementEnabled(); LOG.info("Trying to enable key management on custodian: {} under namespace: {}", keyCust, keyNamespace); diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/keymeta/KeymetaServiceEndpoint.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/keymeta/KeymetaServiceEndpoint.java index b2899955685e..2e084c8f05f6 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/keymeta/KeymetaServiceEndpoint.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/keymeta/KeymetaServiceEndpoint.java @@ -102,13 +102,13 @@ private class KeyMetaAdminServiceImpl extends ManagedKeysService { * @param done The callback to be invoked with the response. */ @Override - public void enableManagedKeys(RpcController controller, ManagedKeysRequest request, + public void enableKeyManagement(RpcController controller, ManagedKeysRequest request, RpcCallback done) { ManagedKeysResponse.Builder builder = getResponseBuilder(controller, request); if (builder.getKeyCust() != null) { try { ManagedKeyStatus managedKeyStatus = master.getKeymetaAdmin() - .enableManagedKeys(request.getKeyCust(), request.getKeyNamespace()); + .enableKeyManagement(request.getKeyCust(), request.getKeyNamespace()); builder.setKeyStatus(ManagedKeysProtos.ManagedKeyStatus.valueOf( managedKeyStatus.getVal())); } catch (IOException e) { diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/keymeta/ManagedKeyTestBase.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/keymeta/ManagedKeyTestBase.java index 17efa663bf99..4b5ade468202 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/keymeta/ManagedKeyTestBase.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/keymeta/ManagedKeyTestBase.java @@ -13,10 +13,12 @@ public class ManagedKeyTestBase { public void setUp() throws Exception { TEST_UTIL.getConfiguration().set(HConstants.CRYPTO_KEYPROVIDER_CONF_KEY, MockManagedKeyProvider.class.getName()); TEST_UTIL.getConfiguration().set(HConstants.CRYPTO_MANAGED_KEYS_ENABLED_CONF_KEY, "true"); + TEST_UTIL.getConfiguration().set("hbase.coprocessor.master.classes", KeymetaServiceEndpoint.class.getName()); // Start the minicluster TEST_UTIL.startMiniCluster(1); TEST_UTIL.waitFor(60000, () -> TEST_UTIL.getMiniHBaseCluster().getMaster().isInitialized()); + TEST_UTIL.waitUntilAllRegionsAssigned(KeymetaTableAccessor.KEY_META_TABLE_NAME); } @After diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/keymeta/TestManagedKeymeta.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/keymeta/TestManagedKeymeta.java new file mode 100644 index 000000000000..f9e1e8fe5363 --- /dev/null +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/keymeta/TestManagedKeymeta.java @@ -0,0 +1,70 @@ +package org.apache.hadoop.hbase.keymeta; + +import org.apache.hadoop.hbase.HBaseClassTestRule; +import org.apache.hadoop.hbase.io.crypto.Encryption; +import org.apache.hadoop.hbase.io.crypto.ManagedKeyData; +import org.apache.hadoop.hbase.io.crypto.ManagedKeyProvider; +import org.apache.hadoop.hbase.io.crypto.MockManagedKeyProvider; +import org.apache.hadoop.hbase.io.crypto.ManagedKeyStatus; +import org.apache.hadoop.hbase.master.HMaster; +import org.apache.hadoop.hbase.testclassification.MasterTests; +import org.apache.hadoop.hbase.testclassification.MediumTests; +import org.junit.ClassRule; +import org.junit.Test; +import org.junit.experimental.categories.Category; +import java.io.IOException; +import java.security.KeyException; +import java.util.List; +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertNotNull; +import static org.junit.Assert.assertThrows; + +@Category({ MasterTests.class, MediumTests.class }) +public class TestManagedKeymeta extends ManagedKeyTestBase { + + @ClassRule + public static final HBaseClassTestRule CLASS_RULE = + HBaseClassTestRule.forClass(TestManagedKeymeta.class); + + @Test + public void testEnableLocal() throws Exception { + HMaster master = TEST_UTIL.getHBaseCluster().getMaster(); + KeymetaAdmin keymetaAdmin = master.getKeymetaAdmin(); + doTestEnable(keymetaAdmin); + } + + @Test + public void testEnableOverRPC() throws Exception { + KeymetaAdmin adminClient = new KeymetaAdminClient(TEST_UTIL.getConnection()); + doTestEnable(adminClient); + } + + private void doTestEnable(KeymetaAdmin adminClient) throws IOException, KeyException { + HMaster master = TEST_UTIL.getHBaseCluster().getMaster(); + MockManagedKeyProvider managedKeyProvider = (MockManagedKeyProvider) + Encryption.getKeyProvider(master.getConfiguration()); + ; + String cust = "cust1"; + String encodedCust = ManagedKeyProvider.encodeToStr(cust.getBytes()); + ManagedKeyStatus managedKeyStatus = + adminClient.enableKeyManagement(encodedCust, ManagedKeyData.KEY_SPACE_GLOBAL); + assertNotNull(managedKeyStatus); + assertEquals(ManagedKeyStatus.ACTIVE, managedKeyStatus); + + List managedKeys = + adminClient.getManagedKeys(encodedCust, ManagedKeyData.KEY_SPACE_GLOBAL); + assertEquals(1, managedKeys.size()); + assertEquals(managedKeyProvider.getKey(cust.getBytes()).cloneWithoutKey(), + managedKeys.get(0).cloneWithoutKey()); + + String nonExistentCust = "nonExistentCust"; + managedKeyProvider.setKeyStatus(nonExistentCust, ManagedKeyStatus.FAILED); + assertEquals(ManagedKeyStatus.FAILED, adminClient.enableKeyManagement( + ManagedKeyProvider.encodeToStr(nonExistentCust.getBytes()), ManagedKeyData.KEY_SPACE_GLOBAL)); + + String disabledCust = "disabledCust"; + managedKeyProvider.setKeyStatus(disabledCust, ManagedKeyStatus.DISABLED); + assertEquals(ManagedKeyStatus.DISABLED, adminClient.enableKeyManagement( + ManagedKeyProvider.encodeToStr(disabledCust.getBytes()), ManagedKeyData.KEY_SPACE_GLOBAL)); + } +} diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestSystemKeyAccessor.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestSystemKeyAccessor.java new file mode 100644 index 000000000000..4421879f511d --- /dev/null +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestSystemKeyAccessor.java @@ -0,0 +1,182 @@ +package org.apache.hadoop.hbase.master; + +import org.apache.hadoop.conf.Configuration; +import org.apache.hadoop.fs.FileStatus; +import org.apache.hadoop.fs.FileSystem; +import org.apache.hadoop.fs.Path; +import org.apache.hadoop.hbase.HBaseClassTestRule; +import org.apache.hadoop.hbase.HBaseTestingUtil; +import org.apache.hadoop.hbase.HConstants; +import org.apache.hadoop.hbase.Server; +import org.apache.hadoop.hbase.keymeta.SystemKeyAccessor; +import org.apache.hadoop.hbase.testclassification.MasterTests; +import org.apache.hadoop.hbase.testclassification.SmallTests; +import org.apache.hadoop.hbase.util.CommonFSUtils; +import org.junit.Before; +import org.junit.ClassRule; +import org.junit.Rule; +import org.junit.Test; +import org.junit.experimental.categories.Category; +import org.junit.rules.TestName; +import org.junit.runner.RunWith; +import org.junit.runners.BlockJUnit4ClassRunner; +import org.junit.runners.Parameterized; +import org.junit.runners.Parameterized.Parameter; +import org.junit.runners.Parameterized.Parameters; +import org.junit.runners.Suite; +import java.io.IOException; +import java.util.Arrays; +import java.util.Collection; +import java.util.List; +import java.util.stream.IntStream; +import static org.apache.hadoop.hbase.HConstants.SYSTEM_KEY_FILE_PREFIX; +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertNull; +import static org.junit.Assert.assertThrows; +import static org.mockito.ArgumentMatchers.any; +import static org.mockito.ArgumentMatchers.eq; +import static org.mockito.Mockito.mock; +import static org.mockito.Mockito.when; + +@RunWith(Suite.class) +@Suite.SuiteClasses({ TestSystemKeyAccessor.TestWhenDisabled.class, + TestSystemKeyAccessor.TestAccessor.class, + TestSystemKeyAccessor.TestForInvalidFileNames.class }) +@Category({ MasterTests.class, SmallTests.class }) +public class TestSystemKeyAccessor { + private static final HBaseTestingUtil TEST_UTIL = new HBaseTestingUtil(); + + @Rule + public TestName name = new TestName(); + + protected Configuration conf; + protected Path testRootDir; + protected FileSystem fs; + + protected FileSystem mockFileSystem = mock(FileSystem.class); + protected Server mockServer = mock(Server.class); + protected SystemKeyAccessor systemKeyAccessor; + + @Before + public void setUp() throws IOException { + conf = TEST_UTIL.getConfiguration(); + testRootDir = TEST_UTIL.getDataTestDir(name.getMethodName()); + fs = testRootDir.getFileSystem(conf); + + conf.set(HConstants.CRYPTO_MANAGED_KEYS_ENABLED_CONF_KEY, "true"); + + when(mockServer.getFileSystem()).thenReturn(mockFileSystem); + when(mockServer.getConfiguration()).thenReturn(conf); + systemKeyAccessor = new SystemKeyAccessor(mockServer); + } + + private static FileStatus createMockFile(String fileName) { + Path mockPath = mock(Path.class); + when(mockPath.getName()).thenReturn(fileName); + FileStatus mockFileStatus = mock(FileStatus.class); + when(mockFileStatus.getPath()).thenReturn(mockPath); + return mockFileStatus; + } + + @RunWith(BlockJUnit4ClassRunner.class) + @Category({ MasterTests.class, SmallTests.class }) + public static class TestWhenDisabled extends TestSystemKeyAccessor { + @ClassRule public static final HBaseClassTestRule CLASS_RULE = + HBaseClassTestRule.forClass(TestWhenDisabled.class); + + @Override public void setUp() throws IOException { + super.setUp(); + conf.set(HConstants.CRYPTO_MANAGED_KEYS_ENABLED_CONF_KEY, "false"); + } + + @Test public void testDisabled() throws Exception { + assertNull(systemKeyAccessor.getAllSystemKeyFiles()); + assertNull(systemKeyAccessor.getLatestSystemKeyFile()); + } + } + + @RunWith(BlockJUnit4ClassRunner.class) + @Category({ MasterTests.class, SmallTests.class }) + public static class TestAccessor extends TestSystemKeyAccessor { + @ClassRule + public static final HBaseClassTestRule CLASS_RULE = + HBaseClassTestRule.forClass(TestAccessor.class); + + @Test + public void testGetLatestWithNone() throws Exception { + when(mockFileSystem.globStatus(any())).thenReturn(new FileStatus[0]); + + RuntimeException ex = assertThrows(RuntimeException.class, + () -> systemKeyAccessor.getLatestSystemKeyFile()); + assertEquals("No cluster key initialized yet", ex.getMessage()); + } + + @Test + public void testGetWithSingle() throws Exception { + String fileName = SYSTEM_KEY_FILE_PREFIX + "1"; + FileStatus mockFileStatus = createMockFile(fileName); + + Path systemKeyDir = CommonFSUtils.getSystemKeyDir(conf); + when(mockFileSystem.globStatus(eq(new Path(systemKeyDir, SYSTEM_KEY_FILE_PREFIX+"*")))) + .thenReturn(new FileStatus[] { mockFileStatus }); + + List files = systemKeyAccessor.getAllSystemKeyFiles(); + assertEquals(1, files.size()); + assertEquals(fileName, files.get(0).getName()); + + Path latestSystemKeyFile = systemKeyAccessor.getLatestSystemKeyFile(); + assertEquals(fileName, latestSystemKeyFile.getName()); + + assertEquals(1, systemKeyAccessor.extractSystemKeySeqNum(latestSystemKeyFile)); + } + + @Test + public void testGetWithMultiple() throws Exception { + FileStatus[] mockFileStatuses = IntStream.rangeClosed(1, 3) + .mapToObj(i -> createMockFile(SYSTEM_KEY_FILE_PREFIX + i)) + .toArray(FileStatus[]::new); + + Path systemKeyDir = CommonFSUtils.getSystemKeyDir(conf); + when(mockFileSystem.globStatus(eq(new Path(systemKeyDir, SYSTEM_KEY_FILE_PREFIX+"*")))) + .thenReturn( mockFileStatuses ); + + List files = systemKeyAccessor.getAllSystemKeyFiles(); + assertEquals(3, files.size()); + + Path latestSystemKeyFile = systemKeyAccessor.getLatestSystemKeyFile(); + assertEquals(3, systemKeyAccessor.extractSystemKeySeqNum(latestSystemKeyFile)); + } + } + + @RunWith(Parameterized.class) + @Category({ MasterTests.class, SmallTests.class }) + public static class TestForInvalidFileNames extends TestSystemKeyAccessor { + @ClassRule + public static final HBaseClassTestRule CLASS_RULE = + HBaseClassTestRule.forClass(TestForInvalidFileNames.class); + + @Parameter(0) + public String fileName; + @Parameter(1) + public String expectedErrorMessage; + + @Parameters(name = "{index},fileName={0}") + public static Collection data() { + return Arrays.asList(new Object[][] { + { "abcd", "Couldn't parse key file name: abcd" }, + {SYSTEM_KEY_FILE_PREFIX+"abcd", "Couldn't parse key file name: "+ + SYSTEM_KEY_FILE_PREFIX+"abcd"}, + // Add more test cases here + }); + } + + @Test + public void testForInvalid() throws Exception { + FileStatus mockFileStatus = createMockFile(fileName); + + IOException ex = assertThrows(IOException.class, + () -> systemKeyAccessor.extractSystemKeySeqNum(mockFileStatus.getPath())); + assertEquals(expectedErrorMessage, ex.getMessage()); + } + } +} diff --git a/hbase-shell/src/main/ruby/hbase/keymeta_admin.rb b/hbase-shell/src/main/ruby/hbase/keymeta_admin.rb index 619f8bf30c88..729f02d0dc03 100644 --- a/hbase-shell/src/main/ruby/hbase/keymeta_admin.rb +++ b/hbase-shell/src/main/ruby/hbase/keymeta_admin.rb @@ -33,7 +33,7 @@ def close def enable_key_management(key_info) cust, namespace = extract_cust_info(key_info) - @admin.enableManagedKeys(cust, namespace) + @admin.enableKeyManagement(cust, namespace) end def get_key_statuses(key_info) @@ -46,7 +46,7 @@ def extract_cust_info(key_info) raise(ArgumentError, 'Invalid cust:namespace format') unless (custInfo.length == 1 || custInfo.length == 2) return custInfo[0], custInfo.length > 1 ? custInfo[1] : - ManagedKeyData::KEY_NAMESPACE_GLOBAL + ManagedKeyData::KEY_SPACE_GLOBAL end end end From 20310c08b746032ca9302c4e19659d0b6c22cd92 Mon Sep 17 00:00:00 2001 From: Hari Dara Date: Mon, 14 Apr 2025 22:01:47 +0530 Subject: [PATCH 19/70] More test coverage --- .../hbase/io/crypto/ManagedKeyProvider.java | 18 +- .../io/crypto/MockManagedKeyProvider.java | 15 +- .../hbase/keymeta/KeymetaAdminImpl.java | 2 +- .../hbase/keymeta/KeymetaTableAccessor.java | 4 +- .../hbase/keymeta/TestManagedKeymeta.java | 5 +- .../hadoop/hbase/master/TestKeymetaAdmin.java | 197 ++++++++++++++++++ .../hadoop/hbase/master/TestSystemKey.java | 4 +- 7 files changed, 231 insertions(+), 14 deletions(-) create mode 100644 hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestKeymetaAdmin.java diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/io/crypto/ManagedKeyProvider.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/io/crypto/ManagedKeyProvider.java index faceaf9212ee..2c99235354eb 100644 --- a/hbase-common/src/main/java/org/apache/hadoop/hbase/io/crypto/ManagedKeyProvider.java +++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/io/crypto/ManagedKeyProvider.java @@ -69,18 +69,28 @@ public interface ManagedKeyProvider extends KeyProvider { */ ManagedKeyData unwrapKey(String keyMetaData) throws IOException; - static byte[] decodeToBytes(String keyCust) throws IOException { + /** + * Decode the given key custodian which is encoded as Base64 string. + * @param encodedKeyCust The encoded key custodian + * @return the decoded key custodian + * @throws IOException + */ + static byte[] decodeToBytes(String encodedKeyCust) throws IOException { byte[] key_cust; try { - key_cust = Base64.getDecoder().decode(keyCust); + key_cust = Base64.getDecoder().decode(encodedKeyCust); } catch (IllegalArgumentException e) { - throw new IOException("Failed to decode specified key custodian as Base64 string: " + - keyCust, e); + throw new IOException("Failed to decode specified key custodian as Base64 string: " + encodedKeyCust, e); } return key_cust; } + /** + * Encode the given key custodian as Base64 string. + * @param key_cust The key custodian + * @return the encoded key custodian as Base64 string + */ static String encodeToStr(byte[] key_cust) { return Base64.getEncoder().encodeToString(key_cust); } diff --git a/hbase-common/src/test/java/org/apache/hadoop/hbase/io/crypto/MockManagedKeyProvider.java b/hbase-common/src/test/java/org/apache/hadoop/hbase/io/crypto/MockManagedKeyProvider.java index babf77b54746..6850104629d6 100644 --- a/hbase-common/src/test/java/org/apache/hadoop/hbase/io/crypto/MockManagedKeyProvider.java +++ b/hbase-common/src/test/java/org/apache/hadoop/hbase/io/crypto/MockManagedKeyProvider.java @@ -71,14 +71,18 @@ public ManagedKeyData getKey(byte[] key_cust) { return getKey(key_cust, alias); } - public void setKeyStatus(String alias, ManagedKeyStatus status) { + public void setMockedKeyStatus(String alias, ManagedKeyStatus status) { keyStatus.put(alias, status); } - public void setKey(String alias, Key key) { + public void setMockedKey(String alias, Key key) { keys.put(alias, key); } + public Key getMockedKey(String alias) { + return keys.get(alias); + } + public void setCluterKeyAlias(String alias) { this.systemKeyAlias = alias; } @@ -106,11 +110,14 @@ private ManagedKeyData getKey(byte[] key_cust, String alias) { ManagedKeyStatus keyStatus = this.keyStatus.get(alias); Key key = null; if (keyStatus != ManagedKeyStatus.FAILED && keyStatus != ManagedKeyStatus.DISABLED) { - key = keys.get(alias); - if (key == null) { + if (! keys.containsKey(alias)) { key = generateSecretKey(); keys.put(alias, key); } + key = keys.get(alias); + if (key == null) { + return null; + } } return new ManagedKeyData(key_cust, ManagedKeyData.KEY_SPACE_GLOBAL, key, keyStatus == null ? ManagedKeyStatus.ACTIVE : keyStatus, diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/keymeta/KeymetaAdminImpl.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/keymeta/KeymetaAdminImpl.java index ec971b44762f..acca80ac609c 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/keymeta/KeymetaAdminImpl.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/keymeta/KeymetaAdminImpl.java @@ -78,6 +78,6 @@ public List getManagedKeys(String keyCust, String keyNamespace) LOG.info("Getting key statuses for custodian: {} under namespace: {}", keyCust, keyNamespace); byte[] key_cust = ManagedKeyProvider.decodeToBytes(keyCust); - return super.getAllKeys(key_cust, keyNamespace); + return getAllKeys(key_cust, keyNamespace); } } diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/keymeta/KeymetaTableAccessor.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/keymeta/KeymetaTableAccessor.java index ce22eaeaa447..3605ce634743 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/keymeta/KeymetaTableAccessor.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/keymeta/KeymetaTableAccessor.java @@ -17,6 +17,7 @@ */ package org.apache.hadoop.hbase.keymeta; +import org.apache.hadoop.hbase.HBaseInterfaceAudience; import org.apache.hadoop.hbase.HConstants; import org.apache.hadoop.hbase.NamespaceDescriptor; import org.apache.hadoop.hbase.Server; @@ -106,7 +107,8 @@ public void addKey(ManagedKeyData keyData) throws IOException { * @throws IOException when there is an underlying IOException. * @throws KeyException when there is an underlying KeyException. */ - protected List getAllKeys(byte[] key_cust, String keyNamespace) + @InterfaceAudience.LimitedPrivate(HBaseInterfaceAudience.UNITTEST) + public List getAllKeys(byte[] key_cust, String keyNamespace) throws IOException, KeyException { assertKeyManagementEnabled(); Connection connection = server.getConnection(); diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/keymeta/TestManagedKeymeta.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/keymeta/TestManagedKeymeta.java index f9e1e8fe5363..1e1bf9ef746a 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/keymeta/TestManagedKeymeta.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/keymeta/TestManagedKeymeta.java @@ -39,6 +39,7 @@ public void testEnableOverRPC() throws Exception { doTestEnable(adminClient); } + // TODO: Need to add test cases for multiple key spaces. private void doTestEnable(KeymetaAdmin adminClient) throws IOException, KeyException { HMaster master = TEST_UTIL.getHBaseCluster().getMaster(); MockManagedKeyProvider managedKeyProvider = (MockManagedKeyProvider) @@ -58,12 +59,12 @@ private void doTestEnable(KeymetaAdmin adminClient) throws IOException, KeyExcep managedKeys.get(0).cloneWithoutKey()); String nonExistentCust = "nonExistentCust"; - managedKeyProvider.setKeyStatus(nonExistentCust, ManagedKeyStatus.FAILED); + managedKeyProvider.setMockedKeyStatus(nonExistentCust, ManagedKeyStatus.FAILED); assertEquals(ManagedKeyStatus.FAILED, adminClient.enableKeyManagement( ManagedKeyProvider.encodeToStr(nonExistentCust.getBytes()), ManagedKeyData.KEY_SPACE_GLOBAL)); String disabledCust = "disabledCust"; - managedKeyProvider.setKeyStatus(disabledCust, ManagedKeyStatus.DISABLED); + managedKeyProvider.setMockedKeyStatus(disabledCust, ManagedKeyStatus.DISABLED); assertEquals(ManagedKeyStatus.DISABLED, adminClient.enableKeyManagement( ManagedKeyProvider.encodeToStr(disabledCust.getBytes()), ManagedKeyData.KEY_SPACE_GLOBAL)); } diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestKeymetaAdmin.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestKeymetaAdmin.java new file mode 100644 index 000000000000..0fcb44146faf --- /dev/null +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestKeymetaAdmin.java @@ -0,0 +1,197 @@ +package org.apache.hadoop.hbase.master; + +import org.apache.hadoop.conf.Configuration; +import org.apache.hadoop.fs.FileSystem; +import org.apache.hadoop.fs.Path; +import org.apache.hadoop.hbase.HBaseClassTestRule; +import org.apache.hadoop.hbase.HBaseTestingUtil; +import org.apache.hadoop.hbase.HConstants; +import org.apache.hadoop.hbase.Server; +import org.apache.hadoop.hbase.io.crypto.Encryption; +import org.apache.hadoop.hbase.io.crypto.ManagedKeyData; +import org.apache.hadoop.hbase.io.crypto.ManagedKeyProvider; +import org.apache.hadoop.hbase.io.crypto.ManagedKeyStatus; +import org.apache.hadoop.hbase.io.crypto.MockManagedKeyProvider; +import org.apache.hadoop.hbase.keymeta.KeymetaAdminImpl; +import org.apache.hadoop.hbase.keymeta.KeymetaTableAccessor; +import org.apache.hadoop.hbase.testclassification.MasterTests; +import org.apache.hadoop.hbase.testclassification.SmallTests; +import org.apache.hadoop.hbase.util.Bytes; +import org.junit.Before; +import org.junit.ClassRule; +import org.junit.Rule; +import org.junit.Test; +import org.junit.experimental.categories.Category; +import org.junit.rules.TestName; +import org.junit.runner.RunWith; +import org.junit.runners.BlockJUnit4ClassRunner; +import org.junit.runners.Parameterized; +import org.junit.runners.Parameterized.Parameter; +import org.junit.runners.Parameterized.Parameters; +import org.junit.runners.Suite; + +import java.io.IOException; +import java.security.Key; +import java.security.KeyException; +import java.util.Arrays; +import java.util.Collection; +import java.util.List; + +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertNotNull; +import static org.junit.Assert.assertNull; +import static org.junit.Assert.assertThrows; +import static org.mockito.ArgumentMatchers.any; +import static org.mockito.ArgumentMatchers.argThat; +import static org.mockito.ArgumentMatchers.eq; +import static org.mockito.Mockito.mock; +import static org.mockito.Mockito.times; +import static org.mockito.Mockito.verify; +import static org.mockito.Mockito.when; + +@RunWith(Suite.class) +@Suite.SuiteClasses({ TestKeymetaAdmin.TestWhenDisabled.class, + TestKeymetaAdmin.TestAdminImpl.class }) +@Category({ MasterTests.class, SmallTests.class }) +public class TestKeymetaAdmin { + private static final HBaseTestingUtil TEST_UTIL = new HBaseTestingUtil(); + + @Rule public TestName name = new TestName(); + + protected Configuration conf; + protected Path testRootDir; + protected FileSystem fs; + + protected FileSystem mockFileSystem = mock(FileSystem.class); + protected Server mockServer = mock(Server.class); + protected KeymetaAdminImpl keymetaAdmin; + KeymetaTableAccessor mockAccessor = mock(KeymetaTableAccessor.class); + + @Before public void setUp() throws IOException { + conf = TEST_UTIL.getConfiguration(); + testRootDir = TEST_UTIL.getDataTestDir(name.getMethodName()); + fs = testRootDir.getFileSystem(conf); + + conf.set(HConstants.CRYPTO_MANAGED_KEYS_ENABLED_CONF_KEY, "true"); + conf.set(HConstants.CRYPTO_KEYPROVIDER_CONF_KEY, MockManagedKeyProvider.class.getName()); + + when(mockServer.getFileSystem()).thenReturn(mockFileSystem); + when(mockServer.getConfiguration()).thenReturn(conf); + keymetaAdmin = new TestKeymetaAdminImpl(mockServer, mockAccessor); + } + + @RunWith(BlockJUnit4ClassRunner.class) @Category({ MasterTests.class, SmallTests.class }) + public static class TestWhenDisabled extends TestKeymetaAdmin { + @ClassRule public static final HBaseClassTestRule CLASS_RULE = + HBaseClassTestRule.forClass(TestWhenDisabled.class); + + @Override public void setUp() throws IOException { + super.setUp(); + conf.set(HConstants.CRYPTO_MANAGED_KEYS_ENABLED_CONF_KEY, "false"); + } + + @Test public void testDisabled() throws Exception { + assertThrows(IOException.class, + () -> keymetaAdmin.enableKeyManagement(ManagedKeyData.KEY_GLOBAL_CUSTODIAN, + ManagedKeyData.KEY_SPACE_GLOBAL)); + assertThrows(IOException.class, + () -> keymetaAdmin.getManagedKeys(ManagedKeyData.KEY_GLOBAL_CUSTODIAN, + ManagedKeyData.KEY_SPACE_GLOBAL)); + } + } + + @RunWith(Parameterized.class) + @Category({ MasterTests.class, SmallTests.class }) + public static class TestAdminImpl extends TestKeymetaAdmin { + @ClassRule public static final HBaseClassTestRule CLASS_RULE = + HBaseClassTestRule.forClass(TestSystemKeyAccessor.TestAccessor.class); + + @Parameter(0) + public ManagedKeyStatus keyStatus; + @Parameter(1) + public boolean isNullKey; + + @Parameters(name = "{index},keyStatus={0}") + public static Collection data() { + return Arrays.asList(new Object[][] { + { ManagedKeyStatus.ACTIVE, false }, + { ManagedKeyStatus.FAILED, true }, + { ManagedKeyStatus.INACTIVE, false }, + { ManagedKeyStatus.DISABLED, true }, + }); + } + + @Test + public void testEnable() throws Exception { + MockManagedKeyProvider managedKeyProvider = (MockManagedKeyProvider) + Encryption.getKeyProvider(conf); + String cust = "cust1"; + managedKeyProvider.setMockedKeyStatus(cust, keyStatus); + String encodedCust = ManagedKeyProvider.encodeToStr(cust.getBytes()); + ManagedKeyStatus managedKeyStatus = + keymetaAdmin.enableKeyManagement(encodedCust, ManagedKeyData.KEY_SPACE_GLOBAL); + assertNotNull(managedKeyStatus); + verify(mockAccessor) + .addKey(argThat((ManagedKeyData keyData) -> assertKeyData(keyData, keyStatus, + isNullKey ? null : managedKeyProvider.getMockedKey(cust)))); + + keymetaAdmin.getManagedKeys(encodedCust, ManagedKeyData.KEY_SPACE_GLOBAL); + verify(mockAccessor) + .getAllKeys(argThat((arr) -> Bytes.compareTo(cust.getBytes(), arr) == 0), + eq(ManagedKeyData.KEY_SPACE_GLOBAL)); + } + } + + @RunWith(BlockJUnit4ClassRunner.class) + @Category({ MasterTests.class, SmallTests.class }) + public static class TestForInvalid extends TestKeymetaAdmin { + @ClassRule public static final HBaseClassTestRule CLASS_RULE = + HBaseClassTestRule.forClass(TestKeymetaAdmin.TestForInvalid.class); + + @Test + public void testForKeyProviderNullReturn() throws Exception { + MockManagedKeyProvider managedKeyProvider = (MockManagedKeyProvider) + Encryption.getKeyProvider(conf); + String cust = "cust1"; + String encodedCust = ManagedKeyProvider.encodeToStr(cust.getBytes()); + managedKeyProvider.setMockedKey(cust, null); + assertThrows(IOException.class, () -> keymetaAdmin.enableKeyManagement(encodedCust, + ManagedKeyData.KEY_SPACE_GLOBAL)); + + } + } + + + private class TestKeymetaAdminImpl extends KeymetaAdminImpl { + public TestKeymetaAdminImpl(Server mockServer, KeymetaTableAccessor mockAccessor) { + super(mockServer); + } + + @Override + public void addKey(ManagedKeyData keyData) throws IOException { + mockAccessor.addKey(keyData); + } + + @Override + public List getAllKeys(byte[] key_cust, String keyNamespace) + throws IOException, KeyException { + return mockAccessor.getAllKeys(key_cust, keyNamespace); + } + } + + protected boolean assertKeyData(ManagedKeyData keyData, ManagedKeyStatus expKeyStatus, + Key expectedKey) { + assertNotNull(keyData); + assertEquals(expKeyStatus, keyData.getKeyStatus()); + if (expectedKey == null) { + assertNull(keyData.getTheKey()); + } + else { + byte[] keyBytes = keyData.getTheKey().getEncoded(); + byte[] expectedKeyBytes = expectedKey.getEncoded(); + assertEquals(expectedKeyBytes.length, keyBytes.length); + assertEquals(new Bytes(expectedKeyBytes), keyBytes); + } + return true; + } +} diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestSystemKey.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestSystemKey.java index acc1094da62d..34822e4e59ff 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestSystemKey.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestSystemKey.java @@ -65,7 +65,7 @@ public void testSystemKeyInitializationAndRotation() throws Exception { String newAlias = "new_cluster_key"; pbeKeyProvider.setCluterKeyAlias(newAlias); Key newCluterKey = MockManagedKeyProvider.generateSecretKey(); - pbeKeyProvider.setKey(newAlias, newCluterKey); + pbeKeyProvider.setMockedKey(newAlias, newCluterKey); restartSystem(); master = TEST_UTIL.getHBaseCluster().getMaster(); SystemKeyAccessor systemKeyAccessor = new SystemKeyAccessor(master); @@ -88,7 +88,7 @@ public void testWithInvalidSystemKey() throws Exception { // Test startup failure when the cluster key is INACTIVE SystemKeyManager tmpCKM = new SystemKeyManager(master); tmpCKM.ensureSystemKeyInitialized(); - pbeKeyProvider.setKeyStatus(pbeKeyProvider.getSystemKeyAlias(), ManagedKeyStatus.INACTIVE); + pbeKeyProvider.setMockedKeyStatus(pbeKeyProvider.getSystemKeyAlias(), ManagedKeyStatus.INACTIVE); assertThrows(IOException.class, tmpCKM::ensureSystemKeyInitialized); } From 217fa9cf237262ffde810bcd831788ed787f54c3 Mon Sep 17 00:00:00 2001 From: Hari Dara Date: Wed, 16 Apr 2025 22:17:11 +0530 Subject: [PATCH 20/70] Updated the enable API to return a list of key data --- .../hbase/keymeta/KeymetaAdminClient.java | 29 ++++++------ .../hadoop/hbase/keymeta/KeymetaAdmin.java | 6 +-- .../main/protobuf/server/ManagedKeys.proto | 2 +- .../hbase/keymeta/KeymetaAdminImpl.java | 10 ++-- .../hbase/keymeta/KeymetaServiceEndpoint.java | 46 +++++++++---------- .../hbase/keymeta/TestManagedKeymeta.java | 24 +++++++--- .../hadoop/hbase/master/TestKeymetaAdmin.java | 6 ++- .../shell/commands/enable_key_management.rb | 9 ++-- .../shell/commands/keymeta_command_base.rb | 40 ++++++++++++++++ .../ruby/shell/commands/show_key_status.rb | 20 ++------ 10 files changed, 115 insertions(+), 77 deletions(-) create mode 100644 hbase-shell/src/main/ruby/shell/commands/keymeta_command_base.rb diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/keymeta/KeymetaAdminClient.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/keymeta/KeymetaAdminClient.java index 41d048774713..cc0b52c13275 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/keymeta/KeymetaAdminClient.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/keymeta/KeymetaAdminClient.java @@ -26,13 +26,12 @@ public KeymetaAdminClient(Connection conn) throws IOException { } @Override - public ManagedKeyStatus enableKeyManagement(String keyCust, String keyNamespace) + public List enableKeyManagement(String keyCust, String keyNamespace) throws IOException { try { - ManagedKeysResponse response = stub.enableKeyManagement(null, + ManagedKeysProtos.GetManagedKeysResponse response = stub.enableKeyManagement(null, ManagedKeysRequest.newBuilder().setKeyCust(keyCust).setKeyNamespace(keyNamespace).build()); - LOG.info("Got response: " + response); - return ManagedKeyStatus.forValue((byte) response.getKeyStatus().getNumber()); + return generateKeyDataList(response); } catch (ServiceException e) { throw ProtobufUtil.handleRemoteException(e); } @@ -41,21 +40,25 @@ public ManagedKeyStatus enableKeyManagement(String keyCust, String keyNamespace) @Override public List getManagedKeys(String keyCust, String keyNamespace) throws IOException, KeyException { - List keyStatuses = new ArrayList<>(); try { ManagedKeysProtos.GetManagedKeysResponse statusResponse = stub.getManagedKeys(null, ManagedKeysRequest.newBuilder().setKeyCust(keyCust).setKeyNamespace(keyNamespace).build()); - for (ManagedKeysResponse status: statusResponse.getStatusList()) { - keyStatuses.add(new ManagedKeyData( - status.getKeyCustBytes().toByteArray(), - status.getKeyNamespace(), null, - ManagedKeyStatus.forValue((byte) status.getKeyStatus().getNumber()), - status.getKeyMetadata(), - status.getRefreshTimestamp(), status.getReadOpCount(), status.getWriteOpCount())); - } + return generateKeyDataList(statusResponse); } catch (ServiceException e) { throw ProtobufUtil.handleRemoteException(e); } + } + + private static List generateKeyDataList(ManagedKeysProtos.GetManagedKeysResponse statusResponse) { + List keyStatuses = new ArrayList<>(); + for (ManagedKeysResponse status: statusResponse.getStatusList()) { + keyStatuses.add(new ManagedKeyData( + status.getKeyCustBytes().toByteArray(), + status.getKeyNamespace(), null, + ManagedKeyStatus.forValue((byte) status.getKeyStatus().getNumber()), + status.getKeyMetadata(), + status.getRefreshTimestamp(), status.getReadOpCount(), status.getWriteOpCount())); + } return keyStatuses; } } diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/keymeta/KeymetaAdmin.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/keymeta/KeymetaAdmin.java index 4ff4a88a2f15..cd662232a24c 100644 --- a/hbase-common/src/main/java/org/apache/hadoop/hbase/keymeta/KeymetaAdmin.java +++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/keymeta/KeymetaAdmin.java @@ -36,17 +36,17 @@ public interface KeymetaAdmin { * @param keyCust The key custodian in base64 encoded format. * @param keyNamespace The namespace for the key management. * - * @return The current status of the managed key. + * @return The list of {@link ManagedKeyData} objects each identifying the key and its current status. * @throws IOException if an error occurs while enabling key management. */ - ManagedKeyStatus enableKeyManagement(String keyCust, String keyNamespace) throws IOException; + List enableKeyManagement(String keyCust, String keyNamespace) throws IOException; /** * Get the status of all the keys for the specified custodian. * * @param keyCust The key custodian in base64 encoded format. * @param keyNamespace The namespace for the key management. - * @return The list of status objects each identifying the key and its current status. + * @return The list of {@link ManagedKeyData} objects each identifying the key and its current status. * @throws IOException if an error occurs while enabling key management. */ List getManagedKeys(String keyCust, String keyNamespace) diff --git a/hbase-protocol-shaded/src/main/protobuf/server/ManagedKeys.proto b/hbase-protocol-shaded/src/main/protobuf/server/ManagedKeys.proto index 5b87ef8c8aff..4f6bc7abac7e 100644 --- a/hbase-protocol-shaded/src/main/protobuf/server/ManagedKeys.proto +++ b/hbase-protocol-shaded/src/main/protobuf/server/ManagedKeys.proto @@ -52,7 +52,7 @@ message GetManagedKeysResponse { service ManagedKeysService { rpc EnableKeyManagement(ManagedKeysRequest) - returns (ManagedKeysResponse); + returns (GetManagedKeysResponse); rpc GetManagedKeys(ManagedKeysRequest) returns (GetManagedKeysResponse); } diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/keymeta/KeymetaAdminImpl.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/keymeta/KeymetaAdminImpl.java index acca80ac609c..04e917da03f8 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/keymeta/KeymetaAdminImpl.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/keymeta/KeymetaAdminImpl.java @@ -20,7 +20,6 @@ import org.apache.hadoop.hbase.Server; import org.apache.hadoop.hbase.io.crypto.ManagedKeyData; import org.apache.hadoop.hbase.io.crypto.ManagedKeyProvider; -import org.apache.hadoop.hbase.io.crypto.ManagedKeyStatus; import org.apache.yetus.audience.InterfaceAudience; import org.slf4j.Logger; import org.slf4j.LoggerFactory; @@ -39,7 +38,7 @@ public KeymetaAdminImpl(Server server) { } @Override - public ManagedKeyStatus enableKeyManagement(String keyCust, String keyNamespace) + public List enableKeyManagement(String keyCust, String keyNamespace) throws IOException { assertKeyManagementEnabled(); LOG.info("Trying to enable key management on custodian: {} under namespace: {}", keyCust, @@ -48,9 +47,8 @@ public ManagedKeyStatus enableKeyManagement(String keyCust, String keyNamespace) ManagedKeyProvider provider = getKeyProvider(); int perPrefixActiveKeyConfCount = getPerPrefixActiveKeyConfCount(); Set retrievedKeys = new HashSet<>(perPrefixActiveKeyConfCount); - ManagedKeyData pbeKey = null; for (int i = 0; i < perPrefixActiveKeyConfCount; ++i) { - pbeKey = provider.getManagedKey(key_cust, keyNamespace); + ManagedKeyData pbeKey = provider.getManagedKey(key_cust, keyNamespace); if (pbeKey == null) { throw new IOException("Invalid null managed key received from key provider"); } @@ -66,9 +64,7 @@ public ManagedKeyStatus enableKeyManagement(String keyCust, String keyNamespace) pbeKey.getKeyStatus(), pbeKey.getKeyMetadata(), keyCust); addKey(pbeKey); } - // pbeKey can't be null at this point as perPrefixActiveKeyConfCount will always be > 0, - // but the null check is needed to avoid any warning. - return pbeKey == null ? null : pbeKey.getKeyStatus(); + return retrievedKeys.stream().toList(); } @Override diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/keymeta/KeymetaServiceEndpoint.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/keymeta/KeymetaServiceEndpoint.java index 2e084c8f05f6..14d045fadd49 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/keymeta/KeymetaServiceEndpoint.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/keymeta/KeymetaServiceEndpoint.java @@ -22,7 +22,6 @@ import org.apache.hadoop.hbase.coprocessor.HasMasterServices; import org.apache.hadoop.hbase.coprocessor.MasterCoprocessor; import org.apache.hadoop.hbase.io.crypto.ManagedKeyData; -import org.apache.hadoop.hbase.io.crypto.ManagedKeyStatus; import org.apache.hadoop.hbase.ipc.CoprocessorRpcUtils; import org.apache.hadoop.hbase.master.MasterServices; import org.apache.hadoop.hbase.protobuf.generated.ManagedKeysProtos; @@ -103,53 +102,34 @@ private class KeyMetaAdminServiceImpl extends ManagedKeysService { */ @Override public void enableKeyManagement(RpcController controller, ManagedKeysRequest request, - RpcCallback done) { + RpcCallback done) { ManagedKeysResponse.Builder builder = getResponseBuilder(controller, request); if (builder.getKeyCust() != null) { try { - ManagedKeyStatus managedKeyStatus = master.getKeymetaAdmin() + List managedKeyStatuses = master.getKeymetaAdmin() .enableKeyManagement(request.getKeyCust(), request.getKeyNamespace()); - builder.setKeyStatus(ManagedKeysProtos.ManagedKeyStatus.valueOf( - managedKeyStatus.getVal())); + done.run(generateKeyStatusResponse(managedKeyStatuses, builder)); } catch (IOException e) { CoprocessorRpcUtils.setControllerException(controller, e); - builder.setKeyStatus(ManagedKeysProtos.ManagedKeyStatus.KEY_FAILED); } } - done.run(builder.build()); } @Override public void getManagedKeys(RpcController controller, ManagedKeysRequest request, RpcCallback done) { - GetManagedKeysResponse.Builder responseBuilder = - GetManagedKeysResponse.newBuilder(); ManagedKeysResponse.Builder builder = getResponseBuilder(controller, request); if (builder.getKeyCust() != null) { try { List managedKeyStatuses = master.getKeymetaAdmin() .getManagedKeys(request.getKeyCust(), request.getKeyNamespace()); - for (ManagedKeyData keyData: managedKeyStatuses) { - builder.setKeyStatus( - ManagedKeysProtos.ManagedKeyStatus.valueOf(keyData.getKeyStatus().getVal())); - builder.setKeyStatus(ManagedKeysProtos.ManagedKeyStatus.valueOf( - keyData.getKeyStatus().getVal())) - .setKeyMetadata(keyData.getKeyMetadata()) - .setRefreshTimestamp(keyData.getRefreshTimestamp()) - .setReadOpCount(keyData.getReadOpCount()) - .setWriteOpCount(keyData.getWriteOpCount()) - ; - responseBuilder.addStatus(builder.build()); - } + done.run(generateKeyStatusResponse(managedKeyStatuses, builder)); } catch (IOException e) { CoprocessorRpcUtils.setControllerException(controller, e); - builder.setKeyStatus(ManagedKeysProtos.ManagedKeyStatus.KEY_FAILED); } catch (KeyException e) { CoprocessorRpcUtils.setControllerException(controller, new IOException(e)); - builder.setKeyStatus(ManagedKeysProtos.ManagedKeyStatus.KEY_FAILED); } } - done.run(responseBuilder.build()); } private byte[] convertToKeyCustBytes(RpcController controller, ManagedKeysRequest request, @@ -180,5 +160,23 @@ private ManagedKeysResponse.Builder getResponseBuilder(RpcController controller, } return builder; } + + private static GetManagedKeysResponse generateKeyStatusResponse( + List managedKeyStatuses, ManagedKeysResponse.Builder builder) { + GetManagedKeysResponse.Builder responseBuilder = GetManagedKeysResponse.newBuilder(); + for (ManagedKeyData keyData: managedKeyStatuses) { + builder.setKeyStatus( + ManagedKeysProtos.ManagedKeyStatus.valueOf(keyData.getKeyStatus().getVal())); + builder.setKeyStatus(ManagedKeysProtos.ManagedKeyStatus.valueOf( + keyData.getKeyStatus().getVal())) + .setKeyMetadata(keyData.getKeyMetadata()) + .setRefreshTimestamp(keyData.getRefreshTimestamp()) + .setReadOpCount(keyData.getReadOpCount()) + .setWriteOpCount(keyData.getWriteOpCount()) + ; + responseBuilder.addStatus(builder.build()); + } + return responseBuilder.build(); + } } } diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/keymeta/TestManagedKeymeta.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/keymeta/TestManagedKeymeta.java index 1e1bf9ef746a..4cfdadcaaec1 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/keymeta/TestManagedKeymeta.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/keymeta/TestManagedKeymeta.java @@ -47,10 +47,9 @@ private void doTestEnable(KeymetaAdmin adminClient) throws IOException, KeyExcep ; String cust = "cust1"; String encodedCust = ManagedKeyProvider.encodeToStr(cust.getBytes()); - ManagedKeyStatus managedKeyStatus = + List managedKeyStatuses = adminClient.enableKeyManagement(encodedCust, ManagedKeyData.KEY_SPACE_GLOBAL); - assertNotNull(managedKeyStatus); - assertEquals(ManagedKeyStatus.ACTIVE, managedKeyStatus); + assertKeyDataListSingleKey(managedKeyStatuses, ManagedKeyStatus.ACTIVE); List managedKeys = adminClient.getManagedKeys(encodedCust, ManagedKeyData.KEY_SPACE_GLOBAL); @@ -60,12 +59,23 @@ private void doTestEnable(KeymetaAdmin adminClient) throws IOException, KeyExcep String nonExistentCust = "nonExistentCust"; managedKeyProvider.setMockedKeyStatus(nonExistentCust, ManagedKeyStatus.FAILED); - assertEquals(ManagedKeyStatus.FAILED, adminClient.enableKeyManagement( - ManagedKeyProvider.encodeToStr(nonExistentCust.getBytes()), ManagedKeyData.KEY_SPACE_GLOBAL)); + List keyDataList1 = + adminClient.enableKeyManagement(ManagedKeyProvider.encodeToStr(nonExistentCust.getBytes()), + ManagedKeyData.KEY_SPACE_GLOBAL); + assertKeyDataListSingleKey(keyDataList1, ManagedKeyStatus.FAILED); String disabledCust = "disabledCust"; managedKeyProvider.setMockedKeyStatus(disabledCust, ManagedKeyStatus.DISABLED); - assertEquals(ManagedKeyStatus.DISABLED, adminClient.enableKeyManagement( - ManagedKeyProvider.encodeToStr(disabledCust.getBytes()), ManagedKeyData.KEY_SPACE_GLOBAL)); + List keyDataList2 = + adminClient.enableKeyManagement(ManagedKeyProvider.encodeToStr(disabledCust.getBytes()), + ManagedKeyData.KEY_SPACE_GLOBAL); + assertKeyDataListSingleKey(keyDataList2, ManagedKeyStatus.DISABLED); + } + + private static void assertKeyDataListSingleKey(List managedKeyStatuses, + ManagedKeyStatus keyStatus) { + assertNotNull(managedKeyStatuses); + assertEquals(1, managedKeyStatuses.size()); + assertEquals(keyStatus, managedKeyStatuses.get(0).getKeyStatus()); } } diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestKeymetaAdmin.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestKeymetaAdmin.java index 0fcb44146faf..01cba89aabb2 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestKeymetaAdmin.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestKeymetaAdmin.java @@ -128,9 +128,11 @@ public void testEnable() throws Exception { String cust = "cust1"; managedKeyProvider.setMockedKeyStatus(cust, keyStatus); String encodedCust = ManagedKeyProvider.encodeToStr(cust.getBytes()); - ManagedKeyStatus managedKeyStatus = + List managedKeyStatuses = keymetaAdmin.enableKeyManagement(encodedCust, ManagedKeyData.KEY_SPACE_GLOBAL); - assertNotNull(managedKeyStatus); + assertNotNull(managedKeyStatuses); + assertEquals(1, managedKeyStatuses.size()); + assertEquals(keyStatus, managedKeyStatuses.get(0).getKeyStatus()); verify(mockAccessor) .addKey(argThat((ManagedKeyData keyData) -> assertKeyData(keyData, keyStatus, isNullKey ? null : managedKeyProvider.getMockedKey(cust)))); diff --git a/hbase-shell/src/main/ruby/shell/commands/enable_key_management.rb b/hbase-shell/src/main/ruby/shell/commands/enable_key_management.rb index 64ffe47ee83e..796c32f7d70b 100644 --- a/hbase-shell/src/main/ruby/shell/commands/enable_key_management.rb +++ b/hbase-shell/src/main/ruby/shell/commands/enable_key_management.rb @@ -15,9 +15,11 @@ # limitations under the License. # +require 'shell/commands/keymeta_command_base' + module Shell module Commands - class EnableKeyManagement < Command + class EnableKeyManagement < KeymetaCommandBase def help <<-EOF Enable key management for a given cust:namespace (cust in Base64 format). @@ -26,9 +28,8 @@ def help end def command(key_info) - formatter.header(['KEY', 'STATUS']) - status = keymeta_admin.enable_key_management(key_info) - formatter.row([key_info, status.toString()]) + statuses = keymeta_admin.enable_key_management(key_info) + print_key_statuses(statuses) end end end diff --git a/hbase-shell/src/main/ruby/shell/commands/keymeta_command_base.rb b/hbase-shell/src/main/ruby/shell/commands/keymeta_command_base.rb new file mode 100644 index 000000000000..79e8087cfc6b --- /dev/null +++ b/hbase-shell/src/main/ruby/shell/commands/keymeta_command_base.rb @@ -0,0 +1,40 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +module Shell + module Commands + class KeymetaCommandBase < Command + def print_key_statuses(statuses) + formatter.header(['ENCODED-KEY', 'NAMESPACE', 'STATUS', 'METADATA', 'METADATA-HASH', + 'REFRESH-TIMESTAMP', 'READ-OP-COUNT', 'WRITE-OP-COUNT']) + statuses.each { |status| + formatter.row([ + status.getKeyCustodianEncoded(), + status.getKeyNamespace(), + status.getKeyStatus().toString(), + status.getKeyMetadata(), + status.getKeyMetadataHashEncoded(), + status.getRefreshTimestamp(), + status.getReadOpCount(), + status.getWriteOpCount() + ]) + } + formatter.footer(statuses.size()) + end + end + end +end diff --git a/hbase-shell/src/main/ruby/shell/commands/show_key_status.rb b/hbase-shell/src/main/ruby/shell/commands/show_key_status.rb index 540f5feb4cae..5866285884c9 100644 --- a/hbase-shell/src/main/ruby/shell/commands/show_key_status.rb +++ b/hbase-shell/src/main/ruby/shell/commands/show_key_status.rb @@ -15,9 +15,11 @@ # limitations under the License. # +require 'shell/commands/keymeta_command_base' + module Shell module Commands - class ShowKeyStatus < Command + class ShowKeyStatus < KeymetaCommandBase def help <<-EOF Show key statuses for a given cust:namespace (cust in Base64 format). @@ -26,22 +28,8 @@ def help end def command(key_info) - formatter.header(['ENCODED-KEY', 'NAMESPACE', 'STATUS', 'METADATA', 'METADATA-HASH', - 'REFRESH-TIMESTAMP', 'READ-OP-COUNT', 'WRITE-OP-COUNT']) statuses = keymeta_admin.get_key_statuses(key_info) - statuses.each { |status| - formatter.row([ - status.getKeyCustodianEncoded(), - status.getKeyNamespace(), - status.getKeyStatus().toString(), - status.getKeyMetadata(), - status.getKeyMetadataHashEncoded(), - status.getRefreshTimestamp(), - status.getReadOpCount(), - status.getWriteOpCount() - ]) - } - formatter.footer(statuses.size()) + print_key_statuses(statuses) end end end From bdb07ebf6d4327c4c4ad0ecce84b62deafe011f0 Mon Sep 17 00:00:00 2001 From: Hari Dara Date: Mon, 21 Apr 2025 19:03:04 +0530 Subject: [PATCH 21/70] added test coverage --- .../hbase/io/crypto/ManagedKeyData.java | 10 +- .../io/crypto/MockManagedKeyProvider.java | 101 ++++++--- .../hbase/io/crypto/TestManagedKeyData.java | 201 ++++++++++++++++++ .../hbase/keymeta/TestManagedKeymeta.java | 4 +- ...taAdmin.java => TestKeymetaAdminImpl.java} | 37 ++-- .../hadoop/hbase/master/TestSystemKey.java | 4 +- 6 files changed, 301 insertions(+), 56 deletions(-) create mode 100644 hbase-common/src/test/java/org/apache/hadoop/hbase/io/crypto/TestManagedKeyData.java rename hbase-server/src/test/java/org/apache/hadoop/hbase/master/{TestKeymetaAdmin.java => TestKeymetaAdminImpl.java} (85%) diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/io/crypto/ManagedKeyData.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/io/crypto/ManagedKeyData.java index 5b5c2afda9ea..aa0e168297e0 100644 --- a/hbase-common/src/main/java/org/apache/hadoop/hbase/io/crypto/ManagedKeyData.java +++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/io/crypto/ManagedKeyData.java @@ -183,10 +183,11 @@ public String getKeyMetadata() { return keyMetadata; } - @Override public String toString() { + @Override + public String toString() { return "ManagedKeyData{" + "keyCustodian=" + Arrays.toString(keyCust) + ", keyNamespace='" + keyNamespace + '\'' + ", keyStatus=" + keyStatus + ", keyMetadata='" + keyMetadata + '\'' - + ", refreshTimestamp=" + refreshTimestamp + '}'; + + ", refreshTimestamp=" + refreshTimestamp + ", keyChecksum=" + getKeyChecksum() + '}'; } public long getRefreshTimestamp() { @@ -213,9 +214,12 @@ public long getWriteOpCount() { * Computes the checksum of the key. If the checksum has already been computed, this method * returns the previously computed value. The checksum is computed using the CRC32C algorithm. * - * @return The checksum of the key as a long value. + * @return The checksum of the key as a long value, {@code 0} if no key is available. */ public long getKeyChecksum() { + if (theKey == null) { + return 0; + } if (keyChecksum == 0) { keyChecksum = constructKeyChecksum(theKey.getEncoded()); } diff --git a/hbase-common/src/test/java/org/apache/hadoop/hbase/io/crypto/MockManagedKeyProvider.java b/hbase-common/src/test/java/org/apache/hadoop/hbase/io/crypto/MockManagedKeyProvider.java index 6850104629d6..3383d4a6892a 100644 --- a/hbase-common/src/test/java/org/apache/hadoop/hbase/io/crypto/MockManagedKeyProvider.java +++ b/hbase-common/src/test/java/org/apache/hadoop/hbase/io/crypto/MockManagedKeyProvider.java @@ -37,53 +37,83 @@ public class MockManagedKeyProvider extends MockAesKeyProvider implements ManagedKeyProvider { protected static final Logger LOG = LoggerFactory.getLogger(MockManagedKeyProvider.class); - public Map keys = new HashMap<>(); - public Map keyStatus = new HashMap<>(); + private boolean multikeyGenMode; + private Map> keys = new HashMap<>(); + private Map> lastGenKeyData = new HashMap<>(); + // Keep references of all generated keys by their full and partial metadata. + private Map allGeneratedKeys = new HashMap<>(); + private Map keyStatus = new HashMap<>(); private String systemKeyAlias = "default_system_key_alias"; - @Override public void initConfig(Configuration conf) { + @Override + public void initConfig(Configuration conf) { // NO-OP } - @Override public ManagedKeyData getSystemKey(byte[] systemId) throws IOException { - return getKey(systemId, systemKeyAlias); + @Override + public ManagedKeyData getSystemKey(byte[] systemId) throws IOException { + return getKey(systemId, systemKeyAlias, ManagedKeyData.KEY_SPACE_GLOBAL); } - @Override public ManagedKeyData getManagedKey(byte[] key_cust, String key_namespace) + @Override + public ManagedKeyData getManagedKey(byte[] key_cust, String key_namespace) throws IOException { - return getKey(key_cust); + String alias = Bytes.toString(key_cust); + return getKey(key_cust, alias, key_namespace); } - @Override public ManagedKeyData unwrapKey(String keyMetadata) throws IOException { - String[] meta_toks = keyMetadata.split(":"); - if (keys.containsKey(meta_toks[1])) { - return getKey(meta_toks[0].getBytes(), meta_toks[1]); + @Override + public ManagedKeyData unwrapKey(String keyMetadata) throws IOException { + if (allGeneratedKeys.containsKey(keyMetadata)) { + String[] meta_toks = keyMetadata.split(":"); + ManagedKeyStatus keyStatus = this.keyStatus.get(meta_toks[1]); + ManagedKeyData managedKeyData = + new ManagedKeyData(meta_toks[0].getBytes(), ManagedKeyData.KEY_SPACE_GLOBAL, + allGeneratedKeys.get(keyMetadata), + keyStatus == null ? ManagedKeyStatus.ACTIVE : keyStatus, keyMetadata); + return registerKeyData(meta_toks[1], managedKeyData); } return null; } - /** - * Lookup the key data for the given key_cust from keys. If missing, initialize one using - * generateSecretKey(). - */ - public ManagedKeyData getKey(byte[] key_cust) { - String alias = Bytes.toString(key_cust); - return getKey(key_cust, alias); + public ManagedKeyData getLastGeneratedKeyData(String alias, String keyNamespace) { + if (! lastGenKeyData.containsKey(keyNamespace)) { + return null; + } + return lastGenKeyData.get(keyNamespace).get(alias); + } + + private ManagedKeyData registerKeyData(String alias, ManagedKeyData managedKeyData) { + if (! lastGenKeyData.containsKey(managedKeyData.getKeyNamespace())) { + lastGenKeyData.put(managedKeyData.getKeyNamespace(), new HashMap<>()); + } + lastGenKeyData.get(managedKeyData.getKeyNamespace()).put(alias, + managedKeyData); + return managedKeyData; + } + + public void setMultikeyGenMode(boolean multikeyGenMode) { + this.multikeyGenMode = multikeyGenMode; } public void setMockedKeyStatus(String alias, ManagedKeyStatus status) { keyStatus.put(alias, status); } - public void setMockedKey(String alias, Key key) { - keys.put(alias, key); + public void setMockedKey(String alias, Key key, String keyNamespace) { + if (! keys.containsKey(keyNamespace)) { + keys.put(keyNamespace, new HashMap<>()); + } + Map keysForSpace = keys.get(keyNamespace); + keysForSpace.put(alias, key); } - public Key getMockedKey(String alias) { - return keys.get(alias); + public Key getMockedKey(String alias, String keySpace) { + Map keysForSpace = keys.get(keySpace); + return keysForSpace != null ? keysForSpace.get(alias) : null; } - public void setCluterKeyAlias(String alias) { + public void setClusterKeyAlias(String alias) { this.systemKeyAlias = alias; } @@ -106,21 +136,32 @@ public static Key generateSecretKey() { return keyGen.generateKey(); } - private ManagedKeyData getKey(byte[] key_cust, String alias) { + // TODO: look up existing key only if multi generate mode is off. + private ManagedKeyData getKey(byte[] key_cust, String alias, String key_namespace) { ManagedKeyStatus keyStatus = this.keyStatus.get(alias); + if (! keys.containsKey(key_namespace)) { + keys.put(key_namespace, new HashMap<>()); + } + Map keySpace = keys.get(key_namespace); Key key = null; if (keyStatus != ManagedKeyStatus.FAILED && keyStatus != ManagedKeyStatus.DISABLED) { - if (! keys.containsKey(alias)) { + if (! keySpace.containsKey(alias)) { key = generateSecretKey(); - keys.put(alias, key); + keySpace.put(alias, key); } - key = keys.get(alias); + key = keySpace.get(alias); if (key == null) { return null; } } - return new ManagedKeyData(key_cust, ManagedKeyData.KEY_SPACE_GLOBAL, key, - keyStatus == null ? ManagedKeyStatus.ACTIVE : keyStatus, - Bytes.toString(key_cust)+":"+alias); + long checksum = key == null ? 0 : ManagedKeyData.constructKeyChecksum(key.getEncoded()); + String partialMetadata = Bytes.toString(key_cust) + ":" + alias; + String keyMetadata = partialMetadata + ":" + key_namespace + ":" + checksum; + allGeneratedKeys.put(partialMetadata, key); + allGeneratedKeys.put(keyMetadata, key); + ManagedKeyData managedKeyData = + new ManagedKeyData(key_cust, ManagedKeyData.KEY_SPACE_GLOBAL, key, + keyStatus == null ? ManagedKeyStatus.ACTIVE : keyStatus, keyMetadata); + return registerKeyData(alias, managedKeyData); } } diff --git a/hbase-common/src/test/java/org/apache/hadoop/hbase/io/crypto/TestManagedKeyData.java b/hbase-common/src/test/java/org/apache/hadoop/hbase/io/crypto/TestManagedKeyData.java new file mode 100644 index 000000000000..98349eee8e9d --- /dev/null +++ b/hbase-common/src/test/java/org/apache/hadoop/hbase/io/crypto/TestManagedKeyData.java @@ -0,0 +1,201 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.hadoop.hbase.io.crypto; + +import org.apache.hadoop.hbase.HBaseClassTestRule; +import org.apache.hadoop.hbase.testclassification.MiscTests; +import org.apache.hadoop.hbase.testclassification.SmallTests; +import org.junit.Before; +import org.junit.ClassRule; +import org.junit.Test; +import org.junit.experimental.categories.Category; +import javax.crypto.KeyGenerator; +import java.security.Key; +import java.security.NoSuchAlgorithmException; +import java.util.Base64; +import static org.junit.Assert.assertArrayEquals; +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertNotEquals; +import static org.junit.Assert.assertNotNull; +import static org.junit.Assert.assertNull; +import static org.junit.Assert.assertThrows; +import static org.junit.Assert.assertTrue; + +@Category({ MiscTests.class, SmallTests.class }) +public class TestManagedKeyData { + @ClassRule + public static final HBaseClassTestRule CLASS_RULE = + HBaseClassTestRule.forClass(TestManagedKeyData.class); + + private byte[] keyCust; + private String keyNamespace; + private Key theKey; + private ManagedKeyStatus keyStatus; + private String keyMetadata; + private ManagedKeyData managedKeyData; + + @Before + public void setUp() throws NoSuchAlgorithmException { + keyCust = "testCustodian".getBytes(); + keyNamespace = "testNamespace"; + KeyGenerator keyGen = KeyGenerator.getInstance("AES"); + keyGen.init(256); + theKey = keyGen.generateKey(); + keyStatus = ManagedKeyStatus.ACTIVE; + keyMetadata = "testMetadata"; + managedKeyData = new ManagedKeyData(keyCust, keyNamespace, theKey, keyStatus, keyMetadata); + } + + @Test + public void testConstructor() { + assertNotNull(managedKeyData); + assertEquals(keyNamespace, managedKeyData.getKeyNamespace()); + assertArrayEquals(keyCust, managedKeyData.getKeyCustodian()); + assertEquals(theKey, managedKeyData.getTheKey()); + assertEquals(keyStatus, managedKeyData.getKeyStatus()); + assertEquals(keyMetadata, managedKeyData.getKeyMetadata()); + } + + @Test + public void testConstructorWithCounts() { + long refreshTimestamp = System.currentTimeMillis(); + long readOpCount = 10; + long writeOpCount = 5; + ManagedKeyData keyDataWithCounts = + new ManagedKeyData(keyCust, keyNamespace, theKey, keyStatus, keyMetadata, refreshTimestamp, + readOpCount, writeOpCount); + + assertEquals(refreshTimestamp, keyDataWithCounts.getRefreshTimestamp()); + assertEquals(readOpCount, keyDataWithCounts.getReadOpCount()); + assertEquals(writeOpCount, keyDataWithCounts.getWriteOpCount()); + } + + @Test + public void testConstructorNullChecks() { + assertThrows(NullPointerException.class, + () -> new ManagedKeyData(null, keyNamespace, theKey, keyStatus, keyMetadata)); + assertThrows(NullPointerException.class, + () -> new ManagedKeyData(keyCust, null, theKey, keyStatus, keyMetadata)); + assertThrows(NullPointerException.class, + () -> new ManagedKeyData(keyCust, keyNamespace, theKey, null, keyMetadata)); + assertThrows(NullPointerException.class, + () -> new ManagedKeyData(keyCust, keyNamespace, theKey, keyStatus, null)); + } + + @Test + public void testConstructorNegativeCountChecks() { + assertThrows(IllegalArgumentException.class, + () -> new ManagedKeyData(keyCust, keyNamespace, theKey, keyStatus, keyMetadata, 0, -1, 0)); + assertThrows(IllegalArgumentException.class, + () -> new ManagedKeyData(keyCust, keyNamespace, theKey, keyStatus, keyMetadata, 0, 0, -1)); + } + + @Test + public void testCloneWithoutKey() { + ManagedKeyData cloned = managedKeyData.cloneWithoutKey(); + assertNull(cloned.getTheKey()); + assertEquals(managedKeyData.getKeyCustodian(), cloned.getKeyCustodian()); + assertEquals(managedKeyData.getKeyNamespace(), cloned.getKeyNamespace()); + assertEquals(managedKeyData.getKeyStatus(), cloned.getKeyStatus()); + assertEquals(managedKeyData.getKeyMetadata(), cloned.getKeyMetadata()); + } + + @Test + public void testGetKeyCustodianEncoded() { + String encoded = managedKeyData.getKeyCustodianEncoded(); + assertNotNull(encoded); + assertArrayEquals(keyCust, Base64.getDecoder().decode(encoded)); + } + + @Test + public void testGetKeyChecksum() { + long checksum = managedKeyData.getKeyChecksum(); + assertNotEquals(0, checksum); + + // Test with null key + ManagedKeyData nullKeyData = + new ManagedKeyData(keyCust, keyNamespace, null, keyStatus, keyMetadata); + assertEquals(0, nullKeyData.getKeyChecksum()); + } + + @Test + public void testConstructKeyChecksum() { + byte[] data = "testData".getBytes(); + long checksum = ManagedKeyData.constructKeyChecksum(data); + assertNotEquals(0, checksum); + } + + @Test + public void testGetKeyMetadataHash() { + byte[] hash = managedKeyData.getKeyMetadataHash(); + assertNotNull(hash); + assertEquals(16, hash.length); // MD5 hash is 16 bytes long + } + + @Test + public void testGetKeyMetadataHashEncoded() { + String encodedHash = managedKeyData.getKeyMetadataHashEncoded(); + assertNotNull(encodedHash); + assertEquals(24, encodedHash.length()); // Base64 encoded MD5 hash is 24 characters long + } + + @Test + public void testConstructMetadataHash() { + byte[] hash = ManagedKeyData.constructMetadataHash(keyMetadata); + assertNotNull(hash); + assertEquals(16, hash.length); // MD5 hash is 16 bytes long + } + + @Test + public void testToString() { + String toString = managedKeyData.toString(); + assertTrue(toString.contains("keyCustodian")); + assertTrue(toString.contains("keyNamespace")); + assertTrue(toString.contains("keyStatus")); + assertTrue(toString.contains("keyMetadata")); + assertTrue(toString.contains("refreshTimestamp")); + assertTrue(toString.contains("keyChecksum")); + } + + @Test + public void testEquals() { + ManagedKeyData same = new ManagedKeyData(keyCust, keyNamespace, theKey, keyStatus, keyMetadata); + assertEquals(managedKeyData, same); + + ManagedKeyData different = + new ManagedKeyData("differentCust".getBytes(), keyNamespace, theKey, keyStatus, keyMetadata); + assertNotEquals(managedKeyData, different); + } + + @Test + public void testHashCode() { + ManagedKeyData same = new ManagedKeyData(keyCust, keyNamespace, theKey, keyStatus, keyMetadata); + assertEquals(managedKeyData.hashCode(), same.hashCode()); + + ManagedKeyData different = + new ManagedKeyData("differentCust".getBytes(), keyNamespace, theKey, keyStatus, keyMetadata); + assertNotEquals(managedKeyData.hashCode(), different.hashCode()); + } + + @Test + public void testConstants() { + assertEquals("*", ManagedKeyData.KEY_SPACE_GLOBAL); + assertEquals(ManagedKeyProvider.encodeToStr(ManagedKeyData.KEY_SPACE_GLOBAL.getBytes()), + ManagedKeyData.KEY_GLOBAL_CUSTODIAN); + } +} diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/keymeta/TestManagedKeymeta.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/keymeta/TestManagedKeymeta.java index 4cfdadcaaec1..77fe308d99df 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/keymeta/TestManagedKeymeta.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/keymeta/TestManagedKeymeta.java @@ -54,8 +54,8 @@ private void doTestEnable(KeymetaAdmin adminClient) throws IOException, KeyExcep List managedKeys = adminClient.getManagedKeys(encodedCust, ManagedKeyData.KEY_SPACE_GLOBAL); assertEquals(1, managedKeys.size()); - assertEquals(managedKeyProvider.getKey(cust.getBytes()).cloneWithoutKey(), - managedKeys.get(0).cloneWithoutKey()); + assertEquals(managedKeyProvider.getLastGeneratedKeyData(cust, + ManagedKeyData.KEY_SPACE_GLOBAL).cloneWithoutKey(), managedKeys.get(0).cloneWithoutKey()); String nonExistentCust = "nonExistentCust"; managedKeyProvider.setMockedKeyStatus(nonExistentCust, ManagedKeyStatus.FAILED); diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestKeymetaAdmin.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestKeymetaAdminImpl.java similarity index 85% rename from hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestKeymetaAdmin.java rename to hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestKeymetaAdminImpl.java index 01cba89aabb2..c57bb1f500ec 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestKeymetaAdmin.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestKeymetaAdminImpl.java @@ -41,19 +41,17 @@ import static org.junit.Assert.assertNotNull; import static org.junit.Assert.assertNull; import static org.junit.Assert.assertThrows; -import static org.mockito.ArgumentMatchers.any; import static org.mockito.ArgumentMatchers.argThat; import static org.mockito.ArgumentMatchers.eq; import static org.mockito.Mockito.mock; -import static org.mockito.Mockito.times; import static org.mockito.Mockito.verify; import static org.mockito.Mockito.when; @RunWith(Suite.class) -@Suite.SuiteClasses({ TestKeymetaAdmin.TestWhenDisabled.class, - TestKeymetaAdmin.TestAdminImpl.class }) +@Suite.SuiteClasses({ TestKeymetaAdminImpl.TestWhenDisabled.class, + TestKeymetaAdminImpl.TestAdminImpl.class, TestKeymetaAdminImpl.TestForInvalid.class }) @Category({ MasterTests.class, SmallTests.class }) -public class TestKeymetaAdmin { +public class TestKeymetaAdminImpl { private static final HBaseTestingUtil TEST_UTIL = new HBaseTestingUtil(); @Rule public TestName name = new TestName(); @@ -77,11 +75,11 @@ public class TestKeymetaAdmin { when(mockServer.getFileSystem()).thenReturn(mockFileSystem); when(mockServer.getConfiguration()).thenReturn(conf); - keymetaAdmin = new TestKeymetaAdminImpl(mockServer, mockAccessor); + keymetaAdmin = new DummyKeymetaAdminImpl(mockServer, mockAccessor); } @RunWith(BlockJUnit4ClassRunner.class) @Category({ MasterTests.class, SmallTests.class }) - public static class TestWhenDisabled extends TestKeymetaAdmin { + public static class TestWhenDisabled extends TestKeymetaAdminImpl { @ClassRule public static final HBaseClassTestRule CLASS_RULE = HBaseClassTestRule.forClass(TestWhenDisabled.class); @@ -102,9 +100,9 @@ public static class TestWhenDisabled extends TestKeymetaAdmin { @RunWith(Parameterized.class) @Category({ MasterTests.class, SmallTests.class }) - public static class TestAdminImpl extends TestKeymetaAdmin { + public static class TestAdminImpl extends TestKeymetaAdminImpl { @ClassRule public static final HBaseClassTestRule CLASS_RULE = - HBaseClassTestRule.forClass(TestSystemKeyAccessor.TestAccessor.class); + HBaseClassTestRule.forClass(TestAdminImpl.class); @Parameter(0) public ManagedKeyStatus keyStatus; @@ -135,7 +133,8 @@ public void testEnable() throws Exception { assertEquals(keyStatus, managedKeyStatuses.get(0).getKeyStatus()); verify(mockAccessor) .addKey(argThat((ManagedKeyData keyData) -> assertKeyData(keyData, keyStatus, - isNullKey ? null : managedKeyProvider.getMockedKey(cust)))); + isNullKey ? null : managedKeyProvider.getMockedKey(cust, + ManagedKeyData.KEY_SPACE_GLOBAL)))); keymetaAdmin.getManagedKeys(encodedCust, ManagedKeyData.KEY_SPACE_GLOBAL); verify(mockAccessor) @@ -146,26 +145,26 @@ public void testEnable() throws Exception { @RunWith(BlockJUnit4ClassRunner.class) @Category({ MasterTests.class, SmallTests.class }) - public static class TestForInvalid extends TestKeymetaAdmin { + public static class TestForInvalid extends TestKeymetaAdminImpl { @ClassRule public static final HBaseClassTestRule CLASS_RULE = - HBaseClassTestRule.forClass(TestKeymetaAdmin.TestForInvalid.class); + HBaseClassTestRule.forClass(TestForInvalid.class); @Test public void testForKeyProviderNullReturn() throws Exception { MockManagedKeyProvider managedKeyProvider = (MockManagedKeyProvider) Encryption.getKeyProvider(conf); - String cust = "cust1"; + String cust = "invalidcust1"; String encodedCust = ManagedKeyProvider.encodeToStr(cust.getBytes()); - managedKeyProvider.setMockedKey(cust, null); - assertThrows(IOException.class, () -> keymetaAdmin.enableKeyManagement(encodedCust, - ManagedKeyData.KEY_SPACE_GLOBAL)); - + managedKeyProvider.setMockedKey(cust, null, ManagedKeyData.KEY_SPACE_GLOBAL); + IOException ex = assertThrows(IOException.class, + () -> keymetaAdmin.enableKeyManagement(encodedCust, ManagedKeyData.KEY_SPACE_GLOBAL)); + assertEquals("Invalid null managed key received from key provider", ex.getMessage()); } } - private class TestKeymetaAdminImpl extends KeymetaAdminImpl { - public TestKeymetaAdminImpl(Server mockServer, KeymetaTableAccessor mockAccessor) { + private class DummyKeymetaAdminImpl extends KeymetaAdminImpl { + public DummyKeymetaAdminImpl(Server mockServer, KeymetaTableAccessor mockAccessor) { super(mockServer); } diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestSystemKey.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestSystemKey.java index 34822e4e59ff..cc5926ec1c01 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestSystemKey.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestSystemKey.java @@ -63,9 +63,9 @@ public void testSystemKeyInitializationAndRotation() throws Exception { // Test rotation of cluster key by changing the key that the key provider provides and restart master. String newAlias = "new_cluster_key"; - pbeKeyProvider.setCluterKeyAlias(newAlias); + pbeKeyProvider.setClusterKeyAlias(newAlias); Key newCluterKey = MockManagedKeyProvider.generateSecretKey(); - pbeKeyProvider.setMockedKey(newAlias, newCluterKey); + pbeKeyProvider.setMockedKey(newAlias, newCluterKey, ManagedKeyData.KEY_SPACE_GLOBAL); restartSystem(); master = TEST_UTIL.getHBaseCluster().getMaster(); SystemKeyAccessor systemKeyAccessor = new SystemKeyAccessor(master); From 4626d2dbad9ff7435831ece5262d9ca599e49fef Mon Sep 17 00:00:00 2001 From: Hari Dara Date: Tue, 22 Apr 2025 20:32:51 +0530 Subject: [PATCH 22/70] added test coverage --- .../io/crypto/MockManagedKeyProvider.java | 3 +- .../io/crypto/TestManagedKeyProvider.java | 279 ++++++++++++++++++ .../TestManagedKeyStoreKeyProvider.java | 210 ------------- .../hbase/master/TestKeymetaAdminImpl.java | 159 +++++++--- 4 files changed, 402 insertions(+), 249 deletions(-) create mode 100644 hbase-common/src/test/java/org/apache/hadoop/hbase/io/crypto/TestManagedKeyProvider.java delete mode 100644 hbase-common/src/test/java/org/apache/hadoop/hbase/io/crypto/TestManagedKeyStoreKeyProvider.java diff --git a/hbase-common/src/test/java/org/apache/hadoop/hbase/io/crypto/MockManagedKeyProvider.java b/hbase-common/src/test/java/org/apache/hadoop/hbase/io/crypto/MockManagedKeyProvider.java index 3383d4a6892a..27f1e6c1d093 100644 --- a/hbase-common/src/test/java/org/apache/hadoop/hbase/io/crypto/MockManagedKeyProvider.java +++ b/hbase-common/src/test/java/org/apache/hadoop/hbase/io/crypto/MockManagedKeyProvider.java @@ -136,7 +136,6 @@ public static Key generateSecretKey() { return keyGen.generateKey(); } - // TODO: look up existing key only if multi generate mode is off. private ManagedKeyData getKey(byte[] key_cust, String alias, String key_namespace) { ManagedKeyStatus keyStatus = this.keyStatus.get(alias); if (! keys.containsKey(key_namespace)) { @@ -145,7 +144,7 @@ private ManagedKeyData getKey(byte[] key_cust, String alias, String key_namespac Map keySpace = keys.get(key_namespace); Key key = null; if (keyStatus != ManagedKeyStatus.FAILED && keyStatus != ManagedKeyStatus.DISABLED) { - if (! keySpace.containsKey(alias)) { + if (multikeyGenMode || ! keySpace.containsKey(alias)) { key = generateSecretKey(); keySpace.put(alias, key); } diff --git a/hbase-common/src/test/java/org/apache/hadoop/hbase/io/crypto/TestManagedKeyProvider.java b/hbase-common/src/test/java/org/apache/hadoop/hbase/io/crypto/TestManagedKeyProvider.java new file mode 100644 index 000000000000..1918ae78d31d --- /dev/null +++ b/hbase-common/src/test/java/org/apache/hadoop/hbase/io/crypto/TestManagedKeyProvider.java @@ -0,0 +1,279 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.hadoop.hbase.io.crypto; + +import org.apache.hadoop.conf.Configuration; +import org.apache.hadoop.hbase.HBaseClassTestRule; +import org.apache.hadoop.hbase.HBaseConfiguration; +import org.apache.hadoop.hbase.HConstants; +import org.apache.hadoop.hbase.testclassification.MiscTests; +import org.apache.hadoop.hbase.testclassification.SmallTests; +import org.apache.hadoop.hbase.util.Bytes; +import org.apache.hadoop.hbase.util.GsonUtil; +import org.junit.Before; +import org.junit.ClassRule; +import org.junit.Test; +import org.junit.experimental.categories.Category; +import org.junit.runner.RunWith; +import org.junit.runners.BlockJUnit4ClassRunner; +import org.junit.runners.Parameterized; +import org.junit.runners.Suite; +import javax.crypto.spec.SecretKeySpec; +import java.security.KeyStore; +import java.security.MessageDigest; +import java.util.Arrays; +import java.util.Base64; +import java.util.HashMap; +import java.util.Map; +import java.util.Properties; +import java.util.UUID; +import static org.apache.hadoop.hbase.io.crypto.ManagedKeyStoreKeyProvider.KEY_METADATA_ALIAS; +import static org.apache.hadoop.hbase.io.crypto.ManagedKeyStoreKeyProvider.KEY_METADATA_CUST; +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertNotNull; +import static org.junit.Assert.assertNull; +import static org.junit.Assert.assertTrue; + +@RunWith(Suite.class) +@Suite.SuiteClasses({ TestManagedKeyProvider.TestManagedKeyStoreKeyProvider.class, + TestManagedKeyProvider.TestManagedKeyProviderDefault.class, }) +@Category({ MiscTests.class, SmallTests.class }) +public class TestManagedKeyProvider { + @RunWith(Parameterized.class) + @Category({ MiscTests.class, SmallTests.class }) + public static class TestManagedKeyStoreKeyProvider extends TestKeyStoreKeyProvider { + @ClassRule + public static final HBaseClassTestRule CLASS_RULE = + HBaseClassTestRule.forClass(TestManagedKeyStoreKeyProvider.class); + + private static final String MASTER_KEY_ALIAS = "master-alias"; + + private Configuration conf = HBaseConfiguration.create(); + private int nPrefixes = 2; + private ManagedKeyProvider managedKeyProvider; + private Map prefix2key = new HashMap<>(); + private Map prefix2alias = new HashMap<>(); + private String clusterId; + private byte[] masterKey; + + @Before + public void setUp() throws Exception { + super.setUp();; + managedKeyProvider = (ManagedKeyProvider) provider; + managedKeyProvider.initConfig(conf); + } + + protected KeyProvider createProvider() { + return new ManagedKeyStoreKeyProvider(); + } + + protected void addCustomEntries(KeyStore store, Properties passwdProps) throws Exception { + super.addCustomEntries(store, passwdProps); + for (int i = 0; i < nPrefixes; ++i) { + String prefix = "prefix+ " + i; + String alias = prefix + "-alias"; + byte[] key = MessageDigest.getInstance("SHA-256").digest(Bytes.toBytes(alias)); + prefix2alias.put(new Bytes(prefix.getBytes()), alias); + prefix2key.put(new Bytes(prefix.getBytes()), new Bytes(key)); + store.setEntry(alias, new KeyStore.SecretKeyEntry(new SecretKeySpec(key, "AES")), + new KeyStore.PasswordProtection( + withPasswordOnAlias ? PASSWORD.toCharArray() : new char[0])); + + String encPrefix = Base64.getEncoder().encodeToString(prefix.getBytes()); + String confKey = HConstants.CRYPTO_MANAGED_KEY_STORE_CONF_KEY_PREFIX + encPrefix + "." + + "alias"; + conf.set(confKey, alias); + + passwdProps.setProperty(alias, PASSWORD); + + clusterId = UUID.randomUUID().toString(); + masterKey = MessageDigest.getInstance("SHA-256").digest( + Bytes.toBytes(MASTER_KEY_ALIAS)); + store.setEntry(MASTER_KEY_ALIAS, new KeyStore.SecretKeyEntry( + new SecretKeySpec(masterKey, "AES")), + new KeyStore.PasswordProtection(withPasswordOnAlias ? PASSWORD.toCharArray() : + new char[0])); + + conf.set(HConstants.CRYPTO_MANAGED_KEY_STORE_SYSTEM_KEY_NAME_CONF_KEY, MASTER_KEY_ALIAS); + + passwdProps.setProperty(MASTER_KEY_ALIAS, PASSWORD); + } + } + + private void addEntry(String alias, String prefix) { + String encPrefix = Base64.getEncoder().encodeToString(prefix.getBytes()); + String confKey = HConstants.CRYPTO_MANAGED_KEY_STORE_CONF_KEY_PREFIX + encPrefix + "." + + "alias"; + conf.set(confKey, alias); + } + + @Test + public void testGetManagedKey() throws Exception { + for (Bytes prefix : prefix2key.keySet()) { + ManagedKeyData keyData = managedKeyProvider.getManagedKey(prefix.get(), + ManagedKeyData.KEY_SPACE_GLOBAL); + assertKeyData(keyData, ManagedKeyStatus.ACTIVE, prefix2key.get(prefix).get(), prefix.get(), + prefix2alias.get(prefix)); + } + } + + @Test + public void testGetInactiveKey() throws Exception { + Bytes firstPrefix = prefix2key.keySet().iterator().next(); + String encPrefix = Base64.getEncoder().encodeToString(firstPrefix.get()); + conf.set(HConstants.CRYPTO_MANAGED_KEY_STORE_CONF_KEY_PREFIX + encPrefix + ".active", + "false"); + ManagedKeyData keyData = managedKeyProvider.getManagedKey(firstPrefix.get(), + ManagedKeyData.KEY_SPACE_GLOBAL); + assertNotNull(keyData); + assertKeyData(keyData, ManagedKeyStatus.INACTIVE, prefix2key.get(firstPrefix).get(), + firstPrefix.get(), prefix2alias.get(firstPrefix)); + } + + @Test + public void testGetInvalidKey() throws Exception { + byte[] invalidPrefixBytes = "invalid".getBytes(); + ManagedKeyData keyData = managedKeyProvider.getManagedKey(invalidPrefixBytes, + ManagedKeyData.KEY_SPACE_GLOBAL); + assertNotNull(keyData); + assertKeyData(keyData, ManagedKeyStatus.FAILED, null, invalidPrefixBytes, null); + } + + @Test + public void testGetDisabledKey() throws Exception { + byte[] invalidPrefix = new byte[] { 1, 2, 3 }; + String invalidPrefixEnc = ManagedKeyProvider.encodeToStr(invalidPrefix); + conf.set(HConstants.CRYPTO_MANAGED_KEY_STORE_CONF_KEY_PREFIX + invalidPrefixEnc + ".active", + "false"); + ManagedKeyData keyData = managedKeyProvider.getManagedKey(invalidPrefix, + ManagedKeyData.KEY_SPACE_GLOBAL); + assertNotNull(keyData); + assertKeyData(keyData, ManagedKeyStatus.DISABLED, null, + invalidPrefix, null); + } + + @Test + public void testGetSystemKey() throws Exception { + ManagedKeyData clusterKeyData = managedKeyProvider.getSystemKey(clusterId.getBytes()); + assertKeyData(clusterKeyData, ManagedKeyStatus.ACTIVE, masterKey, clusterId.getBytes(), + MASTER_KEY_ALIAS); + } + + @Test + public void testUnwrapInvalidKey() throws Exception { + String invalidAlias = "invalidAlias"; + byte[] invalidPrefix = new byte[] { 1, 2, 3 }; + String invalidPrefixEnc = ManagedKeyProvider.encodeToStr(invalidPrefix); + String invalidMetadata = ManagedKeyStoreKeyProvider.generateKeyMetadata(invalidAlias, + invalidPrefixEnc); + ManagedKeyData keyData = managedKeyProvider.unwrapKey(invalidMetadata); + assertNotNull(keyData); + assertKeyData(keyData, ManagedKeyStatus.FAILED, null, invalidPrefix, + invalidAlias); + } + + @Test + public void testUnwrapDisabledKey() throws Exception { + String invalidAlias = "invalidAlias"; + byte[] invalidPrefix = new byte[] { 1, 2, 3 }; + String invalidPrefixEnc = ManagedKeyProvider.encodeToStr(invalidPrefix); + conf.set(HConstants.CRYPTO_MANAGED_KEY_STORE_CONF_KEY_PREFIX + invalidPrefixEnc + ".active", + "false"); + String invalidMetadata = ManagedKeyStoreKeyProvider.generateKeyMetadata(invalidAlias, + invalidPrefixEnc); + ManagedKeyData keyData = managedKeyProvider.unwrapKey(invalidMetadata); + assertNotNull(keyData); + assertKeyData(keyData, ManagedKeyStatus.DISABLED, null, invalidPrefix, invalidAlias); + } + + private void assertKeyData(ManagedKeyData keyData, ManagedKeyStatus expKeyStatus, byte[] key, + byte[] prefixBytes, String alias) throws Exception { + assertNotNull(keyData); + assertEquals(expKeyStatus, keyData.getKeyStatus()); + if (key == null) { + assertNull(keyData.getTheKey()); + } + else { + byte[] keyBytes = keyData.getTheKey().getEncoded(); + assertEquals(key.length, keyBytes.length); + assertEquals(new Bytes(key), keyBytes); + } + Map keyMetadata = GsonUtil.getDefaultInstance().fromJson(keyData.getKeyMetadata(), + HashMap.class); + assertNotNull(keyMetadata); + assertEquals(new Bytes(prefixBytes), keyData.getKeyCustodian()); + assertEquals(alias, keyMetadata.get(KEY_METADATA_ALIAS)); + assertEquals(Base64.getEncoder().encodeToString(prefixBytes), + keyMetadata.get(KEY_METADATA_CUST)); + assertEquals(keyData, managedKeyProvider.unwrapKey(keyData.getKeyMetadata())); + } + } + + @RunWith(BlockJUnit4ClassRunner.class) + @Category({ MiscTests.class, SmallTests.class }) + public static class TestManagedKeyProviderDefault { + @ClassRule + public static final HBaseClassTestRule CLASS_RULE = + HBaseClassTestRule.forClass(TestManagedKeyProvider.class); + + @Test public void testEncodeToStr() { + byte[] input = { 72, 101, 108, 108, 111 }; // "Hello" in ASCII + String expected = "SGVsbG8="; + String actual = ManagedKeyProvider.encodeToStr(input); + + assertEquals("Encoded string should match expected Base64 representation", expected, actual); + } + + @Test public void testDecodeToBytes() throws Exception { + String input = "SGVsbG8="; // "Hello" in Base64 + byte[] expected = { 72, 101, 108, 108, 111 }; + byte[] actual = ManagedKeyProvider.decodeToBytes(input); + + assertTrue("Decoded bytes should match expected ASCII representation", + Arrays.equals(expected, actual)); + } + + @Test public void testEncodeToStrAndDecodeToBytes() throws Exception { + byte[] originalBytes = { 1, 2, 3, 4, 5 }; + String encoded = ManagedKeyProvider.encodeToStr(originalBytes); + byte[] decoded = ManagedKeyProvider.decodeToBytes(encoded); + + assertTrue("Decoded bytes should match original bytes", + Arrays.equals(originalBytes, decoded)); + } + + @Test(expected = Exception.class) public void testDecodeToBytes_InvalidInput() + throws Exception { + String invalidInput = "This is not a valid Base64 string!"; + ManagedKeyProvider.decodeToBytes(invalidInput); + } + + @Test public void testRoundTrip_LargeInput() throws Exception { + byte[] largeInput = new byte[1000]; + for (int i = 0; i < largeInput.length; i++) { + largeInput[i] = (byte) (i % 256); + } + + String encoded = ManagedKeyProvider.encodeToStr(largeInput); + byte[] decoded = ManagedKeyProvider.decodeToBytes(encoded); + + assertTrue("Large input should survive round-trip encoding and decoding", + Arrays.equals(largeInput, decoded)); + } + } +} diff --git a/hbase-common/src/test/java/org/apache/hadoop/hbase/io/crypto/TestManagedKeyStoreKeyProvider.java b/hbase-common/src/test/java/org/apache/hadoop/hbase/io/crypto/TestManagedKeyStoreKeyProvider.java deleted file mode 100644 index 1aa50a48f340..000000000000 --- a/hbase-common/src/test/java/org/apache/hadoop/hbase/io/crypto/TestManagedKeyStoreKeyProvider.java +++ /dev/null @@ -1,210 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.apache.hadoop.hbase.io.crypto; - -import org.apache.hadoop.conf.Configuration; -import org.apache.hadoop.hbase.HBaseClassTestRule; -import org.apache.hadoop.hbase.HBaseConfiguration; -import org.apache.hadoop.hbase.HConstants; -import org.apache.hadoop.hbase.testclassification.MiscTests; -import org.apache.hadoop.hbase.testclassification.SmallTests; -import org.apache.hadoop.hbase.util.Bytes; -import org.apache.hadoop.hbase.util.GsonUtil; -import org.junit.Before; -import org.junit.ClassRule; -import org.junit.Test; -import org.junit.experimental.categories.Category; -import org.junit.runner.RunWith; -import org.junit.runners.Parameterized; -import javax.crypto.spec.SecretKeySpec; -import java.security.KeyStore; -import java.security.MessageDigest; -import java.util.Base64; -import java.util.HashMap; -import java.util.Map; -import java.util.Properties; -import java.util.UUID; -import static org.apache.hadoop.hbase.io.crypto.ManagedKeyStoreKeyProvider.KEY_METADATA_ALIAS; -import static org.apache.hadoop.hbase.io.crypto.ManagedKeyStoreKeyProvider.KEY_METADATA_CUST; -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertNotNull; -import static org.junit.Assert.assertNull; - -@Category({ MiscTests.class, SmallTests.class }) -@RunWith(Parameterized.class) -public class TestManagedKeyStoreKeyProvider extends TestKeyStoreKeyProvider { - - private static final String MASTER_KEY_ALIAS = "master-alias"; - - private Configuration conf = HBaseConfiguration.create(); - - @ClassRule - public static final HBaseClassTestRule CLASS_RULE = - HBaseClassTestRule.forClass(TestManagedKeyStoreKeyProvider.class); - private int nPrefixes = 2; - - private ManagedKeyProvider managedKeyProvider; - - private Map prefix2key = new HashMap<>(); - private Map prefix2alias = new HashMap<>(); - private String clusterId; - private byte[] masterKey; - - @Before - public void setUp() throws Exception { - super.setUp();; - managedKeyProvider = (ManagedKeyProvider) provider; - managedKeyProvider.initConfig(conf); - } - - protected KeyProvider createProvider() { - return new ManagedKeyStoreKeyProvider(); - } - - protected void addCustomEntries(KeyStore store, Properties passwdProps) throws Exception { - super.addCustomEntries(store, passwdProps); - for (int i = 0; i < nPrefixes; ++i) { - String prefix = "prefix+ " + i; - String alias = prefix + "-alias"; - byte[] key = MessageDigest.getInstance("SHA-256").digest(Bytes.toBytes(alias)); - prefix2alias.put(new Bytes(prefix.getBytes()), alias); - prefix2key.put(new Bytes(prefix.getBytes()), new Bytes(key)); - store.setEntry(alias, new KeyStore.SecretKeyEntry(new SecretKeySpec(key, "AES")), - new KeyStore.PasswordProtection(withPasswordOnAlias ? PASSWORD.toCharArray() : new char[0])); - - String encPrefix = Base64.getEncoder().encodeToString(prefix.getBytes()); - String confKey = HConstants.CRYPTO_MANAGED_KEY_STORE_CONF_KEY_PREFIX + encPrefix + "." + "alias"; - conf.set(confKey, alias); - - passwdProps.setProperty(alias, PASSWORD); - - clusterId = UUID.randomUUID().toString(); - masterKey = MessageDigest.getInstance("SHA-256").digest( - Bytes.toBytes(MASTER_KEY_ALIAS)); - store.setEntry(MASTER_KEY_ALIAS, new KeyStore.SecretKeyEntry( - new SecretKeySpec(masterKey, "AES")), - new KeyStore.PasswordProtection(withPasswordOnAlias ? PASSWORD.toCharArray() : - new char[0])); - - conf.set(HConstants.CRYPTO_MANAGED_KEY_STORE_SYSTEM_KEY_NAME_CONF_KEY, MASTER_KEY_ALIAS); - - passwdProps.setProperty(MASTER_KEY_ALIAS, PASSWORD); - } - } - - private void addEntry(String alias, String prefix) { - String encPrefix = Base64.getEncoder().encodeToString(prefix.getBytes()); - String confKey = HConstants.CRYPTO_MANAGED_KEY_STORE_CONF_KEY_PREFIX + encPrefix + "." + "alias"; - conf.set(confKey, alias); - } - - @Test - public void testGetManagedKey() throws Exception { - for (Bytes prefix : prefix2key.keySet()) { - ManagedKeyData keyData = managedKeyProvider.getManagedKey(prefix.get(), ManagedKeyData.KEY_SPACE_GLOBAL); - assertKeyData(keyData, ManagedKeyStatus.ACTIVE, prefix2key.get(prefix).get(), prefix.get(), - prefix2alias.get(prefix)); - } - } - - @Test - public void testGetInactiveKey() throws Exception { - Bytes firstPrefix = prefix2key.keySet().iterator().next(); - String encPrefix = Base64.getEncoder().encodeToString(firstPrefix.get()); - conf.set(HConstants.CRYPTO_MANAGED_KEY_STORE_CONF_KEY_PREFIX + encPrefix + ".active", "false"); - ManagedKeyData keyData = managedKeyProvider.getManagedKey(firstPrefix.get(), ManagedKeyData.KEY_SPACE_GLOBAL); - assertNotNull(keyData); - assertKeyData(keyData, ManagedKeyStatus.INACTIVE, prefix2key.get(firstPrefix).get(), - firstPrefix.get(), prefix2alias.get(firstPrefix)); - } - - @Test - public void testGetInvalidKey() throws Exception { - byte[] invalidPrefixBytes = "invalid".getBytes(); - ManagedKeyData keyData = managedKeyProvider.getManagedKey(invalidPrefixBytes, - ManagedKeyData.KEY_SPACE_GLOBAL); - assertNotNull(keyData); - assertKeyData(keyData, ManagedKeyStatus.FAILED, null, invalidPrefixBytes, null); - } - - @Test - public void testGetDisabledKey() throws Exception { - byte[] invalidPrefix = new byte[] { 1, 2, 3 }; - String invalidPrefixEnc = ManagedKeyProvider.encodeToStr(invalidPrefix); - conf.set(HConstants.CRYPTO_MANAGED_KEY_STORE_CONF_KEY_PREFIX + invalidPrefixEnc + ".active", "false"); - ManagedKeyData keyData = managedKeyProvider.getManagedKey(invalidPrefix, ManagedKeyData.KEY_SPACE_GLOBAL); - assertNotNull(keyData); - assertKeyData(keyData, ManagedKeyStatus.DISABLED, null, - invalidPrefix, null); - } - - @Test - public void testGetSystemKey() throws Exception { - ManagedKeyData clusterKeyData = managedKeyProvider.getSystemKey(clusterId.getBytes()); - assertKeyData(clusterKeyData, ManagedKeyStatus.ACTIVE, masterKey, clusterId.getBytes(), - MASTER_KEY_ALIAS); - } - - @Test - public void testUnwrapInvalidKey() throws Exception { - String invalidAlias = "invalidAlias"; - byte[] invalidPrefix = new byte[] { 1, 2, 3 }; - String invalidPrefixEnc = ManagedKeyProvider.encodeToStr(invalidPrefix); - String invalidMetadata = ManagedKeyStoreKeyProvider.generateKeyMetadata(invalidAlias, - invalidPrefixEnc); - ManagedKeyData keyData = managedKeyProvider.unwrapKey(invalidMetadata); - assertNotNull(keyData); - assertKeyData(keyData, ManagedKeyStatus.FAILED, null, invalidPrefix, - invalidAlias); - } - - @Test - public void testUnwrapDisabledKey() throws Exception { - String invalidAlias = "invalidAlias"; - byte[] invalidPrefix = new byte[] { 1, 2, 3 }; - String invalidPrefixEnc = ManagedKeyProvider.encodeToStr(invalidPrefix); - conf.set(HConstants.CRYPTO_MANAGED_KEY_STORE_CONF_KEY_PREFIX + invalidPrefixEnc + ".active", "false"); - String invalidMetadata = ManagedKeyStoreKeyProvider.generateKeyMetadata(invalidAlias, - invalidPrefixEnc); - ManagedKeyData keyData = managedKeyProvider.unwrapKey(invalidMetadata); - assertNotNull(keyData); - assertKeyData(keyData, ManagedKeyStatus.DISABLED, null, invalidPrefix, invalidAlias); - } - - private void assertKeyData(ManagedKeyData keyData, ManagedKeyStatus expKeyStatus, byte[] key, - byte[] prefixBytes, String alias) throws Exception { - assertNotNull(keyData); - assertEquals(expKeyStatus, keyData.getKeyStatus()); - if (key == null) { - assertNull(keyData.getTheKey()); - } - else { - byte[] keyBytes = keyData.getTheKey().getEncoded(); - assertEquals(key.length, keyBytes.length); - assertEquals(new Bytes(key), keyBytes); - } - Map keyMetadata = GsonUtil.getDefaultInstance().fromJson(keyData.getKeyMetadata(), - HashMap.class); - assertNotNull(keyMetadata); - assertEquals(new Bytes(prefixBytes), keyData.getKeyCustodian()); - assertEquals(alias, keyMetadata.get(KEY_METADATA_ALIAS)); - assertEquals(Base64.getEncoder().encodeToString(prefixBytes), - keyMetadata.get(KEY_METADATA_CUST)); - assertEquals(keyData, managedKeyProvider.unwrapKey(keyData.getKeyMetadata())); - } -} diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestKeymetaAdminImpl.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestKeymetaAdminImpl.java index c57bb1f500ec..5867f6de1969 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestKeymetaAdminImpl.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestKeymetaAdminImpl.java @@ -37,24 +37,32 @@ import java.util.Collection; import java.util.List; +import static org.apache.hadoop.hbase.io.crypto.ManagedKeyStatus.*; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertNotNull; import static org.junit.Assert.assertNull; import static org.junit.Assert.assertThrows; +import static org.mockito.ArgumentMatchers.any; import static org.mockito.ArgumentMatchers.argThat; import static org.mockito.ArgumentMatchers.eq; import static org.mockito.Mockito.mock; +import static org.mockito.Mockito.times; import static org.mockito.Mockito.verify; import static org.mockito.Mockito.when; @RunWith(Suite.class) @Suite.SuiteClasses({ TestKeymetaAdminImpl.TestWhenDisabled.class, - TestKeymetaAdminImpl.TestAdminImpl.class, TestKeymetaAdminImpl.TestForInvalid.class }) + TestKeymetaAdminImpl.TestAdminImpl.class, + TestKeymetaAdminImpl.TestForKeyProviderNullReturn.class, + TestKeymetaAdminImpl.TestMultiKeyGen.class, + TestKeymetaAdminImpl.TestForInvalidKeyCountConfig.class, +}) @Category({ MasterTests.class, SmallTests.class }) public class TestKeymetaAdminImpl { private static final HBaseTestingUtil TEST_UTIL = new HBaseTestingUtil(); - @Rule public TestName name = new TestName(); + @Rule + public TestName name = new TestName(); protected Configuration conf; protected Path testRootDir; @@ -65,7 +73,8 @@ public class TestKeymetaAdminImpl { protected KeymetaAdminImpl keymetaAdmin; KeymetaTableAccessor mockAccessor = mock(KeymetaTableAccessor.class); - @Before public void setUp() throws IOException { + @Before + public void setUp() throws IOException { conf = TEST_UTIL.getConfiguration(); testRootDir = TEST_UTIL.getDataTestDir(name.getMethodName()); fs = testRootDir.getFileSystem(conf); @@ -78,17 +87,21 @@ public class TestKeymetaAdminImpl { keymetaAdmin = new DummyKeymetaAdminImpl(mockServer, mockAccessor); } - @RunWith(BlockJUnit4ClassRunner.class) @Category({ MasterTests.class, SmallTests.class }) + @RunWith(BlockJUnit4ClassRunner.class) + @Category({ MasterTests.class, SmallTests.class }) public static class TestWhenDisabled extends TestKeymetaAdminImpl { - @ClassRule public static final HBaseClassTestRule CLASS_RULE = + @ClassRule + public static final HBaseClassTestRule CLASS_RULE = HBaseClassTestRule.forClass(TestWhenDisabled.class); - @Override public void setUp() throws IOException { + @Override + public void setUp() throws IOException { super.setUp(); conf.set(HConstants.CRYPTO_MANAGED_KEYS_ENABLED_CONF_KEY, "false"); } - @Test public void testDisabled() throws Exception { + @Test + public void testDisabled() throws Exception { assertThrows(IOException.class, () -> keymetaAdmin.enableKeyManagement(ManagedKeyData.KEY_GLOBAL_CUSTODIAN, ManagedKeyData.KEY_SPACE_GLOBAL)); @@ -101,28 +114,40 @@ public static class TestWhenDisabled extends TestKeymetaAdminImpl { @RunWith(Parameterized.class) @Category({ MasterTests.class, SmallTests.class }) public static class TestAdminImpl extends TestKeymetaAdminImpl { - @ClassRule public static final HBaseClassTestRule CLASS_RULE = + @ClassRule + public static final HBaseClassTestRule CLASS_RULE = HBaseClassTestRule.forClass(TestAdminImpl.class); @Parameter(0) - public ManagedKeyStatus keyStatus; + public int nKeys; @Parameter(1) + public ManagedKeyStatus keyStatus; + @Parameter(2) public boolean isNullKey; - @Parameters(name = "{index},keyStatus={0}") + @Parameters(name = "{index},nKeys={0},keyStatus={1}") public static Collection data() { - return Arrays.asList(new Object[][] { - { ManagedKeyStatus.ACTIVE, false }, - { ManagedKeyStatus.FAILED, true }, - { ManagedKeyStatus.INACTIVE, false }, - { ManagedKeyStatus.DISABLED, true }, - }); + return Arrays.asList( + new Object[][] { + { 1, ACTIVE, false }, + { 1, FAILED, true }, + { 1, INACTIVE, false }, + { 1, DISABLED, true }, + { 2, ACTIVE, false }, + }); + } + + @Override + public void setUp() throws IOException { + super.setUp(); + conf.set(HConstants.CRYPTO_MANAGED_KEYS_PER_CUST_ACTIVE_KEY_COUNT, + Integer.toString(nKeys)); } @Test public void testEnable() throws Exception { - MockManagedKeyProvider managedKeyProvider = (MockManagedKeyProvider) - Encryption.getKeyProvider(conf); + MockManagedKeyProvider managedKeyProvider = + (MockManagedKeyProvider) Encryption.getKeyProvider(conf); String cust = "cust1"; managedKeyProvider.setMockedKeyStatus(cust, keyStatus); String encodedCust = ManagedKeyProvider.encodeToStr(cust.getBytes()); @@ -131,37 +156,97 @@ public void testEnable() throws Exception { assertNotNull(managedKeyStatuses); assertEquals(1, managedKeyStatuses.size()); assertEquals(keyStatus, managedKeyStatuses.get(0).getKeyStatus()); - verify(mockAccessor) - .addKey(argThat((ManagedKeyData keyData) -> assertKeyData(keyData, keyStatus, + verify(mockAccessor).addKey(argThat( + (ManagedKeyData keyData) -> assertKeyData(keyData, keyStatus, isNullKey ? null : managedKeyProvider.getMockedKey(cust, ManagedKeyData.KEY_SPACE_GLOBAL)))); keymetaAdmin.getManagedKeys(encodedCust, ManagedKeyData.KEY_SPACE_GLOBAL); - verify(mockAccessor) - .getAllKeys(argThat((arr) -> Bytes.compareTo(cust.getBytes(), arr) == 0), - eq(ManagedKeyData.KEY_SPACE_GLOBAL)); + verify(mockAccessor).getAllKeys( + argThat((arr) -> Bytes.compareTo(cust.getBytes(), arr) == 0), + eq(ManagedKeyData.KEY_SPACE_GLOBAL)); } } @RunWith(BlockJUnit4ClassRunner.class) @Category({ MasterTests.class, SmallTests.class }) - public static class TestForInvalid extends TestKeymetaAdminImpl { + public static class TestMultiKeyGen extends TestKeymetaAdminImpl { + @ClassRule + public static final HBaseClassTestRule CLASS_RULE = + HBaseClassTestRule.forClass(TestKeymetaAdminImpl.TestMultiKeyGen.class); + + @Override + public void setUp() throws IOException { + super.setUp(); + conf.set(HConstants.CRYPTO_MANAGED_KEYS_PER_CUST_ACTIVE_KEY_COUNT, "3"); + MockManagedKeyProvider managedKeyProvider = + (MockManagedKeyProvider) Encryption.getKeyProvider(conf); + managedKeyProvider.setMultikeyGenMode(true); + } + + @Test + public void testEnable() throws Exception { + String cust = "cust1"; + String encodedCust = ManagedKeyProvider.encodeToStr(cust.getBytes()); + List managedKeyStatuses = + keymetaAdmin.enableKeyManagement(encodedCust, ManagedKeyData.KEY_SPACE_GLOBAL); + assertNotNull(managedKeyStatuses); + assertEquals(3, managedKeyStatuses.size()); + assertEquals(ACTIVE, managedKeyStatuses.get(0).getKeyStatus()); + assertEquals(ACTIVE, managedKeyStatuses.get(1).getKeyStatus()); + verify(mockAccessor, times(3)).addKey(any()); + } + } + + @RunWith(BlockJUnit4ClassRunner.class) + @Category({ MasterTests.class, SmallTests.class }) + public static class TestForKeyProviderNullReturn extends TestKeymetaAdminImpl { + @ClassRule + public static final HBaseClassTestRule CLASS_RULE = + HBaseClassTestRule.forClass(TestForKeyProviderNullReturn.class); + + @Test + public void test() throws Exception { + MockManagedKeyProvider managedKeyProvider = + (MockManagedKeyProvider) Encryption.getKeyProvider(conf); + String cust = "invalidcust1"; + String encodedCust = ManagedKeyProvider.encodeToStr(cust.getBytes()); + managedKeyProvider.setMockedKey(cust, null, ManagedKeyData.KEY_SPACE_GLOBAL); + IOException ex = assertThrows(IOException.class, + () -> keymetaAdmin.enableKeyManagement(encodedCust, ManagedKeyData.KEY_SPACE_GLOBAL)); + assertEquals("Invalid null managed key received from key provider", ex.getMessage()); + } + } + + @RunWith(Parameterized.class) + @Category({ MasterTests.class, SmallTests.class }) + public static class TestForInvalidKeyCountConfig extends TestKeymetaAdminImpl { @ClassRule public static final HBaseClassTestRule CLASS_RULE = - HBaseClassTestRule.forClass(TestForInvalid.class); - - @Test - public void testForKeyProviderNullReturn() throws Exception { - MockManagedKeyProvider managedKeyProvider = (MockManagedKeyProvider) - Encryption.getKeyProvider(conf); - String cust = "invalidcust1"; - String encodedCust = ManagedKeyProvider.encodeToStr(cust.getBytes()); - managedKeyProvider.setMockedKey(cust, null, ManagedKeyData.KEY_SPACE_GLOBAL); - IOException ex = assertThrows(IOException.class, - () -> keymetaAdmin.enableKeyManagement(encodedCust, ManagedKeyData.KEY_SPACE_GLOBAL)); - assertEquals("Invalid null managed key received from key provider", ex.getMessage()); - } + HBaseClassTestRule.forClass(TestForInvalidKeyCountConfig.class); + + @Parameter(0) + public String keyCount;; + @Parameter(1) + public Class expectedExType; + @Parameters(name = "{index},keyCount={0},expectedExType={1}") + public static Collection data() { + return Arrays.asList(new Object[][] { + { "0", IOException.class }, + { "-1", IOException.class }, + { "abc", NumberFormatException.class }, + }); } + @Test + public void test() throws Exception { + conf.set(HConstants.CRYPTO_MANAGED_KEYS_PER_CUST_ACTIVE_KEY_COUNT, keyCount); + String cust = "cust1"; + String encodedCust = ManagedKeyProvider.encodeToStr(cust.getBytes()); + assertThrows(expectedExType, () -> + keymetaAdmin.enableKeyManagement(encodedCust, + ManagedKeyData.KEY_SPACE_GLOBAL)); + } + } private class DummyKeymetaAdminImpl extends KeymetaAdminImpl { public DummyKeymetaAdminImpl(Server mockServer, KeymetaTableAccessor mockAccessor) { From 1d683256743d5fdb805547034ed48f9ecb2aef7d Mon Sep 17 00:00:00 2001 From: Hari Dara Date: Wed, 23 Apr 2025 15:28:41 +0530 Subject: [PATCH 23/70] added test coverage --- .../hbase/keymeta/KeymetaServiceEndpoint.java | 87 +++-- .../hbase/keymeta/TestKeymetaEndpoint.java | 334 ++++++++++++++++++ .../hbase/keymeta/TestManagedKeymeta.java | 19 +- .../hbase/master/TestKeymetaAdminImpl.java | 1 + 4 files changed, 395 insertions(+), 46 deletions(-) create mode 100644 hbase-server/src/test/java/org/apache/hadoop/hbase/keymeta/TestKeymetaEndpoint.java diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/keymeta/KeymetaServiceEndpoint.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/keymeta/KeymetaServiceEndpoint.java index 14d045fadd49..31ddb74cc121 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/keymeta/KeymetaServiceEndpoint.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/keymeta/KeymetaServiceEndpoint.java @@ -17,6 +17,7 @@ */ package org.apache.hadoop.hbase.keymeta; +import org.apache.hadoop.classification.VisibleForTesting; import org.apache.hadoop.hbase.CoprocessorEnvironment; import org.apache.hadoop.hbase.coprocessor.CoreCoprocessor; import org.apache.hadoop.hbase.coprocessor.HasMasterServices; @@ -90,7 +91,8 @@ public Iterable getServices() { * The implementation of the {@link ManagedKeysProtos.ManagedKeysService} * interface, which provides the actual method implementations for enabling key management. */ - private class KeyMetaAdminServiceImpl extends ManagedKeysService { + @VisibleForTesting + public class KeyMetaAdminServiceImpl extends ManagedKeysService { /** * Enables key management for a given tenant and namespace, as specified in the provided @@ -104,7 +106,7 @@ private class KeyMetaAdminServiceImpl extends ManagedKeysService { public void enableKeyManagement(RpcController controller, ManagedKeysRequest request, RpcCallback done) { ManagedKeysResponse.Builder builder = getResponseBuilder(controller, request); - if (builder.getKeyCust() != null) { + if (builder.getKeyCust() != null && ! builder.getKeyCust().isEmpty()) { try { List managedKeyStatuses = master.getKeymetaAdmin() .enableKeyManagement(request.getKeyCust(), request.getKeyNamespace()); @@ -119,7 +121,7 @@ public void enableKeyManagement(RpcController controller, ManagedKeysRequest req public void getManagedKeys(RpcController controller, ManagedKeysRequest request, RpcCallback done) { ManagedKeysResponse.Builder builder = getResponseBuilder(controller, request); - if (builder.getKeyCust() != null) { + if (builder.getKeyCust() != null && ! builder.getKeyCust().isEmpty()) { try { List managedKeyStatuses = master.getKeymetaAdmin() .getManagedKeys(request.getKeyCust(), request.getKeyNamespace()); @@ -131,52 +133,49 @@ public void getManagedKeys(RpcController controller, ManagedKeysRequest request, } } } + } - private byte[] convertToKeyCustBytes(RpcController controller, ManagedKeysRequest request, - ManagedKeysResponse.Builder builder) { - byte[] key_cust = null; - try { - key_cust = Base64.getDecoder().decode(request.getKeyCust()); - } catch (IllegalArgumentException e) { - builder.setKeyStatus(ManagedKeysProtos.ManagedKeyStatus.KEY_FAILED); - CoprocessorRpcUtils.setControllerException(controller, new IOException( - "Failed to decode specified prefix as Base64 string: " + request.getKeyCust(), e)); - } - return key_cust; + @VisibleForTesting + public static ManagedKeysResponse.Builder getResponseBuilder(RpcController controller, + ManagedKeysRequest request) { + ManagedKeysResponse.Builder builder = ManagedKeysResponse.newBuilder() + .setKeyNamespace(request.getKeyNamespace()); + byte[] key_cust = convertToKeyCustBytes(controller, request, builder); + if (key_cust != null) { + builder.setKeyCustBytes(ByteString.copyFrom(key_cust)); } + return builder; + } - private ManagedKeysResponse.Builder getResponseBuilder(RpcController controller, - ManagedKeysRequest request) { - ManagedKeysResponse.Builder builder = ManagedKeysResponse.newBuilder() - .setKeyNamespace(request.getKeyNamespace()); - byte[] key_cust = null; - try { - key_cust = Base64.getDecoder().decode(request.getKeyCust()); - builder.setKeyCustBytes(ByteString.copyFrom(key_cust)); - } catch (IllegalArgumentException e) { - builder.setKeyStatus(ManagedKeysProtos.ManagedKeyStatus.KEY_FAILED); - CoprocessorRpcUtils.setControllerException(controller, new IOException( - "Failed to decode specified prefix as Base64 string: " + request.getKeyCust(), e)); - } - return builder; + // Assumes that all ManagedKeyData objects belong to the same custodian and namespace. + @VisibleForTesting + public static GetManagedKeysResponse generateKeyStatusResponse( + List managedKeyStatuses, ManagedKeysResponse.Builder builder) { + GetManagedKeysResponse.Builder responseBuilder = GetManagedKeysResponse.newBuilder(); + for (ManagedKeyData keyData: managedKeyStatuses) { + builder.setKeyStatus(ManagedKeysProtos.ManagedKeyStatus.valueOf( + keyData.getKeyStatus().getVal())) + .setKeyMetadata(keyData.getKeyMetadata()) + .setRefreshTimestamp(keyData.getRefreshTimestamp()) + .setReadOpCount(keyData.getReadOpCount()) + .setWriteOpCount(keyData.getWriteOpCount()) + ; + responseBuilder.addStatus(builder.build()); } + return responseBuilder.build(); + } - private static GetManagedKeysResponse generateKeyStatusResponse( - List managedKeyStatuses, ManagedKeysResponse.Builder builder) { - GetManagedKeysResponse.Builder responseBuilder = GetManagedKeysResponse.newBuilder(); - for (ManagedKeyData keyData: managedKeyStatuses) { - builder.setKeyStatus( - ManagedKeysProtos.ManagedKeyStatus.valueOf(keyData.getKeyStatus().getVal())); - builder.setKeyStatus(ManagedKeysProtos.ManagedKeyStatus.valueOf( - keyData.getKeyStatus().getVal())) - .setKeyMetadata(keyData.getKeyMetadata()) - .setRefreshTimestamp(keyData.getRefreshTimestamp()) - .setReadOpCount(keyData.getReadOpCount()) - .setWriteOpCount(keyData.getWriteOpCount()) - ; - responseBuilder.addStatus(builder.build()); - } - return responseBuilder.build(); + @VisibleForTesting + public static byte[] convertToKeyCustBytes(RpcController controller, ManagedKeysRequest request, + ManagedKeysResponse.Builder builder) { + byte[] key_cust = null; + try { + key_cust = Base64.getDecoder().decode(request.getKeyCust()); + } catch (IllegalArgumentException e) { + builder.setKeyStatus(ManagedKeysProtos.ManagedKeyStatus.KEY_FAILED); + CoprocessorRpcUtils.setControllerException(controller, new IOException( + "Failed to decode specified prefix as Base64 string: " + request.getKeyCust(), e)); } + return key_cust; } } diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/keymeta/TestKeymetaEndpoint.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/keymeta/TestKeymetaEndpoint.java new file mode 100644 index 000000000000..d5c1960c3e1e --- /dev/null +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/keymeta/TestKeymetaEndpoint.java @@ -0,0 +1,334 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.hadoop.hbase.keymeta; + +import org.apache.hadoop.hbase.CoprocessorEnvironment; +import org.apache.hadoop.hbase.HBaseClassTestRule; +import org.apache.hadoop.hbase.coprocessor.HasMasterServices; +import org.apache.hadoop.hbase.io.crypto.ManagedKeyData; +import org.apache.hadoop.hbase.keymeta.KeymetaServiceEndpoint.KeyMetaAdminServiceImpl; +import org.apache.hadoop.hbase.master.MasterServices; +import org.apache.hadoop.hbase.protobuf.generated.ManagedKeysProtos.GetManagedKeysResponse; +import org.apache.hadoop.hbase.protobuf.generated.ManagedKeysProtos.ManagedKeyStatus; +import org.apache.hadoop.hbase.protobuf.generated.ManagedKeysProtos.ManagedKeysRequest; +import org.apache.hadoop.hbase.protobuf.generated.ManagedKeysProtos.ManagedKeysResponse; +import org.apache.hadoop.hbase.testclassification.MasterTests; +import org.apache.hadoop.hbase.testclassification.SmallTests; +import org.apache.hadoop.hbase.util.Bytes; +import org.apache.hbase.thirdparty.com.google.protobuf.ByteString; +import org.apache.hbase.thirdparty.com.google.protobuf.RpcCallback; +import org.apache.hbase.thirdparty.com.google.protobuf.RpcController; +import org.junit.Before; +import org.junit.ClassRule; +import org.junit.Test; +import org.junit.experimental.categories.Category; +import org.mockito.Mock; +import org.mockito.MockitoAnnotations; +import javax.crypto.spec.SecretKeySpec; +import java.io.IOException; +import java.security.KeyException; +import java.util.ArrayList; +import java.util.Arrays; +import java.util.Base64; +import java.util.List; +import static org.apache.hadoop.hbase.io.crypto.ManagedKeyStatus.ACTIVE; +import static org.apache.hadoop.hbase.protobuf.generated.ManagedKeysProtos.ManagedKeyStatus.KEY_ACTIVE; +import static org.apache.hadoop.hbase.protobuf.generated.ManagedKeysProtos.ManagedKeyStatus.KEY_FAILED; +import static org.junit.Assert.assertArrayEquals; +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertFalse; +import static org.junit.Assert.assertNotNull; +import static org.junit.Assert.assertNull; +import static org.junit.Assert.assertThrows; +import static org.junit.Assert.assertTrue; +import static org.mockito.ArgumentMatchers.any; +import static org.mockito.ArgumentMatchers.anyString; +import static org.mockito.ArgumentMatchers.argThat; +import static org.mockito.ArgumentMatchers.contains; +import static org.mockito.Mockito.mock; +import static org.mockito.Mockito.never; +import static org.mockito.Mockito.verify; +import static org.mockito.Mockito.when; +import static org.mockito.Mockito.withSettings; + +@Category({ MasterTests.class, SmallTests.class }) +public class TestKeymetaEndpoint { + + @ClassRule + public static final HBaseClassTestRule CLASS_RULE = + HBaseClassTestRule.forClass(TestKeymetaEndpoint.class); + + private static final String KEY_CUST = "keyCust"; + private static final String KEY_NAMESPACE = "keyNamespace"; + private static final String KEY_METADATA1 = "keyMetadata1"; + private static final String KEY_METADATA2 = "keyMetadata2"; + + @Mock + private RpcController controller; + @Mock + private MasterServices master; + @Mock + private RpcCallback done; + @Mock + private KeymetaAdmin keymetaAdmin; + + KeymetaServiceEndpoint keymetaServiceEndpoint; + private ManagedKeysResponse.Builder responseBuilder; + private ManagedKeysRequest.Builder requestBuilder; + private KeyMetaAdminServiceImpl keyMetaAdminService; + private ManagedKeyData keyData1; + private ManagedKeyData keyData2; + + @Before + public void setUp() throws Exception { + MockitoAnnotations.initMocks(this); + keymetaServiceEndpoint = new KeymetaServiceEndpoint(); + CoprocessorEnvironment env = mock(CoprocessorEnvironment.class, + withSettings().extraInterfaces(HasMasterServices.class)); + when(((HasMasterServices) env).getMasterServices()).thenReturn(master); + keymetaServiceEndpoint.start(env); + keyMetaAdminService = (KeyMetaAdminServiceImpl) keymetaServiceEndpoint.getServices() + .iterator().next(); + responseBuilder = ManagedKeysResponse.newBuilder().setKeyStatus(KEY_ACTIVE); + requestBuilder = ManagedKeysRequest.newBuilder() + .setKeyNamespace(ManagedKeyData.KEY_SPACE_GLOBAL); + keyData1 = new ManagedKeyData(KEY_CUST.getBytes(), KEY_NAMESPACE, + new SecretKeySpec("key1".getBytes(), "AES"), ACTIVE, KEY_METADATA1); + keyData2 = new ManagedKeyData(KEY_CUST.getBytes(), KEY_NAMESPACE, + new SecretKeySpec("key2".getBytes(), "AES"), ACTIVE, KEY_METADATA2); + when(master.getKeymetaAdmin()).thenReturn(keymetaAdmin); + } + + @Test + public void testConvertToKeyCustBytesValid() { + // Arrange + String validBase64 = Base64.getEncoder().encodeToString("testKey".getBytes()); + ManagedKeysRequest request = requestBuilder.setKeyCust(validBase64).build(); + + // Act + byte[] result = + KeymetaServiceEndpoint.convertToKeyCustBytes(controller, request, responseBuilder); + + // Assert + assertNotNull(result); + assertArrayEquals("testKey".getBytes(), result); + assertEquals(KEY_ACTIVE, responseBuilder.getKeyStatus()); + verify(controller, never()).setFailed(anyString()); + } + + @Test + public void testConvertToKeyCustBytesInvalid() { + // Arrange + String invalidBase64 = "invalid!Base64@String"; + ManagedKeysRequest request = requestBuilder.setKeyCust(invalidBase64).build(); + + // Act + byte[] result = KeymetaServiceEndpoint.convertToKeyCustBytes(controller, request, + responseBuilder); + + // Assert + assertNull(result); + assertEquals(KEY_FAILED, responseBuilder.getKeyStatus()); + verify(controller).setFailed(anyString()); + } + + @Test + public void testGetResponseBuilder() { + // Arrange + String keyCust = Base64.getEncoder().encodeToString("testKey".getBytes()); + String keyNamespace = "testNamespace"; + ManagedKeysRequest request = requestBuilder.setKeyCust(keyCust) + .setKeyNamespace(keyNamespace) + .build(); + + // Act + ManagedKeysResponse.Builder result = KeymetaServiceEndpoint.getResponseBuilder(controller, + request); + + // Assert + assertNotNull(result); + assertEquals(keyNamespace, result.getKeyNamespace()); + assertArrayEquals("testKey".getBytes(), result.getKeyCustBytes().toByteArray()); + verify(controller, never()).setFailed(anyString()); + } + + @Test + public void testGetResponseBuilderWithInvalidBase64() { + // Arrange + String keyCust = "invalidBase64!"; + String keyNamespace = "testNamespace"; + ManagedKeysRequest request = requestBuilder.setKeyCust(keyCust) + .setKeyNamespace(keyNamespace) + .build(); + + // Act + ManagedKeysResponse.Builder result = KeymetaServiceEndpoint.getResponseBuilder(controller, request); + + // Assert + assertNotNull(result); + assertEquals(keyNamespace, result.getKeyNamespace()); + assertEquals(ManagedKeyStatus.KEY_FAILED, result.getKeyStatus()); + verify(controller).setFailed(contains("Failed to decode specified prefix as Base64 string")); + } + + @Test + public void testGenerateKeyStatusResponse() throws Exception { + // Arrange + ManagedKeysResponse response = responseBuilder.setKeyCustBytes(ByteString.copyFrom( + keyData1.getKeyCustodian())) + .setKeyNamespace(keyData1.getKeyNamespace()) + .build(); + List managedKeyStatuses = Arrays.asList(keyData1, keyData2); + + // Act + GetManagedKeysResponse result = KeymetaServiceEndpoint.generateKeyStatusResponse( + managedKeyStatuses, responseBuilder); + + // Assert + assertNotNull(response); + assertNotNull(result.getStatusList()); + assertEquals(2, result.getStatusList().size()); + assertEquals(ManagedKeyStatus.KEY_ACTIVE, result.getStatusList().get(0).getKeyStatus()); + assertTrue(Bytes.compareTo(keyData1.getKeyCustodian(), + result.getStatusList().get(0).getKeyCustBytes().toByteArray()) == 0); + assertEquals(keyData1.getKeyNamespace(), result.getStatusList().get(0).getKeyNamespace()); + verify(controller, never()).setFailed(anyString()); + } + + @Test + public void testGenerateKeyStatusResponse_Empty() throws Exception { + // Arrange + ManagedKeysResponse response = responseBuilder.setKeyCustBytes(ByteString.copyFrom( + keyData1.getKeyCustodian())) + .setKeyNamespace(keyData1.getKeyNamespace()) + .build(); + List managedKeyStatuses = new ArrayList<>(); + + // Act + GetManagedKeysResponse result = KeymetaServiceEndpoint.generateKeyStatusResponse( + managedKeyStatuses, responseBuilder); + + // Assert + assertNotNull(response); + assertNotNull(result.getStatusList()); + assertEquals(0, result.getStatusList().size()); + verify(controller, never()).setFailed(anyString()); + } + + @Test + public void testGenerateKeyStatusResponse_Success() throws Exception { + doTestServiceCallForSuccess( + (controller, request, done) -> + keyMetaAdminService.enableKeyManagement(controller, request, done)); + } + + @Test + public void testGetManagedKeys_Success() throws Exception { + doTestServiceCallForSuccess( + (controller, request, done) -> + keyMetaAdminService.getManagedKeys(controller, request, done)); + } + + private void doTestServiceCallForSuccess(ServiceCall svc) throws Exception { + // Arrange + ManagedKeysRequest request = requestBuilder.setKeyCust(KEY_CUST).build(); + List managedKeyStatuses = Arrays.asList(keyData1); + when(keymetaAdmin.enableKeyManagement(any(), any())).thenReturn(managedKeyStatuses); + + // Act + svc.call(controller, request, done); + + // Assert + verify(done).run(any()); + verify(controller, never()).setFailed(anyString()); + } + + private interface ServiceCall { + void call(RpcController controller, ManagedKeysRequest request, + RpcCallback done) throws Exception; + } + + @Test + public void testGenerateKeyStatusResponse_InvalidCust() throws Exception { + // Arrange + String invalidBase64 = "invalid!Base64@String"; + ManagedKeysRequest request = requestBuilder.setKeyCust(invalidBase64).build(); + + // Act + keyMetaAdminService.enableKeyManagement(controller, request, done); + + // Assert + verify(controller).setFailed(contains("IOException")); + verify(keymetaAdmin, never()).enableKeyManagement(any(), any()); + verify(done, never()).run(any()); + } + + @Test + public void testGenerateKeyStatusResponse_IOException() throws Exception { + // Arrange + when(keymetaAdmin.enableKeyManagement(any(), any())).thenThrow(IOException.class); + ManagedKeysRequest request = requestBuilder.setKeyCust(KEY_CUST).build(); + + // Act + keyMetaAdminService.enableKeyManagement(controller, request, done); + + // Assert + verify(controller).setFailed(contains("IOException")); + verify(keymetaAdmin).enableKeyManagement(any(), any()); + verify(done, never()).run(any()); + } + + @Test + public void testGetManagedKeys_IOException() throws Exception { + doTestGetManagedKeysError(IOException.class); + } + + @Test + public void testGetManagedKeys_KeyException() throws Exception { + doTestGetManagedKeysError(KeyException.class); + } + + private void doTestGetManagedKeysError(Class exType) throws Exception { + // Arrange + when(keymetaAdmin.getManagedKeys(any(), any())).thenThrow(exType); + ManagedKeysRequest request = requestBuilder.setKeyCust(KEY_CUST).build(); + + // Act + keyMetaAdminService.getManagedKeys(controller, request, done); + + // Assert + verify(controller).setFailed(contains(exType.getSimpleName())); + verify(keymetaAdmin).getManagedKeys(any(), any()); + verify(done, never()).run(any()); + } + + @Test + public void testGetManagedKeys_InvalidCust() throws Exception { + // Arrange + String invalidBase64 = "invalid!Base64@String"; + ManagedKeysRequest request = requestBuilder.setKeyCust(invalidBase64).build(); + + // Act + keyMetaAdminService.getManagedKeys(controller, request, done); + + // Assert + verify(controller).setFailed(contains("IOException")); + verify(keymetaAdmin, never()).getManagedKeys(any(), any()); + verify(done, never()).run(any()); + } +} diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/keymeta/TestManagedKeymeta.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/keymeta/TestManagedKeymeta.java index 77fe308d99df..b3692f0896d7 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/keymeta/TestManagedKeymeta.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/keymeta/TestManagedKeymeta.java @@ -1,3 +1,20 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ package org.apache.hadoop.hbase.keymeta; import org.apache.hadoop.hbase.HBaseClassTestRule; @@ -17,7 +34,6 @@ import java.util.List; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertNotNull; -import static org.junit.Assert.assertThrows; @Category({ MasterTests.class, MediumTests.class }) public class TestManagedKeymeta extends ManagedKeyTestBase { @@ -39,7 +55,6 @@ public void testEnableOverRPC() throws Exception { doTestEnable(adminClient); } - // TODO: Need to add test cases for multiple key spaces. private void doTestEnable(KeymetaAdmin adminClient) throws IOException, KeyException { HMaster master = TEST_UTIL.getHBaseCluster().getMaster(); MockManagedKeyProvider managedKeyProvider = (MockManagedKeyProvider) diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestKeymetaAdminImpl.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestKeymetaAdminImpl.java index 5867f6de1969..d2d8d5e55e0e 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestKeymetaAdminImpl.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestKeymetaAdminImpl.java @@ -111,6 +111,7 @@ public void testDisabled() throws Exception { } } + // TODO: Need to add test cases for multiple key spaces. @RunWith(Parameterized.class) @Category({ MasterTests.class, SmallTests.class }) public static class TestAdminImpl extends TestKeymetaAdminImpl { From 5bc64af5177e7b073da9d556348ec289aa44bf1c Mon Sep 17 00:00:00 2001 From: Hari Dara Date: Thu, 24 Apr 2025 15:26:38 +0530 Subject: [PATCH 24/70] Added TestKeymetaTableAccessor --- .../hbase/keymeta/KeymetaTableAccessor.java | 28 +- .../hbase/keymeta/TestKeymetaEndpoint.java | 7 +- .../keymeta/TestKeymetaTableAccessor.java | 398 ++++++++++++++++++ .../hbase/master/TestKeymetaAdminImpl.java | 8 +- 4 files changed, 422 insertions(+), 19 deletions(-) create mode 100644 hbase-server/src/test/java/org/apache/hadoop/hbase/keymeta/TestKeymetaTableAccessor.java diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/keymeta/KeymetaTableAccessor.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/keymeta/KeymetaTableAccessor.java index 3605ce634743..52275229de3e 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/keymeta/KeymetaTableAccessor.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/keymeta/KeymetaTableAccessor.java @@ -17,6 +17,7 @@ */ package org.apache.hadoop.hbase.keymeta; +import org.apache.hadoop.classification.VisibleForTesting; import org.apache.hadoop.hbase.HBaseInterfaceAudience; import org.apache.hadoop.hbase.HConstants; import org.apache.hadoop.hbase.NamespaceDescriptor; @@ -123,7 +124,7 @@ public List getAllKeys(byte[] key_cust, String keyNamespace) ResultScanner scanner = table.getScanner(scan); List allKeys = new ArrayList<>(); for (Result result : scanner) { - ManagedKeyData keyData = parseFromResult(key_cust, keyNamespace, result); + ManagedKeyData keyData = parseFromResult(server, key_cust, keyNamespace, result); if (keyData != null) { allKeys.add(keyData); } @@ -171,7 +172,7 @@ public ManagedKeyData getKey(byte[] key_cust, String keyNamespace, String keyMet byte[] rowKey = constructRowKeyForMetadata(key_cust, keyNamespace, ManagedKeyData.constructMetadataHash(keyMetadata)); Result result = table.get(new Get(rowKey)); - return parseFromResult(key_cust, keyNamespace, result); + return parseFromResult(server, key_cust, keyNamespace, result); } } @@ -210,13 +211,13 @@ private Put addMutationColumns(Put put, ManagedKeyData keyData) throws IOExcepti put.addColumn(KEY_META_INFO_FAMILY, DEK_CHECKSUM_QUAL_BYTES, Bytes.toBytes(keyData.getKeyChecksum())) .addColumn(KEY_META_INFO_FAMILY, DEK_WRAPPED_BY_STK_QUAL_BYTES, dekWrappedBySTK) + .addColumn(KEY_META_INFO_FAMILY, STK_CHECKSUM_QUAL_BYTES, + Bytes.toBytes(latestSystemKey.getKeyChecksum())) ; } return put.setDurability(Durability.SKIP_WAL) .setPriority(HConstants.SYSTEMTABLE_QOS) .addColumn(KEY_META_INFO_FAMILY, DEK_METADATA_QUAL_BYTES, keyData.getKeyMetadata().getBytes()) - .addColumn(KEY_META_INFO_FAMILY, STK_CHECKSUM_QUAL_BYTES, - Bytes.toBytes(latestSystemKey.getKeyChecksum())) .addColumn(KEY_META_INFO_FAMILY, REFRESHED_TIMESTAMP_QUAL_BYTES, Bytes.toBytes(keyData.getRefreshTimestamp())) .addColumn(KEY_META_INFO_FAMILY, KEY_STATUS_QUAL_BYTES, @@ -224,26 +225,35 @@ private Put addMutationColumns(Put put, ManagedKeyData keyData) throws IOExcepti ; } - private byte[] constructRowKeyForMetadata(ManagedKeyData keyData) { + @VisibleForTesting + public static byte[] constructRowKeyForMetadata(ManagedKeyData keyData) { return constructRowKeyForMetadata(keyData.getKeyCustodian(), keyData.getKeyNamespace(), keyData.getKeyMetadataHash()); } - private static byte[] constructRowKeyForMetadata(byte[] key_cust, String keyNamespace, + @VisibleForTesting + public static byte[] constructRowKeyForMetadata(byte[] key_cust, String keyNamespace, byte[] keyMetadataHash) { int prefixLength = key_cust.length; return Bytes.add(Bytes.toBytes(prefixLength), key_cust, Bytes.toBytesBinary(keyNamespace), keyMetadataHash); } - private ManagedKeyData parseFromResult(byte[] key_cust, String keyNamespace, Result result) - throws IOException, KeyException { + @VisibleForTesting + public static ManagedKeyData parseFromResult(Server server, byte[] key_cust, String keyNamespace, + Result result) throws IOException, KeyException { if (result == null || result.isEmpty()) { return null; } + ManagedKeyStatus keyStatus = ManagedKeyStatus.forValue( + result.getValue(KEY_META_INFO_FAMILY, KEY_STATUS_QUAL_BYTES)[0]); String dekMetadata = Bytes.toString(result.getValue(KEY_META_INFO_FAMILY, DEK_METADATA_QUAL_BYTES)); byte[] dekWrappedByStk = result.getValue(KEY_META_INFO_FAMILY, DEK_WRAPPED_BY_STK_QUAL_BYTES); + if ((keyStatus == ManagedKeyStatus.ACTIVE || keyStatus == ManagedKeyStatus.INACTIVE) + && dekWrappedByStk == null) { + throw new IOException(keyStatus + " key must have a wrapped key"); + } Key dek = null; if (dekWrappedByStk != null) { long stkChecksum = @@ -257,8 +267,6 @@ private ManagedKeyData parseFromResult(byte[] key_cust, String keyNamespace, Res dek = EncryptionUtil.unwrapKey(server.getConfiguration(), null, dekWrappedByStk, clusterKey.getTheKey()); } - ManagedKeyStatus keyStatus = ManagedKeyStatus.forValue( - result.getValue(KEY_META_INFO_FAMILY, KEY_STATUS_QUAL_BYTES)[0]); long refreshedTimestamp = Bytes.toLong(result.getValue(KEY_META_INFO_FAMILY, REFRESHED_TIMESTAMP_QUAL_BYTES)); byte[] readOpValue = result.getValue(KEY_META_INFO_FAMILY, READ_OP_COUNT_QUAL_BYTES); diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/keymeta/TestKeymetaEndpoint.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/keymeta/TestKeymetaEndpoint.java index d5c1960c3e1e..ebdce1eff6fd 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/keymeta/TestKeymetaEndpoint.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/keymeta/TestKeymetaEndpoint.java @@ -51,14 +51,11 @@ import static org.apache.hadoop.hbase.protobuf.generated.ManagedKeysProtos.ManagedKeyStatus.KEY_FAILED; import static org.junit.Assert.assertArrayEquals; import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertFalse; import static org.junit.Assert.assertNotNull; import static org.junit.Assert.assertNull; -import static org.junit.Assert.assertThrows; import static org.junit.Assert.assertTrue; import static org.mockito.ArgumentMatchers.any; import static org.mockito.ArgumentMatchers.anyString; -import static org.mockito.ArgumentMatchers.argThat; import static org.mockito.ArgumentMatchers.contains; import static org.mockito.Mockito.mock; import static org.mockito.Mockito.never; @@ -204,8 +201,8 @@ public void testGenerateKeyStatusResponse() throws Exception { assertNotNull(result.getStatusList()); assertEquals(2, result.getStatusList().size()); assertEquals(ManagedKeyStatus.KEY_ACTIVE, result.getStatusList().get(0).getKeyStatus()); - assertTrue(Bytes.compareTo(keyData1.getKeyCustodian(), - result.getStatusList().get(0).getKeyCustBytes().toByteArray()) == 0); + assertEquals(0, Bytes.compareTo(keyData1.getKeyCustodian(), + result.getStatusList().get(0).getKeyCustBytes().toByteArray())); assertEquals(keyData1.getKeyNamespace(), result.getStatusList().get(0).getKeyNamespace()); verify(controller, never()).setFailed(anyString()); } diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/keymeta/TestKeymetaTableAccessor.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/keymeta/TestKeymetaTableAccessor.java new file mode 100644 index 000000000000..e8c0078f2941 --- /dev/null +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/keymeta/TestKeymetaTableAccessor.java @@ -0,0 +1,398 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.hadoop.hbase.keymeta; + +import static org.apache.hadoop.hbase.io.crypto.ManagedKeyData.KEY_SPACE_GLOBAL; +import static org.apache.hadoop.hbase.io.crypto.ManagedKeyStatus.ACTIVE; +import static org.apache.hadoop.hbase.io.crypto.ManagedKeyStatus.DISABLED; +import static org.apache.hadoop.hbase.io.crypto.ManagedKeyStatus.FAILED; +import static org.apache.hadoop.hbase.io.crypto.ManagedKeyStatus.INACTIVE; +import static org.apache.hadoop.hbase.keymeta.KeymetaTableAccessor.DEK_CHECKSUM_QUAL_BYTES; +import static org.apache.hadoop.hbase.keymeta.KeymetaTableAccessor.DEK_METADATA_QUAL_BYTES; +import static org.apache.hadoop.hbase.keymeta.KeymetaTableAccessor.DEK_WRAPPED_BY_STK_QUAL_BYTES; +import static org.apache.hadoop.hbase.keymeta.KeymetaTableAccessor.KEY_META_INFO_FAMILY; +import static org.apache.hadoop.hbase.keymeta.KeymetaTableAccessor.KEY_STATUS_QUAL_BYTES; +import static org.apache.hadoop.hbase.keymeta.KeymetaTableAccessor.READ_OP_COUNT_QUAL_BYTES; +import static org.apache.hadoop.hbase.keymeta.KeymetaTableAccessor.REFRESHED_TIMESTAMP_QUAL_BYTES; +import static org.apache.hadoop.hbase.keymeta.KeymetaTableAccessor.STK_CHECKSUM_QUAL_BYTES; +import static org.apache.hadoop.hbase.keymeta.KeymetaTableAccessor.WRITE_OP_COUNT_QUAL_BYTES; +import static org.apache.hadoop.hbase.keymeta.KeymetaTableAccessor.constructRowKeyForMetadata; +import static org.apache.hadoop.hbase.keymeta.KeymetaTableAccessor.parseFromResult; +import static org.junit.Assert.*; +import static org.mockito.Mockito.*; + +import java.io.IOException; +import java.util.Arrays; +import java.util.Collection; +import java.util.HashMap; +import java.util.List; +import java.util.Map; +import java.util.NavigableMap; + +import org.apache.hadoop.conf.Configuration; +import org.apache.hadoop.hbase.Cell; +import org.apache.hadoop.hbase.HBaseClassTestRule; +import org.apache.hadoop.hbase.HBaseConfiguration; +import org.apache.hadoop.hbase.HConstants; +import org.apache.hadoop.hbase.Server; +import org.apache.hadoop.hbase.client.*; +import org.apache.hadoop.hbase.io.crypto.ManagedKeyData; +import org.apache.hadoop.hbase.io.crypto.ManagedKeyStatus; +import org.apache.hadoop.hbase.io.crypto.MockManagedKeyProvider; +import org.apache.hadoop.hbase.security.EncryptionUtil; +import org.apache.hadoop.hbase.testclassification.MasterTests; +import org.apache.hadoop.hbase.testclassification.SmallTests; +import org.apache.hadoop.hbase.util.Bytes; +import org.junit.After; +import org.junit.Before; +import org.junit.ClassRule; +import org.junit.Test; +import org.junit.experimental.categories.Category; +import org.junit.runner.RunWith; +import org.junit.runners.BlockJUnit4ClassRunner; +import org.junit.runners.Parameterized; +import org.junit.runners.Parameterized.Parameter; +import org.junit.runners.Suite; +import org.mockito.ArgumentCaptor; +import org.mockito.Mock; +import org.mockito.MockitoAnnotations; + +@RunWith(Suite.class) +@Suite.SuiteClasses({ + TestKeymetaTableAccessor.TestAdd.class, + TestKeymetaTableAccessor.TestGet.class, + TestKeymetaTableAccessor.TestOps.class, +}) +@Category({ MasterTests.class, SmallTests.class }) +public class TestKeymetaTableAccessor { + protected static final String ALIAS = "custId1"; + protected static final byte[] CUST_ID = ALIAS.getBytes(); + protected static final String KEY_NAMESPACE = "namespace"; + protected static String KEY_METADATA = "metadata1"; + + @Mock + protected Server server; + @Mock + protected Connection connection; + @Mock + protected Table table; + @Mock + protected ResultScanner scanner; + @Mock + protected SystemKeyCache systemKeyCache; + + protected KeymetaTableAccessor accessor; + protected Configuration conf = HBaseConfiguration.create(); + protected MockManagedKeyProvider managedKeyProvider; + protected ManagedKeyData latestSystemKey; + + private AutoCloseable closeable; + + @Before + public void setUp() throws Exception { + closeable = MockitoAnnotations.openMocks(this); + + conf.set(HConstants.CRYPTO_MANAGED_KEYS_ENABLED_CONF_KEY, "true"); + conf.set(HConstants.CRYPTO_KEYPROVIDER_CONF_KEY, MockManagedKeyProvider.class.getName()); + + when(server.getConnection()).thenReturn(connection); + when(connection.getTable(KeymetaTableAccessor.KEY_META_TABLE_NAME)).thenReturn(table); + when(server.getSystemKeyCache()).thenReturn(systemKeyCache); + when(server.getConfiguration()).thenReturn(conf); + + accessor = new KeymetaTableAccessor(server); + managedKeyProvider = new MockManagedKeyProvider(); + managedKeyProvider.initConfig(conf); + + latestSystemKey = managedKeyProvider.getSystemKey("system-id".getBytes()); + when(systemKeyCache.getLatestSystemKey()).thenReturn(latestSystemKey); + when(systemKeyCache.getSystemKeyByChecksum(anyLong())).thenReturn(latestSystemKey); + } + + @After + public void tearDown() throws Exception { + closeable.close(); + } + + @RunWith(Parameterized.class) + @Category({ MasterTests.class, SmallTests.class }) + public static class TestAdd extends TestKeymetaTableAccessor { + @ClassRule + public static final HBaseClassTestRule CLASS_RULE = + HBaseClassTestRule.forClass(TestAdd.class); + + @Parameter(0) + public ManagedKeyStatus keyStatus; + + @Parameterized.Parameters(name = "{index},keyStatus={0}") + public static Collection data() { + return Arrays.asList( + new Object[][] { { ACTIVE }, { FAILED }, { INACTIVE }, { DISABLED }, }); + } + + @Test + public void testAddActiveKey() throws Exception { + managedKeyProvider.setMockedKeyStatus(ALIAS, keyStatus); + ManagedKeyData keyData = + managedKeyProvider.getManagedKey(CUST_ID, KEY_SPACE_GLOBAL); + + accessor.addKey(keyData); + + ArgumentCaptor putCaptor = ArgumentCaptor.forClass(Put.class); + verify(table).put(putCaptor.capture()); + Put put = putCaptor.getValue(); + assertPut(keyData, put); + } + } + + @RunWith(BlockJUnit4ClassRunner.class) + @Category({ MasterTests.class, SmallTests.class }) + public static class TestGet extends TestKeymetaTableAccessor { + @ClassRule + public static final HBaseClassTestRule CLASS_RULE = + HBaseClassTestRule.forClass(TestGet.class); + + @Mock + private Result result1; + @Mock + private Result result2; + + private String keyMetadata2 = "metadata2"; + + @Override + public void setUp() throws Exception { + super.setUp(); + + when(result1.getValue(eq(KEY_META_INFO_FAMILY), eq(KEY_STATUS_QUAL_BYTES))) + .thenReturn(new byte[] { ACTIVE.getVal() }); + when(result2.getValue(eq(KEY_META_INFO_FAMILY), eq(KEY_STATUS_QUAL_BYTES))) + .thenReturn(new byte[] { FAILED.getVal() }); + for (Result result : Arrays.asList(result1, result2)) { + when(result.getValue(eq(KEY_META_INFO_FAMILY), eq(REFRESHED_TIMESTAMP_QUAL_BYTES))) + .thenReturn(Bytes.toBytes(0L)); + when(result.getValue(eq(KEY_META_INFO_FAMILY), eq(STK_CHECKSUM_QUAL_BYTES))) + .thenReturn(Bytes.toBytes(0L)); + } + when(result1.getValue(eq(KEY_META_INFO_FAMILY), eq(DEK_METADATA_QUAL_BYTES))) + .thenReturn(KEY_METADATA.getBytes()); + when(result2.getValue(eq(KEY_META_INFO_FAMILY), eq(DEK_METADATA_QUAL_BYTES))) + .thenReturn(keyMetadata2.getBytes()); + } + + @Test + public void testParseEmptyResult() throws Exception { + Result result = mock(Result.class); + when(result.isEmpty()).thenReturn(true); + + assertNull(parseFromResult(server, CUST_ID, KEY_NAMESPACE, null)); + assertNull(parseFromResult(server, CUST_ID, KEY_NAMESPACE, result)); + } + + @Test + public void testGetActiveKeyMissingWrappedKey() throws Exception { + Result result = mock(Result.class); + when(table.get(any(Get.class))).thenReturn(result); + when(result.getValue(eq(KEY_META_INFO_FAMILY), eq(KEY_STATUS_QUAL_BYTES))) + .thenReturn(new byte[] { ACTIVE.getVal() }, new byte[] { INACTIVE.getVal() }); + + IOException ex; + ex = assertThrows(IOException.class, + () -> accessor.getKey(CUST_ID, KEY_SPACE_GLOBAL, KEY_METADATA)); + assertEquals("ACTIVE key must have a wrapped key", ex.getMessage()); + ex = assertThrows(IOException.class, () -> + accessor.getKey(CUST_ID, KEY_SPACE_GLOBAL, KEY_METADATA)); + assertEquals("INACTIVE key must have a wrapped key", ex.getMessage()); + } + + @Test + public void testGetKeyMissingSTK() throws Exception { + when(result1.getValue(eq(KEY_META_INFO_FAMILY), eq(DEK_WRAPPED_BY_STK_QUAL_BYTES))) + .thenReturn(new byte[] { 0 }); + when(systemKeyCache.getSystemKeyByChecksum(anyLong())).thenReturn(null); + when(table.get(any(Get.class))).thenReturn(result1); + + ManagedKeyData result = accessor.getKey(CUST_ID, KEY_NAMESPACE, KEY_METADATA); + + assertNull(result); + } + + @Test + public void testGetKeyWithWrappedKey() throws Exception { + ManagedKeyData keyData = setupActiveKey(CUST_ID, result1); + + ManagedKeyData result = accessor.getKey(CUST_ID, KEY_NAMESPACE, KEY_METADATA); + + verify(table).get(any(Get.class)); + assertNotNull(result); + assertEquals(0, Bytes.compareTo(CUST_ID, result.getKeyCustodian())); + assertEquals(KEY_NAMESPACE, result.getKeyNamespace()); + assertEquals(KEY_METADATA, result.getKeyMetadata()); + assertEquals(0, Bytes.compareTo(keyData.getTheKey().getEncoded(), + result.getTheKey().getEncoded())); + assertEquals(ACTIVE, result.getKeyStatus()); + + // When DEK checksum doesn't match, we expect a null value. + result = accessor.getKey(CUST_ID, KEY_NAMESPACE, KEY_METADATA); + assertNull(result); + } + + @Test + public void testGetKeyWithoutWrappedKey() throws Exception { + when(table.get(any(Get.class))).thenReturn(result2); + + ManagedKeyData result = accessor.getKey(CUST_ID, KEY_NAMESPACE, KEY_METADATA); + + verify(table).get(any(Get.class)); + assertNotNull(result); + assertEquals(0, Bytes.compareTo(CUST_ID, result.getKeyCustodian())); + assertEquals(KEY_NAMESPACE, result.getKeyNamespace()); + assertEquals(keyMetadata2, result.getKeyMetadata()); + assertNull(result.getTheKey()); + assertEquals(FAILED, result.getKeyStatus()); + } + + @Test + public void testGetKeyWithOps() throws Exception { + long readCnt = 5; + long writeCnt = 10; + when(result2.getValue(eq(KEY_META_INFO_FAMILY), eq(READ_OP_COUNT_QUAL_BYTES))) + .thenReturn(Bytes.toBytes(readCnt)); + when(result2.getValue(eq(KEY_META_INFO_FAMILY), eq(WRITE_OP_COUNT_QUAL_BYTES))) + .thenReturn(Bytes.toBytes(writeCnt)); + when(table.get(any(Get.class))).thenReturn(result2); + + ManagedKeyData result = accessor.getKey(CUST_ID, KEY_NAMESPACE, KEY_METADATA); + + verify(table).get(any(Get.class)); + assertNotNull(result); + assertEquals(readCnt, result.getReadOpCount()); + assertEquals(writeCnt, result.getWriteOpCount()); + } + + @Test + public void testGetAllKeys() throws Exception { + ManagedKeyData keyData = setupActiveKey(CUST_ID, result1); + + when(scanner.iterator()).thenReturn(List.of(result1, result2).iterator()); + when(table.getScanner(any(Scan.class))).thenReturn(scanner); + + List allKeys = accessor.getAllKeys(CUST_ID, KEY_NAMESPACE); + + assertEquals(2, allKeys.size()); + assertEquals(KEY_METADATA, allKeys.get(0).getKeyMetadata()); + assertEquals(keyMetadata2, allKeys.get(1).getKeyMetadata()); + verify(table).getScanner(any(Scan.class)); + } + + @Test + public void testGetActiveKeys() throws Exception { + ManagedKeyData keyData = setupActiveKey(CUST_ID, result1); + + when(scanner.iterator()).thenReturn(List.of(result1, result2).iterator()); + when(table.getScanner(any(Scan.class))).thenReturn(scanner); + + List allKeys = accessor.getActiveKeys(CUST_ID, KEY_NAMESPACE); + + assertEquals(1, allKeys.size()); + assertEquals(KEY_METADATA, allKeys.get(0).getKeyMetadata()); + verify(table).getScanner(any(Scan.class)); + } + + private ManagedKeyData setupActiveKey(byte[] custId, Result result) throws Exception { + ManagedKeyData keyData = managedKeyProvider.getManagedKey(custId, KEY_NAMESPACE); + byte[] dekWrappedBySTK = EncryptionUtil.wrapKey(conf, null, + keyData.getTheKey(), latestSystemKey.getTheKey()); + when(result.getValue(eq(KEY_META_INFO_FAMILY), eq(DEK_WRAPPED_BY_STK_QUAL_BYTES))) + .thenReturn(dekWrappedBySTK); + when(result.getValue(eq(KEY_META_INFO_FAMILY), eq(DEK_CHECKSUM_QUAL_BYTES))) + .thenReturn(Bytes.toBytes(keyData.getKeyChecksum()), Bytes.toBytes(0L)); + when(table.get(any(Get.class))).thenReturn(result); + return keyData; + } + } + + + @RunWith(Parameterized.class) + @Category({ MasterTests.class, SmallTests.class }) + public static class TestOps extends TestKeymetaTableAccessor { + @ClassRule + public static final HBaseClassTestRule CLASS_RULE = + HBaseClassTestRule.forClass(TestOps.class); + + @Parameter(0) + public boolean isReadonly; + + @Parameterized.Parameters(name = "{index},isReadonly={0}") + public static Collection data() { + return Arrays.asList( + new Object[][] { { true }, { false } }); + } + + @Test + public void testReportOperation() throws Exception { + long count = 5; + + accessor.reportOperation(CUST_ID, KEY_NAMESPACE, KEY_METADATA, count, isReadonly); + + ArgumentCaptor incrementCaptor = ArgumentCaptor.forClass(Increment.class); + verify(table).increment(incrementCaptor.capture()); + Increment increment = incrementCaptor.getValue(); + NavigableMap> familyCellMap = increment.getFamilyCellMap(); + List cells = familyCellMap.get(KEY_META_INFO_FAMILY); + assertEquals(1, cells.size()); + Cell cell = cells.get(0); + assertEquals(new Bytes(isReadonly ? READ_OP_COUNT_QUAL_BYTES : WRITE_OP_COUNT_QUAL_BYTES), + new Bytes(cell.getQualifierArray(), cell.getQualifierOffset(), cell.getQualifierLength())); + assertEquals(new Bytes(Bytes.toBytes(count)), + new Bytes(cell.getValueArray(), cell.getValueOffset(), cell.getValueLength())); + } + } + + protected void assertPut(ManagedKeyData keyData, Put put) { + assertEquals(Durability.SKIP_WAL, put.getDurability()); + assertEquals(HConstants.SYSTEMTABLE_QOS, put.getPriority()); + assertTrue(Bytes.compareTo(constructRowKeyForMetadata(keyData), + put.getRow()) == 0); + + NavigableMap> familyCellMap = put.getFamilyCellMap(); + List cells = familyCellMap.get(KEY_META_INFO_FAMILY); + Map valueMap = new HashMap<>(); + for (Cell cell : cells) { + valueMap.put( + new Bytes(cell.getQualifierArray(), cell.getQualifierOffset(), cell.getQualifierLength()), + new Bytes(cell.getValueArray(), cell.getValueOffset(), cell.getValueLength())); + } + + if (keyData.getTheKey() != null) { + assertNotNull(valueMap.get(new Bytes(DEK_CHECKSUM_QUAL_BYTES))); + assertNotNull(valueMap.get(new Bytes(DEK_WRAPPED_BY_STK_QUAL_BYTES))); + assertEquals(new Bytes(Bytes.toBytes(latestSystemKey.getKeyChecksum())), + valueMap.get(new Bytes(STK_CHECKSUM_QUAL_BYTES))); + } + else { + assertNull(valueMap.get(new Bytes(DEK_CHECKSUM_QUAL_BYTES))); + assertNull(valueMap.get(new Bytes(DEK_WRAPPED_BY_STK_QUAL_BYTES))); + assertNull(valueMap.get(new Bytes(STK_CHECKSUM_QUAL_BYTES))); + } + assertEquals(new Bytes(keyData.getKeyMetadata().getBytes()), + valueMap.get(new Bytes(DEK_METADATA_QUAL_BYTES))); + assertNotNull(valueMap.get(new Bytes(REFRESHED_TIMESTAMP_QUAL_BYTES))); + assertEquals(new Bytes(new byte[] { keyData.getKeyStatus().getVal() }), + valueMap.get(new Bytes(KEY_STATUS_QUAL_BYTES))); + } +} diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestKeymetaAdminImpl.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestKeymetaAdminImpl.java index d2d8d5e55e0e..6122f963c300 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestKeymetaAdminImpl.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestKeymetaAdminImpl.java @@ -74,7 +74,7 @@ public class TestKeymetaAdminImpl { KeymetaTableAccessor mockAccessor = mock(KeymetaTableAccessor.class); @Before - public void setUp() throws IOException { + public void setUp() throws Exception { conf = TEST_UTIL.getConfiguration(); testRootDir = TEST_UTIL.getDataTestDir(name.getMethodName()); fs = testRootDir.getFileSystem(conf); @@ -95,7 +95,7 @@ public static class TestWhenDisabled extends TestKeymetaAdminImpl { HBaseClassTestRule.forClass(TestWhenDisabled.class); @Override - public void setUp() throws IOException { + public void setUp() throws Exception { super.setUp(); conf.set(HConstants.CRYPTO_MANAGED_KEYS_ENABLED_CONF_KEY, "false"); } @@ -139,7 +139,7 @@ public static Collection data() { } @Override - public void setUp() throws IOException { + public void setUp() throws Exception { super.setUp(); conf.set(HConstants.CRYPTO_MANAGED_KEYS_PER_CUST_ACTIVE_KEY_COUNT, Integer.toString(nKeys)); @@ -177,7 +177,7 @@ public static class TestMultiKeyGen extends TestKeymetaAdminImpl { HBaseClassTestRule.forClass(TestKeymetaAdminImpl.TestMultiKeyGen.class); @Override - public void setUp() throws IOException { + public void setUp() throws Exception { super.setUp(); conf.set(HConstants.CRYPTO_MANAGED_KEYS_PER_CUST_ACTIVE_KEY_COUNT, "3"); MockManagedKeyProvider managedKeyProvider = From 81a38a98ef22435b261537d50b205942053db0f8 Mon Sep 17 00:00:00 2001 From: Hari Dara Date: Thu, 24 Apr 2025 18:56:14 +0530 Subject: [PATCH 25/70] Added TestManagedKeyDataCache --- .../hbase/keymeta/ManagedKeyDataCache.java | 52 +++--- .../keymeta/TestKeymetaTableAccessor.java | 6 +- .../keymeta/TestManagedKeyDataCache.java | 158 ++++++++++++++++++ 3 files changed, 190 insertions(+), 26 deletions(-) create mode 100644 hbase-server/src/test/java/org/apache/hadoop/hbase/keymeta/TestManagedKeyDataCache.java diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/keymeta/ManagedKeyDataCache.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/keymeta/ManagedKeyDataCache.java index a204244cfa12..0cb131c5a494 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/keymeta/ManagedKeyDataCache.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/keymeta/ManagedKeyDataCache.java @@ -34,8 +34,9 @@ @InterfaceAudience.Private public class ManagedKeyDataCache { private final Map cache; - private final Map>> prefixCache; + private final Map>> prefixCache; private final ReentrantLock lock; + private int nEntries; public ManagedKeyDataCache() { this.prefixCache = new HashMap<>(); @@ -56,19 +57,16 @@ public void addEntry(ManagedKeyData keyData) { cache.put(keyData.getKeyMetadata(), keyData); - Map> nsCache = prefixCache.get(keyNamespace); - if (nsCache == null) { - nsCache = new HashMap<>(); - prefixCache.put(keyNamespace, nsCache); - } + Map> nsCache = prefixCache.computeIfAbsent(keyNamespace, + k -> new HashMap<>()); + Map keyMap = nsCache.computeIfAbsent(keyCust, + k -> new HashMap<>()); - List keyList = nsCache.get(keyCust); - if (keyList == null) { - keyList = new ArrayList<>(); - prefixCache.get(keyNamespace).put(keyCust, keyList); + int prevSize = keyMap.size(); + keyMap.put(keyData.getKeyMetadata(), keyData); + if (keyMap.size() != prevSize) { + ++nEntries; } - - keyList.add(keyData); } finally { lock.unlock(); } @@ -102,21 +100,29 @@ public ManagedKeyData removeEntry(String keyMetadata) { if (removedEntry != null) { Bytes keyCust = new Bytes(removedEntry.getKeyCustodian()); String keyNamespace = removedEntry.getKeyNamespace(); - Map> nsCache = prefixCache.get(keyNamespace); - List keyList = nsCache != null ? nsCache.get(keyCust) : null; - if (keyList != null) { - keyList.remove(removedEntry); - if (keyList.isEmpty()) { - prefixCache.get(keyNamespace).remove(keyCust); - } + Map> nsCache = prefixCache.get(keyNamespace); + Map keyMap = nsCache.get(keyCust); + keyMap.remove(removedEntry.getKeyMetadata()); + if (keyMap.isEmpty()) { + nsCache.remove(keyCust); } } + if (removedEntry != null) { + --nEntries; + } return removedEntry; } finally { lock.unlock(); } } + /** + * @return the approximate number of entries in the cache. + */ + public int getEntryCount() { + return nEntries; + } + /** * Retrieves a random entry from the cache based on its key custodian, key namespace, and filters * out entries with a status other than ACTIVE. @@ -132,10 +138,10 @@ public ManagedKeyData getRandomEntryForPrefix(byte[] key_cust, String keyNamespa List activeEntries = new ArrayList<>(); Bytes keyCust = new Bytes(key_cust); - Map> nsCache = prefixCache.get(keyNamespace); - List keyList = nsCache != null ? nsCache.get(keyCust) : null; - if (keyList != null) { - for (ManagedKeyData entry : keyList) { + Map> nsCache = prefixCache.get(keyNamespace); + Map keyMap = nsCache != null ? nsCache.get(keyCust) : null; + if (keyMap != null) { + for (ManagedKeyData entry : keyMap.values()) { if (entry.getKeyStatus() == ManagedKeyStatus.ACTIVE) { activeEntries.add(entry); } diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/keymeta/TestKeymetaTableAccessor.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/keymeta/TestKeymetaTableAccessor.java index e8c0078f2941..e8cdf6f935a8 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/keymeta/TestKeymetaTableAccessor.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/keymeta/TestKeymetaTableAccessor.java @@ -101,11 +101,11 @@ public class TestKeymetaTableAccessor { protected MockManagedKeyProvider managedKeyProvider; protected ManagedKeyData latestSystemKey; - private AutoCloseable closeable; + private AutoCloseable closeableMocks; @Before public void setUp() throws Exception { - closeable = MockitoAnnotations.openMocks(this); + closeableMocks = MockitoAnnotations.openMocks(this); conf.set(HConstants.CRYPTO_MANAGED_KEYS_ENABLED_CONF_KEY, "true"); conf.set(HConstants.CRYPTO_KEYPROVIDER_CONF_KEY, MockManagedKeyProvider.class.getName()); @@ -126,7 +126,7 @@ public void setUp() throws Exception { @After public void tearDown() throws Exception { - closeable.close(); + closeableMocks.close(); } @RunWith(Parameterized.class) diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/keymeta/TestManagedKeyDataCache.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/keymeta/TestManagedKeyDataCache.java new file mode 100644 index 000000000000..9c2036403efa --- /dev/null +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/keymeta/TestManagedKeyDataCache.java @@ -0,0 +1,158 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.hadoop.hbase.keymeta; + +import static org.apache.hadoop.hbase.io.crypto.ManagedKeyData.KEY_SPACE_GLOBAL; +import static org.apache.hadoop.hbase.io.crypto.ManagedKeyStatus.FAILED; +import static org.junit.Assert.*; + +import org.apache.hadoop.conf.Configuration; +import org.apache.hadoop.hbase.HBaseClassTestRule; +import org.apache.hadoop.hbase.HBaseConfiguration; +import org.apache.hadoop.hbase.HConstants; +import org.apache.hadoop.hbase.io.crypto.ManagedKeyData; +import org.apache.hadoop.hbase.io.crypto.MockManagedKeyProvider; +import org.apache.hadoop.hbase.testclassification.MasterTests; +import org.apache.hadoop.hbase.testclassification.SmallTests; +import org.junit.Before; +import org.junit.ClassRule; +import org.junit.Test; +import org.junit.experimental.categories.Category; + +import java.util.ArrayList; +import java.util.HashSet; +import java.util.List; +import java.util.Set; + +@Category({ MasterTests.class, SmallTests.class }) +public class TestManagedKeyDataCache { + @ClassRule + public static final HBaseClassTestRule CLASS_RULE = + HBaseClassTestRule.forClass(TestManagedKeyDataCache.class); + + private static final String ALIAS = "cust1"; + private static final byte[] CUST_ID = ALIAS.getBytes(); + + private MockManagedKeyProvider managedKeyProvider; + private ManagedKeyDataCache cache; + protected Configuration conf = HBaseConfiguration.create(); + + @Before + public void setUp() { + conf.set(HConstants.CRYPTO_MANAGED_KEYS_ENABLED_CONF_KEY, "true"); + conf.set(HConstants.CRYPTO_KEYPROVIDER_CONF_KEY, MockManagedKeyProvider.class.getName()); + + cache = new ManagedKeyDataCache(); + managedKeyProvider = new MockManagedKeyProvider(); + managedKeyProvider.initConfig(conf); + managedKeyProvider.setMultikeyGenMode(true); + } + + @Test + public void testOperations() throws Exception { + ManagedKeyData globalKey1 = managedKeyProvider.getManagedKey(CUST_ID, + KEY_SPACE_GLOBAL); + + assertEquals(0, cache.getEntryCount()); + assertNull(cache.getEntry(globalKey1.getKeyMetadata())); + assertNull(cache.removeEntry(globalKey1.getKeyMetadata())); + + cache.addEntry(globalKey1); + assertEntries(globalKey1); + cache.addEntry(globalKey1); + assertEntries(globalKey1); + + ManagedKeyData nsKey1 = managedKeyProvider.getManagedKey(CUST_ID, + "namespace1"); + + assertNull(cache.getEntry(nsKey1.getKeyMetadata())); + cache.addEntry(nsKey1); + assertEquals(nsKey1, cache.getEntry(nsKey1.getKeyMetadata())); + assertEquals(globalKey1, cache.getEntry(globalKey1.getKeyMetadata())); + assertEntries(nsKey1, globalKey1); + + ManagedKeyData globalKey2 = managedKeyProvider.getManagedKey(CUST_ID, + KEY_SPACE_GLOBAL); + assertNull(cache.getEntry(globalKey2.getKeyMetadata())); + cache.addEntry(globalKey2); + assertEntries(globalKey2, nsKey1, globalKey1); + + ManagedKeyData nsKey2 = managedKeyProvider.getManagedKey(CUST_ID, + "namespace1"); + assertNull(cache.getEntry(nsKey2.getKeyMetadata())); + cache.addEntry(nsKey2); + assertEntries(nsKey2, globalKey2, nsKey1, globalKey1); + + assertEquals(globalKey1, cache.removeEntry(globalKey1.getKeyMetadata())); + assertNull(cache.getEntry(globalKey1.getKeyMetadata())); + assertEntries(nsKey2, globalKey2, nsKey1); + assertEquals(nsKey2, cache.removeEntry(nsKey2.getKeyMetadata())); + assertNull(cache.getEntry(nsKey2.getKeyMetadata())); + assertEntries(globalKey2, nsKey1); + assertEquals(nsKey1, cache.removeEntry(nsKey1.getKeyMetadata())); + assertNull(cache.getEntry(nsKey1.getKeyMetadata())); + assertEntries(globalKey2); + assertEquals(globalKey2, cache.removeEntry(globalKey2.getKeyMetadata())); + assertNull(cache.getEntry(globalKey2.getKeyMetadata())); + } + + @Test + public void testRandomKeyGet() throws Exception{ + assertNull(cache.getRandomEntryForPrefix(CUST_ID, KEY_SPACE_GLOBAL)); + List allKeys = new ArrayList<>(); + for (int i = 0; i < 20; ++i) { + ManagedKeyData keyData; + keyData = managedKeyProvider.getManagedKey(CUST_ID, KEY_SPACE_GLOBAL); + cache.addEntry(keyData); + allKeys.add(keyData); + keyData = managedKeyProvider.getManagedKey(CUST_ID, "namespace"); + cache.addEntry(keyData); + allKeys.add(keyData); + } + Set keys = new HashSet<>(); + for (int i = 0; i < 10; ++i) { + keys.add(cache.getRandomEntryForPrefix(CUST_ID, KEY_SPACE_GLOBAL)); + } + assertTrue(keys.size() > 1); + assertTrue(keys.size() <= 10); + for (ManagedKeyData key: keys) { + assertEquals(KEY_SPACE_GLOBAL, key.getKeyNamespace()); + } + + for(ManagedKeyData key: allKeys) { + assertEquals(key, cache.removeEntry(key.getKeyMetadata())); + } + assertNull(cache.getRandomEntryForPrefix(CUST_ID, KEY_SPACE_GLOBAL)); + } + + @Test + public void testRandomKeyGetNoActive() throws Exception { + managedKeyProvider.setMockedKeyStatus(ALIAS, FAILED); + for (int i = 0; i < 20; ++i) { + cache.addEntry(managedKeyProvider.getManagedKey(CUST_ID, KEY_SPACE_GLOBAL)); + } + assertNull(cache.getRandomEntryForPrefix(CUST_ID, KEY_SPACE_GLOBAL)); + } + + private void assertEntries(ManagedKeyData... keys) { + assertEquals(keys.length, cache.getEntryCount()); + for (ManagedKeyData key: keys) { + assertEquals(key, cache.getEntry(key.getKeyMetadata())); + } + } +} From 9597dabe6117161c35bd1d52b6d6cd8b908f54ab Mon Sep 17 00:00:00 2001 From: Hari Dara Date: Fri, 25 Apr 2025 13:56:41 +0530 Subject: [PATCH 26/70] Added TestManagedKeyAccessor --- .../hbase/keymeta/KeyManagementBase.java | 12 +- .../hbase/keymeta/KeymetaTableAccessor.java | 16 +- .../hbase/keymeta/ManagedKeyAccessor.java | 31 ++- .../hbase/keymeta/SystemKeyAccessor.java | 4 +- .../hbase/regionserver/HRegionServer.java | 3 +- .../hbase/keymeta/TestManagedKeyAccessor.java | 210 ++++++++++++++++++ .../keymeta/TestManagedKeyDataCache.java | 3 +- 7 files changed, 251 insertions(+), 28 deletions(-) create mode 100644 hbase-server/src/test/java/org/apache/hadoop/hbase/keymeta/TestManagedKeyAccessor.java diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/keymeta/KeyManagementBase.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/keymeta/KeyManagementBase.java index b304490f71e7..5bec294fe92f 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/keymeta/KeyManagementBase.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/keymeta/KeyManagementBase.java @@ -34,7 +34,7 @@ public abstract class KeyManagementBase { protected static final Logger LOG = LoggerFactory.getLogger(KeyManagementBase.class); - protected final Server server; + private final Server server; private Boolean keyManagementEnabled; private Integer perPrefixActiveKeyCount; @@ -43,6 +43,10 @@ public KeyManagementBase(Server server) { this.server = server; } + protected Server getServer() { + return server; + } + /** * A utility method for getting the managed key provider. * @return the key provider @@ -50,7 +54,7 @@ public KeyManagementBase(Server server) { * instance of ManagedKeyProvider */ protected ManagedKeyProvider getKeyProvider() { - KeyProvider provider = Encryption.getKeyProvider(server.getConfiguration()); + KeyProvider provider = Encryption.getKeyProvider(getServer().getConfiguration()); if (!(provider instanceof ManagedKeyProvider)) { throw new RuntimeException( "KeyProvider: " + provider.getClass().getName() + " expected to be of type ManagedKeyProvider"); @@ -64,7 +68,7 @@ protected ManagedKeyProvider getKeyProvider() { */ protected boolean isKeyManagementEnabled() { if (keyManagementEnabled == null) { - keyManagementEnabled = Server.isKeyManagementEnabled(server); + keyManagementEnabled = Server.isKeyManagementEnabled(getServer()); } return keyManagementEnabled; } @@ -81,7 +85,7 @@ protected void assertKeyManagementEnabled() throws IOException { protected int getPerPrefixActiveKeyConfCount() throws IOException { if (perPrefixActiveKeyCount == null) { - perPrefixActiveKeyCount = server.getConfiguration().getInt( + perPrefixActiveKeyCount = getServer().getConfiguration().getInt( HConstants.CRYPTO_MANAGED_KEYS_PER_CUST_ACTIVE_KEY_COUNT, HConstants.CRYPTO_MANAGED_KEYS_PER_CUST_ACTIVE_KEY_DEFAULT_COUNT); } diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/keymeta/KeymetaTableAccessor.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/keymeta/KeymetaTableAccessor.java index 52275229de3e..c447cf00230a 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/keymeta/KeymetaTableAccessor.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/keymeta/KeymetaTableAccessor.java @@ -93,7 +93,7 @@ public void addKey(ManagedKeyData keyData) throws IOException { assertKeyManagementEnabled(); final Put putForMetadata = addMutationColumns(new Put(constructRowKeyForMetadata(keyData)), keyData); - Connection connection = server.getConnection(); + Connection connection = getServer().getConnection(); try (Table table = connection.getTable(KEY_META_TABLE_NAME)) { table.put(putForMetadata); } @@ -112,7 +112,7 @@ public void addKey(ManagedKeyData keyData) throws IOException { public List getAllKeys(byte[] key_cust, String keyNamespace) throws IOException, KeyException { assertKeyManagementEnabled(); - Connection connection = server.getConnection(); + Connection connection = getServer().getConnection(); byte[] prefixForScan = Bytes.add(Bytes.toBytes(key_cust.length), key_cust, Bytes.toBytes(keyNamespace)); try (Table table = connection.getTable(KEY_META_TABLE_NAME)) { @@ -124,7 +124,7 @@ public List getAllKeys(byte[] key_cust, String keyNamespace) ResultScanner scanner = table.getScanner(scan); List allKeys = new ArrayList<>(); for (Result result : scanner) { - ManagedKeyData keyData = parseFromResult(server, key_cust, keyNamespace, result); + ManagedKeyData keyData = parseFromResult(getServer(), key_cust, keyNamespace, result); if (keyData != null) { allKeys.add(keyData); } @@ -167,12 +167,12 @@ public List getActiveKeys(byte[] key_cust, String keyNamespace) public ManagedKeyData getKey(byte[] key_cust, String keyNamespace, String keyMetadata) throws IOException, KeyException { assertKeyManagementEnabled(); - Connection connection = server.getConnection(); + Connection connection = getServer().getConnection(); try (Table table = connection.getTable(KEY_META_TABLE_NAME)) { byte[] rowKey = constructRowKeyForMetadata(key_cust, keyNamespace, ManagedKeyData.constructMetadataHash(keyMetadata)); Result result = table.get(new Get(rowKey)); - return parseFromResult(server, key_cust, keyNamespace, result); + return parseFromResult(getServer(), key_cust, keyNamespace, result); } } @@ -188,7 +188,7 @@ public ManagedKeyData getKey(byte[] key_cust, String keyNamespace, String keyMet public void reportOperation(byte[] key_cust, String keyNamespace, String keyMetadata, long count, boolean isReadOperation) throws IOException { assertKeyManagementEnabled(); - Connection connection = server.getConnection(); + Connection connection = getServer().getConnection(); try (Table table = connection.getTable(KEY_META_TABLE_NAME)) { byte[] rowKey = constructRowKeyForMetadata(key_cust, keyNamespace, ManagedKeyData.constructMetadataHash(keyMetadata)); @@ -204,9 +204,9 @@ public void reportOperation(byte[] key_cust, String keyNamespace, String keyMeta * Add the mutation columns to the given Put that are derived from the keyData. */ private Put addMutationColumns(Put put, ManagedKeyData keyData) throws IOException { - ManagedKeyData latestSystemKey = server.getSystemKeyCache().getLatestSystemKey(); + ManagedKeyData latestSystemKey = getServer().getSystemKeyCache().getLatestSystemKey(); if (keyData.getTheKey() != null) { - byte[] dekWrappedBySTK = EncryptionUtil.wrapKey(server.getConfiguration(), null, + byte[] dekWrappedBySTK = EncryptionUtil.wrapKey(getServer().getConfiguration(), null, keyData.getTheKey(), latestSystemKey.getTheKey()); put.addColumn(KEY_META_INFO_FAMILY, DEK_CHECKSUM_QUAL_BYTES, Bytes.toBytes(keyData.getKeyChecksum())) diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/keymeta/ManagedKeyAccessor.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/keymeta/ManagedKeyAccessor.java index 923bd7b4436d..fe1d1c6d8b61 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/keymeta/ManagedKeyAccessor.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/keymeta/ManagedKeyAccessor.java @@ -34,10 +34,11 @@ public class ManagedKeyAccessor extends KeyManagementBase { private final ManagedKeyDataCache keyDataCache; private final KeymetaTableAccessor keymetaAccessor; - public ManagedKeyAccessor(KeymetaTableAccessor keymetaAccessor) { - super(keymetaAccessor.server); + public ManagedKeyAccessor(KeymetaTableAccessor keymetaAccessor, + ManagedKeyDataCache keyDataCache) { + super(keymetaAccessor.getServer()); this.keymetaAccessor = keymetaAccessor; - keyDataCache = new ManagedKeyDataCache(); + this.keyDataCache = keyDataCache; } /** @@ -61,10 +62,16 @@ public ManagedKeyData getKey(byte[] key_cust, String keyNamespace, String keyMet // 3. Check with Key Provider. ManagedKeyProvider provider = getKeyProvider(); keyData = provider.unwrapKey(keyMetadata); - LOG.info("Got key data with status: {} and metadata: {} for prefix: {}", - keyData.getKeyStatus(), keyData.getKeyMetadata(), - ManagedKeyProvider.encodeToStr(key_cust)); - keymetaAccessor.addKey(keyData); + if (keyData != null) { + LOG.info("Got key data with status: {} and metadata: {} for prefix: {}", + keyData.getKeyStatus(), keyData.getKeyMetadata(), + ManagedKeyProvider.encodeToStr(key_cust)); + keymetaAccessor.addKey(keyData); + } + else { + LOG.info("Failed to get key data with metadata: {} for prefix: {}", + keyMetadata, ManagedKeyProvider.encodeToStr(key_cust)); + } } if (keyData != null) { keyDataCache.addEntry(keyData); @@ -82,15 +89,17 @@ public ManagedKeyData getKey(byte[] key_cust, String keyNamespace, String keyMet * @throws IOException if an error occurs while retrieving the key */ public ManagedKeyData getAnActiveKey(byte[] key_cust, String keyNamespace) - throws IOException, KeyException { + throws IOException, KeyException { assertKeyManagementEnabled(); ManagedKeyData keyData = keyDataCache.getRandomEntryForPrefix(key_cust, keyNamespace); if (keyData == null) { List activeKeys = keymetaAccessor.getActiveKeys(key_cust, keyNamespace); - for (ManagedKeyData kd: activeKeys) { - keyDataCache.addEntry(kd); + if (! activeKeys.isEmpty()) { + for (ManagedKeyData kd : activeKeys) { + keyDataCache.addEntry(kd); + } + keyData = keyDataCache.getRandomEntryForPrefix(key_cust, keyNamespace); } - keyData = keyDataCache.getRandomEntryForPrefix(key_cust, keyNamespace); } return keyData; } diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/keymeta/SystemKeyAccessor.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/keymeta/SystemKeyAccessor.java index 0cc61c6ba103..b03d15dfd2e1 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/keymeta/SystemKeyAccessor.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/keymeta/SystemKeyAccessor.java @@ -67,7 +67,7 @@ public List getAllSystemKeyFiles() throws IOException { if (!isKeyManagementEnabled()) { return null; } - FileSystem fs = server.getFileSystem(); + FileSystem fs = getServer().getFileSystem(); Map clusterKeys = new TreeMap<>(Comparator.reverseOrder()); for (FileStatus st : fs.globStatus(new Path(systemKeyDir, SYSTEM_KEY_FILE_PREFIX + "*"))) { @@ -111,7 +111,7 @@ protected int extractKeySequence(Path clusterKeyFile) throws IOException { } protected String loadKeyMetadata(Path keyPath) throws IOException { - try (FSDataInputStream fin = server.getFileSystem().open(keyPath)) { + try (FSDataInputStream fin = getServer().getFileSystem().open(keyPath)) { return fin.readUTF(); } } diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HRegionServer.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HRegionServer.java index 0df4d1db890f..eeccb53f36b2 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HRegionServer.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HRegionServer.java @@ -121,6 +121,7 @@ import org.apache.hadoop.hbase.ipc.ServerNotRunningYetException; import org.apache.hadoop.hbase.ipc.ServerRpcController; import org.apache.hadoop.hbase.keymeta.ManagedKeyAccessor; +import org.apache.hadoop.hbase.keymeta.ManagedKeyDataCache; import org.apache.hadoop.hbase.log.HBaseMarkers; import org.apache.hadoop.hbase.mob.MobFileCache; import org.apache.hadoop.hbase.mob.RSMobFileCleanerChore; @@ -1450,7 +1451,7 @@ protected void handleReportForDutyResponse(final RegionServerStartupResponse c) } buildSystemKeyCache(); - managedKeyAccessor = new ManagedKeyAccessor(keymetaAdmin); + managedKeyAccessor = new ManagedKeyAccessor(keymetaAdmin, new ManagedKeyDataCache()); // hack! Maps DFSClient => RegionServer for logs. HDFS made this // config param for task trackers, but we can piggyback off of it. diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/keymeta/TestManagedKeyAccessor.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/keymeta/TestManagedKeyAccessor.java new file mode 100644 index 000000000000..75b77be027df --- /dev/null +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/keymeta/TestManagedKeyAccessor.java @@ -0,0 +1,210 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.hadoop.hbase.keymeta; + +import org.apache.hadoop.conf.Configuration; +import org.apache.hadoop.hbase.HBaseClassTestRule; +import org.apache.hadoop.hbase.HBaseConfiguration; +import org.apache.hadoop.hbase.HConstants; +import org.apache.hadoop.hbase.Server; +import org.apache.hadoop.hbase.io.crypto.Encryption; +import org.apache.hadoop.hbase.io.crypto.ManagedKeyData; +import org.apache.hadoop.hbase.io.crypto.MockManagedKeyProvider; +import org.apache.hadoop.hbase.testclassification.MasterTests; +import org.apache.hadoop.hbase.testclassification.SmallTests; +import org.junit.After; +import org.junit.Before; +import org.junit.ClassRule; +import org.junit.Test; +import org.junit.experimental.categories.Category; +import org.mockito.Mock; +import org.mockito.MockitoAnnotations; +import java.util.ArrayList; +import java.util.Arrays; +import static org.apache.hadoop.hbase.io.crypto.ManagedKeyData.KEY_SPACE_GLOBAL; +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertNull; +import static org.mockito.ArgumentMatchers.any; +import static org.mockito.Mockito.clearInvocations; +import static org.mockito.Mockito.never; +import static org.mockito.Mockito.times; +import static org.mockito.Mockito.verify; +import static org.mockito.Mockito.when; + +@Category({ MasterTests.class, SmallTests.class }) +public class TestManagedKeyAccessor { + @ClassRule + public static final HBaseClassTestRule CLASS_RULE = + HBaseClassTestRule.forClass(TestManagedKeyAccessor.class); + + private static final String ALIAS = "cust1"; + private static final byte[] CUST_ID = ALIAS.getBytes(); + + @Mock + private KeymetaTableAccessor keymetaAccessor; + @Mock + private ManagedKeyDataCache keyDataCache; + @Mock + private Server server; + + private ManagedKeyAccessor managedKeyAccessor; + private AutoCloseable closeableMocks; + private MockManagedKeyProvider managedKeyProvider; + protected Configuration conf = HBaseConfiguration.create(); + + @Before + public void setUp() { + conf.set(HConstants.CRYPTO_MANAGED_KEYS_ENABLED_CONF_KEY, "true"); + conf.set(HConstants.CRYPTO_KEYPROVIDER_CONF_KEY, MockManagedKeyProvider.class.getName()); + + closeableMocks = MockitoAnnotations.openMocks(this); + managedKeyProvider = (MockManagedKeyProvider) Encryption.getKeyProvider(conf); + managedKeyProvider.initConfig(conf); + when(server.getConfiguration()).thenReturn(conf); + when(keymetaAccessor.getServer()).thenReturn(server); + managedKeyAccessor = new ManagedKeyAccessor(keymetaAccessor, keyDataCache); + } + + @After + public void tearDown() throws Exception { + closeableMocks.close(); + } + + @Test + public void testGetKeyNonExisting() throws Exception { + for (int i = 0; i < 2; ++i) { + ManagedKeyData keyData = managedKeyAccessor.getKey(CUST_ID, KEY_SPACE_GLOBAL, "abcd"); + verifyNonExisting(keyData); + } + } + + private void verifyNonExisting(ManagedKeyData keyData) throws Exception { + assertNull(keyData); + verify(keyDataCache).getEntry("abcd"); + verify(keymetaAccessor).getKey(CUST_ID, KEY_SPACE_GLOBAL, "abcd"); + verify(keymetaAccessor, never()).addKey(any()); + verify(keyDataCache, never()).addEntry(any()); + clearInvocations(keyDataCache, keymetaAccessor); + } + + @Test + public void testGetFromL1() throws Exception { + ManagedKeyData keyData = managedKeyProvider.getManagedKey(CUST_ID, KEY_SPACE_GLOBAL); + when(keyDataCache.getEntry(any())).thenReturn(keyData); + + ManagedKeyData result = + managedKeyAccessor.getKey(CUST_ID, KEY_SPACE_GLOBAL, keyData.getKeyMetadata()); + + assertEquals(keyData, result); + verify(keyDataCache).getEntry(keyData.getKeyMetadata()); + verify(keymetaAccessor, never()).getKey(any(), any(), any()); + verify(keymetaAccessor, never()).addKey(any()); + verify(keyDataCache, never()).addEntry(keyData); + } + + @Test + public void testGetFromL2() throws Exception { + ManagedKeyData keyData = managedKeyProvider.getManagedKey(CUST_ID, KEY_SPACE_GLOBAL); + when(keymetaAccessor.getKey(any(), any(), any())).thenReturn(keyData); + + ManagedKeyData result = + managedKeyAccessor.getKey(CUST_ID, KEY_SPACE_GLOBAL, keyData.getKeyMetadata()); + + assertEquals(keyData, result); + verify(keyDataCache).getEntry(keyData.getKeyMetadata()); + verify(keymetaAccessor).getKey(CUST_ID, KEY_SPACE_GLOBAL, keyData.getKeyMetadata()); + verify(keymetaAccessor, never()).addKey(any()); + verify(keyDataCache).addEntry(keyData); + } + + @Test + public void testGetFromProvider() throws Exception { + ManagedKeyData keyData = managedKeyProvider.getManagedKey(CUST_ID, KEY_SPACE_GLOBAL); + + ManagedKeyData result = + managedKeyAccessor.getKey(CUST_ID, KEY_SPACE_GLOBAL, keyData.getKeyMetadata()); + + assertEquals(keyData, result); + verify(keyDataCache).getEntry(keyData.getKeyMetadata()); + verify(keymetaAccessor).getKey(CUST_ID, KEY_SPACE_GLOBAL, keyData.getKeyMetadata()); + verify(keymetaAccessor).addKey(any()); + verify(keyDataCache).addEntry(keyData); + } + + @Test + public void testGetActiveKeyWhenMissing() throws Exception { + ManagedKeyData result = managedKeyAccessor.getAnActiveKey(CUST_ID, KEY_SPACE_GLOBAL); + + assertNull(result); + verify(keyDataCache).getRandomEntryForPrefix(CUST_ID, KEY_SPACE_GLOBAL); + verify(keymetaAccessor).getActiveKeys(CUST_ID, KEY_SPACE_GLOBAL); + } + + @Test + public void testGetActiveKeyFromL1() throws Exception { + ManagedKeyData keyData = managedKeyProvider.getManagedKey(CUST_ID, KEY_SPACE_GLOBAL); + when(keyDataCache.getRandomEntryForPrefix(any(), any())).thenReturn(keyData); + + ManagedKeyData result = managedKeyAccessor.getAnActiveKey(CUST_ID, KEY_SPACE_GLOBAL); + + assertEquals(keyData, result); + verify(keyDataCache).getRandomEntryForPrefix(CUST_ID, KEY_SPACE_GLOBAL); + verify(keymetaAccessor, never()).getActiveKeys(any(), any()); + } + + @Test + public void testGetActiveKeyFromL2WithNoResults() throws Exception { + when(keymetaAccessor.getActiveKeys(any(), any())).thenReturn(new ArrayList<>()); + + ManagedKeyData result = managedKeyAccessor.getAnActiveKey(CUST_ID, KEY_SPACE_GLOBAL); + + assertNull(result); + verify(keyDataCache).getRandomEntryForPrefix(CUST_ID, KEY_SPACE_GLOBAL); + verify(keymetaAccessor).getActiveKeys(CUST_ID, KEY_SPACE_GLOBAL); + } + + @Test + public void testGetActiveKeyFromL2WithSingleResult() throws Exception { + ManagedKeyData keyData = managedKeyProvider.getManagedKey(CUST_ID, KEY_SPACE_GLOBAL); + when(keyDataCache.getRandomEntryForPrefix(any(), any())).thenReturn(null, keyData); + when(keymetaAccessor.getActiveKeys(any(), any())).thenReturn(Arrays.asList(keyData)); + + ManagedKeyData result = managedKeyAccessor.getAnActiveKey(CUST_ID, KEY_SPACE_GLOBAL); + + assertEquals(keyData, result); + verify(keyDataCache, times(2)).getRandomEntryForPrefix(CUST_ID, KEY_SPACE_GLOBAL); + verify(keymetaAccessor).getActiveKeys(CUST_ID, KEY_SPACE_GLOBAL); + verify(keyDataCache).addEntry(keyData); + } + + @Test + public void testGetActiveKeyFromL2WithMultipleResults() throws Exception { + managedKeyProvider.setMultikeyGenMode(true); + ManagedKeyData keyData1 = managedKeyProvider.getManagedKey(CUST_ID, KEY_SPACE_GLOBAL); + ManagedKeyData keyData2 = managedKeyProvider.getManagedKey(CUST_ID, KEY_SPACE_GLOBAL); + when(keyDataCache.getRandomEntryForPrefix(any(), any())).thenReturn(null, keyData1); + when(keymetaAccessor.getActiveKeys(any(), any())).thenReturn(Arrays.asList(keyData1, keyData2)); + + ManagedKeyData result = managedKeyAccessor.getAnActiveKey(CUST_ID, KEY_SPACE_GLOBAL); + + assertEquals(keyData1, result); + verify(keyDataCache, times(2)).getRandomEntryForPrefix(CUST_ID, KEY_SPACE_GLOBAL); + verify(keymetaAccessor).getActiveKeys(CUST_ID, KEY_SPACE_GLOBAL); + verify(keyDataCache, times(2)).addEntry(any()); + } +} diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/keymeta/TestManagedKeyDataCache.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/keymeta/TestManagedKeyDataCache.java index 9c2036403efa..1d5c33774efd 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/keymeta/TestManagedKeyDataCache.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/keymeta/TestManagedKeyDataCache.java @@ -48,7 +48,7 @@ public class TestManagedKeyDataCache { private static final String ALIAS = "cust1"; private static final byte[] CUST_ID = ALIAS.getBytes(); - private MockManagedKeyProvider managedKeyProvider; + private final MockManagedKeyProvider managedKeyProvider = new MockManagedKeyProvider(); private ManagedKeyDataCache cache; protected Configuration conf = HBaseConfiguration.create(); @@ -58,7 +58,6 @@ public void setUp() { conf.set(HConstants.CRYPTO_KEYPROVIDER_CONF_KEY, MockManagedKeyProvider.class.getName()); cache = new ManagedKeyDataCache(); - managedKeyProvider = new MockManagedKeyProvider(); managedKeyProvider.initConfig(conf); managedKeyProvider.setMultikeyGenMode(true); } From dfa316e2ab921c7d1b21672ba76c8f92f44e5564 Mon Sep 17 00:00:00 2001 From: Hari Dara Date: Mon, 28 Apr 2025 11:48:01 +0530 Subject: [PATCH 27/70] Some parameterization --- .../hbase/master/TestKeymetaAdminImpl.java | 98 ++++++++++++------- 1 file changed, 63 insertions(+), 35 deletions(-) diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestKeymetaAdminImpl.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestKeymetaAdminImpl.java index 6122f963c300..03e5d3d90bbb 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestKeymetaAdminImpl.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestKeymetaAdminImpl.java @@ -37,6 +37,7 @@ import java.util.Collection; import java.util.List; +import static org.apache.hadoop.hbase.io.crypto.ManagedKeyData.KEY_SPACE_GLOBAL; import static org.apache.hadoop.hbase.io.crypto.ManagedKeyStatus.*; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertNotNull; @@ -44,7 +45,6 @@ import static org.junit.Assert.assertThrows; import static org.mockito.ArgumentMatchers.any; import static org.mockito.ArgumentMatchers.argThat; -import static org.mockito.ArgumentMatchers.eq; import static org.mockito.Mockito.mock; import static org.mockito.Mockito.times; import static org.mockito.Mockito.verify; @@ -71,7 +71,7 @@ public class TestKeymetaAdminImpl { protected FileSystem mockFileSystem = mock(FileSystem.class); protected Server mockServer = mock(Server.class); protected KeymetaAdminImpl keymetaAdmin; - KeymetaTableAccessor mockAccessor = mock(KeymetaTableAccessor.class); + KeymetaTableAccessor keymetaAccessor = mock(KeymetaTableAccessor.class); @Before public void setUp() throws Exception { @@ -84,7 +84,7 @@ public void setUp() throws Exception { when(mockServer.getFileSystem()).thenReturn(mockFileSystem); when(mockServer.getConfiguration()).thenReturn(conf); - keymetaAdmin = new DummyKeymetaAdminImpl(mockServer, mockAccessor); + keymetaAdmin = new DummyKeymetaAdminImpl(mockServer, keymetaAccessor); } @RunWith(BlockJUnit4ClassRunner.class) @@ -104,14 +104,13 @@ public void setUp() throws Exception { public void testDisabled() throws Exception { assertThrows(IOException.class, () -> keymetaAdmin.enableKeyManagement(ManagedKeyData.KEY_GLOBAL_CUSTODIAN, - ManagedKeyData.KEY_SPACE_GLOBAL)); + KEY_SPACE_GLOBAL)); assertThrows(IOException.class, () -> keymetaAdmin.getManagedKeys(ManagedKeyData.KEY_GLOBAL_CUSTODIAN, - ManagedKeyData.KEY_SPACE_GLOBAL)); + KEY_SPACE_GLOBAL)); } } - // TODO: Need to add test cases for multiple key spaces. @RunWith(Parameterized.class) @Category({ MasterTests.class, SmallTests.class }) public static class TestAdminImpl extends TestKeymetaAdminImpl { @@ -122,19 +121,22 @@ public static class TestAdminImpl extends TestKeymetaAdminImpl { @Parameter(0) public int nKeys; @Parameter(1) - public ManagedKeyStatus keyStatus; + public String keySpace; @Parameter(2) + public ManagedKeyStatus keyStatus; + @Parameter(3) public boolean isNullKey; - @Parameters(name = "{index},nKeys={0},keyStatus={1}") + @Parameters(name = "{index},nKeys={0},keySpace={1},keyStatus={2}") public static Collection data() { return Arrays.asList( new Object[][] { - { 1, ACTIVE, false }, - { 1, FAILED, true }, - { 1, INACTIVE, false }, - { 1, DISABLED, true }, - { 2, ACTIVE, false }, + { 1, KEY_SPACE_GLOBAL, ACTIVE, false }, + { 1, "ns1", ACTIVE, false }, + { 1, KEY_SPACE_GLOBAL, FAILED, true }, + { 1, KEY_SPACE_GLOBAL, INACTIVE, false }, + { 1, KEY_SPACE_GLOBAL, DISABLED, true }, + { 2, KEY_SPACE_GLOBAL, ACTIVE, false }, }); } @@ -146,36 +148,46 @@ public void setUp() throws Exception { } @Test - public void testEnable() throws Exception { + public void testEnableAndGet() throws Exception { MockManagedKeyProvider managedKeyProvider = (MockManagedKeyProvider) Encryption.getKeyProvider(conf); String cust = "cust1"; managedKeyProvider.setMockedKeyStatus(cust, keyStatus); String encodedCust = ManagedKeyProvider.encodeToStr(cust.getBytes()); List managedKeyStatuses = - keymetaAdmin.enableKeyManagement(encodedCust, ManagedKeyData.KEY_SPACE_GLOBAL); + keymetaAdmin.enableKeyManagement(encodedCust, keySpace); assertNotNull(managedKeyStatuses); assertEquals(1, managedKeyStatuses.size()); assertEquals(keyStatus, managedKeyStatuses.get(0).getKeyStatus()); - verify(mockAccessor).addKey(argThat( + verify(keymetaAccessor).addKey(argThat( (ManagedKeyData keyData) -> assertKeyData(keyData, keyStatus, isNullKey ? null : managedKeyProvider.getMockedKey(cust, - ManagedKeyData.KEY_SPACE_GLOBAL)))); + keySpace)))); - keymetaAdmin.getManagedKeys(encodedCust, ManagedKeyData.KEY_SPACE_GLOBAL); - verify(mockAccessor).getAllKeys( - argThat((arr) -> Bytes.compareTo(cust.getBytes(), arr) == 0), - eq(ManagedKeyData.KEY_SPACE_GLOBAL)); + keymetaAdmin.getManagedKeys(encodedCust, keySpace); + verify(keymetaAccessor).getAllKeys(cust.getBytes(), keySpace); } } - @RunWith(BlockJUnit4ClassRunner.class) + @RunWith(Parameterized.class) @Category({ MasterTests.class, SmallTests.class }) public static class TestMultiKeyGen extends TestKeymetaAdminImpl { @ClassRule public static final HBaseClassTestRule CLASS_RULE = HBaseClassTestRule.forClass(TestKeymetaAdminImpl.TestMultiKeyGen.class); + @Parameter(0) + public String keySpace; + + @Parameters(name = "{index},keySpace={0}") + public static Collection data() { + return Arrays.asList( + new Object[][] { + { KEY_SPACE_GLOBAL }, + { "ns1" }, + }); + } + @Override public void setUp() throws Exception { super.setUp(); @@ -190,31 +202,43 @@ public void testEnable() throws Exception { String cust = "cust1"; String encodedCust = ManagedKeyProvider.encodeToStr(cust.getBytes()); List managedKeyStatuses = - keymetaAdmin.enableKeyManagement(encodedCust, ManagedKeyData.KEY_SPACE_GLOBAL); + keymetaAdmin.enableKeyManagement(encodedCust, keySpace); assertNotNull(managedKeyStatuses); assertEquals(3, managedKeyStatuses.size()); assertEquals(ACTIVE, managedKeyStatuses.get(0).getKeyStatus()); assertEquals(ACTIVE, managedKeyStatuses.get(1).getKeyStatus()); - verify(mockAccessor, times(3)).addKey(any()); + verify(keymetaAccessor, times(3)).addKey(any()); } } - @RunWith(BlockJUnit4ClassRunner.class) + @RunWith(Parameterized.class) @Category({ MasterTests.class, SmallTests.class }) public static class TestForKeyProviderNullReturn extends TestKeymetaAdminImpl { @ClassRule public static final HBaseClassTestRule CLASS_RULE = HBaseClassTestRule.forClass(TestForKeyProviderNullReturn.class); + @Parameter(0) + public String keySpace; + + @Parameters(name = "{index},keySpace={0}") + public static Collection data() { + return Arrays.asList( + new Object[][] { + { KEY_SPACE_GLOBAL }, + { "ns1" }, + }); + } + @Test public void test() throws Exception { MockManagedKeyProvider managedKeyProvider = (MockManagedKeyProvider) Encryption.getKeyProvider(conf); String cust = "invalidcust1"; String encodedCust = ManagedKeyProvider.encodeToStr(cust.getBytes()); - managedKeyProvider.setMockedKey(cust, null, ManagedKeyData.KEY_SPACE_GLOBAL); + managedKeyProvider.setMockedKey(cust, null, keySpace); IOException ex = assertThrows(IOException.class, - () -> keymetaAdmin.enableKeyManagement(encodedCust, ManagedKeyData.KEY_SPACE_GLOBAL)); + () -> keymetaAdmin.enableKeyManagement(encodedCust, keySpace)); assertEquals("Invalid null managed key received from key provider", ex.getMessage()); } } @@ -228,13 +252,18 @@ public static class TestForInvalidKeyCountConfig extends TestKeymetaAdminImpl { @Parameter(0) public String keyCount;; @Parameter(1) + public String keySpace; + @Parameter(2) public Class expectedExType; - @Parameters(name = "{index},keyCount={0},expectedExType={1}") + @Parameters(name = "{index},keyCount={0},keySpace={1}expectedExType={2}") public static Collection data() { return Arrays.asList(new Object[][] { - { "0", IOException.class }, - { "-1", IOException.class }, - { "abc", NumberFormatException.class }, + { "0", KEY_SPACE_GLOBAL, IOException.class }, + { "-1", KEY_SPACE_GLOBAL, IOException.class }, + { "abc", KEY_SPACE_GLOBAL, NumberFormatException.class }, + { "0", "ns1", IOException.class }, + { "-1", "ns1", IOException.class }, + { "abc", "ns1", NumberFormatException.class }, }); } @@ -244,8 +273,7 @@ public void test() throws Exception { String cust = "cust1"; String encodedCust = ManagedKeyProvider.encodeToStr(cust.getBytes()); assertThrows(expectedExType, () -> - keymetaAdmin.enableKeyManagement(encodedCust, - ManagedKeyData.KEY_SPACE_GLOBAL)); + keymetaAdmin.enableKeyManagement(encodedCust, keySpace)); } } @@ -256,13 +284,13 @@ public DummyKeymetaAdminImpl(Server mockServer, KeymetaTableAccessor mockAccesso @Override public void addKey(ManagedKeyData keyData) throws IOException { - mockAccessor.addKey(keyData); + keymetaAccessor.addKey(keyData); } @Override public List getAllKeys(byte[] key_cust, String keyNamespace) throws IOException, KeyException { - return mockAccessor.getAllKeys(key_cust, keyNamespace); + return keymetaAccessor.getAllKeys(key_cust, keyNamespace); } } From 5645c8aa28c468a8aec3947248e75c93cc124a43 Mon Sep 17 00:00:00 2001 From: Hari Dara Date: Mon, 28 Apr 2025 15:57:54 +0530 Subject: [PATCH 28/70] More test coverage for system key --- .../hbase/keymeta/SystemKeyAccessor.java | 9 +- .../hadoop/hbase/master/SystemKeyManager.java | 15 +- .../hadoop/hbase/master/TestSystemKey.java | 414 ++++++++++++++---- .../hbase/master/TestSystemKeyAccessor.java | 182 -------- .../hbase/master/TestSystemKeyManager.java | 116 +++++ 5 files changed, 454 insertions(+), 282 deletions(-) delete mode 100644 hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestSystemKeyAccessor.java create mode 100644 hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestSystemKeyManager.java diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/keymeta/SystemKeyAccessor.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/keymeta/SystemKeyAccessor.java index b03d15dfd2e1..ec1c37c585db 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/keymeta/SystemKeyAccessor.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/keymeta/SystemKeyAccessor.java @@ -17,6 +17,7 @@ */ package org.apache.hadoop.hbase.keymeta; +import org.apache.hadoop.classification.VisibleForTesting; import org.apache.hadoop.fs.FSDataInputStream; import org.apache.hadoop.fs.FileStatus; import org.apache.hadoop.fs.FileSystem; @@ -51,7 +52,7 @@ public Path getLatestSystemKeyFile() throws IOException { if (allClusterKeyFiles.isEmpty()) { throw new RuntimeException("No cluster key initialized yet"); } - int currentMaxSeqNum = extractKeySequence(allClusterKeyFiles.get(0)); + int currentMaxSeqNum = SystemKeyAccessor.extractKeySequence(allClusterKeyFiles.get(0)); return new Path(systemKeyDir, SYSTEM_KEY_FILE_PREFIX + currentMaxSeqNum); } @@ -84,7 +85,8 @@ public ManagedKeyData loadSystemKey(Path keyPath) throws IOException { return provider.unwrapKey(loadKeyMetadata(keyPath)); } - public int extractSystemKeySeqNum(Path keyPath) throws IOException { + @VisibleForTesting + public static int extractSystemKeySeqNum(Path keyPath) throws IOException { if (keyPath.getName().startsWith(SYSTEM_KEY_FILE_PREFIX)) { try { return Integer.valueOf(keyPath.getName().substring(SYSTEM_KEY_FILE_PREFIX.length())); @@ -102,7 +104,8 @@ public int extractSystemKeySeqNum(Path keyPath) throws IOException { * @return The sequence or {@code -1} if not a valid sequence file. * @throws IOException */ - protected int extractKeySequence(Path clusterKeyFile) throws IOException { + @VisibleForTesting + public static int extractKeySequence(Path clusterKeyFile) throws IOException { int keySeq = -1; if (clusterKeyFile.getName().startsWith(SYSTEM_KEY_FILE_PREFIX)) { keySeq = Integer.valueOf(clusterKeyFile.getName().substring(SYSTEM_KEY_FILE_PREFIX.length())); diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/SystemKeyManager.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/SystemKeyManager.java index f91fb22c49d4..f109e1195b91 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/SystemKeyManager.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/SystemKeyManager.java @@ -68,18 +68,21 @@ public ManagedKeyData rotateSystemKeyIfChanged() throws IOException { } private ManagedKeyData rotateSystemKey(String currentKeyMetadata) throws IOException { - if (! isKeyManagementEnabled()) { - return null; - } ManagedKeyProvider provider = getKeyProvider(); ManagedKeyData clusterKey = provider.getSystemKey( master.getMasterFileSystem().getClusterId().toString().getBytes()); + if (clusterKey == null) { + throw new IOException("Failed to get system key for cluster id: " + + master.getMasterFileSystem().getClusterId().toString()); + } if (clusterKey.getKeyStatus() != ManagedKeyStatus.ACTIVE) { throw new IOException("System key is expected to be ACTIVE but it is: " + clusterKey.getKeyStatus() + " for metadata: " + clusterKey.getKeyMetadata()); } - if (clusterKey != null && clusterKey.getKeyMetadata() != null && - ! clusterKey.getKeyMetadata().equals(currentKeyMetadata) && + if (clusterKey.getKeyMetadata() == null) { + throw new IOException("System key is expected to have metadata but it is null"); + } + if (! clusterKey.getKeyMetadata().equals(currentKeyMetadata) && saveLatestSystemKey(clusterKey.getKeyMetadata())) { return clusterKey; } @@ -89,7 +92,7 @@ private ManagedKeyData rotateSystemKey(String currentKeyMetadata) throws IOExcep private boolean saveLatestSystemKey(String keyMetadata) throws IOException { List allSystemKeyFiles = getAllSystemKeyFiles(); int nextSystemKeySeq = (allSystemKeyFiles.isEmpty() ? -1 - : extractKeySequence(allSystemKeyFiles.get(0))) + 1; + : SystemKeyAccessor.extractKeySequence(allSystemKeyFiles.get(0))) + 1; LOG.info("Trying to save a new cluster key at seq: {}", nextSystemKeySeq); MasterFileSystem masterFS = master.getMasterFileSystem(); Path nextSystemKeyPath = new Path(systemKeyDir, diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestSystemKey.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestSystemKey.java index cc5926ec1c01..e724415fca16 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestSystemKey.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestSystemKey.java @@ -1,114 +1,346 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ package org.apache.hadoop.hbase.master; +import org.apache.hadoop.conf.Configuration; +import org.apache.hadoop.fs.FSDataOutputStream; +import org.apache.hadoop.fs.FileStatus; +import org.apache.hadoop.fs.FileSystem; +import org.apache.hadoop.fs.Path; +import org.apache.hadoop.hbase.ClusterId; import org.apache.hadoop.hbase.HBaseClassTestRule; +import org.apache.hadoop.hbase.HBaseTestingUtil; +import org.apache.hadoop.hbase.HConstants; import org.apache.hadoop.hbase.io.crypto.Encryption; -import org.apache.hadoop.hbase.io.crypto.KeyProvider; import org.apache.hadoop.hbase.io.crypto.ManagedKeyData; import org.apache.hadoop.hbase.io.crypto.ManagedKeyProvider; import org.apache.hadoop.hbase.io.crypto.MockManagedKeyProvider; -import org.apache.hadoop.hbase.io.crypto.ManagedKeyStatus; import org.apache.hadoop.hbase.keymeta.SystemKeyAccessor; -import org.apache.hadoop.hbase.keymeta.SystemKeyCache; -import org.apache.hadoop.hbase.keymeta.ManagedKeyTestBase; import org.apache.hadoop.hbase.testclassification.MasterTests; -import org.apache.hadoop.hbase.testclassification.MediumTests; -import org.apache.hadoop.hbase.util.Bytes; +import org.apache.hadoop.hbase.testclassification.SmallTests; +import org.apache.hadoop.hbase.util.CommonFSUtils; +import org.junit.After; +import org.junit.Before; import org.junit.ClassRule; +import org.junit.Rule; import org.junit.Test; import org.junit.experimental.categories.Category; +import org.junit.rules.TestName; +import org.junit.runner.RunWith; +import org.junit.runners.BlockJUnit4ClassRunner; +import org.junit.runners.Parameterized; +import org.junit.runners.Parameterized.Parameter; +import org.junit.runners.Parameterized.Parameters; +import org.junit.runners.Suite; +import org.mockito.Mock; +import org.mockito.MockitoAnnotations; import java.io.IOException; -import java.security.Key; +import java.util.Arrays; +import java.util.Collection; +import java.util.List; +import java.util.stream.IntStream; +import static org.apache.hadoop.hbase.HConstants.SYSTEM_KEY_FILE_PREFIX; +import static org.apache.hadoop.hbase.io.crypto.ManagedKeyData.KEY_SPACE_GLOBAL; +import static org.apache.hadoop.hbase.io.crypto.ManagedKeyStatus.ACTIVE; +import static org.apache.hadoop.hbase.io.crypto.ManagedKeyStatus.INACTIVE; import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertNotNull; +import static org.junit.Assert.assertNull; import static org.junit.Assert.assertThrows; -import static org.junit.Assert.assertTrue; - -@Category({ MasterTests.class, MediumTests.class }) -public class TestSystemKey extends ManagedKeyTestBase { - - @ClassRule - public static final HBaseClassTestRule CLASS_RULE = - HBaseClassTestRule.forClass(TestSystemKey.class); - - @Test - public void testSystemKeyInitializationAndRotation() throws Exception { - HMaster master = TEST_UTIL.getHBaseCluster().getMaster(); - KeyProvider keyProvider = Encryption.getKeyProvider(master.getConfiguration()); - assertNotNull(keyProvider); - assertTrue(keyProvider instanceof ManagedKeyProvider); - assertTrue(keyProvider instanceof MockManagedKeyProvider); - MockManagedKeyProvider pbeKeyProvider = (MockManagedKeyProvider) keyProvider; - ManagedKeyData initialSystemKey = validateInitialState(master, pbeKeyProvider); - - restartSystem(); - master = TEST_UTIL.getHBaseCluster().getMaster(); - validateInitialState(master, pbeKeyProvider); - - // Test rotation of cluster key by changing the key that the key provider provides and restart master. - String newAlias = "new_cluster_key"; - pbeKeyProvider.setClusterKeyAlias(newAlias); - Key newCluterKey = MockManagedKeyProvider.generateSecretKey(); - pbeKeyProvider.setMockedKey(newAlias, newCluterKey, ManagedKeyData.KEY_SPACE_GLOBAL); - restartSystem(); - master = TEST_UTIL.getHBaseCluster().getMaster(); - SystemKeyAccessor systemKeyAccessor = new SystemKeyAccessor(master); - assertEquals(2, systemKeyAccessor.getAllSystemKeyFiles().size()); - SystemKeyCache systemKeyCache = master.getSystemKeyCache(); - assertEquals(0, Bytes.compareTo(newCluterKey.getEncoded(), - systemKeyCache.getLatestSystemKey().getTheKey().getEncoded())); - assertEquals(initialSystemKey, - systemKeyAccessor.loadSystemKey(systemKeyAccessor.getAllSystemKeyFiles().get(1))); - assertEquals(initialSystemKey, - systemKeyCache.getSystemKeyByChecksum(initialSystemKey.getKeyChecksum())); +import static org.mockito.ArgumentMatchers.any; +import static org.mockito.ArgumentMatchers.eq; +import static org.mockito.Mockito.mock; +import static org.mockito.Mockito.when; + +@RunWith(Suite.class) +@Suite.SuiteClasses({ + TestSystemKey.TestAccessorWhenDisabled.class, + TestSystemKey.TestManagerWhenDisabled.class, + TestSystemKey.TestAccessor.class, + TestSystemKey.TestForInvalidFilenames.class, + TestSystemKey.TestManagerForErrors.class +}) +@Category({ MasterTests.class, SmallTests.class }) +public class TestSystemKey { + private static final HBaseTestingUtil TEST_UTIL = new HBaseTestingUtil(); + + @Rule + public TestName name = new TestName(); + + protected Configuration conf; + protected Path testRootDir; + protected FileSystem fs; + + protected FileSystem mockFileSystem = mock(FileSystem.class); + protected MasterServices mockMaster = mock(MasterServices.class); + protected SystemKeyManager systemKeyManager; + + @Before + public void setUp() throws Exception { + conf = TEST_UTIL.getConfiguration(); + testRootDir = TEST_UTIL.getDataTestDir(name.getMethodName()); + fs = testRootDir.getFileSystem(conf); + + conf.set(HConstants.CRYPTO_MANAGED_KEYS_ENABLED_CONF_KEY, "true"); + + when(mockMaster.getFileSystem()).thenReturn(mockFileSystem); + when(mockMaster.getConfiguration()).thenReturn(conf); + systemKeyManager = new SystemKeyManager(mockMaster); + } + + private static FileStatus createMockFile(String fileName) { + Path mockPath = mock(Path.class); + when(mockPath.getName()).thenReturn(fileName); + FileStatus mockFileStatus = mock(FileStatus.class); + when(mockFileStatus.getPath()).thenReturn(mockPath); + return mockFileStatus; + } + + @RunWith(BlockJUnit4ClassRunner.class) + @Category({ MasterTests.class, SmallTests.class }) + public static class TestAccessorWhenDisabled extends TestSystemKey { + @ClassRule public static final HBaseClassTestRule CLASS_RULE = + HBaseClassTestRule.forClass(TestAccessorWhenDisabled.class); + + @Override public void setUp() throws Exception { + super.setUp(); + conf.set(HConstants.CRYPTO_MANAGED_KEYS_ENABLED_CONF_KEY, "false"); + } + + @Test public void test() throws Exception { + assertNull(systemKeyManager.getAllSystemKeyFiles()); + assertNull(systemKeyManager.getLatestSystemKeyFile()); + } + } + + @RunWith(BlockJUnit4ClassRunner.class) + @Category({ MasterTests.class, SmallTests.class }) + public static class TestManagerWhenDisabled extends TestSystemKey { + @ClassRule public static final HBaseClassTestRule CLASS_RULE = + HBaseClassTestRule.forClass(TestManagerWhenDisabled.class); + + @Override public void setUp() throws Exception { + super.setUp(); + conf.set(HConstants.CRYPTO_MANAGED_KEYS_ENABLED_CONF_KEY, "false"); + } + + @Test public void test() throws Exception { + systemKeyManager.ensureSystemKeyInitialized(); + assertNull(systemKeyManager.rotateSystemKeyIfChanged()); + } + } + + @RunWith(BlockJUnit4ClassRunner.class) + @Category({ MasterTests.class, SmallTests.class }) + public static class TestAccessor extends TestSystemKey { + @ClassRule + public static final HBaseClassTestRule CLASS_RULE = + HBaseClassTestRule.forClass(TestAccessor.class); + + @Test + public void testGetLatestWithNone() throws Exception { + when(mockFileSystem.globStatus(any())).thenReturn(new FileStatus[0]); + + RuntimeException ex = assertThrows(RuntimeException.class, + () -> systemKeyManager.getLatestSystemKeyFile()); + assertEquals("No cluster key initialized yet", ex.getMessage()); + } + + @Test + public void testGetWithSingle() throws Exception { + String fileName = SYSTEM_KEY_FILE_PREFIX + "1"; + FileStatus mockFileStatus = createMockFile(fileName); + + Path systemKeyDir = CommonFSUtils.getSystemKeyDir(conf); + when(mockFileSystem.globStatus(eq(new Path(systemKeyDir, SYSTEM_KEY_FILE_PREFIX+"*")))) + .thenReturn(new FileStatus[] { mockFileStatus }); + + List files = systemKeyManager.getAllSystemKeyFiles(); + assertEquals(1, files.size()); + assertEquals(fileName, files.get(0).getName()); + + Path latestSystemKeyFile = systemKeyManager.getLatestSystemKeyFile(); + assertEquals(fileName, latestSystemKeyFile.getName()); + + assertEquals(1, SystemKeyAccessor.extractSystemKeySeqNum(latestSystemKeyFile)); + } + + @Test + public void testGetWithMultiple() throws Exception { + FileStatus[] mockFileStatuses = IntStream.rangeClosed(1, 3) + .mapToObj(i -> createMockFile(SYSTEM_KEY_FILE_PREFIX + i)) + .toArray(FileStatus[]::new); + + Path systemKeyDir = CommonFSUtils.getSystemKeyDir(conf); + when(mockFileSystem.globStatus(eq(new Path(systemKeyDir, SYSTEM_KEY_FILE_PREFIX+"*")))) + .thenReturn( mockFileStatuses ); + + List files = systemKeyManager.getAllSystemKeyFiles(); + assertEquals(3, files.size()); + + Path latestSystemKeyFile = systemKeyManager.getLatestSystemKeyFile(); + assertEquals(3, SystemKeyAccessor.extractSystemKeySeqNum(latestSystemKeyFile)); + } + + @Test + public void testExtractKeySequenceForInvalidFilename() throws Exception { + assertEquals(-1, SystemKeyAccessor.extractKeySequence( + createMockFile("abcd").getPath())); + } } - @Test - public void testWithInvalidSystemKey() throws Exception { - HMaster master = TEST_UTIL.getHBaseCluster().getMaster(); - KeyProvider keyProvider = Encryption.getKeyProvider(master.getConfiguration()); - MockManagedKeyProvider pbeKeyProvider = (MockManagedKeyProvider) keyProvider; - - // Test startup failure when the cluster key is INACTIVE - SystemKeyManager tmpCKM = new SystemKeyManager(master); - tmpCKM.ensureSystemKeyInitialized(); - pbeKeyProvider.setMockedKeyStatus(pbeKeyProvider.getSystemKeyAlias(), ManagedKeyStatus.INACTIVE); - assertThrows(IOException.class, tmpCKM::ensureSystemKeyInitialized); + @RunWith(Parameterized.class) + @Category({ MasterTests.class, SmallTests.class }) + public static class TestForInvalidFilenames extends TestSystemKey { + @ClassRule + public static final HBaseClassTestRule CLASS_RULE = + HBaseClassTestRule.forClass(TestForInvalidFilenames.class); + + @Parameter(0) + public String fileName; + @Parameter(1) + public String expectedErrorMessage; + + @Parameters(name = "{index},fileName={0}") + public static Collection data() { + return Arrays.asList(new Object[][] { + { "abcd", "Couldn't parse key file name: abcd" }, + {SYSTEM_KEY_FILE_PREFIX+"abcd", "Couldn't parse key file name: "+ + SYSTEM_KEY_FILE_PREFIX+"abcd"}, + // Add more test cases here + }); + } + + @Test + public void test() throws Exception { + FileStatus mockFileStatus = createMockFile(fileName); + + IOException ex = assertThrows(IOException.class, + () -> SystemKeyAccessor.extractSystemKeySeqNum(mockFileStatus.getPath())); + assertEquals(expectedErrorMessage, ex.getMessage()); + } } - private ManagedKeyData validateInitialState(HMaster master, MockManagedKeyProvider pbeKeyProvider ) - throws IOException { - SystemKeyAccessor systemKeyAccessor = new SystemKeyAccessor(master); - assertEquals(1, systemKeyAccessor.getAllSystemKeyFiles().size()); - SystemKeyCache systemKeyCache = master.getSystemKeyCache(); - assertNotNull(systemKeyCache); - ManagedKeyData clusterKey = systemKeyCache.getLatestSystemKey(); - assertEquals(pbeKeyProvider.getSystemKey(master.getClusterId().getBytes()), clusterKey); - assertEquals(clusterKey, - systemKeyCache.getSystemKeyByChecksum(clusterKey.getKeyChecksum())); - return clusterKey; + @RunWith(BlockJUnit4ClassRunner.class) + @Category({ MasterTests.class, SmallTests.class }) + public static class TestManagerForErrors extends TestSystemKey { + @ClassRule + public static final HBaseClassTestRule CLASS_RULE = + HBaseClassTestRule.forClass(TestManagerForErrors.class); + + private static final String CLUSTER_ID = "clusterId"; + + @Mock + ManagedKeyProvider mockKeyProvide; + @Mock + MasterFileSystem masterFS; + + private MockSystemKeyManager manager; + private AutoCloseable closeableMocks; + + @Before + public void setUp() throws Exception { + super.setUp(); + closeableMocks = MockitoAnnotations.openMocks(this); + + when(mockFileSystem.globStatus(any())).thenReturn(new FileStatus[0]); + ClusterId clusterId = mock(ClusterId.class); + when(mockMaster.getMasterFileSystem()).thenReturn(masterFS); + when(masterFS.getClusterId()).thenReturn(clusterId); + when(clusterId.toString()).thenReturn(CLUSTER_ID); + when(masterFS.getFileSystem()).thenReturn(mockFileSystem); + + manager = new MockSystemKeyManager(mockMaster, mockKeyProvide); + } + + @After + public void tearDown() throws Exception { + closeableMocks.close();; + } + + @Test + public void testEnsureSystemKeyInitialized_WithNoSystemKeys() throws Exception { + when(mockKeyProvide.getSystemKey(any())).thenReturn(null); + + IOException ex = assertThrows(IOException.class, manager::ensureSystemKeyInitialized); + assertEquals("Failed to get system key for cluster id: " + CLUSTER_ID, ex.getMessage()); + } + + @Test + public void testEnsureSystemKeyInitialized_WithNoNonActiveKey() throws Exception { + String metadata = "key-metadata"; + ManagedKeyData keyData = mock(ManagedKeyData.class); + when(keyData.getKeyStatus()).thenReturn(INACTIVE); + when(keyData.getKeyMetadata()).thenReturn(metadata); + when(mockKeyProvide.getSystemKey(any())).thenReturn(keyData); + + IOException ex = assertThrows(IOException.class, manager::ensureSystemKeyInitialized); + assertEquals("System key is expected to be ACTIVE but it is: INACTIVE for metadata: " + + metadata, ex.getMessage()); + } + + @Test + public void testEnsureSystemKeyInitialized_WithInvalidMetadata() throws Exception { + ManagedKeyData keyData = mock(ManagedKeyData.class); + when(keyData.getKeyStatus()).thenReturn(ACTIVE); + when(mockKeyProvide.getSystemKey(any())).thenReturn(keyData); + + IOException ex = assertThrows(IOException.class, manager::ensureSystemKeyInitialized); + assertEquals("System key is expected to have metadata but it is null", ex.getMessage()); + } + + @Test + public void testEnsureSystemKeyInitialized_WithSaveFailure() throws Exception { + String metadata = "key-metadata"; + ManagedKeyData keyData = mock(ManagedKeyData.class); + when(keyData.getKeyStatus()).thenReturn(ACTIVE); + when(mockKeyProvide.getSystemKey(any())).thenReturn(keyData); + when(keyData.getKeyMetadata()).thenReturn(metadata); + when(mockFileSystem.globStatus(any())).thenReturn(new FileStatus[0]); + Path rootDir = CommonFSUtils.getRootDir(conf); + when(masterFS.getTempDir()).thenReturn(rootDir); + FSDataOutputStream mockStream = mock(FSDataOutputStream.class); + when(mockFileSystem.create(any())).thenReturn(mockStream); + when(mockFileSystem.rename(any(), any())).thenReturn(false); + + RuntimeException ex = assertThrows(RuntimeException.class, manager::ensureSystemKeyInitialized); + assertEquals("Failed to generate or save System Key", ex.getMessage()); + } + + @Test + public void testEnsureSystemKeyInitialized_RaceCondition() throws Exception { + String metadata = "key-metadata"; + ManagedKeyData keyData = mock(ManagedKeyData.class); + when(keyData.getKeyStatus()).thenReturn(ACTIVE); + when(mockKeyProvide.getSystemKey(any())).thenReturn(keyData); + when(keyData.getKeyMetadata()).thenReturn(metadata); + when(mockFileSystem.globStatus(any())).thenReturn(new FileStatus[0]); + Path rootDir = CommonFSUtils.getRootDir(conf); + when(masterFS.getTempDir()).thenReturn(rootDir); + FSDataOutputStream mockStream = mock(FSDataOutputStream.class); + when(mockFileSystem.create(any())).thenReturn(mockStream); + when(mockFileSystem.rename(any(), any())).thenReturn(false); + String fileName = SYSTEM_KEY_FILE_PREFIX + "1"; + FileStatus mockFileStatus = createMockFile(fileName); + when(mockFileSystem.globStatus(any())).thenReturn( + new FileStatus[0], + new FileStatus[0], + new FileStatus[] { mockFileStatus } + ); + + manager.ensureSystemKeyInitialized(); + } } - private void restartSystem() throws Exception { - TEST_UTIL.shutdownMiniHBaseCluster(); - Thread.sleep(2000); - TEST_UTIL.restartHBaseCluster(1); - TEST_UTIL.waitFor(60000, () -> TEST_UTIL.getMiniHBaseCluster().getMaster().isInitialized()); + private static class MockSystemKeyManager extends SystemKeyManager { + private final ManagedKeyProvider keyProvider; + + public MockSystemKeyManager(MasterServices master, ManagedKeyProvider keyProvider) throws IOException { + super(master); + this.keyProvider = keyProvider; + //systemKeyDir = mock(Path.class); + } + + @Override + protected ManagedKeyProvider getKeyProvider() { + return keyProvider; + } } } diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestSystemKeyAccessor.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestSystemKeyAccessor.java deleted file mode 100644 index 4421879f511d..000000000000 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestSystemKeyAccessor.java +++ /dev/null @@ -1,182 +0,0 @@ -package org.apache.hadoop.hbase.master; - -import org.apache.hadoop.conf.Configuration; -import org.apache.hadoop.fs.FileStatus; -import org.apache.hadoop.fs.FileSystem; -import org.apache.hadoop.fs.Path; -import org.apache.hadoop.hbase.HBaseClassTestRule; -import org.apache.hadoop.hbase.HBaseTestingUtil; -import org.apache.hadoop.hbase.HConstants; -import org.apache.hadoop.hbase.Server; -import org.apache.hadoop.hbase.keymeta.SystemKeyAccessor; -import org.apache.hadoop.hbase.testclassification.MasterTests; -import org.apache.hadoop.hbase.testclassification.SmallTests; -import org.apache.hadoop.hbase.util.CommonFSUtils; -import org.junit.Before; -import org.junit.ClassRule; -import org.junit.Rule; -import org.junit.Test; -import org.junit.experimental.categories.Category; -import org.junit.rules.TestName; -import org.junit.runner.RunWith; -import org.junit.runners.BlockJUnit4ClassRunner; -import org.junit.runners.Parameterized; -import org.junit.runners.Parameterized.Parameter; -import org.junit.runners.Parameterized.Parameters; -import org.junit.runners.Suite; -import java.io.IOException; -import java.util.Arrays; -import java.util.Collection; -import java.util.List; -import java.util.stream.IntStream; -import static org.apache.hadoop.hbase.HConstants.SYSTEM_KEY_FILE_PREFIX; -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertNull; -import static org.junit.Assert.assertThrows; -import static org.mockito.ArgumentMatchers.any; -import static org.mockito.ArgumentMatchers.eq; -import static org.mockito.Mockito.mock; -import static org.mockito.Mockito.when; - -@RunWith(Suite.class) -@Suite.SuiteClasses({ TestSystemKeyAccessor.TestWhenDisabled.class, - TestSystemKeyAccessor.TestAccessor.class, - TestSystemKeyAccessor.TestForInvalidFileNames.class }) -@Category({ MasterTests.class, SmallTests.class }) -public class TestSystemKeyAccessor { - private static final HBaseTestingUtil TEST_UTIL = new HBaseTestingUtil(); - - @Rule - public TestName name = new TestName(); - - protected Configuration conf; - protected Path testRootDir; - protected FileSystem fs; - - protected FileSystem mockFileSystem = mock(FileSystem.class); - protected Server mockServer = mock(Server.class); - protected SystemKeyAccessor systemKeyAccessor; - - @Before - public void setUp() throws IOException { - conf = TEST_UTIL.getConfiguration(); - testRootDir = TEST_UTIL.getDataTestDir(name.getMethodName()); - fs = testRootDir.getFileSystem(conf); - - conf.set(HConstants.CRYPTO_MANAGED_KEYS_ENABLED_CONF_KEY, "true"); - - when(mockServer.getFileSystem()).thenReturn(mockFileSystem); - when(mockServer.getConfiguration()).thenReturn(conf); - systemKeyAccessor = new SystemKeyAccessor(mockServer); - } - - private static FileStatus createMockFile(String fileName) { - Path mockPath = mock(Path.class); - when(mockPath.getName()).thenReturn(fileName); - FileStatus mockFileStatus = mock(FileStatus.class); - when(mockFileStatus.getPath()).thenReturn(mockPath); - return mockFileStatus; - } - - @RunWith(BlockJUnit4ClassRunner.class) - @Category({ MasterTests.class, SmallTests.class }) - public static class TestWhenDisabled extends TestSystemKeyAccessor { - @ClassRule public static final HBaseClassTestRule CLASS_RULE = - HBaseClassTestRule.forClass(TestWhenDisabled.class); - - @Override public void setUp() throws IOException { - super.setUp(); - conf.set(HConstants.CRYPTO_MANAGED_KEYS_ENABLED_CONF_KEY, "false"); - } - - @Test public void testDisabled() throws Exception { - assertNull(systemKeyAccessor.getAllSystemKeyFiles()); - assertNull(systemKeyAccessor.getLatestSystemKeyFile()); - } - } - - @RunWith(BlockJUnit4ClassRunner.class) - @Category({ MasterTests.class, SmallTests.class }) - public static class TestAccessor extends TestSystemKeyAccessor { - @ClassRule - public static final HBaseClassTestRule CLASS_RULE = - HBaseClassTestRule.forClass(TestAccessor.class); - - @Test - public void testGetLatestWithNone() throws Exception { - when(mockFileSystem.globStatus(any())).thenReturn(new FileStatus[0]); - - RuntimeException ex = assertThrows(RuntimeException.class, - () -> systemKeyAccessor.getLatestSystemKeyFile()); - assertEquals("No cluster key initialized yet", ex.getMessage()); - } - - @Test - public void testGetWithSingle() throws Exception { - String fileName = SYSTEM_KEY_FILE_PREFIX + "1"; - FileStatus mockFileStatus = createMockFile(fileName); - - Path systemKeyDir = CommonFSUtils.getSystemKeyDir(conf); - when(mockFileSystem.globStatus(eq(new Path(systemKeyDir, SYSTEM_KEY_FILE_PREFIX+"*")))) - .thenReturn(new FileStatus[] { mockFileStatus }); - - List files = systemKeyAccessor.getAllSystemKeyFiles(); - assertEquals(1, files.size()); - assertEquals(fileName, files.get(0).getName()); - - Path latestSystemKeyFile = systemKeyAccessor.getLatestSystemKeyFile(); - assertEquals(fileName, latestSystemKeyFile.getName()); - - assertEquals(1, systemKeyAccessor.extractSystemKeySeqNum(latestSystemKeyFile)); - } - - @Test - public void testGetWithMultiple() throws Exception { - FileStatus[] mockFileStatuses = IntStream.rangeClosed(1, 3) - .mapToObj(i -> createMockFile(SYSTEM_KEY_FILE_PREFIX + i)) - .toArray(FileStatus[]::new); - - Path systemKeyDir = CommonFSUtils.getSystemKeyDir(conf); - when(mockFileSystem.globStatus(eq(new Path(systemKeyDir, SYSTEM_KEY_FILE_PREFIX+"*")))) - .thenReturn( mockFileStatuses ); - - List files = systemKeyAccessor.getAllSystemKeyFiles(); - assertEquals(3, files.size()); - - Path latestSystemKeyFile = systemKeyAccessor.getLatestSystemKeyFile(); - assertEquals(3, systemKeyAccessor.extractSystemKeySeqNum(latestSystemKeyFile)); - } - } - - @RunWith(Parameterized.class) - @Category({ MasterTests.class, SmallTests.class }) - public static class TestForInvalidFileNames extends TestSystemKeyAccessor { - @ClassRule - public static final HBaseClassTestRule CLASS_RULE = - HBaseClassTestRule.forClass(TestForInvalidFileNames.class); - - @Parameter(0) - public String fileName; - @Parameter(1) - public String expectedErrorMessage; - - @Parameters(name = "{index},fileName={0}") - public static Collection data() { - return Arrays.asList(new Object[][] { - { "abcd", "Couldn't parse key file name: abcd" }, - {SYSTEM_KEY_FILE_PREFIX+"abcd", "Couldn't parse key file name: "+ - SYSTEM_KEY_FILE_PREFIX+"abcd"}, - // Add more test cases here - }); - } - - @Test - public void testForInvalid() throws Exception { - FileStatus mockFileStatus = createMockFile(fileName); - - IOException ex = assertThrows(IOException.class, - () -> systemKeyAccessor.extractSystemKeySeqNum(mockFileStatus.getPath())); - assertEquals(expectedErrorMessage, ex.getMessage()); - } - } -} diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestSystemKeyManager.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestSystemKeyManager.java new file mode 100644 index 000000000000..ec211f1dd02a --- /dev/null +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestSystemKeyManager.java @@ -0,0 +1,116 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.hadoop.hbase.master; + +import org.apache.hadoop.hbase.HBaseClassTestRule; +import org.apache.hadoop.hbase.io.crypto.Encryption; +import org.apache.hadoop.hbase.io.crypto.KeyProvider; +import org.apache.hadoop.hbase.io.crypto.ManagedKeyData; +import org.apache.hadoop.hbase.io.crypto.ManagedKeyProvider; +import org.apache.hadoop.hbase.io.crypto.MockManagedKeyProvider; +import org.apache.hadoop.hbase.io.crypto.ManagedKeyStatus; +import org.apache.hadoop.hbase.keymeta.SystemKeyAccessor; +import org.apache.hadoop.hbase.keymeta.SystemKeyCache; +import org.apache.hadoop.hbase.keymeta.ManagedKeyTestBase; +import org.apache.hadoop.hbase.testclassification.MasterTests; +import org.apache.hadoop.hbase.testclassification.MediumTests; +import org.apache.hadoop.hbase.util.Bytes; +import org.junit.ClassRule; +import org.junit.Test; +import org.junit.experimental.categories.Category; +import java.io.IOException; +import java.security.Key; +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertNotNull; +import static org.junit.Assert.assertThrows; +import static org.junit.Assert.assertTrue; + +@Category({ MasterTests.class, MediumTests.class }) +public class TestSystemKeyManager extends ManagedKeyTestBase { + + @ClassRule + public static final HBaseClassTestRule CLASS_RULE = + HBaseClassTestRule.forClass(TestSystemKeyManager.class); + + @Test + public void testSystemKeyInitializationAndRotation() throws Exception { + HMaster master = TEST_UTIL.getHBaseCluster().getMaster(); + KeyProvider keyProvider = Encryption.getKeyProvider(master.getConfiguration()); + assertNotNull(keyProvider); + assertTrue(keyProvider instanceof ManagedKeyProvider); + assertTrue(keyProvider instanceof MockManagedKeyProvider); + MockManagedKeyProvider pbeKeyProvider = (MockManagedKeyProvider) keyProvider; + ManagedKeyData initialSystemKey = validateInitialState(master, pbeKeyProvider); + + restartSystem(); + master = TEST_UTIL.getHBaseCluster().getMaster(); + validateInitialState(master, pbeKeyProvider); + + // Test rotation of cluster key by changing the key that the key provider provides and restart + // master. + String newAlias = "new_cluster_key"; + pbeKeyProvider.setClusterKeyAlias(newAlias); + Key newCluterKey = MockManagedKeyProvider.generateSecretKey(); + pbeKeyProvider.setMockedKey(newAlias, newCluterKey, ManagedKeyData.KEY_SPACE_GLOBAL); + + restartSystem(); + master = TEST_UTIL.getHBaseCluster().getMaster(); + SystemKeyAccessor systemKeyAccessor = new SystemKeyAccessor(master); + assertEquals(2, systemKeyAccessor.getAllSystemKeyFiles().size()); + SystemKeyCache systemKeyCache = master.getSystemKeyCache(); + assertEquals(0, Bytes.compareTo(newCluterKey.getEncoded(), + systemKeyCache.getLatestSystemKey().getTheKey().getEncoded())); + assertEquals(initialSystemKey, + systemKeyAccessor.loadSystemKey(systemKeyAccessor.getAllSystemKeyFiles().get(1))); + assertEquals(initialSystemKey, + systemKeyCache.getSystemKeyByChecksum(initialSystemKey.getKeyChecksum())); + } + + @Test + public void testWithInvalidSystemKey() throws Exception { + HMaster master = TEST_UTIL.getHBaseCluster().getMaster(); + KeyProvider keyProvider = Encryption.getKeyProvider(master.getConfiguration()); + MockManagedKeyProvider pbeKeyProvider = (MockManagedKeyProvider) keyProvider; + + // Test startup failure when the cluster key is INACTIVE + SystemKeyManager tmpCKM = new SystemKeyManager(master); + tmpCKM.ensureSystemKeyInitialized(); + pbeKeyProvider.setMockedKeyStatus(pbeKeyProvider.getSystemKeyAlias(), ManagedKeyStatus.INACTIVE); + assertThrows(IOException.class, tmpCKM::ensureSystemKeyInitialized); + } + + private ManagedKeyData validateInitialState(HMaster master, MockManagedKeyProvider pbeKeyProvider ) + throws IOException { + SystemKeyAccessor systemKeyAccessor = new SystemKeyAccessor(master); + assertEquals(1, systemKeyAccessor.getAllSystemKeyFiles().size()); + SystemKeyCache systemKeyCache = master.getSystemKeyCache(); + assertNotNull(systemKeyCache); + ManagedKeyData clusterKey = systemKeyCache.getLatestSystemKey(); + assertEquals(pbeKeyProvider.getSystemKey(master.getClusterId().getBytes()), clusterKey); + assertEquals(clusterKey, + systemKeyCache.getSystemKeyByChecksum(clusterKey.getKeyChecksum())); + return clusterKey; + } + + private void restartSystem() throws Exception { + TEST_UTIL.shutdownMiniHBaseCluster(); + Thread.sleep(2000); + TEST_UTIL.restartHBaseCluster(1); + TEST_UTIL.waitFor(60000, () -> TEST_UTIL.getMiniHBaseCluster().getMaster().isInitialized()); + } +} From 0ad777a408e77a5ceb1dc9ca73adb1699d734f37 Mon Sep 17 00:00:00 2001 From: Hari Dara Date: Mon, 28 Apr 2025 18:25:52 +0530 Subject: [PATCH 29/70] Small optimizaion --- .../hbase/keymeta/SystemKeyAccessor.java | 8 +++++--- .../hadoop/hbase/master/SystemKeyManager.java | 16 +++++++++------- .../hadoop/hbase/master/TestSystemKey.java | 19 +++++++++---------- 3 files changed, 23 insertions(+), 20 deletions(-) diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/keymeta/SystemKeyAccessor.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/keymeta/SystemKeyAccessor.java index ec1c37c585db..e9a9e64baaaa 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/keymeta/SystemKeyAccessor.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/keymeta/SystemKeyAccessor.java @@ -26,6 +26,7 @@ import org.apache.hadoop.hbase.io.crypto.ManagedKeyData; import org.apache.hadoop.hbase.io.crypto.ManagedKeyProvider; import org.apache.hadoop.hbase.util.CommonFSUtils; +import org.apache.hadoop.hbase.util.Pair; import org.apache.yetus.audience.InterfaceAudience; import java.io.IOException; import java.util.ArrayList; @@ -44,16 +45,17 @@ public SystemKeyAccessor(Server server) throws IOException { this.systemKeyDir = CommonFSUtils.getSystemKeyDir(server.getConfiguration()); } - public Path getLatestSystemKeyFile() throws IOException { + public Pair> getLatestSystemKeyFile() throws IOException { if (! isKeyManagementEnabled()) { - return null; + return new Pair<>(null, null); } List allClusterKeyFiles = getAllSystemKeyFiles(); if (allClusterKeyFiles.isEmpty()) { throw new RuntimeException("No cluster key initialized yet"); } int currentMaxSeqNum = SystemKeyAccessor.extractKeySequence(allClusterKeyFiles.get(0)); - return new Path(systemKeyDir, SYSTEM_KEY_FILE_PREFIX + currentMaxSeqNum); + return new Pair<>(new Path(systemKeyDir, SYSTEM_KEY_FILE_PREFIX + currentMaxSeqNum), + allClusterKeyFiles); } /** diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/SystemKeyManager.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/SystemKeyManager.java index f109e1195b91..29a405939d78 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/SystemKeyManager.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/SystemKeyManager.java @@ -26,6 +26,7 @@ import org.apache.hadoop.hbase.io.crypto.ManagedKeyProvider; import org.apache.hadoop.hbase.io.crypto.ManagedKeyStatus; import org.apache.hadoop.hbase.keymeta.SystemKeyAccessor; +import org.apache.hadoop.hbase.util.Pair; import org.apache.yetus.audience.InterfaceAudience; import static org.apache.hadoop.hbase.HConstants.SYSTEM_KEY_FILE_PREFIX; @@ -46,7 +47,8 @@ public void ensureSystemKeyInitialized() throws IOException { if (clusterKeys.isEmpty()) { LOG.info("Initializing System Key for the first time"); // Double check for cluster key as another HMaster might have succeeded. - if (rotateSystemKey(null) == null && getAllSystemKeyFiles().isEmpty()) { + if (rotateSystemKey(null, clusterKeys) == null && + getAllSystemKeyFiles().isEmpty()) { throw new RuntimeException("Failed to generate or save System Key"); } } @@ -62,12 +64,13 @@ public ManagedKeyData rotateSystemKeyIfChanged() throws IOException { if (! isKeyManagementEnabled()) { return null; } - Path latestFile = getLatestSystemKeyFile(); + Pair> latestFileResult = getLatestSystemKeyFile(); + Path latestFile = getLatestSystemKeyFile().getFirst(); String latestKeyMetadata = loadKeyMetadata(latestFile); - return rotateSystemKey(latestKeyMetadata); + return rotateSystemKey(latestKeyMetadata, latestFileResult.getSecond()); } - private ManagedKeyData rotateSystemKey(String currentKeyMetadata) throws IOException { + private ManagedKeyData rotateSystemKey(String currentKeyMetadata, List allSystemKeyFiles) throws IOException { ManagedKeyProvider provider = getKeyProvider(); ManagedKeyData clusterKey = provider.getSystemKey( master.getMasterFileSystem().getClusterId().toString().getBytes()); @@ -83,14 +86,13 @@ private ManagedKeyData rotateSystemKey(String currentKeyMetadata) throws IOExcep throw new IOException("System key is expected to have metadata but it is null"); } if (! clusterKey.getKeyMetadata().equals(currentKeyMetadata) && - saveLatestSystemKey(clusterKey.getKeyMetadata())) { + saveLatestSystemKey(clusterKey.getKeyMetadata(), allSystemKeyFiles)) { return clusterKey; } return null; } - private boolean saveLatestSystemKey(String keyMetadata) throws IOException { - List allSystemKeyFiles = getAllSystemKeyFiles(); + private boolean saveLatestSystemKey(String keyMetadata, List allSystemKeyFiles) throws IOException { int nextSystemKeySeq = (allSystemKeyFiles.isEmpty() ? -1 : SystemKeyAccessor.extractKeySequence(allSystemKeyFiles.get(0))) + 1; LOG.info("Trying to save a new cluster key at seq: {}", nextSystemKeySeq); diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestSystemKey.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestSystemKey.java index e724415fca16..9ab2a154c49b 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestSystemKey.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestSystemKey.java @@ -9,14 +9,13 @@ import org.apache.hadoop.hbase.HBaseClassTestRule; import org.apache.hadoop.hbase.HBaseTestingUtil; import org.apache.hadoop.hbase.HConstants; -import org.apache.hadoop.hbase.io.crypto.Encryption; import org.apache.hadoop.hbase.io.crypto.ManagedKeyData; import org.apache.hadoop.hbase.io.crypto.ManagedKeyProvider; -import org.apache.hadoop.hbase.io.crypto.MockManagedKeyProvider; import org.apache.hadoop.hbase.keymeta.SystemKeyAccessor; import org.apache.hadoop.hbase.testclassification.MasterTests; import org.apache.hadoop.hbase.testclassification.SmallTests; import org.apache.hadoop.hbase.util.CommonFSUtils; +import org.apache.hadoop.hbase.util.Pair; import org.junit.After; import org.junit.Before; import org.junit.ClassRule; @@ -38,7 +37,6 @@ import java.util.List; import java.util.stream.IntStream; import static org.apache.hadoop.hbase.HConstants.SYSTEM_KEY_FILE_PREFIX; -import static org.apache.hadoop.hbase.io.crypto.ManagedKeyData.KEY_SPACE_GLOBAL; import static org.apache.hadoop.hbase.io.crypto.ManagedKeyStatus.ACTIVE; import static org.apache.hadoop.hbase.io.crypto.ManagedKeyStatus.INACTIVE; import static org.junit.Assert.assertEquals; @@ -106,7 +104,7 @@ public static class TestAccessorWhenDisabled extends TestSystemKey { @Test public void test() throws Exception { assertNull(systemKeyManager.getAllSystemKeyFiles()); - assertNull(systemKeyManager.getLatestSystemKeyFile()); + assertNull(systemKeyManager.getLatestSystemKeyFile().getFirst()); } } @@ -156,10 +154,11 @@ public void testGetWithSingle() throws Exception { assertEquals(1, files.size()); assertEquals(fileName, files.get(0).getName()); - Path latestSystemKeyFile = systemKeyManager.getLatestSystemKeyFile(); - assertEquals(fileName, latestSystemKeyFile.getName()); + Pair> latestSystemKeyFileResult = systemKeyManager.getLatestSystemKeyFile(); + assertEquals(fileName, latestSystemKeyFileResult.getFirst().getName()); - assertEquals(1, SystemKeyAccessor.extractSystemKeySeqNum(latestSystemKeyFile)); + assertEquals(1, SystemKeyAccessor.extractSystemKeySeqNum( + latestSystemKeyFileResult.getFirst())); } @Test @@ -175,8 +174,9 @@ public void testGetWithMultiple() throws Exception { List files = systemKeyManager.getAllSystemKeyFiles(); assertEquals(3, files.size()); - Path latestSystemKeyFile = systemKeyManager.getLatestSystemKeyFile(); - assertEquals(3, SystemKeyAccessor.extractSystemKeySeqNum(latestSystemKeyFile)); + Pair> latestSystemKeyFileResult = systemKeyManager.getLatestSystemKeyFile(); + assertEquals(3, + SystemKeyAccessor.extractSystemKeySeqNum(latestSystemKeyFileResult.getFirst())); } @Test @@ -320,7 +320,6 @@ public void testEnsureSystemKeyInitialized_RaceCondition() throws Exception { String fileName = SYSTEM_KEY_FILE_PREFIX + "1"; FileStatus mockFileStatus = createMockFile(fileName); when(mockFileSystem.globStatus(any())).thenReturn( - new FileStatus[0], new FileStatus[0], new FileStatus[] { mockFileStatus } ); From 80739b208ce751880f4605b3f5990dd78c18a521 Mon Sep 17 00:00:00 2001 From: Hari Dara Date: Mon, 19 May 2025 17:44:08 +0530 Subject: [PATCH 30/70] Filled some gaps and added test coverage --- .../org/apache/hadoop/hbase/HConstants.java | 6 +- .../io/crypto/ManagedKeyStoreKeyProvider.java | 13 ++-- .../hadoop/hbase/keymeta/KeymetaAdmin.java | 3 +- .../io/crypto/TestManagedKeyProvider.java | 39 +++++++--- .../hbase/keymeta/KeyManagementBase.java | 20 ++--- .../hbase/keymeta/KeymetaAdminImpl.java | 42 ++++++++--- .../hbase/keymeta/KeymetaServiceEndpoint.java | 2 + .../hbase/keymeta/KeymetaTableAccessor.java | 4 +- .../hbase/master/TestKeymetaAdminImpl.java | 75 ++++++++++++++++--- 9 files changed, 149 insertions(+), 55 deletions(-) diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/HConstants.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/HConstants.java index 8cdd6bf03645..262a0779b4ca 100644 --- a/hbase-common/src/main/java/org/apache/hadoop/hbase/HConstants.java +++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/HConstants.java @@ -1318,9 +1318,9 @@ public enum OperationStatusCode { "hbase.crypto.managed_keys.enabled"; public static final boolean CRYPTO_MANAGED_KEYS_DEFAULT_ENABLED = false; - public static final String CRYPTO_MANAGED_KEYS_PER_CUST_ACTIVE_KEY_COUNT = - "hbase.crypto.managed_keys.per_cust.active_count"; - public static final int CRYPTO_MANAGED_KEYS_PER_CUST_ACTIVE_KEY_DEFAULT_COUNT = 1; + public static final String CRYPTO_MANAGED_KEYS_PER_CUST_NAMESPACE_ACTIVE_KEY_COUNT = + "hbase.crypto.managed_keys.per_cust_namespace.active_count"; + public static final int CRYPTO_MANAGED_KEYS_PER_CUST_NAMESPACE_ACTIVE_KEY_DEFAULT_COUNT = 1; /** Configuration key for setting RPC codec class name */ public static final String RPC_CODEC_CONF_KEY = "hbase.client.rpc.codec"; diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/io/crypto/ManagedKeyStoreKeyProvider.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/io/crypto/ManagedKeyStoreKeyProvider.java index 8c42ab0b99f8..3268e812eb66 100644 --- a/hbase-common/src/main/java/org/apache/hadoop/hbase/io/crypto/ManagedKeyStoreKeyProvider.java +++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/io/crypto/ManagedKeyStoreKeyProvider.java @@ -24,16 +24,17 @@ public void initConfig(Configuration conf) { @Override public ManagedKeyData getSystemKey(byte[] clusterId) { checkConfig(); - String masterKeyAlias = conf.get(HConstants.CRYPTO_MANAGED_KEY_STORE_SYSTEM_KEY_NAME_CONF_KEY, null); - if (masterKeyAlias == null) { - throw new RuntimeException("No alias configured for master key"); + String systemKeyAlias = conf.get(HConstants.CRYPTO_MANAGED_KEY_STORE_SYSTEM_KEY_NAME_CONF_KEY, + null); + if (systemKeyAlias == null) { + throw new RuntimeException("No alias configured for system key"); } - Key key = getKey(masterKeyAlias); + Key key = getKey(systemKeyAlias); if (key == null) { - throw new RuntimeException("Unable to find cluster key with alias: " + masterKeyAlias); + throw new RuntimeException("Unable to find system key with alias: " + systemKeyAlias); } // Encode clusterId too for consistency with that of key custodian. - String keyMetadata = generateKeyMetadata(masterKeyAlias, + String keyMetadata = generateKeyMetadata(systemKeyAlias, ManagedKeyProvider.encodeToStr(clusterId)); return new ManagedKeyData(clusterId, ManagedKeyData.KEY_SPACE_GLOBAL, key, ManagedKeyStatus.ACTIVE, keyMetadata); diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/keymeta/KeymetaAdmin.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/keymeta/KeymetaAdmin.java index cd662232a24c..ad15874df971 100644 --- a/hbase-common/src/main/java/org/apache/hadoop/hbase/keymeta/KeymetaAdmin.java +++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/keymeta/KeymetaAdmin.java @@ -39,7 +39,8 @@ public interface KeymetaAdmin { * @return The list of {@link ManagedKeyData} objects each identifying the key and its current status. * @throws IOException if an error occurs while enabling key management. */ - List enableKeyManagement(String keyCust, String keyNamespace) throws IOException; + List enableKeyManagement(String keyCust, String keyNamespace) + throws IOException, KeyException; /** * Get the status of all the keys for the specified custodian. diff --git a/hbase-common/src/test/java/org/apache/hadoop/hbase/io/crypto/TestManagedKeyProvider.java b/hbase-common/src/test/java/org/apache/hadoop/hbase/io/crypto/TestManagedKeyProvider.java index 1918ae78d31d..a96c94ed4d63 100644 --- a/hbase-common/src/test/java/org/apache/hadoop/hbase/io/crypto/TestManagedKeyProvider.java +++ b/hbase-common/src/test/java/org/apache/hadoop/hbase/io/crypto/TestManagedKeyProvider.java @@ -47,6 +47,7 @@ import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertNotNull; import static org.junit.Assert.assertNull; +import static org.junit.Assert.assertThrows; import static org.junit.Assert.assertTrue; @RunWith(Suite.class) @@ -61,7 +62,7 @@ public static class TestManagedKeyStoreKeyProvider extends TestKeyStoreKeyProvid public static final HBaseClassTestRule CLASS_RULE = HBaseClassTestRule.forClass(TestManagedKeyStoreKeyProvider.class); - private static final String MASTER_KEY_ALIAS = "master-alias"; + private static final String SYSTEM_KEY_ALIAS = "system-alias"; private Configuration conf = HBaseConfiguration.create(); private int nPrefixes = 2; @@ -69,7 +70,7 @@ public static class TestManagedKeyStoreKeyProvider extends TestKeyStoreKeyProvid private Map prefix2key = new HashMap<>(); private Map prefix2alias = new HashMap<>(); private String clusterId; - private byte[] masterKey; + private byte[] systemKey; @Before public void setUp() throws Exception { @@ -102,16 +103,16 @@ protected void addCustomEntries(KeyStore store, Properties passwdProps) throws E passwdProps.setProperty(alias, PASSWORD); clusterId = UUID.randomUUID().toString(); - masterKey = MessageDigest.getInstance("SHA-256").digest( - Bytes.toBytes(MASTER_KEY_ALIAS)); - store.setEntry(MASTER_KEY_ALIAS, new KeyStore.SecretKeyEntry( - new SecretKeySpec(masterKey, "AES")), + systemKey = MessageDigest.getInstance("SHA-256").digest( + Bytes.toBytes(SYSTEM_KEY_ALIAS)); + store.setEntry(SYSTEM_KEY_ALIAS, new KeyStore.SecretKeyEntry( + new SecretKeySpec(systemKey, "AES")), new KeyStore.PasswordProtection(withPasswordOnAlias ? PASSWORD.toCharArray() : new char[0])); - conf.set(HConstants.CRYPTO_MANAGED_KEY_STORE_SYSTEM_KEY_NAME_CONF_KEY, MASTER_KEY_ALIAS); + conf.set(HConstants.CRYPTO_MANAGED_KEY_STORE_SYSTEM_KEY_NAME_CONF_KEY, SYSTEM_KEY_ALIAS); - passwdProps.setProperty(MASTER_KEY_ALIAS, PASSWORD); + passwdProps.setProperty(SYSTEM_KEY_ALIAS, PASSWORD); } } @@ -122,6 +123,14 @@ private void addEntry(String alias, String prefix) { conf.set(confKey, alias); } + @Test + public void testMissingConfig() throws Exception { + managedKeyProvider.initConfig(null); + RuntimeException ex = assertThrows(RuntimeException.class, + () -> managedKeyProvider.getSystemKey(null)); + assertEquals("initConfig is not called or config is null", ex.getMessage()); + } + @Test public void testGetManagedKey() throws Exception { for (Bytes prefix : prefix2key.keySet()) { @@ -170,8 +179,16 @@ public void testGetDisabledKey() throws Exception { @Test public void testGetSystemKey() throws Exception { ManagedKeyData clusterKeyData = managedKeyProvider.getSystemKey(clusterId.getBytes()); - assertKeyData(clusterKeyData, ManagedKeyStatus.ACTIVE, masterKey, clusterId.getBytes(), - MASTER_KEY_ALIAS); + assertKeyData(clusterKeyData, ManagedKeyStatus.ACTIVE, systemKey, clusterId.getBytes(), + SYSTEM_KEY_ALIAS); + conf.unset(HConstants.CRYPTO_MANAGED_KEY_STORE_SYSTEM_KEY_NAME_CONF_KEY); + RuntimeException ex = assertThrows(RuntimeException.class, + () -> managedKeyProvider.getSystemKey(null)); + assertEquals("No alias configured for system key", ex.getMessage()); + conf.set(HConstants.CRYPTO_MANAGED_KEY_STORE_SYSTEM_KEY_NAME_CONF_KEY, "non_existing_alias"); + ex = assertThrows(RuntimeException.class, + () -> managedKeyProvider.getSystemKey(null)); + assertTrue(ex.getMessage().startsWith("Unable to find system key with alias:")); } @Test @@ -229,7 +246,7 @@ private void assertKeyData(ManagedKeyData keyData, ManagedKeyStatus expKeyStatus public static class TestManagedKeyProviderDefault { @ClassRule public static final HBaseClassTestRule CLASS_RULE = - HBaseClassTestRule.forClass(TestManagedKeyProvider.class); + HBaseClassTestRule.forClass(TestManagedKeyProviderDefault.class); @Test public void testEncodeToStr() { byte[] input = { 72, 101, 108, 108, 111 }; // "Hello" in ASCII diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/keymeta/KeyManagementBase.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/keymeta/KeyManagementBase.java index 5bec294fe92f..10aac70a71c7 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/keymeta/KeyManagementBase.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/keymeta/KeyManagementBase.java @@ -37,7 +37,7 @@ public abstract class KeyManagementBase { private final Server server; private Boolean keyManagementEnabled; - private Integer perPrefixActiveKeyCount; + private Integer perCustNamespaceActiveKeyCount; public KeyManagementBase(Server server) { this.server = server; @@ -83,16 +83,16 @@ protected void assertKeyManagementEnabled() throws IOException { } } - protected int getPerPrefixActiveKeyConfCount() throws IOException { - if (perPrefixActiveKeyCount == null) { - perPrefixActiveKeyCount = getServer().getConfiguration().getInt( - HConstants.CRYPTO_MANAGED_KEYS_PER_CUST_ACTIVE_KEY_COUNT, - HConstants.CRYPTO_MANAGED_KEYS_PER_CUST_ACTIVE_KEY_DEFAULT_COUNT); + protected int getPerCustodianNamespaceActiveKeyConfCount() throws IOException { + if (perCustNamespaceActiveKeyCount == null) { + perCustNamespaceActiveKeyCount = getServer().getConfiguration().getInt( + HConstants.CRYPTO_MANAGED_KEYS_PER_CUST_NAMESPACE_ACTIVE_KEY_COUNT, + HConstants.CRYPTO_MANAGED_KEYS_PER_CUST_NAMESPACE_ACTIVE_KEY_DEFAULT_COUNT); } - if (perPrefixActiveKeyCount <= 0) { - throw new IOException("Invalid value: " + perPrefixActiveKeyCount + " configured for: " + - HConstants.CRYPTO_MANAGED_KEYS_PER_CUST_ACTIVE_KEY_COUNT); + if (perCustNamespaceActiveKeyCount <= 0) { + throw new IOException("Invalid value: " + perCustNamespaceActiveKeyCount + " configured for: " + + HConstants.CRYPTO_MANAGED_KEYS_PER_CUST_NAMESPACE_ACTIVE_KEY_COUNT); } - return perPrefixActiveKeyCount; + return perCustNamespaceActiveKeyCount; } } diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/keymeta/KeymetaAdminImpl.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/keymeta/KeymetaAdminImpl.java index 04e917da03f8..6015eff11a1d 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/keymeta/KeymetaAdminImpl.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/keymeta/KeymetaAdminImpl.java @@ -20,6 +20,7 @@ import org.apache.hadoop.hbase.Server; import org.apache.hadoop.hbase.io.crypto.ManagedKeyData; import org.apache.hadoop.hbase.io.crypto.ManagedKeyProvider; +import org.apache.hadoop.hbase.io.crypto.ManagedKeyStatus; import org.apache.yetus.audience.InterfaceAudience; import org.slf4j.Logger; import org.slf4j.LoggerFactory; @@ -39,30 +40,51 @@ public KeymetaAdminImpl(Server server) { @Override public List enableKeyManagement(String keyCust, String keyNamespace) - throws IOException { + throws IOException, KeyException { assertKeyManagementEnabled(); LOG.info("Trying to enable key management on custodian: {} under namespace: {}", keyCust, keyNamespace); byte[] key_cust = ManagedKeyProvider.decodeToBytes(keyCust); + int perCustNamespaceActiveKeyConfCount = getPerCustodianNamespaceActiveKeyConfCount(); + + // Check if (cust, namespace) pair is already enabled and if there are enough number of + // active keys. + List activeKeys = getActiveKeys(key_cust, keyNamespace); + if (activeKeys.size() >= perCustNamespaceActiveKeyConfCount) { + LOG.info("enableManagedKeys: specified (custodian: {}, namespace: {}) already has " + + " {} number of managed keys active, which satisfies the configured minimum: {}", + keyCust, keyNamespace, activeKeys.size(), perCustNamespaceActiveKeyConfCount); + return activeKeys; + } + + Set existingKeys = new HashSet<>(activeKeys); + int nKeysToRetrieve = perCustNamespaceActiveKeyConfCount - activeKeys.size(); + Set retrievedKeys = new HashSet<>(nKeysToRetrieve); ManagedKeyProvider provider = getKeyProvider(); - int perPrefixActiveKeyConfCount = getPerPrefixActiveKeyConfCount(); - Set retrievedKeys = new HashSet<>(perPrefixActiveKeyConfCount); - for (int i = 0; i < perPrefixActiveKeyConfCount; ++i) { + for (int i = 0; i < nKeysToRetrieve; ++i) { ManagedKeyData pbeKey = provider.getManagedKey(key_cust, keyNamespace); if (pbeKey == null) { throw new IOException("Invalid null managed key received from key provider"); } - if (retrievedKeys.contains(pbeKey)) { + if (retrievedKeys.contains(pbeKey) || existingKeys.contains(pbeKey)) { // This typically means, the key provider is not capable of producing multiple active keys. - LOG.info("enableManagedKeys: configured key count per prefix: " + - perPrefixActiveKeyConfCount + " but received only: " + retrievedKeys.size() + - " unique keys."); + LOG.info("enableManagedKeys: specified (custodian: {}, namespace: {}) is configured " + + " to have {} active keys, but received only {} unique keys.", + keyCust, keyNamespace, perCustNamespaceActiveKeyConfCount, + activeKeys.size() + retrievedKeys.size()); break; } retrievedKeys.add(pbeKey); - LOG.info("enableManagedKeys: got key data with status: {} and metadata: {} for custodian: {}", - pbeKey.getKeyStatus(), pbeKey.getKeyMetadata(), keyCust); + LOG.info("enableManagedKeys: got managed key with status: {} and metadata: {} for " + + "(custodian: {}, namespace: {})", pbeKey.getKeyStatus(), pbeKey.getKeyMetadata(), + keyCust, keyNamespace); addKey(pbeKey); + if (pbeKey.getKeyStatus() != ManagedKeyStatus.ACTIVE) { + LOG.info("enableManagedKeys: received non-ACTIVE key with status: {} with metadata: {} for " + + "(custodian: {}, namespace: {})", + pbeKey.getKeyStatus(), pbeKey.getKeyMetadata(), keyCust, keyNamespace); + break; + } } return retrievedKeys.stream().toList(); } diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/keymeta/KeymetaServiceEndpoint.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/keymeta/KeymetaServiceEndpoint.java index 31ddb74cc121..cab56215555b 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/keymeta/KeymetaServiceEndpoint.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/keymeta/KeymetaServiceEndpoint.java @@ -113,6 +113,8 @@ public void enableKeyManagement(RpcController controller, ManagedKeysRequest req done.run(generateKeyStatusResponse(managedKeyStatuses, builder)); } catch (IOException e) { CoprocessorRpcUtils.setControllerException(controller, e); + } catch (KeyException e) { + CoprocessorRpcUtils.setControllerException(controller, new IOException(e)); } } } diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/keymeta/KeymetaTableAccessor.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/keymeta/KeymetaTableAccessor.java index c447cf00230a..6d7df48ab2a6 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/keymeta/KeymetaTableAccessor.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/keymeta/KeymetaTableAccessor.java @@ -234,8 +234,8 @@ public static byte[] constructRowKeyForMetadata(ManagedKeyData keyData) { @VisibleForTesting public static byte[] constructRowKeyForMetadata(byte[] key_cust, String keyNamespace, byte[] keyMetadataHash) { - int prefixLength = key_cust.length; - return Bytes.add(Bytes.toBytes(prefixLength), key_cust, Bytes.toBytesBinary(keyNamespace), + int custLength = key_cust.length; + return Bytes.add(Bytes.toBytes(custLength), key_cust, Bytes.toBytesBinary(keyNamespace), keyMetadataHash); } diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestKeymetaAdminImpl.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestKeymetaAdminImpl.java index 03e5d3d90bbb..2df063c1de7a 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestKeymetaAdminImpl.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestKeymetaAdminImpl.java @@ -46,6 +46,7 @@ import static org.mockito.ArgumentMatchers.any; import static org.mockito.ArgumentMatchers.argThat; import static org.mockito.Mockito.mock; +import static org.mockito.Mockito.reset; import static org.mockito.Mockito.times; import static org.mockito.Mockito.verify; import static org.mockito.Mockito.when; @@ -70,7 +71,7 @@ public class TestKeymetaAdminImpl { protected FileSystem mockFileSystem = mock(FileSystem.class); protected Server mockServer = mock(Server.class); - protected KeymetaAdminImpl keymetaAdmin; + protected DummyKeymetaAdminImpl keymetaAdmin; KeymetaTableAccessor keymetaAccessor = mock(KeymetaTableAccessor.class); @Before @@ -143,7 +144,7 @@ public static Collection data() { @Override public void setUp() throws Exception { super.setUp(); - conf.set(HConstants.CRYPTO_MANAGED_KEYS_PER_CUST_ACTIVE_KEY_COUNT, + conf.set(HConstants.CRYPTO_MANAGED_KEYS_PER_CUST_NAMESPACE_ACTIVE_KEY_COUNT, Integer.toString(nKeys)); } @@ -163,6 +164,8 @@ public void testEnableAndGet() throws Exception { (ManagedKeyData keyData) -> assertKeyData(keyData, keyStatus, isNullKey ? null : managedKeyProvider.getMockedKey(cust, keySpace)))); + verify(keymetaAccessor).getAllKeys(cust.getBytes(), keySpace); + reset(keymetaAccessor); keymetaAdmin.getManagedKeys(encodedCust, keySpace); verify(keymetaAccessor).getAllKeys(cust.getBytes(), keySpace); @@ -179,6 +182,8 @@ public static class TestMultiKeyGen extends TestKeymetaAdminImpl { @Parameter(0) public String keySpace; + private MockManagedKeyProvider managedKeyProvider; + @Parameters(name = "{index},keySpace={0}") public static Collection data() { return Arrays.asList( @@ -191,23 +196,59 @@ public static Collection data() { @Override public void setUp() throws Exception { super.setUp(); - conf.set(HConstants.CRYPTO_MANAGED_KEYS_PER_CUST_ACTIVE_KEY_COUNT, "3"); - MockManagedKeyProvider managedKeyProvider = - (MockManagedKeyProvider) Encryption.getKeyProvider(conf); + conf.set(HConstants.CRYPTO_MANAGED_KEYS_PER_CUST_NAMESPACE_ACTIVE_KEY_COUNT, "3"); + managedKeyProvider = (MockManagedKeyProvider) Encryption.getKeyProvider(conf); managedKeyProvider.setMultikeyGenMode(true); } @Test public void testEnable() throws Exception { + List managedKeyStatuses; String cust = "cust1"; String encodedCust = ManagedKeyProvider.encodeToStr(cust.getBytes()); - List managedKeyStatuses = - keymetaAdmin.enableKeyManagement(encodedCust, keySpace); - assertNotNull(managedKeyStatuses); - assertEquals(3, managedKeyStatuses.size()); - assertEquals(ACTIVE, managedKeyStatuses.get(0).getKeyStatus()); - assertEquals(ACTIVE, managedKeyStatuses.get(1).getKeyStatus()); + managedKeyStatuses = keymetaAdmin.enableKeyManagement(encodedCust, keySpace); + assertKeys(managedKeyStatuses, 3); + verify(keymetaAccessor).getAllKeys(cust.getBytes(), keySpace); verify(keymetaAccessor, times(3)).addKey(any()); + + reset(keymetaAccessor); + + when(keymetaAccessor.getAllKeys(cust.getBytes(), keySpace)).thenReturn(managedKeyStatuses); + managedKeyStatuses = keymetaAdmin.enableKeyManagement(encodedCust, keySpace); + assertKeys(managedKeyStatuses, 3); + verify(keymetaAccessor, times(0)).addKey(any()); + + reset(keymetaAccessor); + when(keymetaAccessor.getAllKeys(cust.getBytes(), keySpace)).thenReturn(managedKeyStatuses); + keymetaAdmin.activeKeyCountOverride = 4; + managedKeyStatuses = keymetaAdmin.enableKeyManagement(encodedCust, keySpace); + assertKeys(managedKeyStatuses, 1); + verify(keymetaAccessor, times(1)).addKey(any()); + + reset(keymetaAccessor); + when(keymetaAccessor.getAllKeys(cust.getBytes(), keySpace)).thenReturn(managedKeyStatuses); + managedKeyProvider.setMultikeyGenMode(false); + managedKeyStatuses = keymetaAdmin.enableKeyManagement(encodedCust, keySpace); + assertKeys(managedKeyStatuses, 0); + verify(keymetaAccessor, times(0)).addKey(any()); + + //reset(keymetaAccessor); + managedKeyProvider.setMockedKeyStatus(cust, FAILED); + managedKeyStatuses = keymetaAdmin.enableKeyManagement(encodedCust, keySpace); + assertNotNull(managedKeyStatuses); + assertEquals(1, managedKeyStatuses.size()); + assertEquals(FAILED, managedKeyStatuses.get(0).getKeyStatus()); + verify(keymetaAccessor, times(1)).addKey(any()); + // NOTE: Reset as this instance is shared for more than 1 test. + managedKeyProvider.setMockedKeyStatus(cust, ACTIVE); + } + + private static void assertKeys(List managedKeyStatuses, int expectedCnt) { + assertNotNull(managedKeyStatuses); + assertEquals(expectedCnt, managedKeyStatuses.size()); + for (int i = 0; i < managedKeyStatuses.size(); ++i) { + assertEquals(ACTIVE, managedKeyStatuses.get(i).getKeyStatus()); + } } } @@ -269,7 +310,7 @@ public static Collection data() { @Test public void test() throws Exception { - conf.set(HConstants.CRYPTO_MANAGED_KEYS_PER_CUST_ACTIVE_KEY_COUNT, keyCount); + conf.set(HConstants.CRYPTO_MANAGED_KEYS_PER_CUST_NAMESPACE_ACTIVE_KEY_COUNT, keyCount); String cust = "cust1"; String encodedCust = ManagedKeyProvider.encodeToStr(cust.getBytes()); assertThrows(expectedExType, () -> @@ -282,6 +323,16 @@ public DummyKeymetaAdminImpl(Server mockServer, KeymetaTableAccessor mockAccesso super(mockServer); } + public Integer activeKeyCountOverride; + + @Override + protected int getPerCustodianNamespaceActiveKeyConfCount() throws IOException { + if (activeKeyCountOverride != null) { + return activeKeyCountOverride; + } + return super.getPerCustodianNamespaceActiveKeyConfCount(); + } + @Override public void addKey(ManagedKeyData keyData) throws IOException { keymetaAccessor.addKey(keyData); From c8e3f9a958a28b25d7798107e53b385425bff1e7 Mon Sep 17 00:00:00 2001 From: Hari Dara Date: Tue, 20 May 2025 21:21:27 +0530 Subject: [PATCH 31/70] Just renamed status to state --- .../hbase/keymeta/KeymetaAdminClient.java | 22 +++--- .../hbase/io/crypto/ManagedKeyData.java | 42 +++++------ ...gedKeyStatus.java => ManagedKeyState.java} | 18 ++--- .../io/crypto/ManagedKeyStoreKeyProvider.java | 6 +- .../hadoop/hbase/keymeta/KeymetaAdmin.java | 1 - .../io/crypto/MockManagedKeyProvider.java | 18 ++--- .../hbase/io/crypto/TestManagedKeyData.java | 34 ++++----- .../io/crypto/TestManagedKeyProvider.java | 18 ++--- .../main/protobuf/server/ManagedKeys.proto | 6 +- .../hbase/keymeta/KeymetaAdminImpl.java | 8 +-- .../hbase/keymeta/KeymetaServiceEndpoint.java | 22 +++--- .../hbase/keymeta/KeymetaTableAccessor.java | 22 +++--- .../hbase/keymeta/ManagedKeyAccessor.java | 2 +- .../hbase/keymeta/ManagedKeyDataCache.java | 4 +- .../hadoop/hbase/master/SystemKeyManager.java | 6 +- .../hbase/keymeta/TestKeymetaEndpoint.java | 55 +++++++-------- .../keymeta/TestKeymetaTableAccessor.java | 32 ++++----- .../keymeta/TestManagedKeyDataCache.java | 4 +- .../hbase/keymeta/TestManagedKeymeta.java | 24 +++---- .../hbase/master/TestKeymetaAdminImpl.java | 70 +++++++++---------- .../hadoop/hbase/master/TestSystemKey.java | 12 ++-- .../hbase/master/TestSystemKeyManager.java | 4 +- 22 files changed, 214 insertions(+), 216 deletions(-) rename hbase-common/src/main/java/org/apache/hadoop/hbase/io/crypto/{ManagedKeyStatus.java => ManagedKeyState.java} (77%) diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/keymeta/KeymetaAdminClient.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/keymeta/KeymetaAdminClient.java index cc0b52c13275..7a4f6d58ae4f 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/keymeta/KeymetaAdminClient.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/keymeta/KeymetaAdminClient.java @@ -2,7 +2,7 @@ import org.apache.hadoop.hbase.client.Connection; import org.apache.hadoop.hbase.io.crypto.ManagedKeyData; -import org.apache.hadoop.hbase.io.crypto.ManagedKeyStatus; +import org.apache.hadoop.hbase.io.crypto.ManagedKeyState; import org.apache.hadoop.hbase.protobuf.generated.ManagedKeysProtos; import org.apache.hadoop.hbase.protobuf.generated.ManagedKeysProtos.ManagedKeysRequest; import org.apache.hadoop.hbase.protobuf.generated.ManagedKeysProtos.ManagedKeysResponse; @@ -49,16 +49,16 @@ public List getManagedKeys(String keyCust, String keyNamespace) } } - private static List generateKeyDataList(ManagedKeysProtos.GetManagedKeysResponse statusResponse) { - List keyStatuses = new ArrayList<>(); - for (ManagedKeysResponse status: statusResponse.getStatusList()) { - keyStatuses.add(new ManagedKeyData( - status.getKeyCustBytes().toByteArray(), - status.getKeyNamespace(), null, - ManagedKeyStatus.forValue((byte) status.getKeyStatus().getNumber()), - status.getKeyMetadata(), - status.getRefreshTimestamp(), status.getReadOpCount(), status.getWriteOpCount())); + private static List generateKeyDataList(ManagedKeysProtos.GetManagedKeysResponse stateResponse) { + List keyStates = new ArrayList<>(); + for (ManagedKeysResponse state: stateResponse.getStateList()) { + keyStates.add(new ManagedKeyData( + state.getKeyCustBytes().toByteArray(), + state.getKeyNamespace(), null, + ManagedKeyState.forValue((byte) state.getKeyState().getNumber()), + state.getKeyMetadata(), + state.getRefreshTimestamp(), state.getReadOpCount(), state.getWriteOpCount())); } - return keyStatuses; + return keyStates; } } diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/io/crypto/ManagedKeyData.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/io/crypto/ManagedKeyData.java index aa0e168297e0..328efa4f5290 100644 --- a/hbase-common/src/main/java/org/apache/hadoop/hbase/io/crypto/ManagedKeyData.java +++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/io/crypto/ManagedKeyData.java @@ -31,16 +31,16 @@ import java.util.Base64; /** - * This class represents an encryption key data which includes the key itself, its status, metadata + * This class represents an encryption key data which includes the key itself, its state, metadata * and a prefix. The metadata encodes enough information on the key such that it can be used to - * retrieve the exact same key again in the future. If the key status is {@link ManagedKeyStatus#FAILED} + * retrieve the exact same key again in the future. If the key state is {@link ManagedKeyState#FAILED} * expect the key to be {@code null}. * * The key data is represented by the following fields: *
      *
    • key_cust: The prefix for which this key belongs to
    • *
    • theKey: The key capturing the bytes and encoding
    • - *
    • keyStatus: The status of the key (see {@link ManagedKeyStatus})
    • + *
    • keyState: The state of the key (see {@link ManagedKeyState})
    • *
    • keyMetadata: Metadata that identifies the key
    • *
    * @@ -67,7 +67,7 @@ public class ManagedKeyData { private final byte[] keyCust; private final String keyNamespace; private final Key theKey; - private final ManagedKeyStatus keyStatus; + private final ManagedKeyState keyState; private final String keyMetadata; private final long refreshTimestamp; private final long readOpCount; @@ -80,13 +80,13 @@ public class ManagedKeyData { * * @param key_cust The key custodian. * @param theKey The actual key, can be {@code null}. - * @param keyStatus The status of the key. + * @param keyState The state of the key. * @param keyMetadata The metadata associated with the key. - * @throws NullPointerException if any of key_cust, keyStatus or keyMetadata is null. + * @throws NullPointerException if any of key_cust, keyState or keyMetadata is null. */ - public ManagedKeyData(byte[] key_cust, String key_namespace, Key theKey, ManagedKeyStatus keyStatus, + public ManagedKeyData(byte[] key_cust, String key_namespace, Key theKey, ManagedKeyState keyState, String keyMetadata) { - this(key_cust, key_namespace, theKey, keyStatus, keyMetadata, + this(key_cust, key_namespace, theKey, keyState, keyMetadata, EnvironmentEdgeManager.currentTime(), 0, 0); } @@ -95,18 +95,18 @@ public ManagedKeyData(byte[] key_cust, String key_namespace, Key theKey, Managed * * @param key_cust The key custodian. * @param theKey The actual key, can be {@code null}. - * @param keyStatus The status of the key. + * @param keyState The state of the key. * @param keyMetadata The metadata associated with the key. * @param refreshTimestamp The timestamp when this key was last refreshed. * @param readOpCount The current number of read operations for this key. * @param writeOpCount The current number of write operations for this key. - * @throws NullPointerException if any of key_cust, keyStatus or keyMetadata is null. + * @throws NullPointerException if any of key_cust, keyState or keyMetadata is null. */ - public ManagedKeyData(byte[] key_cust, String key_namespace, Key theKey, ManagedKeyStatus keyStatus, + public ManagedKeyData(byte[] key_cust, String key_namespace, Key theKey, ManagedKeyState keyState, String keyMetadata, long refreshTimestamp, long readOpCount, long writeOpCount) { Preconditions.checkNotNull(key_cust, "key_cust should not be null"); Preconditions.checkNotNull(key_namespace, "key_namespace should not be null"); - Preconditions.checkNotNull(keyStatus, "keyStatus should not be null"); + Preconditions.checkNotNull(keyState, "keyState should not be null"); Preconditions.checkNotNull(keyMetadata, "keyMetadata should not be null"); Preconditions.checkArgument(readOpCount >= 0, "readOpCount: " + readOpCount + " should be >= 0"); @@ -116,7 +116,7 @@ public ManagedKeyData(byte[] key_cust, String key_namespace, Key theKey, Managed this.keyCust = key_cust; this.keyNamespace = key_namespace; this.theKey = theKey; - this.keyStatus = keyStatus; + this.keyState = keyState; this.keyMetadata = keyMetadata; this.refreshTimestamp = refreshTimestamp; this.readOpCount = readOpCount; @@ -125,7 +125,7 @@ public ManagedKeyData(byte[] key_cust, String key_namespace, Key theKey, Managed @VisibleForTesting public ManagedKeyData cloneWithoutKey() { - return new ManagedKeyData(keyCust, keyNamespace, null, keyStatus, keyMetadata, + return new ManagedKeyData(keyCust, keyNamespace, null, keyState, keyMetadata, refreshTimestamp, readOpCount, writeOpCount); } @@ -166,12 +166,12 @@ public Key getTheKey() { } /** - * Returns the status of the key. + * Returns the state of the key. * - * @return The key status as a {@code ManagedKeyStatus} enum value. + * @return The key state as a {@code ManagedKeyState} enum value. */ - public ManagedKeyStatus getKeyStatus() { - return keyStatus; + public ManagedKeyState getKeyState() { + return keyState; } /** @@ -186,7 +186,7 @@ public String getKeyMetadata() { @Override public String toString() { return "ManagedKeyData{" + "keyCustodian=" + Arrays.toString(keyCust) + ", keyNamespace='" - + keyNamespace + '\'' + ", keyStatus=" + keyStatus + ", keyMetadata='" + keyMetadata + '\'' + + keyNamespace + '\'' + ", keyState=" + keyState + ", keyMetadata='" + keyMetadata + '\'' + ", refreshTimestamp=" + refreshTimestamp + ", keyChecksum=" + getKeyChecksum() + '}'; } @@ -279,7 +279,7 @@ public boolean equals(Object o) { .append(keyCust, that.keyCust) .append(keyNamespace, that.keyNamespace) .append(theKey, that.theKey) - .append(keyStatus, that.keyStatus) + .append(keyState, that.keyState) .append(keyMetadata, that.keyMetadata) .isEquals(); } @@ -290,7 +290,7 @@ public int hashCode() { .append(keyCust) .append(keyNamespace) .append(theKey) - .append(keyStatus) + .append(keyState) .append(keyMetadata) .toHashCode(); } diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/io/crypto/ManagedKeyStatus.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/io/crypto/ManagedKeyState.java similarity index 77% rename from hbase-common/src/main/java/org/apache/hadoop/hbase/io/crypto/ManagedKeyStatus.java rename to hbase-common/src/main/java/org/apache/hadoop/hbase/io/crypto/ManagedKeyState.java index 768927ba037b..0cc31dd2c19b 100644 --- a/hbase-common/src/main/java/org/apache/hadoop/hbase/io/crypto/ManagedKeyStatus.java +++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/io/crypto/ManagedKeyState.java @@ -25,7 +25,7 @@ * Enum of Managed key status. It is used to indicate the status of managed custodian keys. */ @InterfaceAudience.Public -public enum ManagedKeyStatus { +public enum ManagedKeyState { /** Represents the active status of a managed key. */ ACTIVE((byte) 1), /** Represents the inactive status of a managed key. */ @@ -36,11 +36,11 @@ public enum ManagedKeyStatus { DISABLED((byte) 4), ; - private static Map lookupByVal; + private static Map lookupByVal; private final byte val; - private ManagedKeyStatus(byte val) { + private ManagedKeyState(byte val) { this.val = val; } @@ -53,14 +53,14 @@ public byte getVal() { } /** - * Returns the ManagedKeyStatus for the given numeric value. - * @param val The numeric value of the desired ManagedKeyStatus - * @return The ManagedKeyStatus corresponding to the given value + * Returns the ManagedKeyState for the given numeric value. + * @param val The numeric value of the desired ManagedKeyState + * @return The ManagedKeyState corresponding to the given value */ - public static ManagedKeyStatus forValue(byte val) { + public static ManagedKeyState forValue(byte val) { if (lookupByVal == null) { - Map tbl = new HashMap<>(); - for (ManagedKeyStatus e: ManagedKeyStatus.values()) { + Map tbl = new HashMap<>(); + for (ManagedKeyState e: ManagedKeyState.values()) { tbl.put(e.getVal(), e); } lookupByVal = tbl; diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/io/crypto/ManagedKeyStoreKeyProvider.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/io/crypto/ManagedKeyStoreKeyProvider.java index 3268e812eb66..cb764a0a7f6e 100644 --- a/hbase-common/src/main/java/org/apache/hadoop/hbase/io/crypto/ManagedKeyStoreKeyProvider.java +++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/io/crypto/ManagedKeyStoreKeyProvider.java @@ -36,7 +36,7 @@ public ManagedKeyData getSystemKey(byte[] clusterId) { // Encode clusterId too for consistency with that of key custodian. String keyMetadata = generateKeyMetadata(systemKeyAlias, ManagedKeyProvider.encodeToStr(clusterId)); - return new ManagedKeyData(clusterId, ManagedKeyData.KEY_SPACE_GLOBAL, key, ManagedKeyStatus.ACTIVE, + return new ManagedKeyData(clusterId, ManagedKeyData.KEY_SPACE_GLOBAL, key, ManagedKeyState.ACTIVE, keyMetadata); } @@ -63,10 +63,10 @@ public ManagedKeyData unwrapKey(String keyMetadataStr) throws IOException { Key key = alias != null ? getKey(alias) : null; if (key != null) { return new ManagedKeyData(key_cust, ManagedKeyData.KEY_SPACE_GLOBAL, key, - isActive ? ManagedKeyStatus.ACTIVE : ManagedKeyStatus.INACTIVE, keyMetadataStr); + isActive ? ManagedKeyState.ACTIVE : ManagedKeyState.INACTIVE, keyMetadataStr); } return new ManagedKeyData(key_cust, ManagedKeyData.KEY_SPACE_GLOBAL, null, - isActive ? ManagedKeyStatus.FAILED : ManagedKeyStatus.DISABLED, keyMetadataStr); + isActive ? ManagedKeyState.FAILED : ManagedKeyState.DISABLED, keyMetadataStr); } private void checkConfig() { diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/keymeta/KeymetaAdmin.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/keymeta/KeymetaAdmin.java index ad15874df971..f90aa6317552 100644 --- a/hbase-common/src/main/java/org/apache/hadoop/hbase/keymeta/KeymetaAdmin.java +++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/keymeta/KeymetaAdmin.java @@ -18,7 +18,6 @@ package org.apache.hadoop.hbase.keymeta; import org.apache.hadoop.hbase.io.crypto.ManagedKeyData; -import org.apache.hadoop.hbase.io.crypto.ManagedKeyStatus; import org.apache.yetus.audience.InterfaceAudience; import java.io.IOException; import java.security.KeyException; diff --git a/hbase-common/src/test/java/org/apache/hadoop/hbase/io/crypto/MockManagedKeyProvider.java b/hbase-common/src/test/java/org/apache/hadoop/hbase/io/crypto/MockManagedKeyProvider.java index 27f1e6c1d093..6c692ebebcf2 100644 --- a/hbase-common/src/test/java/org/apache/hadoop/hbase/io/crypto/MockManagedKeyProvider.java +++ b/hbase-common/src/test/java/org/apache/hadoop/hbase/io/crypto/MockManagedKeyProvider.java @@ -31,7 +31,7 @@ /** * A simple implementation of ManagedKeyProvider for testing. It generates a key on demand given a - * prefix. One can control the state of a key by calling setKeyStatus and can rotate a key by + * prefix. One can control the state of a key by calling setKeyState and can rotate a key by * calling setKey. */ public class MockManagedKeyProvider extends MockAesKeyProvider implements ManagedKeyProvider { @@ -42,7 +42,7 @@ public class MockManagedKeyProvider extends MockAesKeyProvider implements Manage private Map> lastGenKeyData = new HashMap<>(); // Keep references of all generated keys by their full and partial metadata. private Map allGeneratedKeys = new HashMap<>(); - private Map keyStatus = new HashMap<>(); + private Map keyState = new HashMap<>(); private String systemKeyAlias = "default_system_key_alias"; @Override @@ -66,11 +66,11 @@ public ManagedKeyData getManagedKey(byte[] key_cust, String key_namespace) public ManagedKeyData unwrapKey(String keyMetadata) throws IOException { if (allGeneratedKeys.containsKey(keyMetadata)) { String[] meta_toks = keyMetadata.split(":"); - ManagedKeyStatus keyStatus = this.keyStatus.get(meta_toks[1]); + ManagedKeyState keyState = this.keyState.get(meta_toks[1]); ManagedKeyData managedKeyData = new ManagedKeyData(meta_toks[0].getBytes(), ManagedKeyData.KEY_SPACE_GLOBAL, allGeneratedKeys.get(keyMetadata), - keyStatus == null ? ManagedKeyStatus.ACTIVE : keyStatus, keyMetadata); + keyState == null ? ManagedKeyState.ACTIVE : keyState, keyMetadata); return registerKeyData(meta_toks[1], managedKeyData); } return null; @@ -96,8 +96,8 @@ public void setMultikeyGenMode(boolean multikeyGenMode) { this.multikeyGenMode = multikeyGenMode; } - public void setMockedKeyStatus(String alias, ManagedKeyStatus status) { - keyStatus.put(alias, status); + public void setMockedKeyState(String alias, ManagedKeyState status) { + keyState.put(alias, status); } public void setMockedKey(String alias, Key key, String keyNamespace) { @@ -137,13 +137,13 @@ public static Key generateSecretKey() { } private ManagedKeyData getKey(byte[] key_cust, String alias, String key_namespace) { - ManagedKeyStatus keyStatus = this.keyStatus.get(alias); + ManagedKeyState keyState = this.keyState.get(alias); if (! keys.containsKey(key_namespace)) { keys.put(key_namespace, new HashMap<>()); } Map keySpace = keys.get(key_namespace); Key key = null; - if (keyStatus != ManagedKeyStatus.FAILED && keyStatus != ManagedKeyStatus.DISABLED) { + if (keyState != ManagedKeyState.FAILED && keyState != ManagedKeyState.DISABLED) { if (multikeyGenMode || ! keySpace.containsKey(alias)) { key = generateSecretKey(); keySpace.put(alias, key); @@ -160,7 +160,7 @@ private ManagedKeyData getKey(byte[] key_cust, String alias, String key_namespac allGeneratedKeys.put(keyMetadata, key); ManagedKeyData managedKeyData = new ManagedKeyData(key_cust, ManagedKeyData.KEY_SPACE_GLOBAL, key, - keyStatus == null ? ManagedKeyStatus.ACTIVE : keyStatus, keyMetadata); + keyState == null ? ManagedKeyState.ACTIVE : keyState, keyMetadata); return registerKeyData(alias, managedKeyData); } } diff --git a/hbase-common/src/test/java/org/apache/hadoop/hbase/io/crypto/TestManagedKeyData.java b/hbase-common/src/test/java/org/apache/hadoop/hbase/io/crypto/TestManagedKeyData.java index 98349eee8e9d..04ef4a70720e 100644 --- a/hbase-common/src/test/java/org/apache/hadoop/hbase/io/crypto/TestManagedKeyData.java +++ b/hbase-common/src/test/java/org/apache/hadoop/hbase/io/crypto/TestManagedKeyData.java @@ -45,7 +45,7 @@ public class TestManagedKeyData { private byte[] keyCust; private String keyNamespace; private Key theKey; - private ManagedKeyStatus keyStatus; + private ManagedKeyState keyState; private String keyMetadata; private ManagedKeyData managedKeyData; @@ -56,9 +56,9 @@ public void setUp() throws NoSuchAlgorithmException { KeyGenerator keyGen = KeyGenerator.getInstance("AES"); keyGen.init(256); theKey = keyGen.generateKey(); - keyStatus = ManagedKeyStatus.ACTIVE; + keyState = ManagedKeyState.ACTIVE; keyMetadata = "testMetadata"; - managedKeyData = new ManagedKeyData(keyCust, keyNamespace, theKey, keyStatus, keyMetadata); + managedKeyData = new ManagedKeyData(keyCust, keyNamespace, theKey, keyState, keyMetadata); } @Test @@ -67,7 +67,7 @@ public void testConstructor() { assertEquals(keyNamespace, managedKeyData.getKeyNamespace()); assertArrayEquals(keyCust, managedKeyData.getKeyCustodian()); assertEquals(theKey, managedKeyData.getTheKey()); - assertEquals(keyStatus, managedKeyData.getKeyStatus()); + assertEquals(keyState, managedKeyData.getKeyState()); assertEquals(keyMetadata, managedKeyData.getKeyMetadata()); } @@ -77,7 +77,7 @@ public void testConstructorWithCounts() { long readOpCount = 10; long writeOpCount = 5; ManagedKeyData keyDataWithCounts = - new ManagedKeyData(keyCust, keyNamespace, theKey, keyStatus, keyMetadata, refreshTimestamp, + new ManagedKeyData(keyCust, keyNamespace, theKey, keyState, keyMetadata, refreshTimestamp, readOpCount, writeOpCount); assertEquals(refreshTimestamp, keyDataWithCounts.getRefreshTimestamp()); @@ -88,21 +88,21 @@ public void testConstructorWithCounts() { @Test public void testConstructorNullChecks() { assertThrows(NullPointerException.class, - () -> new ManagedKeyData(null, keyNamespace, theKey, keyStatus, keyMetadata)); + () -> new ManagedKeyData(null, keyNamespace, theKey, keyState, keyMetadata)); assertThrows(NullPointerException.class, - () -> new ManagedKeyData(keyCust, null, theKey, keyStatus, keyMetadata)); + () -> new ManagedKeyData(keyCust, null, theKey, keyState, keyMetadata)); assertThrows(NullPointerException.class, () -> new ManagedKeyData(keyCust, keyNamespace, theKey, null, keyMetadata)); assertThrows(NullPointerException.class, - () -> new ManagedKeyData(keyCust, keyNamespace, theKey, keyStatus, null)); + () -> new ManagedKeyData(keyCust, keyNamespace, theKey, keyState, null)); } @Test public void testConstructorNegativeCountChecks() { assertThrows(IllegalArgumentException.class, - () -> new ManagedKeyData(keyCust, keyNamespace, theKey, keyStatus, keyMetadata, 0, -1, 0)); + () -> new ManagedKeyData(keyCust, keyNamespace, theKey, keyState, keyMetadata, 0, -1, 0)); assertThrows(IllegalArgumentException.class, - () -> new ManagedKeyData(keyCust, keyNamespace, theKey, keyStatus, keyMetadata, 0, 0, -1)); + () -> new ManagedKeyData(keyCust, keyNamespace, theKey, keyState, keyMetadata, 0, 0, -1)); } @Test @@ -111,7 +111,7 @@ public void testCloneWithoutKey() { assertNull(cloned.getTheKey()); assertEquals(managedKeyData.getKeyCustodian(), cloned.getKeyCustodian()); assertEquals(managedKeyData.getKeyNamespace(), cloned.getKeyNamespace()); - assertEquals(managedKeyData.getKeyStatus(), cloned.getKeyStatus()); + assertEquals(managedKeyData.getKeyState(), cloned.getKeyState()); assertEquals(managedKeyData.getKeyMetadata(), cloned.getKeyMetadata()); } @@ -129,7 +129,7 @@ public void testGetKeyChecksum() { // Test with null key ManagedKeyData nullKeyData = - new ManagedKeyData(keyCust, keyNamespace, null, keyStatus, keyMetadata); + new ManagedKeyData(keyCust, keyNamespace, null, keyState, keyMetadata); assertEquals(0, nullKeyData.getKeyChecksum()); } @@ -166,7 +166,7 @@ public void testToString() { String toString = managedKeyData.toString(); assertTrue(toString.contains("keyCustodian")); assertTrue(toString.contains("keyNamespace")); - assertTrue(toString.contains("keyStatus")); + assertTrue(toString.contains("keyState")); assertTrue(toString.contains("keyMetadata")); assertTrue(toString.contains("refreshTimestamp")); assertTrue(toString.contains("keyChecksum")); @@ -174,21 +174,21 @@ public void testToString() { @Test public void testEquals() { - ManagedKeyData same = new ManagedKeyData(keyCust, keyNamespace, theKey, keyStatus, keyMetadata); + ManagedKeyData same = new ManagedKeyData(keyCust, keyNamespace, theKey, keyState, keyMetadata); assertEquals(managedKeyData, same); ManagedKeyData different = - new ManagedKeyData("differentCust".getBytes(), keyNamespace, theKey, keyStatus, keyMetadata); + new ManagedKeyData("differentCust".getBytes(), keyNamespace, theKey, keyState, keyMetadata); assertNotEquals(managedKeyData, different); } @Test public void testHashCode() { - ManagedKeyData same = new ManagedKeyData(keyCust, keyNamespace, theKey, keyStatus, keyMetadata); + ManagedKeyData same = new ManagedKeyData(keyCust, keyNamespace, theKey, keyState, keyMetadata); assertEquals(managedKeyData.hashCode(), same.hashCode()); ManagedKeyData different = - new ManagedKeyData("differentCust".getBytes(), keyNamespace, theKey, keyStatus, keyMetadata); + new ManagedKeyData("differentCust".getBytes(), keyNamespace, theKey, keyState, keyMetadata); assertNotEquals(managedKeyData.hashCode(), different.hashCode()); } diff --git a/hbase-common/src/test/java/org/apache/hadoop/hbase/io/crypto/TestManagedKeyProvider.java b/hbase-common/src/test/java/org/apache/hadoop/hbase/io/crypto/TestManagedKeyProvider.java index a96c94ed4d63..b2a7608fa6a2 100644 --- a/hbase-common/src/test/java/org/apache/hadoop/hbase/io/crypto/TestManagedKeyProvider.java +++ b/hbase-common/src/test/java/org/apache/hadoop/hbase/io/crypto/TestManagedKeyProvider.java @@ -136,7 +136,7 @@ public void testGetManagedKey() throws Exception { for (Bytes prefix : prefix2key.keySet()) { ManagedKeyData keyData = managedKeyProvider.getManagedKey(prefix.get(), ManagedKeyData.KEY_SPACE_GLOBAL); - assertKeyData(keyData, ManagedKeyStatus.ACTIVE, prefix2key.get(prefix).get(), prefix.get(), + assertKeyData(keyData, ManagedKeyState.ACTIVE, prefix2key.get(prefix).get(), prefix.get(), prefix2alias.get(prefix)); } } @@ -150,7 +150,7 @@ public void testGetInactiveKey() throws Exception { ManagedKeyData keyData = managedKeyProvider.getManagedKey(firstPrefix.get(), ManagedKeyData.KEY_SPACE_GLOBAL); assertNotNull(keyData); - assertKeyData(keyData, ManagedKeyStatus.INACTIVE, prefix2key.get(firstPrefix).get(), + assertKeyData(keyData, ManagedKeyState.INACTIVE, prefix2key.get(firstPrefix).get(), firstPrefix.get(), prefix2alias.get(firstPrefix)); } @@ -160,7 +160,7 @@ public void testGetInvalidKey() throws Exception { ManagedKeyData keyData = managedKeyProvider.getManagedKey(invalidPrefixBytes, ManagedKeyData.KEY_SPACE_GLOBAL); assertNotNull(keyData); - assertKeyData(keyData, ManagedKeyStatus.FAILED, null, invalidPrefixBytes, null); + assertKeyData(keyData, ManagedKeyState.FAILED, null, invalidPrefixBytes, null); } @Test @@ -172,14 +172,14 @@ public void testGetDisabledKey() throws Exception { ManagedKeyData keyData = managedKeyProvider.getManagedKey(invalidPrefix, ManagedKeyData.KEY_SPACE_GLOBAL); assertNotNull(keyData); - assertKeyData(keyData, ManagedKeyStatus.DISABLED, null, + assertKeyData(keyData, ManagedKeyState.DISABLED, null, invalidPrefix, null); } @Test public void testGetSystemKey() throws Exception { ManagedKeyData clusterKeyData = managedKeyProvider.getSystemKey(clusterId.getBytes()); - assertKeyData(clusterKeyData, ManagedKeyStatus.ACTIVE, systemKey, clusterId.getBytes(), + assertKeyData(clusterKeyData, ManagedKeyState.ACTIVE, systemKey, clusterId.getBytes(), SYSTEM_KEY_ALIAS); conf.unset(HConstants.CRYPTO_MANAGED_KEY_STORE_SYSTEM_KEY_NAME_CONF_KEY); RuntimeException ex = assertThrows(RuntimeException.class, @@ -200,7 +200,7 @@ public void testUnwrapInvalidKey() throws Exception { invalidPrefixEnc); ManagedKeyData keyData = managedKeyProvider.unwrapKey(invalidMetadata); assertNotNull(keyData); - assertKeyData(keyData, ManagedKeyStatus.FAILED, null, invalidPrefix, + assertKeyData(keyData, ManagedKeyState.FAILED, null, invalidPrefix, invalidAlias); } @@ -215,13 +215,13 @@ public void testUnwrapDisabledKey() throws Exception { invalidPrefixEnc); ManagedKeyData keyData = managedKeyProvider.unwrapKey(invalidMetadata); assertNotNull(keyData); - assertKeyData(keyData, ManagedKeyStatus.DISABLED, null, invalidPrefix, invalidAlias); + assertKeyData(keyData, ManagedKeyState.DISABLED, null, invalidPrefix, invalidAlias); } - private void assertKeyData(ManagedKeyData keyData, ManagedKeyStatus expKeyStatus, byte[] key, + private void assertKeyData(ManagedKeyData keyData, ManagedKeyState expKeyState, byte[] key, byte[] prefixBytes, String alias) throws Exception { assertNotNull(keyData); - assertEquals(expKeyStatus, keyData.getKeyStatus()); + assertEquals(expKeyState, keyData.getKeyState()); if (key == null) { assertNull(keyData.getTheKey()); } diff --git a/hbase-protocol-shaded/src/main/protobuf/server/ManagedKeys.proto b/hbase-protocol-shaded/src/main/protobuf/server/ManagedKeys.proto index 4f6bc7abac7e..452151b98628 100644 --- a/hbase-protocol-shaded/src/main/protobuf/server/ManagedKeys.proto +++ b/hbase-protocol-shaded/src/main/protobuf/server/ManagedKeys.proto @@ -29,7 +29,7 @@ message ManagedKeysRequest { required string key_namespace = 2; } -enum ManagedKeyStatus { +enum ManagedKeyState { KEY_ACTIVE = 1; KEY_INACTIVE = 2; KEY_FAILED = 3; @@ -39,7 +39,7 @@ enum ManagedKeyStatus { message ManagedKeysResponse { required string key_cust = 1; required string key_namespace = 2; - required ManagedKeyStatus key_status = 3; + required ManagedKeyState key_state = 3; optional string key_metadata = 4; optional int64 refresh_timestamp = 5; optional int64 read_op_count = 6; @@ -47,7 +47,7 @@ message ManagedKeysResponse { } message GetManagedKeysResponse { - repeated ManagedKeysResponse status = 1; + repeated ManagedKeysResponse state = 1; } service ManagedKeysService { diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/keymeta/KeymetaAdminImpl.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/keymeta/KeymetaAdminImpl.java index 6015eff11a1d..e15c7102d4d9 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/keymeta/KeymetaAdminImpl.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/keymeta/KeymetaAdminImpl.java @@ -20,7 +20,7 @@ import org.apache.hadoop.hbase.Server; import org.apache.hadoop.hbase.io.crypto.ManagedKeyData; import org.apache.hadoop.hbase.io.crypto.ManagedKeyProvider; -import org.apache.hadoop.hbase.io.crypto.ManagedKeyStatus; +import org.apache.hadoop.hbase.io.crypto.ManagedKeyState; import org.apache.yetus.audience.InterfaceAudience; import org.slf4j.Logger; import org.slf4j.LoggerFactory; @@ -76,13 +76,13 @@ public List enableKeyManagement(String keyCust, String keyNamesp } retrievedKeys.add(pbeKey); LOG.info("enableManagedKeys: got managed key with status: {} and metadata: {} for " - + "(custodian: {}, namespace: {})", pbeKey.getKeyStatus(), pbeKey.getKeyMetadata(), + + "(custodian: {}, namespace: {})", pbeKey.getKeyState(), pbeKey.getKeyMetadata(), keyCust, keyNamespace); addKey(pbeKey); - if (pbeKey.getKeyStatus() != ManagedKeyStatus.ACTIVE) { + if (pbeKey.getKeyState() != ManagedKeyState.ACTIVE) { LOG.info("enableManagedKeys: received non-ACTIVE key with status: {} with metadata: {} for " + "(custodian: {}, namespace: {})", - pbeKey.getKeyStatus(), pbeKey.getKeyMetadata(), keyCust, keyNamespace); + pbeKey.getKeyState(), pbeKey.getKeyMetadata(), keyCust, keyNamespace); break; } } diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/keymeta/KeymetaServiceEndpoint.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/keymeta/KeymetaServiceEndpoint.java index cab56215555b..d956d2e2a572 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/keymeta/KeymetaServiceEndpoint.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/keymeta/KeymetaServiceEndpoint.java @@ -108,9 +108,9 @@ public void enableKeyManagement(RpcController controller, ManagedKeysRequest req ManagedKeysResponse.Builder builder = getResponseBuilder(controller, request); if (builder.getKeyCust() != null && ! builder.getKeyCust().isEmpty()) { try { - List managedKeyStatuses = master.getKeymetaAdmin() + List managedKeyStates = master.getKeymetaAdmin() .enableKeyManagement(request.getKeyCust(), request.getKeyNamespace()); - done.run(generateKeyStatusResponse(managedKeyStatuses, builder)); + done.run(generateKeyStateResponse(managedKeyStates, builder)); } catch (IOException e) { CoprocessorRpcUtils.setControllerException(controller, e); } catch (KeyException e) { @@ -125,9 +125,9 @@ public void getManagedKeys(RpcController controller, ManagedKeysRequest request, ManagedKeysResponse.Builder builder = getResponseBuilder(controller, request); if (builder.getKeyCust() != null && ! builder.getKeyCust().isEmpty()) { try { - List managedKeyStatuses = master.getKeymetaAdmin() + List managedKeyStates = master.getKeymetaAdmin() .getManagedKeys(request.getKeyCust(), request.getKeyNamespace()); - done.run(generateKeyStatusResponse(managedKeyStatuses, builder)); + done.run(generateKeyStateResponse(managedKeyStates, builder)); } catch (IOException e) { CoprocessorRpcUtils.setControllerException(controller, e); } catch (KeyException e) { @@ -151,18 +151,18 @@ public static ManagedKeysResponse.Builder getResponseBuilder(RpcController contr // Assumes that all ManagedKeyData objects belong to the same custodian and namespace. @VisibleForTesting - public static GetManagedKeysResponse generateKeyStatusResponse( - List managedKeyStatuses, ManagedKeysResponse.Builder builder) { + public static GetManagedKeysResponse generateKeyStateResponse( + List managedKeyStates, ManagedKeysResponse.Builder builder) { GetManagedKeysResponse.Builder responseBuilder = GetManagedKeysResponse.newBuilder(); - for (ManagedKeyData keyData: managedKeyStatuses) { - builder.setKeyStatus(ManagedKeysProtos.ManagedKeyStatus.valueOf( - keyData.getKeyStatus().getVal())) + for (ManagedKeyData keyData: managedKeyStates) { + builder.setKeyState(ManagedKeysProtos.ManagedKeyState.valueOf( + keyData.getKeyState().getVal())) .setKeyMetadata(keyData.getKeyMetadata()) .setRefreshTimestamp(keyData.getRefreshTimestamp()) .setReadOpCount(keyData.getReadOpCount()) .setWriteOpCount(keyData.getWriteOpCount()) ; - responseBuilder.addStatus(builder.build()); + responseBuilder.addState(builder.build()); } return responseBuilder.build(); } @@ -174,7 +174,7 @@ public static byte[] convertToKeyCustBytes(RpcController controller, ManagedKeys try { key_cust = Base64.getDecoder().decode(request.getKeyCust()); } catch (IllegalArgumentException e) { - builder.setKeyStatus(ManagedKeysProtos.ManagedKeyStatus.KEY_FAILED); + builder.setKeyState(ManagedKeysProtos.ManagedKeyState.KEY_FAILED); CoprocessorRpcUtils.setControllerException(controller, new IOException( "Failed to decode specified prefix as Base64 string: " + request.getKeyCust(), e)); } diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/keymeta/KeymetaTableAccessor.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/keymeta/KeymetaTableAccessor.java index 6d7df48ab2a6..0ef01613c3fd 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/keymeta/KeymetaTableAccessor.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/keymeta/KeymetaTableAccessor.java @@ -34,7 +34,7 @@ import org.apache.hadoop.hbase.client.Table; import org.apache.hadoop.hbase.filter.PrefixFilter; import org.apache.hadoop.hbase.io.crypto.ManagedKeyData; -import org.apache.hadoop.hbase.io.crypto.ManagedKeyStatus; +import org.apache.hadoop.hbase.io.crypto.ManagedKeyState; import org.apache.hadoop.hbase.security.EncryptionUtil; import org.apache.hadoop.hbase.util.Bytes; import org.apache.yetus.audience.InterfaceAudience; @@ -71,8 +71,8 @@ public class KeymetaTableAccessor extends KeyManagementBase { public static final String REFRESHED_TIMESTAMP_QUAL_NAME = "refreshed_timestamp"; public static final byte[] REFRESHED_TIMESTAMP_QUAL_BYTES = Bytes.toBytes(REFRESHED_TIMESTAMP_QUAL_NAME); - public static final String KEY_STATUS_QUAL_NAME = "key_status"; - public static final byte[] KEY_STATUS_QUAL_BYTES = Bytes.toBytes(KEY_STATUS_QUAL_NAME); + public static final String KEY_STATE_QUAL_NAME = "key_state"; + public static final byte[] KEY_STATE_QUAL_BYTES = Bytes.toBytes(KEY_STATE_QUAL_NAME); public static final String READ_OP_COUNT_QUAL_NAME = "read_op_count"; public static final byte[] READ_OP_COUNT_QUAL_BYTES = Bytes.toBytes(READ_OP_COUNT_QUAL_NAME); @@ -147,7 +147,7 @@ public List getActiveKeys(byte[] key_cust, String keyNamespace) assertKeyManagementEnabled(); List activeKeys = new ArrayList<>(); for (ManagedKeyData keyData : getAllKeys(key_cust, keyNamespace)) { - if (keyData.getKeyStatus() == ManagedKeyStatus.ACTIVE) { + if (keyData.getKeyState() == ManagedKeyState.ACTIVE) { activeKeys.add(keyData); } } @@ -220,8 +220,8 @@ private Put addMutationColumns(Put put, ManagedKeyData keyData) throws IOExcepti .addColumn(KEY_META_INFO_FAMILY, DEK_METADATA_QUAL_BYTES, keyData.getKeyMetadata().getBytes()) .addColumn(KEY_META_INFO_FAMILY, REFRESHED_TIMESTAMP_QUAL_BYTES, Bytes.toBytes(keyData.getRefreshTimestamp())) - .addColumn(KEY_META_INFO_FAMILY, KEY_STATUS_QUAL_BYTES, - new byte[] { keyData.getKeyStatus().getVal() }) + .addColumn(KEY_META_INFO_FAMILY, KEY_STATE_QUAL_BYTES, + new byte[] { keyData.getKeyState().getVal() }) ; } @@ -245,14 +245,14 @@ public static ManagedKeyData parseFromResult(Server server, byte[] key_cust, Str if (result == null || result.isEmpty()) { return null; } - ManagedKeyStatus keyStatus = ManagedKeyStatus.forValue( - result.getValue(KEY_META_INFO_FAMILY, KEY_STATUS_QUAL_BYTES)[0]); + ManagedKeyState keyState = ManagedKeyState.forValue( + result.getValue(KEY_META_INFO_FAMILY, KEY_STATE_QUAL_BYTES)[0]); String dekMetadata = Bytes.toString(result.getValue(KEY_META_INFO_FAMILY, DEK_METADATA_QUAL_BYTES)); byte[] dekWrappedByStk = result.getValue(KEY_META_INFO_FAMILY, DEK_WRAPPED_BY_STK_QUAL_BYTES); - if ((keyStatus == ManagedKeyStatus.ACTIVE || keyStatus == ManagedKeyStatus.INACTIVE) + if ((keyState == ManagedKeyState.ACTIVE || keyState == ManagedKeyState.INACTIVE) && dekWrappedByStk == null) { - throw new IOException(keyStatus + " key must have a wrapped key"); + throw new IOException(keyState + " key must have a wrapped key"); } Key dek = null; if (dekWrappedByStk != null) { @@ -274,7 +274,7 @@ public static ManagedKeyData parseFromResult(Server server, byte[] key_cust, Str byte[] writeOpValue = result.getValue(KEY_META_INFO_FAMILY, WRITE_OP_COUNT_QUAL_BYTES); long writeOpCount = writeOpValue != null ? Bytes.toLong(writeOpValue) : 0; ManagedKeyData - dekKeyData = new ManagedKeyData(key_cust, keyNamespace, dek, keyStatus, dekMetadata, + dekKeyData = new ManagedKeyData(key_cust, keyNamespace, dek, keyState, dekMetadata, refreshedTimestamp, readOpCount, writeOpCount); if (dek != null) { long dekChecksum = Bytes.toLong(result.getValue(KEY_META_INFO_FAMILY, diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/keymeta/ManagedKeyAccessor.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/keymeta/ManagedKeyAccessor.java index fe1d1c6d8b61..124700afa6c9 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/keymeta/ManagedKeyAccessor.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/keymeta/ManagedKeyAccessor.java @@ -64,7 +64,7 @@ public ManagedKeyData getKey(byte[] key_cust, String keyNamespace, String keyMet keyData = provider.unwrapKey(keyMetadata); if (keyData != null) { LOG.info("Got key data with status: {} and metadata: {} for prefix: {}", - keyData.getKeyStatus(), keyData.getKeyMetadata(), + keyData.getKeyState(), keyData.getKeyMetadata(), ManagedKeyProvider.encodeToStr(key_cust)); keymetaAccessor.addKey(keyData); } diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/keymeta/ManagedKeyDataCache.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/keymeta/ManagedKeyDataCache.java index 0cb131c5a494..5fa98af7f41b 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/keymeta/ManagedKeyDataCache.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/keymeta/ManagedKeyDataCache.java @@ -18,7 +18,7 @@ package org.apache.hadoop.hbase.keymeta; import org.apache.hadoop.hbase.io.crypto.ManagedKeyData; -import org.apache.hadoop.hbase.io.crypto.ManagedKeyStatus; +import org.apache.hadoop.hbase.io.crypto.ManagedKeyState; import org.apache.hadoop.hbase.util.Bytes; import org.apache.yetus.audience.InterfaceAudience; @@ -142,7 +142,7 @@ public ManagedKeyData getRandomEntryForPrefix(byte[] key_cust, String keyNamespa Map keyMap = nsCache != null ? nsCache.get(keyCust) : null; if (keyMap != null) { for (ManagedKeyData entry : keyMap.values()) { - if (entry.getKeyStatus() == ManagedKeyStatus.ACTIVE) { + if (entry.getKeyState() == ManagedKeyState.ACTIVE) { activeEntries.add(entry); } } diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/SystemKeyManager.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/SystemKeyManager.java index 29a405939d78..096e48b3d975 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/SystemKeyManager.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/SystemKeyManager.java @@ -24,7 +24,7 @@ import org.apache.hadoop.fs.Path; import org.apache.hadoop.hbase.io.crypto.ManagedKeyData; import org.apache.hadoop.hbase.io.crypto.ManagedKeyProvider; -import org.apache.hadoop.hbase.io.crypto.ManagedKeyStatus; +import org.apache.hadoop.hbase.io.crypto.ManagedKeyState; import org.apache.hadoop.hbase.keymeta.SystemKeyAccessor; import org.apache.hadoop.hbase.util.Pair; import org.apache.yetus.audience.InterfaceAudience; @@ -78,9 +78,9 @@ private ManagedKeyData rotateSystemKey(String currentKeyMetadata, List all throw new IOException("Failed to get system key for cluster id: " + master.getMasterFileSystem().getClusterId().toString()); } - if (clusterKey.getKeyStatus() != ManagedKeyStatus.ACTIVE) { + if (clusterKey.getKeyState() != ManagedKeyState.ACTIVE) { throw new IOException("System key is expected to be ACTIVE but it is: " + - clusterKey.getKeyStatus() + " for metadata: " + clusterKey.getKeyMetadata()); + clusterKey.getKeyState() + " for metadata: " + clusterKey.getKeyMetadata()); } if (clusterKey.getKeyMetadata() == null) { throw new IOException("System key is expected to have metadata but it is null"); diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/keymeta/TestKeymetaEndpoint.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/keymeta/TestKeymetaEndpoint.java index ebdce1eff6fd..9808e061f8a7 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/keymeta/TestKeymetaEndpoint.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/keymeta/TestKeymetaEndpoint.java @@ -24,7 +24,6 @@ import org.apache.hadoop.hbase.keymeta.KeymetaServiceEndpoint.KeyMetaAdminServiceImpl; import org.apache.hadoop.hbase.master.MasterServices; import org.apache.hadoop.hbase.protobuf.generated.ManagedKeysProtos.GetManagedKeysResponse; -import org.apache.hadoop.hbase.protobuf.generated.ManagedKeysProtos.ManagedKeyStatus; import org.apache.hadoop.hbase.protobuf.generated.ManagedKeysProtos.ManagedKeysRequest; import org.apache.hadoop.hbase.protobuf.generated.ManagedKeysProtos.ManagedKeysResponse; import org.apache.hadoop.hbase.testclassification.MasterTests; @@ -46,9 +45,9 @@ import java.util.Arrays; import java.util.Base64; import java.util.List; -import static org.apache.hadoop.hbase.io.crypto.ManagedKeyStatus.ACTIVE; -import static org.apache.hadoop.hbase.protobuf.generated.ManagedKeysProtos.ManagedKeyStatus.KEY_ACTIVE; -import static org.apache.hadoop.hbase.protobuf.generated.ManagedKeysProtos.ManagedKeyStatus.KEY_FAILED; +import static org.apache.hadoop.hbase.io.crypto.ManagedKeyState.ACTIVE; +import static org.apache.hadoop.hbase.protobuf.generated.ManagedKeysProtos.ManagedKeyState.KEY_ACTIVE; +import static org.apache.hadoop.hbase.protobuf.generated.ManagedKeysProtos.ManagedKeyState.KEY_FAILED; import static org.junit.Assert.assertArrayEquals; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertNotNull; @@ -101,7 +100,7 @@ public void setUp() throws Exception { keymetaServiceEndpoint.start(env); keyMetaAdminService = (KeyMetaAdminServiceImpl) keymetaServiceEndpoint.getServices() .iterator().next(); - responseBuilder = ManagedKeysResponse.newBuilder().setKeyStatus(KEY_ACTIVE); + responseBuilder = ManagedKeysResponse.newBuilder().setKeyState(KEY_ACTIVE); requestBuilder = ManagedKeysRequest.newBuilder() .setKeyNamespace(ManagedKeyData.KEY_SPACE_GLOBAL); keyData1 = new ManagedKeyData(KEY_CUST.getBytes(), KEY_NAMESPACE, @@ -124,7 +123,7 @@ public void testConvertToKeyCustBytesValid() { // Assert assertNotNull(result); assertArrayEquals("testKey".getBytes(), result); - assertEquals(KEY_ACTIVE, responseBuilder.getKeyStatus()); + assertEquals(KEY_ACTIVE, responseBuilder.getKeyState()); verify(controller, never()).setFailed(anyString()); } @@ -140,7 +139,7 @@ public void testConvertToKeyCustBytesInvalid() { // Assert assertNull(result); - assertEquals(KEY_FAILED, responseBuilder.getKeyStatus()); + assertEquals(KEY_FAILED, responseBuilder.getKeyState()); verify(controller).setFailed(anyString()); } @@ -179,56 +178,56 @@ public void testGetResponseBuilderWithInvalidBase64() { // Assert assertNotNull(result); assertEquals(keyNamespace, result.getKeyNamespace()); - assertEquals(ManagedKeyStatus.KEY_FAILED, result.getKeyStatus()); + assertEquals(KEY_FAILED, result.getKeyState()); verify(controller).setFailed(contains("Failed to decode specified prefix as Base64 string")); } @Test - public void testGenerateKeyStatusResponse() throws Exception { + public void testGenerateKeyStateResponse() throws Exception { // Arrange ManagedKeysResponse response = responseBuilder.setKeyCustBytes(ByteString.copyFrom( keyData1.getKeyCustodian())) .setKeyNamespace(keyData1.getKeyNamespace()) .build(); - List managedKeyStatuses = Arrays.asList(keyData1, keyData2); + List managedKeyStates = Arrays.asList(keyData1, keyData2); // Act - GetManagedKeysResponse result = KeymetaServiceEndpoint.generateKeyStatusResponse( - managedKeyStatuses, responseBuilder); + GetManagedKeysResponse result = KeymetaServiceEndpoint.generateKeyStateResponse( + managedKeyStates, responseBuilder); // Assert assertNotNull(response); - assertNotNull(result.getStatusList()); - assertEquals(2, result.getStatusList().size()); - assertEquals(ManagedKeyStatus.KEY_ACTIVE, result.getStatusList().get(0).getKeyStatus()); + assertNotNull(result.getStateList()); + assertEquals(2, result.getStateList().size()); + assertEquals(KEY_ACTIVE, result.getStateList().get(0).getKeyState()); assertEquals(0, Bytes.compareTo(keyData1.getKeyCustodian(), - result.getStatusList().get(0).getKeyCustBytes().toByteArray())); - assertEquals(keyData1.getKeyNamespace(), result.getStatusList().get(0).getKeyNamespace()); + result.getStateList().get(0).getKeyCustBytes().toByteArray())); + assertEquals(keyData1.getKeyNamespace(), result.getStateList().get(0).getKeyNamespace()); verify(controller, never()).setFailed(anyString()); } @Test - public void testGenerateKeyStatusResponse_Empty() throws Exception { + public void testGenerateKeyStateResponse_Empty() throws Exception { // Arrange ManagedKeysResponse response = responseBuilder.setKeyCustBytes(ByteString.copyFrom( keyData1.getKeyCustodian())) .setKeyNamespace(keyData1.getKeyNamespace()) .build(); - List managedKeyStatuses = new ArrayList<>(); + List managedKeyStates = new ArrayList<>(); // Act - GetManagedKeysResponse result = KeymetaServiceEndpoint.generateKeyStatusResponse( - managedKeyStatuses, responseBuilder); + GetManagedKeysResponse result = KeymetaServiceEndpoint.generateKeyStateResponse( + managedKeyStates, responseBuilder); // Assert assertNotNull(response); - assertNotNull(result.getStatusList()); - assertEquals(0, result.getStatusList().size()); + assertNotNull(result.getStateList()); + assertEquals(0, result.getStateList().size()); verify(controller, never()).setFailed(anyString()); } @Test - public void testGenerateKeyStatusResponse_Success() throws Exception { + public void testGenerateKeyStatResponse_Success() throws Exception { doTestServiceCallForSuccess( (controller, request, done) -> keyMetaAdminService.enableKeyManagement(controller, request, done)); @@ -244,8 +243,8 @@ public void testGetManagedKeys_Success() throws Exception { private void doTestServiceCallForSuccess(ServiceCall svc) throws Exception { // Arrange ManagedKeysRequest request = requestBuilder.setKeyCust(KEY_CUST).build(); - List managedKeyStatuses = Arrays.asList(keyData1); - when(keymetaAdmin.enableKeyManagement(any(), any())).thenReturn(managedKeyStatuses); + List managedKeyStates = Arrays.asList(keyData1); + when(keymetaAdmin.enableKeyManagement(any(), any())).thenReturn(managedKeyStates); // Act svc.call(controller, request, done); @@ -261,7 +260,7 @@ void call(RpcController controller, ManagedKeysRequest request, } @Test - public void testGenerateKeyStatusResponse_InvalidCust() throws Exception { + public void testGenerateKeyStateResponse_InvalidCust() throws Exception { // Arrange String invalidBase64 = "invalid!Base64@String"; ManagedKeysRequest request = requestBuilder.setKeyCust(invalidBase64).build(); @@ -276,7 +275,7 @@ public void testGenerateKeyStatusResponse_InvalidCust() throws Exception { } @Test - public void testGenerateKeyStatusResponse_IOException() throws Exception { + public void testGenerateKeyStateResponse_IOException() throws Exception { // Arrange when(keymetaAdmin.enableKeyManagement(any(), any())).thenThrow(IOException.class); ManagedKeysRequest request = requestBuilder.setKeyCust(KEY_CUST).build(); diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/keymeta/TestKeymetaTableAccessor.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/keymeta/TestKeymetaTableAccessor.java index e8cdf6f935a8..388692dfbf0b 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/keymeta/TestKeymetaTableAccessor.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/keymeta/TestKeymetaTableAccessor.java @@ -18,15 +18,15 @@ package org.apache.hadoop.hbase.keymeta; import static org.apache.hadoop.hbase.io.crypto.ManagedKeyData.KEY_SPACE_GLOBAL; -import static org.apache.hadoop.hbase.io.crypto.ManagedKeyStatus.ACTIVE; -import static org.apache.hadoop.hbase.io.crypto.ManagedKeyStatus.DISABLED; -import static org.apache.hadoop.hbase.io.crypto.ManagedKeyStatus.FAILED; -import static org.apache.hadoop.hbase.io.crypto.ManagedKeyStatus.INACTIVE; +import static org.apache.hadoop.hbase.io.crypto.ManagedKeyState.ACTIVE; +import static org.apache.hadoop.hbase.io.crypto.ManagedKeyState.DISABLED; +import static org.apache.hadoop.hbase.io.crypto.ManagedKeyState.FAILED; +import static org.apache.hadoop.hbase.io.crypto.ManagedKeyState.INACTIVE; import static org.apache.hadoop.hbase.keymeta.KeymetaTableAccessor.DEK_CHECKSUM_QUAL_BYTES; import static org.apache.hadoop.hbase.keymeta.KeymetaTableAccessor.DEK_METADATA_QUAL_BYTES; import static org.apache.hadoop.hbase.keymeta.KeymetaTableAccessor.DEK_WRAPPED_BY_STK_QUAL_BYTES; import static org.apache.hadoop.hbase.keymeta.KeymetaTableAccessor.KEY_META_INFO_FAMILY; -import static org.apache.hadoop.hbase.keymeta.KeymetaTableAccessor.KEY_STATUS_QUAL_BYTES; +import static org.apache.hadoop.hbase.keymeta.KeymetaTableAccessor.KEY_STATE_QUAL_BYTES; import static org.apache.hadoop.hbase.keymeta.KeymetaTableAccessor.READ_OP_COUNT_QUAL_BYTES; import static org.apache.hadoop.hbase.keymeta.KeymetaTableAccessor.REFRESHED_TIMESTAMP_QUAL_BYTES; import static org.apache.hadoop.hbase.keymeta.KeymetaTableAccessor.STK_CHECKSUM_QUAL_BYTES; @@ -52,7 +52,7 @@ import org.apache.hadoop.hbase.Server; import org.apache.hadoop.hbase.client.*; import org.apache.hadoop.hbase.io.crypto.ManagedKeyData; -import org.apache.hadoop.hbase.io.crypto.ManagedKeyStatus; +import org.apache.hadoop.hbase.io.crypto.ManagedKeyState; import org.apache.hadoop.hbase.io.crypto.MockManagedKeyProvider; import org.apache.hadoop.hbase.security.EncryptionUtil; import org.apache.hadoop.hbase.testclassification.MasterTests; @@ -137,9 +137,9 @@ public static class TestAdd extends TestKeymetaTableAccessor { HBaseClassTestRule.forClass(TestAdd.class); @Parameter(0) - public ManagedKeyStatus keyStatus; + public ManagedKeyState keyState; - @Parameterized.Parameters(name = "{index},keyStatus={0}") + @Parameterized.Parameters(name = "{index},keyState={0}") public static Collection data() { return Arrays.asList( new Object[][] { { ACTIVE }, { FAILED }, { INACTIVE }, { DISABLED }, }); @@ -147,7 +147,7 @@ public static Collection data() { @Test public void testAddActiveKey() throws Exception { - managedKeyProvider.setMockedKeyStatus(ALIAS, keyStatus); + managedKeyProvider.setMockedKeyState(ALIAS, keyState); ManagedKeyData keyData = managedKeyProvider.getManagedKey(CUST_ID, KEY_SPACE_GLOBAL); @@ -178,9 +178,9 @@ public static class TestGet extends TestKeymetaTableAccessor { public void setUp() throws Exception { super.setUp(); - when(result1.getValue(eq(KEY_META_INFO_FAMILY), eq(KEY_STATUS_QUAL_BYTES))) + when(result1.getValue(eq(KEY_META_INFO_FAMILY), eq(KEY_STATE_QUAL_BYTES))) .thenReturn(new byte[] { ACTIVE.getVal() }); - when(result2.getValue(eq(KEY_META_INFO_FAMILY), eq(KEY_STATUS_QUAL_BYTES))) + when(result2.getValue(eq(KEY_META_INFO_FAMILY), eq(KEY_STATE_QUAL_BYTES))) .thenReturn(new byte[] { FAILED.getVal() }); for (Result result : Arrays.asList(result1, result2)) { when(result.getValue(eq(KEY_META_INFO_FAMILY), eq(REFRESHED_TIMESTAMP_QUAL_BYTES))) @@ -207,7 +207,7 @@ public void testParseEmptyResult() throws Exception { public void testGetActiveKeyMissingWrappedKey() throws Exception { Result result = mock(Result.class); when(table.get(any(Get.class))).thenReturn(result); - when(result.getValue(eq(KEY_META_INFO_FAMILY), eq(KEY_STATUS_QUAL_BYTES))) + when(result.getValue(eq(KEY_META_INFO_FAMILY), eq(KEY_STATE_QUAL_BYTES))) .thenReturn(new byte[] { ACTIVE.getVal() }, new byte[] { INACTIVE.getVal() }); IOException ex; @@ -244,7 +244,7 @@ public void testGetKeyWithWrappedKey() throws Exception { assertEquals(KEY_METADATA, result.getKeyMetadata()); assertEquals(0, Bytes.compareTo(keyData.getTheKey().getEncoded(), result.getTheKey().getEncoded())); - assertEquals(ACTIVE, result.getKeyStatus()); + assertEquals(ACTIVE, result.getKeyState()); // When DEK checksum doesn't match, we expect a null value. result = accessor.getKey(CUST_ID, KEY_NAMESPACE, KEY_METADATA); @@ -263,7 +263,7 @@ public void testGetKeyWithoutWrappedKey() throws Exception { assertEquals(KEY_NAMESPACE, result.getKeyNamespace()); assertEquals(keyMetadata2, result.getKeyMetadata()); assertNull(result.getTheKey()); - assertEquals(FAILED, result.getKeyStatus()); + assertEquals(FAILED, result.getKeyState()); } @Test @@ -392,7 +392,7 @@ protected void assertPut(ManagedKeyData keyData, Put put) { assertEquals(new Bytes(keyData.getKeyMetadata().getBytes()), valueMap.get(new Bytes(DEK_METADATA_QUAL_BYTES))); assertNotNull(valueMap.get(new Bytes(REFRESHED_TIMESTAMP_QUAL_BYTES))); - assertEquals(new Bytes(new byte[] { keyData.getKeyStatus().getVal() }), - valueMap.get(new Bytes(KEY_STATUS_QUAL_BYTES))); + assertEquals(new Bytes(new byte[] { keyData.getKeyState().getVal() }), + valueMap.get(new Bytes(KEY_STATE_QUAL_BYTES))); } } diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/keymeta/TestManagedKeyDataCache.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/keymeta/TestManagedKeyDataCache.java index 1d5c33774efd..6449dc06d495 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/keymeta/TestManagedKeyDataCache.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/keymeta/TestManagedKeyDataCache.java @@ -18,7 +18,7 @@ package org.apache.hadoop.hbase.keymeta; import static org.apache.hadoop.hbase.io.crypto.ManagedKeyData.KEY_SPACE_GLOBAL; -import static org.apache.hadoop.hbase.io.crypto.ManagedKeyStatus.FAILED; +import static org.apache.hadoop.hbase.io.crypto.ManagedKeyState.FAILED; import static org.junit.Assert.*; import org.apache.hadoop.conf.Configuration; @@ -141,7 +141,7 @@ public void testRandomKeyGet() throws Exception{ @Test public void testRandomKeyGetNoActive() throws Exception { - managedKeyProvider.setMockedKeyStatus(ALIAS, FAILED); + managedKeyProvider.setMockedKeyState(ALIAS, FAILED); for (int i = 0; i < 20; ++i) { cache.addEntry(managedKeyProvider.getManagedKey(CUST_ID, KEY_SPACE_GLOBAL)); } diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/keymeta/TestManagedKeymeta.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/keymeta/TestManagedKeymeta.java index b3692f0896d7..6eb4a998236b 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/keymeta/TestManagedKeymeta.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/keymeta/TestManagedKeymeta.java @@ -21,8 +21,8 @@ import org.apache.hadoop.hbase.io.crypto.Encryption; import org.apache.hadoop.hbase.io.crypto.ManagedKeyData; import org.apache.hadoop.hbase.io.crypto.ManagedKeyProvider; +import org.apache.hadoop.hbase.io.crypto.ManagedKeyState; import org.apache.hadoop.hbase.io.crypto.MockManagedKeyProvider; -import org.apache.hadoop.hbase.io.crypto.ManagedKeyStatus; import org.apache.hadoop.hbase.master.HMaster; import org.apache.hadoop.hbase.testclassification.MasterTests; import org.apache.hadoop.hbase.testclassification.MediumTests; @@ -62,9 +62,9 @@ private void doTestEnable(KeymetaAdmin adminClient) throws IOException, KeyExcep ; String cust = "cust1"; String encodedCust = ManagedKeyProvider.encodeToStr(cust.getBytes()); - List managedKeyStatuses = + List managedKeyStates = adminClient.enableKeyManagement(encodedCust, ManagedKeyData.KEY_SPACE_GLOBAL); - assertKeyDataListSingleKey(managedKeyStatuses, ManagedKeyStatus.ACTIVE); + assertKeyDataListSingleKey(managedKeyStates, ManagedKeyState.ACTIVE); List managedKeys = adminClient.getManagedKeys(encodedCust, ManagedKeyData.KEY_SPACE_GLOBAL); @@ -73,24 +73,24 @@ private void doTestEnable(KeymetaAdmin adminClient) throws IOException, KeyExcep ManagedKeyData.KEY_SPACE_GLOBAL).cloneWithoutKey(), managedKeys.get(0).cloneWithoutKey()); String nonExistentCust = "nonExistentCust"; - managedKeyProvider.setMockedKeyStatus(nonExistentCust, ManagedKeyStatus.FAILED); + managedKeyProvider.setMockedKeyState(nonExistentCust, ManagedKeyState.FAILED); List keyDataList1 = adminClient.enableKeyManagement(ManagedKeyProvider.encodeToStr(nonExistentCust.getBytes()), ManagedKeyData.KEY_SPACE_GLOBAL); - assertKeyDataListSingleKey(keyDataList1, ManagedKeyStatus.FAILED); + assertKeyDataListSingleKey(keyDataList1, ManagedKeyState.FAILED); String disabledCust = "disabledCust"; - managedKeyProvider.setMockedKeyStatus(disabledCust, ManagedKeyStatus.DISABLED); + managedKeyProvider.setMockedKeyState(disabledCust, ManagedKeyState.DISABLED); List keyDataList2 = adminClient.enableKeyManagement(ManagedKeyProvider.encodeToStr(disabledCust.getBytes()), ManagedKeyData.KEY_SPACE_GLOBAL); - assertKeyDataListSingleKey(keyDataList2, ManagedKeyStatus.DISABLED); + assertKeyDataListSingleKey(keyDataList2, ManagedKeyState.DISABLED); } - private static void assertKeyDataListSingleKey(List managedKeyStatuses, - ManagedKeyStatus keyStatus) { - assertNotNull(managedKeyStatuses); - assertEquals(1, managedKeyStatuses.size()); - assertEquals(keyStatus, managedKeyStatuses.get(0).getKeyStatus()); + private static void assertKeyDataListSingleKey(List managedKeyStates, + ManagedKeyState keyState) { + assertNotNull(managedKeyStates); + assertEquals(1, managedKeyStates.size()); + assertEquals(keyState, managedKeyStates.get(0).getKeyState()); } } diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestKeymetaAdminImpl.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestKeymetaAdminImpl.java index 2df063c1de7a..582dd9b6351a 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestKeymetaAdminImpl.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestKeymetaAdminImpl.java @@ -10,7 +10,7 @@ import org.apache.hadoop.hbase.io.crypto.Encryption; import org.apache.hadoop.hbase.io.crypto.ManagedKeyData; import org.apache.hadoop.hbase.io.crypto.ManagedKeyProvider; -import org.apache.hadoop.hbase.io.crypto.ManagedKeyStatus; +import org.apache.hadoop.hbase.io.crypto.ManagedKeyState; import org.apache.hadoop.hbase.io.crypto.MockManagedKeyProvider; import org.apache.hadoop.hbase.keymeta.KeymetaAdminImpl; import org.apache.hadoop.hbase.keymeta.KeymetaTableAccessor; @@ -38,7 +38,7 @@ import java.util.List; import static org.apache.hadoop.hbase.io.crypto.ManagedKeyData.KEY_SPACE_GLOBAL; -import static org.apache.hadoop.hbase.io.crypto.ManagedKeyStatus.*; +import static org.apache.hadoop.hbase.io.crypto.ManagedKeyState.*; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertNotNull; import static org.junit.Assert.assertNull; @@ -124,11 +124,11 @@ public static class TestAdminImpl extends TestKeymetaAdminImpl { @Parameter(1) public String keySpace; @Parameter(2) - public ManagedKeyStatus keyStatus; + public ManagedKeyState keyState; @Parameter(3) public boolean isNullKey; - @Parameters(name = "{index},nKeys={0},keySpace={1},keyStatus={2}") + @Parameters(name = "{index},nKeys={0},keySpace={1},keyState={2}") public static Collection data() { return Arrays.asList( new Object[][] { @@ -153,15 +153,15 @@ public void testEnableAndGet() throws Exception { MockManagedKeyProvider managedKeyProvider = (MockManagedKeyProvider) Encryption.getKeyProvider(conf); String cust = "cust1"; - managedKeyProvider.setMockedKeyStatus(cust, keyStatus); + managedKeyProvider.setMockedKeyState(cust, keyState); String encodedCust = ManagedKeyProvider.encodeToStr(cust.getBytes()); - List managedKeyStatuses = + List managedKeyStates = keymetaAdmin.enableKeyManagement(encodedCust, keySpace); - assertNotNull(managedKeyStatuses); - assertEquals(1, managedKeyStatuses.size()); - assertEquals(keyStatus, managedKeyStatuses.get(0).getKeyStatus()); + assertNotNull(managedKeyStates); + assertEquals(1, managedKeyStates.size()); + assertEquals(keyState, managedKeyStates.get(0).getKeyState()); verify(keymetaAccessor).addKey(argThat( - (ManagedKeyData keyData) -> assertKeyData(keyData, keyStatus, + (ManagedKeyData keyData) -> assertKeyData(keyData, keyState, isNullKey ? null : managedKeyProvider.getMockedKey(cust, keySpace)))); verify(keymetaAccessor).getAllKeys(cust.getBytes(), keySpace); @@ -203,51 +203,51 @@ public void setUp() throws Exception { @Test public void testEnable() throws Exception { - List managedKeyStatuses; + List managedKeyStates; String cust = "cust1"; String encodedCust = ManagedKeyProvider.encodeToStr(cust.getBytes()); - managedKeyStatuses = keymetaAdmin.enableKeyManagement(encodedCust, keySpace); - assertKeys(managedKeyStatuses, 3); + managedKeyStates = keymetaAdmin.enableKeyManagement(encodedCust, keySpace); + assertKeys(managedKeyStates, 3); verify(keymetaAccessor).getAllKeys(cust.getBytes(), keySpace); verify(keymetaAccessor, times(3)).addKey(any()); reset(keymetaAccessor); - when(keymetaAccessor.getAllKeys(cust.getBytes(), keySpace)).thenReturn(managedKeyStatuses); - managedKeyStatuses = keymetaAdmin.enableKeyManagement(encodedCust, keySpace); - assertKeys(managedKeyStatuses, 3); + when(keymetaAccessor.getAllKeys(cust.getBytes(), keySpace)).thenReturn(managedKeyStates); + managedKeyStates = keymetaAdmin.enableKeyManagement(encodedCust, keySpace); + assertKeys(managedKeyStates, 3); verify(keymetaAccessor, times(0)).addKey(any()); reset(keymetaAccessor); - when(keymetaAccessor.getAllKeys(cust.getBytes(), keySpace)).thenReturn(managedKeyStatuses); + when(keymetaAccessor.getAllKeys(cust.getBytes(), keySpace)).thenReturn(managedKeyStates); keymetaAdmin.activeKeyCountOverride = 4; - managedKeyStatuses = keymetaAdmin.enableKeyManagement(encodedCust, keySpace); - assertKeys(managedKeyStatuses, 1); + managedKeyStates = keymetaAdmin.enableKeyManagement(encodedCust, keySpace); + assertKeys(managedKeyStates, 1); verify(keymetaAccessor, times(1)).addKey(any()); reset(keymetaAccessor); - when(keymetaAccessor.getAllKeys(cust.getBytes(), keySpace)).thenReturn(managedKeyStatuses); + when(keymetaAccessor.getAllKeys(cust.getBytes(), keySpace)).thenReturn(managedKeyStates); managedKeyProvider.setMultikeyGenMode(false); - managedKeyStatuses = keymetaAdmin.enableKeyManagement(encodedCust, keySpace); - assertKeys(managedKeyStatuses, 0); + managedKeyStates = keymetaAdmin.enableKeyManagement(encodedCust, keySpace); + assertKeys(managedKeyStates, 0); verify(keymetaAccessor, times(0)).addKey(any()); //reset(keymetaAccessor); - managedKeyProvider.setMockedKeyStatus(cust, FAILED); - managedKeyStatuses = keymetaAdmin.enableKeyManagement(encodedCust, keySpace); - assertNotNull(managedKeyStatuses); - assertEquals(1, managedKeyStatuses.size()); - assertEquals(FAILED, managedKeyStatuses.get(0).getKeyStatus()); + managedKeyProvider.setMockedKeyState(cust, FAILED); + managedKeyStates = keymetaAdmin.enableKeyManagement(encodedCust, keySpace); + assertNotNull(managedKeyStates); + assertEquals(1, managedKeyStates.size()); + assertEquals(FAILED, managedKeyStates.get(0).getKeyState()); verify(keymetaAccessor, times(1)).addKey(any()); // NOTE: Reset as this instance is shared for more than 1 test. - managedKeyProvider.setMockedKeyStatus(cust, ACTIVE); + managedKeyProvider.setMockedKeyState(cust, ACTIVE); } - private static void assertKeys(List managedKeyStatuses, int expectedCnt) { - assertNotNull(managedKeyStatuses); - assertEquals(expectedCnt, managedKeyStatuses.size()); - for (int i = 0; i < managedKeyStatuses.size(); ++i) { - assertEquals(ACTIVE, managedKeyStatuses.get(i).getKeyStatus()); + private static void assertKeys(List managedKeyStates, int expectedCnt) { + assertNotNull(managedKeyStates); + assertEquals(expectedCnt, managedKeyStates.size()); + for (int i = 0; i < managedKeyStates.size(); ++i) { + assertEquals(ACTIVE, managedKeyStates.get(i).getKeyState()); } } } @@ -345,10 +345,10 @@ public List getAllKeys(byte[] key_cust, String keyNamespace) } } - protected boolean assertKeyData(ManagedKeyData keyData, ManagedKeyStatus expKeyStatus, + protected boolean assertKeyData(ManagedKeyData keyData, ManagedKeyState expKeyState, Key expectedKey) { assertNotNull(keyData); - assertEquals(expKeyStatus, keyData.getKeyStatus()); + assertEquals(expKeyState, keyData.getKeyState()); if (expectedKey == null) { assertNull(keyData.getTheKey()); } diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestSystemKey.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestSystemKey.java index 9ab2a154c49b..c16d832d8a17 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestSystemKey.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestSystemKey.java @@ -37,8 +37,8 @@ import java.util.List; import java.util.stream.IntStream; import static org.apache.hadoop.hbase.HConstants.SYSTEM_KEY_FILE_PREFIX; -import static org.apache.hadoop.hbase.io.crypto.ManagedKeyStatus.ACTIVE; -import static org.apache.hadoop.hbase.io.crypto.ManagedKeyStatus.INACTIVE; +import static org.apache.hadoop.hbase.io.crypto.ManagedKeyState.ACTIVE; +import static org.apache.hadoop.hbase.io.crypto.ManagedKeyState.INACTIVE; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertNull; import static org.junit.Assert.assertThrows; @@ -267,7 +267,7 @@ public void testEnsureSystemKeyInitialized_WithNoSystemKeys() throws Exception { public void testEnsureSystemKeyInitialized_WithNoNonActiveKey() throws Exception { String metadata = "key-metadata"; ManagedKeyData keyData = mock(ManagedKeyData.class); - when(keyData.getKeyStatus()).thenReturn(INACTIVE); + when(keyData.getKeyState()).thenReturn(INACTIVE); when(keyData.getKeyMetadata()).thenReturn(metadata); when(mockKeyProvide.getSystemKey(any())).thenReturn(keyData); @@ -279,7 +279,7 @@ public void testEnsureSystemKeyInitialized_WithNoNonActiveKey() throws Exception @Test public void testEnsureSystemKeyInitialized_WithInvalidMetadata() throws Exception { ManagedKeyData keyData = mock(ManagedKeyData.class); - when(keyData.getKeyStatus()).thenReturn(ACTIVE); + when(keyData.getKeyState()).thenReturn(ACTIVE); when(mockKeyProvide.getSystemKey(any())).thenReturn(keyData); IOException ex = assertThrows(IOException.class, manager::ensureSystemKeyInitialized); @@ -290,7 +290,7 @@ public void testEnsureSystemKeyInitialized_WithInvalidMetadata() throws Exceptio public void testEnsureSystemKeyInitialized_WithSaveFailure() throws Exception { String metadata = "key-metadata"; ManagedKeyData keyData = mock(ManagedKeyData.class); - when(keyData.getKeyStatus()).thenReturn(ACTIVE); + when(keyData.getKeyState()).thenReturn(ACTIVE); when(mockKeyProvide.getSystemKey(any())).thenReturn(keyData); when(keyData.getKeyMetadata()).thenReturn(metadata); when(mockFileSystem.globStatus(any())).thenReturn(new FileStatus[0]); @@ -308,7 +308,7 @@ public void testEnsureSystemKeyInitialized_WithSaveFailure() throws Exception { public void testEnsureSystemKeyInitialized_RaceCondition() throws Exception { String metadata = "key-metadata"; ManagedKeyData keyData = mock(ManagedKeyData.class); - when(keyData.getKeyStatus()).thenReturn(ACTIVE); + when(keyData.getKeyState()).thenReturn(ACTIVE); when(mockKeyProvide.getSystemKey(any())).thenReturn(keyData); when(keyData.getKeyMetadata()).thenReturn(metadata); when(mockFileSystem.globStatus(any())).thenReturn(new FileStatus[0]); diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestSystemKeyManager.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestSystemKeyManager.java index ec211f1dd02a..e184add8f695 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestSystemKeyManager.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestSystemKeyManager.java @@ -23,7 +23,7 @@ import org.apache.hadoop.hbase.io.crypto.ManagedKeyData; import org.apache.hadoop.hbase.io.crypto.ManagedKeyProvider; import org.apache.hadoop.hbase.io.crypto.MockManagedKeyProvider; -import org.apache.hadoop.hbase.io.crypto.ManagedKeyStatus; +import org.apache.hadoop.hbase.io.crypto.ManagedKeyState; import org.apache.hadoop.hbase.keymeta.SystemKeyAccessor; import org.apache.hadoop.hbase.keymeta.SystemKeyCache; import org.apache.hadoop.hbase.keymeta.ManagedKeyTestBase; @@ -90,7 +90,7 @@ public void testWithInvalidSystemKey() throws Exception { // Test startup failure when the cluster key is INACTIVE SystemKeyManager tmpCKM = new SystemKeyManager(master); tmpCKM.ensureSystemKeyInitialized(); - pbeKeyProvider.setMockedKeyStatus(pbeKeyProvider.getSystemKeyAlias(), ManagedKeyStatus.INACTIVE); + pbeKeyProvider.setMockedKeyState(pbeKeyProvider.getSystemKeyAlias(), ManagedKeyState.INACTIVE); assertThrows(IOException.class, tmpCKM::ensureSystemKeyInitialized); } From a2d7f8e4a055c704a0a8437edb2bf534d45074d4 Mon Sep 17 00:00:00 2001 From: Hari Dara Date: Tue, 27 May 2025 11:03:05 +0530 Subject: [PATCH 32/70] Address a couple of gaps - unwrapKey() should be able to take in a wrapped key as well. - Store FAILED state without metadata for scenarios when enable fails. --- .gitignore | 1 + .../hbase/io/crypto/ManagedKeyData.java | 5 +- .../hbase/io/crypto/ManagedKeyProvider.java | 10 ++- .../io/crypto/ManagedKeyStoreKeyProvider.java | 4 +- .../io/crypto/MockManagedKeyProvider.java | 2 +- .../hbase/io/crypto/TestManagedKeyData.java | 11 ++- .../io/crypto/TestManagedKeyProvider.java | 6 +- .../hbase/keymeta/KeymetaTableAccessor.java | 53 ++++++++++-- .../hbase/keymeta/ManagedKeyAccessor.java | 8 +- .../hbase/keymeta/SystemKeyAccessor.java | 2 +- .../keymeta/TestKeymetaTableAccessor.java | 86 +++++++++++++++++-- .../hbase/keymeta/TestManagedKeyAccessor.java | 13 +-- 12 files changed, 165 insertions(+), 36 deletions(-) diff --git a/.gitignore b/.gitignore index efe9f99ae703..3ad423ef1106 100644 --- a/.gitignore +++ b/.gitignore @@ -30,3 +30,4 @@ ID filenametags tags .codegenie +.vscode diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/io/crypto/ManagedKeyData.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/io/crypto/ManagedKeyData.java index 328efa4f5290..96b5844930bb 100644 --- a/hbase-common/src/main/java/org/apache/hadoop/hbase/io/crypto/ManagedKeyData.java +++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/io/crypto/ManagedKeyData.java @@ -107,7 +107,10 @@ public ManagedKeyData(byte[] key_cust, String key_namespace, Key theKey, Managed Preconditions.checkNotNull(key_cust, "key_cust should not be null"); Preconditions.checkNotNull(key_namespace, "key_namespace should not be null"); Preconditions.checkNotNull(keyState, "keyState should not be null"); - Preconditions.checkNotNull(keyMetadata, "keyMetadata should not be null"); + // Only check for null metadata if state is not FAILED + if (keyState != ManagedKeyState.FAILED) { + Preconditions.checkNotNull(keyMetadata, "keyMetadata should not be null"); + } Preconditions.checkArgument(readOpCount >= 0, "readOpCount: " + readOpCount + " should be >= 0"); Preconditions.checkArgument(writeOpCount >= 0, "writeOpCount: " + writeOpCount + diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/io/crypto/ManagedKeyProvider.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/io/crypto/ManagedKeyProvider.java index 2c99235354eb..4140ee2d1713 100644 --- a/hbase-common/src/main/java/org/apache/hadoop/hbase/io/crypto/ManagedKeyProvider.java +++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/io/crypto/ManagedKeyProvider.java @@ -60,14 +60,16 @@ public interface ManagedKeyProvider extends KeyProvider { /** * Retrieve a key identified by the key metadata. The key metadata is typically generated by the * same key provider via the {@link #getSystemKey(byte[])} or - * {@link #getManagedKey(byte[], String)} methods. + * {@link #getManagedKey(byte[], String)} methods. If key couldn't be retrieved using metadata and + * the wrappedKey is provided, the implementation may try to decrypt it as a fallback operation. * - * @param keyMetaData Key metadata + * @param keyMetaData Key metadata, must not be {@code null}. + * @param wrappedKey The DEK key material encrypted with the corresponding KEK, if available. * @return ManagedKeyData for the key represented by the metadata and is expected to be not - * {@code null} + * {@code null} * @throws IOException if an error occurs while generating the key */ - ManagedKeyData unwrapKey(String keyMetaData) throws IOException; + ManagedKeyData unwrapKey(String keyMetaData, byte[] wrappedKey) throws IOException; /** * Decode the given key custodian which is encoded as Base64 string. diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/io/crypto/ManagedKeyStoreKeyProvider.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/io/crypto/ManagedKeyStoreKeyProvider.java index cb764a0a7f6e..f3f0d3d407b1 100644 --- a/hbase-common/src/main/java/org/apache/hadoop/hbase/io/crypto/ManagedKeyStoreKeyProvider.java +++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/io/crypto/ManagedKeyStoreKeyProvider.java @@ -47,11 +47,11 @@ public ManagedKeyData getManagedKey(byte[] key_cust, String key_namespace) throw String aliasConfKey = HConstants.CRYPTO_MANAGED_KEY_STORE_CONF_KEY_PREFIX + encodedCust + "." + "alias"; String keyMetadata = generateKeyMetadata(conf.get(aliasConfKey, null), encodedCust); - return unwrapKey(keyMetadata); + return unwrapKey(keyMetadata, null); } @Override - public ManagedKeyData unwrapKey(String keyMetadataStr) throws IOException { + public ManagedKeyData unwrapKey(String keyMetadataStr, byte[] wrappedKey) throws IOException { Map keyMetadata = GsonUtil.getDefaultInstance().fromJson(keyMetadataStr, HashMap.class); String encodedCust = keyMetadata.get(KEY_METADATA_CUST); diff --git a/hbase-common/src/test/java/org/apache/hadoop/hbase/io/crypto/MockManagedKeyProvider.java b/hbase-common/src/test/java/org/apache/hadoop/hbase/io/crypto/MockManagedKeyProvider.java index 6c692ebebcf2..561c66a559e3 100644 --- a/hbase-common/src/test/java/org/apache/hadoop/hbase/io/crypto/MockManagedKeyProvider.java +++ b/hbase-common/src/test/java/org/apache/hadoop/hbase/io/crypto/MockManagedKeyProvider.java @@ -63,7 +63,7 @@ public ManagedKeyData getManagedKey(byte[] key_cust, String key_namespace) } @Override - public ManagedKeyData unwrapKey(String keyMetadata) throws IOException { + public ManagedKeyData unwrapKey(String keyMetadata, byte[] wrappedKey) throws IOException { if (allGeneratedKeys.containsKey(keyMetadata)) { String[] meta_toks = keyMetadata.split(":"); ManagedKeyState keyState = this.keyState.get(meta_toks[1]); diff --git a/hbase-common/src/test/java/org/apache/hadoop/hbase/io/crypto/TestManagedKeyData.java b/hbase-common/src/test/java/org/apache/hadoop/hbase/io/crypto/TestManagedKeyData.java index 04ef4a70720e..5d3410a131b6 100644 --- a/hbase-common/src/test/java/org/apache/hadoop/hbase/io/crypto/TestManagedKeyData.java +++ b/hbase-common/src/test/java/org/apache/hadoop/hbase/io/crypto/TestManagedKeyData.java @@ -94,7 +94,16 @@ public void testConstructorNullChecks() { assertThrows(NullPointerException.class, () -> new ManagedKeyData(keyCust, keyNamespace, theKey, null, keyMetadata)); assertThrows(NullPointerException.class, - () -> new ManagedKeyData(keyCust, keyNamespace, theKey, keyState, null)); + () -> new ManagedKeyData(keyCust, keyNamespace, theKey, ManagedKeyState.ACTIVE, null)); + } + + @Test + public void testConstructorWithFailedStateAndNullMetadata() { + ManagedKeyData keyData = new ManagedKeyData(keyCust, keyNamespace, null, ManagedKeyState.FAILED, null); + assertNotNull(keyData); + assertEquals(ManagedKeyState.FAILED, keyData.getKeyState()); + assertNull(keyData.getKeyMetadata()); + assertNull(keyData.getTheKey()); } @Test diff --git a/hbase-common/src/test/java/org/apache/hadoop/hbase/io/crypto/TestManagedKeyProvider.java b/hbase-common/src/test/java/org/apache/hadoop/hbase/io/crypto/TestManagedKeyProvider.java index b2a7608fa6a2..9bce1e5c3c9b 100644 --- a/hbase-common/src/test/java/org/apache/hadoop/hbase/io/crypto/TestManagedKeyProvider.java +++ b/hbase-common/src/test/java/org/apache/hadoop/hbase/io/crypto/TestManagedKeyProvider.java @@ -198,7 +198,7 @@ public void testUnwrapInvalidKey() throws Exception { String invalidPrefixEnc = ManagedKeyProvider.encodeToStr(invalidPrefix); String invalidMetadata = ManagedKeyStoreKeyProvider.generateKeyMetadata(invalidAlias, invalidPrefixEnc); - ManagedKeyData keyData = managedKeyProvider.unwrapKey(invalidMetadata); + ManagedKeyData keyData = managedKeyProvider.unwrapKey(invalidMetadata, null); assertNotNull(keyData); assertKeyData(keyData, ManagedKeyState.FAILED, null, invalidPrefix, invalidAlias); @@ -213,7 +213,7 @@ public void testUnwrapDisabledKey() throws Exception { "false"); String invalidMetadata = ManagedKeyStoreKeyProvider.generateKeyMetadata(invalidAlias, invalidPrefixEnc); - ManagedKeyData keyData = managedKeyProvider.unwrapKey(invalidMetadata); + ManagedKeyData keyData = managedKeyProvider.unwrapKey(invalidMetadata, null); assertNotNull(keyData); assertKeyData(keyData, ManagedKeyState.DISABLED, null, invalidPrefix, invalidAlias); } @@ -237,7 +237,7 @@ private void assertKeyData(ManagedKeyData keyData, ManagedKeyState expKeyState, assertEquals(alias, keyMetadata.get(KEY_METADATA_ALIAS)); assertEquals(Base64.getEncoder().encodeToString(prefixBytes), keyMetadata.get(KEY_METADATA_CUST)); - assertEquals(keyData, managedKeyProvider.unwrapKey(keyData.getKeyMetadata())); + assertEquals(keyData, managedKeyProvider.unwrapKey(keyData.getKeyMetadata(), null)); } } diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/keymeta/KeymetaTableAccessor.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/keymeta/KeymetaTableAccessor.java index 0ef01613c3fd..6ac6d09bb445 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/keymeta/KeymetaTableAccessor.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/keymeta/KeymetaTableAccessor.java @@ -154,6 +154,21 @@ public List getActiveKeys(byte[] key_cust, String keyNamespace) return activeKeys; } + /** + * Get the specific key identified by key_cust, keyNamespace and keyState. + * + * @param key_cust The prefix. + * @param keyNamespace The namespace. + * @param keyState The state of the key. + * @return the key or {@code null} + * @throws IOException when there is an underlying IOException. + * @throws KeyException when there is an underlying KeyException. + */ + public ManagedKeyData getKey(byte[] key_cust, String keyNamespace, ManagedKeyState keyState) + throws IOException, KeyException { + return getKeyInternal(key_cust, keyNamespace, new byte[] { keyState.getVal() }); + } + /** * Get the specific key identified by key_cust, keyNamespace and keyMetadata. * @@ -165,12 +180,26 @@ public List getActiveKeys(byte[] key_cust, String keyNamespace) * @throws KeyException when there is an underlying KeyException. */ public ManagedKeyData getKey(byte[] key_cust, String keyNamespace, String keyMetadata) + throws IOException, KeyException { + return getKeyInternal(key_cust, keyNamespace, ManagedKeyData.constructMetadataHash(keyMetadata)); + } + + /** + * Internal helper method to get a key using the provided metadata hash. + * + * @param key_cust The prefix. + * @param keyNamespace The namespace. + * @param keyMetadataHash The metadata hash or state value. + * @return the key or {@code null} + * @throws IOException when there is an underlying IOException. + * @throws KeyException when there is an underlying KeyException. + */ + private ManagedKeyData getKeyInternal(byte[] key_cust, String keyNamespace, byte[] keyMetadataHash) throws IOException, KeyException { assertKeyManagementEnabled(); Connection connection = getServer().getConnection(); try (Table table = connection.getTable(KEY_META_TABLE_NAME)) { - byte[] rowKey = constructRowKeyForMetadata(key_cust, keyNamespace, - ManagedKeyData.constructMetadataHash(keyMetadata)); + byte[] rowKey = constructRowKeyForMetadata(key_cust, keyNamespace, keyMetadataHash); Result result = table.get(new Get(rowKey)); return parseFromResult(getServer(), key_cust, keyNamespace, result); } @@ -215,20 +244,34 @@ private Put addMutationColumns(Put put, ManagedKeyData keyData) throws IOExcepti Bytes.toBytes(latestSystemKey.getKeyChecksum())) ; } - return put.setDurability(Durability.SKIP_WAL) + Put result = put.setDurability(Durability.SKIP_WAL) .setPriority(HConstants.SYSTEMTABLE_QOS) - .addColumn(KEY_META_INFO_FAMILY, DEK_METADATA_QUAL_BYTES, keyData.getKeyMetadata().getBytes()) .addColumn(KEY_META_INFO_FAMILY, REFRESHED_TIMESTAMP_QUAL_BYTES, Bytes.toBytes(keyData.getRefreshTimestamp())) .addColumn(KEY_META_INFO_FAMILY, KEY_STATE_QUAL_BYTES, new byte[] { keyData.getKeyState().getVal() }) ; + + // Only add metadata column if metadata is not null + String metadata = keyData.getKeyMetadata(); + if (metadata != null) { + result.addColumn(KEY_META_INFO_FAMILY, DEK_METADATA_QUAL_BYTES, metadata.getBytes()); + } + + return result; } @VisibleForTesting public static byte[] constructRowKeyForMetadata(ManagedKeyData keyData) { + byte[] keyMetadataHash; + if (keyData.getKeyState() == ManagedKeyState.FAILED && keyData.getKeyMetadata() == null) { + // For FAILED state with null metadata, use state as metadata + keyMetadataHash = new byte[] { keyData.getKeyState().getVal() }; + } else { + keyMetadataHash = keyData.getKeyMetadataHash(); + } return constructRowKeyForMetadata(keyData.getKeyCustodian(), keyData.getKeyNamespace(), - keyData.getKeyMetadataHash()); + keyMetadataHash); } @VisibleForTesting diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/keymeta/ManagedKeyAccessor.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/keymeta/ManagedKeyAccessor.java index 124700afa6c9..cf4972727977 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/keymeta/ManagedKeyAccessor.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/keymeta/ManagedKeyAccessor.java @@ -46,11 +46,13 @@ public ManagedKeyAccessor(KeymetaTableAccessor keymetaAccessor, * * @param key_cust The key custodian. * @param keyNamespace The namespace of the key - * @param keyMetadata The metadata of the key + * @param keyMetadata The metadata of the key + * @param wrappedKey The DEK key material encrypted with the corresponding KEK, if available. * @return The key data or {@code null} * @throws IOException if an error occurs while retrieving the key */ - public ManagedKeyData getKey(byte[] key_cust, String keyNamespace, String keyMetadata) + public ManagedKeyData getKey(byte[] key_cust, String keyNamespace, String keyMetadata, + byte[] wrappedKey) throws IOException, KeyException { assertKeyManagementEnabled(); // 1. Check L1 cache. @@ -61,7 +63,7 @@ public ManagedKeyData getKey(byte[] key_cust, String keyNamespace, String keyMet if (keyData == null) { // 3. Check with Key Provider. ManagedKeyProvider provider = getKeyProvider(); - keyData = provider.unwrapKey(keyMetadata); + keyData = provider.unwrapKey(keyMetadata, wrappedKey); if (keyData != null) { LOG.info("Got key data with status: {} and metadata: {} for prefix: {}", keyData.getKeyState(), keyData.getKeyMetadata(), diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/keymeta/SystemKeyAccessor.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/keymeta/SystemKeyAccessor.java index e9a9e64baaaa..c207dc251d1f 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/keymeta/SystemKeyAccessor.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/keymeta/SystemKeyAccessor.java @@ -84,7 +84,7 @@ public List getAllSystemKeyFiles() throws IOException { public ManagedKeyData loadSystemKey(Path keyPath) throws IOException { ManagedKeyProvider provider = getKeyProvider(); - return provider.unwrapKey(loadKeyMetadata(keyPath)); + return provider.unwrapKey(loadKeyMetadata(keyPath), null); } @VisibleForTesting diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/keymeta/TestKeymetaTableAccessor.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/keymeta/TestKeymetaTableAccessor.java index 388692dfbf0b..cada943b64ee 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/keymeta/TestKeymetaTableAccessor.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/keymeta/TestKeymetaTableAccessor.java @@ -75,6 +75,7 @@ @RunWith(Suite.class) @Suite.SuiteClasses({ TestKeymetaTableAccessor.TestAdd.class, + TestKeymetaTableAccessor.TestAddWithNullableFields.class, TestKeymetaTableAccessor.TestGet.class, TestKeymetaTableAccessor.TestOps.class, }) @@ -146,7 +147,7 @@ public static Collection data() { } @Test - public void testAddActiveKey() throws Exception { + public void testAddKey() throws Exception { managedKeyProvider.setMockedKeyState(ALIAS, keyState); ManagedKeyData keyData = managedKeyProvider.getManagedKey(CUST_ID, KEY_SPACE_GLOBAL); @@ -160,6 +161,42 @@ public void testAddActiveKey() throws Exception { } } + @RunWith(BlockJUnit4ClassRunner.class) + @Category({ MasterTests.class, SmallTests.class }) + public static class TestAddWithNullableFields extends TestKeymetaTableAccessor { + @ClassRule + public static final HBaseClassTestRule CLASS_RULE = + HBaseClassTestRule.forClass(TestAddWithNullableFields.class); + + @Test + public void testAddKeyWithFailedStateAndNullMetadata() throws Exception { + managedKeyProvider.setMockedKeyState(ALIAS, FAILED); + ManagedKeyData keyData = new ManagedKeyData(CUST_ID, KEY_SPACE_GLOBAL, null, FAILED, null); + + accessor.addKey(keyData); + + ArgumentCaptor putCaptor = ArgumentCaptor.forClass(Put.class); + verify(table).put(putCaptor.capture()); + Put put = putCaptor.getValue(); + + // Verify the row key uses state value for metadata hash + byte[] expectedRowKey = constructRowKeyForMetadata(CUST_ID, KEY_SPACE_GLOBAL, + new byte[] { FAILED.getVal() }); + assertEquals(0, Bytes.compareTo(expectedRowKey, put.getRow())); + + Map valueMap = getValueMap(put); + + // Verify key-related columns are not present + assertNull(valueMap.get(new Bytes(DEK_CHECKSUM_QUAL_BYTES))); + assertNull(valueMap.get(new Bytes(DEK_WRAPPED_BY_STK_QUAL_BYTES))); + assertNull(valueMap.get(new Bytes(STK_CHECKSUM_QUAL_BYTES))); + + // Verify state is set correctly + assertEquals(new Bytes(new byte[] { FAILED.getVal() }), + valueMap.get(new Bytes(KEY_STATE_QUAL_BYTES))); + } + } + @RunWith(BlockJUnit4ClassRunner.class) @Category({ MasterTests.class, SmallTests.class }) public static class TestGet extends TestKeymetaTableAccessor { @@ -251,6 +288,32 @@ public void testGetKeyWithWrappedKey() throws Exception { assertNull(result); } + @Test + public void testGetKeyWithFailedState() throws Exception { + // Test with FAILED state and null metadata + Result failedResult = mock(Result.class); + when(failedResult.getValue(eq(KEY_META_INFO_FAMILY), eq(KEY_STATE_QUAL_BYTES))) + .thenReturn(new byte[] { FAILED.getVal() }); + when(failedResult.getValue(eq(KEY_META_INFO_FAMILY), eq(REFRESHED_TIMESTAMP_QUAL_BYTES))) + .thenReturn(Bytes.toBytes(0L)); + when(failedResult.getValue(eq(KEY_META_INFO_FAMILY), eq(STK_CHECKSUM_QUAL_BYTES))) + .thenReturn(Bytes.toBytes(0L)); + // Explicitly return null for metadata to simulate FAILED state with null metadata + when(failedResult.getValue(eq(KEY_META_INFO_FAMILY), eq(DEK_METADATA_QUAL_BYTES))) + .thenReturn(null); + + when(table.get(any(Get.class))).thenReturn(failedResult); + ManagedKeyData result = accessor.getKey(CUST_ID, KEY_NAMESPACE, FAILED); + + verify(table).get(any(Get.class)); + assertNotNull(result); + assertEquals(0, Bytes.compareTo(CUST_ID, result.getKeyCustodian())); + assertEquals(KEY_NAMESPACE, result.getKeyNamespace()); + assertNull(result.getKeyMetadata()); + assertNull(result.getTheKey()); + assertEquals(FAILED, result.getKeyState()); + } + @Test public void testGetKeyWithoutWrappedKey() throws Exception { when(table.get(any(Get.class))).thenReturn(result2); @@ -369,14 +432,7 @@ protected void assertPut(ManagedKeyData keyData, Put put) { assertTrue(Bytes.compareTo(constructRowKeyForMetadata(keyData), put.getRow()) == 0); - NavigableMap> familyCellMap = put.getFamilyCellMap(); - List cells = familyCellMap.get(KEY_META_INFO_FAMILY); - Map valueMap = new HashMap<>(); - for (Cell cell : cells) { - valueMap.put( - new Bytes(cell.getQualifierArray(), cell.getQualifierOffset(), cell.getQualifierLength()), - new Bytes(cell.getValueArray(), cell.getValueOffset(), cell.getValueLength())); - } + Map valueMap = getValueMap(put); if (keyData.getTheKey() != null) { assertNotNull(valueMap.get(new Bytes(DEK_CHECKSUM_QUAL_BYTES))); @@ -395,4 +451,16 @@ protected void assertPut(ManagedKeyData keyData, Put put) { assertEquals(new Bytes(new byte[] { keyData.getKeyState().getVal() }), valueMap.get(new Bytes(KEY_STATE_QUAL_BYTES))); } + + private static Map getValueMap(Put put) { + NavigableMap> familyCellMap = put.getFamilyCellMap(); + List cells = familyCellMap.get(KEY_META_INFO_FAMILY); + Map valueMap = new HashMap<>(); + for (Cell cell : cells) { + valueMap.put( + new Bytes(cell.getQualifierArray(), cell.getQualifierOffset(), cell.getQualifierLength()), + new Bytes(cell.getValueArray(), cell.getValueOffset(), cell.getValueLength())); + } + return valueMap; + } } diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/keymeta/TestManagedKeyAccessor.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/keymeta/TestManagedKeyAccessor.java index 75b77be027df..d8f4c2e43f12 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/keymeta/TestManagedKeyAccessor.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/keymeta/TestManagedKeyAccessor.java @@ -88,7 +88,8 @@ public void tearDown() throws Exception { @Test public void testGetKeyNonExisting() throws Exception { for (int i = 0; i < 2; ++i) { - ManagedKeyData keyData = managedKeyAccessor.getKey(CUST_ID, KEY_SPACE_GLOBAL, "abcd"); + ManagedKeyData keyData = managedKeyAccessor.getKey(CUST_ID, KEY_SPACE_GLOBAL, "abcd", + null); verifyNonExisting(keyData); } } @@ -108,11 +109,11 @@ public void testGetFromL1() throws Exception { when(keyDataCache.getEntry(any())).thenReturn(keyData); ManagedKeyData result = - managedKeyAccessor.getKey(CUST_ID, KEY_SPACE_GLOBAL, keyData.getKeyMetadata()); + managedKeyAccessor.getKey(CUST_ID, KEY_SPACE_GLOBAL, keyData.getKeyMetadata(), null); assertEquals(keyData, result); verify(keyDataCache).getEntry(keyData.getKeyMetadata()); - verify(keymetaAccessor, never()).getKey(any(), any(), any()); + verify(keymetaAccessor, never()).getKey(any(), any(), any(String.class)); verify(keymetaAccessor, never()).addKey(any()); verify(keyDataCache, never()).addEntry(keyData); } @@ -120,10 +121,10 @@ public void testGetFromL1() throws Exception { @Test public void testGetFromL2() throws Exception { ManagedKeyData keyData = managedKeyProvider.getManagedKey(CUST_ID, KEY_SPACE_GLOBAL); - when(keymetaAccessor.getKey(any(), any(), any())).thenReturn(keyData); + when(keymetaAccessor.getKey(any(), any(), any(String.class))).thenReturn(keyData); ManagedKeyData result = - managedKeyAccessor.getKey(CUST_ID, KEY_SPACE_GLOBAL, keyData.getKeyMetadata()); + managedKeyAccessor.getKey(CUST_ID, KEY_SPACE_GLOBAL, keyData.getKeyMetadata(), null); assertEquals(keyData, result); verify(keyDataCache).getEntry(keyData.getKeyMetadata()); @@ -137,7 +138,7 @@ public void testGetFromProvider() throws Exception { ManagedKeyData keyData = managedKeyProvider.getManagedKey(CUST_ID, KEY_SPACE_GLOBAL); ManagedKeyData result = - managedKeyAccessor.getKey(CUST_ID, KEY_SPACE_GLOBAL, keyData.getKeyMetadata()); + managedKeyAccessor.getKey(CUST_ID, KEY_SPACE_GLOBAL, keyData.getKeyMetadata(), null); assertEquals(keyData, result); verify(keyDataCache).getEntry(keyData.getKeyMetadata()); From 49c58df1ebbb647ba977f1034be89ba57c1db3de Mon Sep 17 00:00:00 2001 From: Hari Dara Date: Wed, 28 May 2025 12:57:47 +0530 Subject: [PATCH 33/70] Fix some naming --- .../hadoop/hbase/keymeta/ManagedKeyAccessor.java | 4 ++-- .../hbase/keymeta/ManagedKeyDataCache.java | 12 ++++++------ .../hbase/keymeta/TestManagedKeyAccessor.java | 16 ++++++++-------- .../hbase/keymeta/TestManagedKeyDataCache.java | 8 ++++---- 4 files changed, 20 insertions(+), 20 deletions(-) diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/keymeta/ManagedKeyAccessor.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/keymeta/ManagedKeyAccessor.java index cf4972727977..d01e70f1ecc8 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/keymeta/ManagedKeyAccessor.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/keymeta/ManagedKeyAccessor.java @@ -93,14 +93,14 @@ public ManagedKeyData getKey(byte[] key_cust, String keyNamespace, String keyMet public ManagedKeyData getAnActiveKey(byte[] key_cust, String keyNamespace) throws IOException, KeyException { assertKeyManagementEnabled(); - ManagedKeyData keyData = keyDataCache.getRandomEntryForPrefix(key_cust, keyNamespace); + ManagedKeyData keyData = keyDataCache.getRandomEntry(key_cust, keyNamespace); if (keyData == null) { List activeKeys = keymetaAccessor.getActiveKeys(key_cust, keyNamespace); if (! activeKeys.isEmpty()) { for (ManagedKeyData kd : activeKeys) { keyDataCache.addEntry(kd); } - keyData = keyDataCache.getRandomEntryForPrefix(key_cust, keyNamespace); + keyData = keyDataCache.getRandomEntry(key_cust, keyNamespace); } } return keyData; diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/keymeta/ManagedKeyDataCache.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/keymeta/ManagedKeyDataCache.java index 5fa98af7f41b..fa1e1421a28c 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/keymeta/ManagedKeyDataCache.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/keymeta/ManagedKeyDataCache.java @@ -34,12 +34,12 @@ @InterfaceAudience.Private public class ManagedKeyDataCache { private final Map cache; - private final Map>> prefixCache; + private final Map>> cacheByNS; private final ReentrantLock lock; private int nEntries; public ManagedKeyDataCache() { - this.prefixCache = new HashMap<>(); + this.cacheByNS = new HashMap<>(); this.cache = new HashMap<>(); this.lock = new ReentrantLock(); } @@ -57,7 +57,7 @@ public void addEntry(ManagedKeyData keyData) { cache.put(keyData.getKeyMetadata(), keyData); - Map> nsCache = prefixCache.computeIfAbsent(keyNamespace, + Map> nsCache = cacheByNS.computeIfAbsent(keyNamespace, k -> new HashMap<>()); Map keyMap = nsCache.computeIfAbsent(keyCust, k -> new HashMap<>()); @@ -100,7 +100,7 @@ public ManagedKeyData removeEntry(String keyMetadata) { if (removedEntry != null) { Bytes keyCust = new Bytes(removedEntry.getKeyCustodian()); String keyNamespace = removedEntry.getKeyNamespace(); - Map> nsCache = prefixCache.get(keyNamespace); + Map> nsCache = cacheByNS.get(keyNamespace); Map keyMap = nsCache.get(keyCust); keyMap.remove(removedEntry.getKeyMetadata()); if (keyMap.isEmpty()) { @@ -132,13 +132,13 @@ public int getEntryCount() { * @return a random ManagedKeyData entry with the given custodian and ACTIVE status, or null if * not found */ - public ManagedKeyData getRandomEntryForPrefix(byte[] key_cust, String keyNamespace) { + public ManagedKeyData getRandomEntry(byte[] key_cust, String keyNamespace) { lock.lock(); try { List activeEntries = new ArrayList<>(); Bytes keyCust = new Bytes(key_cust); - Map> nsCache = prefixCache.get(keyNamespace); + Map> nsCache = cacheByNS.get(keyNamespace); Map keyMap = nsCache != null ? nsCache.get(keyCust) : null; if (keyMap != null) { for (ManagedKeyData entry : keyMap.values()) { diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/keymeta/TestManagedKeyAccessor.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/keymeta/TestManagedKeyAccessor.java index d8f4c2e43f12..dd4e5393926c 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/keymeta/TestManagedKeyAccessor.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/keymeta/TestManagedKeyAccessor.java @@ -152,19 +152,19 @@ public void testGetActiveKeyWhenMissing() throws Exception { ManagedKeyData result = managedKeyAccessor.getAnActiveKey(CUST_ID, KEY_SPACE_GLOBAL); assertNull(result); - verify(keyDataCache).getRandomEntryForPrefix(CUST_ID, KEY_SPACE_GLOBAL); + verify(keyDataCache).getRandomEntry(CUST_ID, KEY_SPACE_GLOBAL); verify(keymetaAccessor).getActiveKeys(CUST_ID, KEY_SPACE_GLOBAL); } @Test public void testGetActiveKeyFromL1() throws Exception { ManagedKeyData keyData = managedKeyProvider.getManagedKey(CUST_ID, KEY_SPACE_GLOBAL); - when(keyDataCache.getRandomEntryForPrefix(any(), any())).thenReturn(keyData); + when(keyDataCache.getRandomEntry(any(), any())).thenReturn(keyData); ManagedKeyData result = managedKeyAccessor.getAnActiveKey(CUST_ID, KEY_SPACE_GLOBAL); assertEquals(keyData, result); - verify(keyDataCache).getRandomEntryForPrefix(CUST_ID, KEY_SPACE_GLOBAL); + verify(keyDataCache).getRandomEntry(CUST_ID, KEY_SPACE_GLOBAL); verify(keymetaAccessor, never()).getActiveKeys(any(), any()); } @@ -175,20 +175,20 @@ public void testGetActiveKeyFromL2WithNoResults() throws Exception { ManagedKeyData result = managedKeyAccessor.getAnActiveKey(CUST_ID, KEY_SPACE_GLOBAL); assertNull(result); - verify(keyDataCache).getRandomEntryForPrefix(CUST_ID, KEY_SPACE_GLOBAL); + verify(keyDataCache).getRandomEntry(CUST_ID, KEY_SPACE_GLOBAL); verify(keymetaAccessor).getActiveKeys(CUST_ID, KEY_SPACE_GLOBAL); } @Test public void testGetActiveKeyFromL2WithSingleResult() throws Exception { ManagedKeyData keyData = managedKeyProvider.getManagedKey(CUST_ID, KEY_SPACE_GLOBAL); - when(keyDataCache.getRandomEntryForPrefix(any(), any())).thenReturn(null, keyData); + when(keyDataCache.getRandomEntry(any(), any())).thenReturn(null, keyData); when(keymetaAccessor.getActiveKeys(any(), any())).thenReturn(Arrays.asList(keyData)); ManagedKeyData result = managedKeyAccessor.getAnActiveKey(CUST_ID, KEY_SPACE_GLOBAL); assertEquals(keyData, result); - verify(keyDataCache, times(2)).getRandomEntryForPrefix(CUST_ID, KEY_SPACE_GLOBAL); + verify(keyDataCache, times(2)).getRandomEntry(CUST_ID, KEY_SPACE_GLOBAL); verify(keymetaAccessor).getActiveKeys(CUST_ID, KEY_SPACE_GLOBAL); verify(keyDataCache).addEntry(keyData); } @@ -198,13 +198,13 @@ public void testGetActiveKeyFromL2WithMultipleResults() throws Exception { managedKeyProvider.setMultikeyGenMode(true); ManagedKeyData keyData1 = managedKeyProvider.getManagedKey(CUST_ID, KEY_SPACE_GLOBAL); ManagedKeyData keyData2 = managedKeyProvider.getManagedKey(CUST_ID, KEY_SPACE_GLOBAL); - when(keyDataCache.getRandomEntryForPrefix(any(), any())).thenReturn(null, keyData1); + when(keyDataCache.getRandomEntry(any(), any())).thenReturn(null, keyData1); when(keymetaAccessor.getActiveKeys(any(), any())).thenReturn(Arrays.asList(keyData1, keyData2)); ManagedKeyData result = managedKeyAccessor.getAnActiveKey(CUST_ID, KEY_SPACE_GLOBAL); assertEquals(keyData1, result); - verify(keyDataCache, times(2)).getRandomEntryForPrefix(CUST_ID, KEY_SPACE_GLOBAL); + verify(keyDataCache, times(2)).getRandomEntry(CUST_ID, KEY_SPACE_GLOBAL); verify(keymetaAccessor).getActiveKeys(CUST_ID, KEY_SPACE_GLOBAL); verify(keyDataCache, times(2)).addEntry(any()); } diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/keymeta/TestManagedKeyDataCache.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/keymeta/TestManagedKeyDataCache.java index 6449dc06d495..00c702b17098 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/keymeta/TestManagedKeyDataCache.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/keymeta/TestManagedKeyDataCache.java @@ -112,7 +112,7 @@ public void testOperations() throws Exception { @Test public void testRandomKeyGet() throws Exception{ - assertNull(cache.getRandomEntryForPrefix(CUST_ID, KEY_SPACE_GLOBAL)); + assertNull(cache.getRandomEntry(CUST_ID, KEY_SPACE_GLOBAL)); List allKeys = new ArrayList<>(); for (int i = 0; i < 20; ++i) { ManagedKeyData keyData; @@ -125,7 +125,7 @@ public void testRandomKeyGet() throws Exception{ } Set keys = new HashSet<>(); for (int i = 0; i < 10; ++i) { - keys.add(cache.getRandomEntryForPrefix(CUST_ID, KEY_SPACE_GLOBAL)); + keys.add(cache.getRandomEntry(CUST_ID, KEY_SPACE_GLOBAL)); } assertTrue(keys.size() > 1); assertTrue(keys.size() <= 10); @@ -136,7 +136,7 @@ public void testRandomKeyGet() throws Exception{ for(ManagedKeyData key: allKeys) { assertEquals(key, cache.removeEntry(key.getKeyMetadata())); } - assertNull(cache.getRandomEntryForPrefix(CUST_ID, KEY_SPACE_GLOBAL)); + assertNull(cache.getRandomEntry(CUST_ID, KEY_SPACE_GLOBAL)); } @Test @@ -145,7 +145,7 @@ public void testRandomKeyGetNoActive() throws Exception { for (int i = 0; i < 20; ++i) { cache.addEntry(managedKeyProvider.getManagedKey(CUST_ID, KEY_SPACE_GLOBAL)); } - assertNull(cache.getRandomEntryForPrefix(CUST_ID, KEY_SPACE_GLOBAL)); + assertNull(cache.getRandomEntry(CUST_ID, KEY_SPACE_GLOBAL)); } private void assertEntries(ManagedKeyData... keys) { From d3a72fcc9ae35c25f21b453e95c2b3eba0cd0105 Mon Sep 17 00:00:00 2001 From: Hari Dara Date: Wed, 28 May 2025 18:22:55 +0530 Subject: [PATCH 34/70] rename --- .../org/apache/hadoop/hbase/io/crypto/ManagedKeyData.java | 2 +- .../apache/hadoop/hbase/keymeta/KeymetaServiceEndpoint.java | 6 +++--- .../apache/hadoop/hbase/keymeta/TestKeymetaEndpoint.java | 6 +++--- 3 files changed, 7 insertions(+), 7 deletions(-) diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/io/crypto/ManagedKeyData.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/io/crypto/ManagedKeyData.java index 96b5844930bb..f88299d3c61f 100644 --- a/hbase-common/src/main/java/org/apache/hadoop/hbase/io/crypto/ManagedKeyData.java +++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/io/crypto/ManagedKeyData.java @@ -109,7 +109,7 @@ public ManagedKeyData(byte[] key_cust, String key_namespace, Key theKey, Managed Preconditions.checkNotNull(keyState, "keyState should not be null"); // Only check for null metadata if state is not FAILED if (keyState != ManagedKeyState.FAILED) { - Preconditions.checkNotNull(keyMetadata, "keyMetadata should not be null"); + Preconditions.checkNotNull(keyMetadata, "keyMetadata should not be null"); } Preconditions.checkArgument(readOpCount >= 0, "readOpCount: " + readOpCount + " should be >= 0"); diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/keymeta/KeymetaServiceEndpoint.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/keymeta/KeymetaServiceEndpoint.java index d956d2e2a572..a2882314b1b1 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/keymeta/KeymetaServiceEndpoint.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/keymeta/KeymetaServiceEndpoint.java @@ -57,7 +57,7 @@ public class KeymetaServiceEndpoint implements MasterCoprocessor { private MasterServices master = null; - private final ManagedKeysService managedKeysService = new KeyMetaAdminServiceImpl(); + private final ManagedKeysService managedKeysService = new KeymetaAdminServiceImpl(); /** * Starts the coprocessor by initializing the reference to the {@link org.apache.hadoop.hbase.master.MasterServices} @@ -78,7 +78,7 @@ public void start(CoprocessorEnvironment env) throws IOException { /** * Returns an iterable of the available coprocessor services, which includes the * {@link ManagedKeysService} implemented by - * {@link KeymetaServiceEndpoint.KeyMetaAdminServiceImpl}. + * {@link KeymetaServiceEndpoint.KeymetaAdminServiceImpl}. * * @return An iterable of the available coprocessor services. */ @@ -92,7 +92,7 @@ public Iterable getServices() { * interface, which provides the actual method implementations for enabling key management. */ @VisibleForTesting - public class KeyMetaAdminServiceImpl extends ManagedKeysService { + public class KeymetaAdminServiceImpl extends ManagedKeysService { /** * Enables key management for a given tenant and namespace, as specified in the provided diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/keymeta/TestKeymetaEndpoint.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/keymeta/TestKeymetaEndpoint.java index 9808e061f8a7..7e1ba09838cf 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/keymeta/TestKeymetaEndpoint.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/keymeta/TestKeymetaEndpoint.java @@ -21,7 +21,7 @@ import org.apache.hadoop.hbase.HBaseClassTestRule; import org.apache.hadoop.hbase.coprocessor.HasMasterServices; import org.apache.hadoop.hbase.io.crypto.ManagedKeyData; -import org.apache.hadoop.hbase.keymeta.KeymetaServiceEndpoint.KeyMetaAdminServiceImpl; +import org.apache.hadoop.hbase.keymeta.KeymetaServiceEndpoint.KeymetaAdminServiceImpl; import org.apache.hadoop.hbase.master.MasterServices; import org.apache.hadoop.hbase.protobuf.generated.ManagedKeysProtos.GetManagedKeysResponse; import org.apache.hadoop.hbase.protobuf.generated.ManagedKeysProtos.ManagedKeysRequest; @@ -86,7 +86,7 @@ public class TestKeymetaEndpoint { KeymetaServiceEndpoint keymetaServiceEndpoint; private ManagedKeysResponse.Builder responseBuilder; private ManagedKeysRequest.Builder requestBuilder; - private KeyMetaAdminServiceImpl keyMetaAdminService; + private KeymetaAdminServiceImpl keyMetaAdminService; private ManagedKeyData keyData1; private ManagedKeyData keyData2; @@ -98,7 +98,7 @@ public void setUp() throws Exception { withSettings().extraInterfaces(HasMasterServices.class)); when(((HasMasterServices) env).getMasterServices()).thenReturn(master); keymetaServiceEndpoint.start(env); - keyMetaAdminService = (KeyMetaAdminServiceImpl) keymetaServiceEndpoint.getServices() + keyMetaAdminService = (KeymetaAdminServiceImpl) keymetaServiceEndpoint.getServices() .iterator().next(); responseBuilder = ManagedKeysResponse.newBuilder().setKeyState(KEY_ACTIVE); requestBuilder = ManagedKeysRequest.newBuilder() From 49d37658eb9633742d567f806f13b9ab373f83b5 Mon Sep 17 00:00:00 2001 From: Hari Dara Date: Fri, 6 Jun 2025 13:56:07 +0530 Subject: [PATCH 35/70] Config for dynamic lookup --- .../main/java/org/apache/hadoop/hbase/HConstants.java | 3 +++ .../apache/hadoop/hbase/keymeta/KeyManagementBase.java | 10 ++++++++++ .../hadoop/hbase/keymeta/KeymetaServiceEndpoint.java | 6 +++--- .../hadoop/hbase/keymeta/ManagedKeyAccessor.java | 8 +++++--- 4 files changed, 21 insertions(+), 6 deletions(-) diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/HConstants.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/HConstants.java index 262a0779b4ca..fc2619f1ce48 100644 --- a/hbase-common/src/main/java/org/apache/hadoop/hbase/HConstants.java +++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/HConstants.java @@ -1321,6 +1321,9 @@ public enum OperationStatusCode { public static final String CRYPTO_MANAGED_KEYS_PER_CUST_NAMESPACE_ACTIVE_KEY_COUNT = "hbase.crypto.managed_keys.per_cust_namespace.active_count"; public static final int CRYPTO_MANAGED_KEYS_PER_CUST_NAMESPACE_ACTIVE_KEY_DEFAULT_COUNT = 1; + public static final String CRYPTO_MANAGED_KEYS_DYNAMIC_LOOKUP_ENABLED_CONF_KEY = + "hbase.crypto.managed_keys.dynamic_lookup.enabled"; + public static final boolean CRYPTO_MANAGED_KEYS_DYNAMIC_LOOKUP_DEFAULT_ENABLED = true; /** Configuration key for setting RPC codec class name */ public static final String RPC_CODEC_CONF_KEY = "hbase.client.rpc.codec"; diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/keymeta/KeyManagementBase.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/keymeta/KeyManagementBase.java index 10aac70a71c7..5711bf946e5a 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/keymeta/KeyManagementBase.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/keymeta/KeyManagementBase.java @@ -73,6 +73,16 @@ protected boolean isKeyManagementEnabled() { return keyManagementEnabled; } + /** + * A utility method for checking if dynamic lookup is enabled. + * @return true if dynamic lookup is enabled + */ + protected boolean isDynamicLookupEnabled() { + return getServer().getConfiguration().getBoolean( + HConstants.CRYPTO_MANAGED_KEYS_DYNAMIC_LOOKUP_ENABLED_CONF_KEY, + HConstants.CRYPTO_MANAGED_KEYS_DYNAMIC_LOOKUP_DEFAULT_ENABLED); + } + /** * Check if key management is enabled, otherwise throw exception. * @throws IOException if key management is not enabled. diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/keymeta/KeymetaServiceEndpoint.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/keymeta/KeymetaServiceEndpoint.java index a2882314b1b1..d956d2e2a572 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/keymeta/KeymetaServiceEndpoint.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/keymeta/KeymetaServiceEndpoint.java @@ -57,7 +57,7 @@ public class KeymetaServiceEndpoint implements MasterCoprocessor { private MasterServices master = null; - private final ManagedKeysService managedKeysService = new KeymetaAdminServiceImpl(); + private final ManagedKeysService managedKeysService = new KeyMetaAdminServiceImpl(); /** * Starts the coprocessor by initializing the reference to the {@link org.apache.hadoop.hbase.master.MasterServices} @@ -78,7 +78,7 @@ public void start(CoprocessorEnvironment env) throws IOException { /** * Returns an iterable of the available coprocessor services, which includes the * {@link ManagedKeysService} implemented by - * {@link KeymetaServiceEndpoint.KeymetaAdminServiceImpl}. + * {@link KeymetaServiceEndpoint.KeyMetaAdminServiceImpl}. * * @return An iterable of the available coprocessor services. */ @@ -92,7 +92,7 @@ public Iterable getServices() { * interface, which provides the actual method implementations for enabling key management. */ @VisibleForTesting - public class KeymetaAdminServiceImpl extends ManagedKeysService { + public class KeyMetaAdminServiceImpl extends ManagedKeysService { /** * Enables key management for a given tenant and namespace, as specified in the provided diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/keymeta/ManagedKeyAccessor.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/keymeta/ManagedKeyAccessor.java index d01e70f1ecc8..c6190838ec11 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/keymeta/ManagedKeyAccessor.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/keymeta/ManagedKeyAccessor.java @@ -61,9 +61,11 @@ public ManagedKeyData getKey(byte[] key_cust, String keyNamespace, String keyMet // 2. Check L2 cache. keyData = keymetaAccessor.getKey(key_cust, keyNamespace, keyMetadata); if (keyData == null) { - // 3. Check with Key Provider. - ManagedKeyProvider provider = getKeyProvider(); - keyData = provider.unwrapKey(keyMetadata, wrappedKey); + // 3. If dynamic lookup is enabled, check with Key Provider. + if (isDynamicLookupEnabled()) { + ManagedKeyProvider provider = getKeyProvider(); + keyData = provider.unwrapKey(keyMetadata, wrappedKey); + } if (keyData != null) { LOG.info("Got key data with status: {} and metadata: {} for prefix: {}", keyData.getKeyState(), keyData.getKeyMetadata(), From c2d2ddb02a8d8fdd4e5ab701f321e18594befc6f Mon Sep 17 00:00:00 2001 From: Hari Dara Date: Fri, 6 Jun 2025 15:18:08 +0530 Subject: [PATCH 36/70] Finish an incomplete rename --- .../apache/hadoop/hbase/keymeta/KeymetaServiceEndpoint.java | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/keymeta/KeymetaServiceEndpoint.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/keymeta/KeymetaServiceEndpoint.java index d956d2e2a572..a2882314b1b1 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/keymeta/KeymetaServiceEndpoint.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/keymeta/KeymetaServiceEndpoint.java @@ -57,7 +57,7 @@ public class KeymetaServiceEndpoint implements MasterCoprocessor { private MasterServices master = null; - private final ManagedKeysService managedKeysService = new KeyMetaAdminServiceImpl(); + private final ManagedKeysService managedKeysService = new KeymetaAdminServiceImpl(); /** * Starts the coprocessor by initializing the reference to the {@link org.apache.hadoop.hbase.master.MasterServices} @@ -78,7 +78,7 @@ public void start(CoprocessorEnvironment env) throws IOException { /** * Returns an iterable of the available coprocessor services, which includes the * {@link ManagedKeysService} implemented by - * {@link KeymetaServiceEndpoint.KeyMetaAdminServiceImpl}. + * {@link KeymetaServiceEndpoint.KeymetaAdminServiceImpl}. * * @return An iterable of the available coprocessor services. */ @@ -92,7 +92,7 @@ public Iterable getServices() { * interface, which provides the actual method implementations for enabling key management. */ @VisibleForTesting - public class KeyMetaAdminServiceImpl extends ManagedKeysService { + public class KeymetaAdminServiceImpl extends ManagedKeysService { /** * Enables key management for a given tenant and namespace, as specified in the provided From 9b0555d6c7706951b8cdc64b55369498f9d0df90 Mon Sep 17 00:00:00 2001 From: Hari Dara Date: Mon, 9 Jun 2025 19:08:32 +0530 Subject: [PATCH 37/70] More test coverage --- .../hbase/io/crypto/ManagedKeyData.java | 7 +- .../hbase/io/crypto/ManagedKeyProvider.java | 3 +- .../hbase/io/crypto/TestManagedKeyData.java | 12 + .../hbase/keymeta/SystemKeyAccessor.java | 15 +- .../hbase/keymeta/DummyKeyProvider.java | 38 +++ .../hbase/keymeta/TestKeyManagementBase.java | 64 ++++ .../hbase/keymeta/TestManagedKeymeta.java | 53 +++ .../hbase/keymeta/TestSystemKeyCache.java | 316 ++++++++++++++++++ ...a => TestSystemKeyAccessorAndManager.java} | 180 +++++++++- 9 files changed, 671 insertions(+), 17 deletions(-) create mode 100644 hbase-server/src/test/java/org/apache/hadoop/hbase/keymeta/DummyKeyProvider.java create mode 100644 hbase-server/src/test/java/org/apache/hadoop/hbase/keymeta/TestKeyManagementBase.java create mode 100644 hbase-server/src/test/java/org/apache/hadoop/hbase/keymeta/TestSystemKeyCache.java rename hbase-server/src/test/java/org/apache/hadoop/hbase/master/{TestSystemKey.java => TestSystemKeyAccessorAndManager.java} (67%) diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/io/crypto/ManagedKeyData.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/io/crypto/ManagedKeyData.java index f88299d3c61f..98c40359b962 100644 --- a/hbase-common/src/main/java/org/apache/hadoop/hbase/io/crypto/ManagedKeyData.java +++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/io/crypto/ManagedKeyData.java @@ -103,13 +103,14 @@ public ManagedKeyData(byte[] key_cust, String key_namespace, Key theKey, Managed * @throws NullPointerException if any of key_cust, keyState or keyMetadata is null. */ public ManagedKeyData(byte[] key_cust, String key_namespace, Key theKey, ManagedKeyState keyState, - String keyMetadata, long refreshTimestamp, long readOpCount, long writeOpCount) { + String keyMetadata, long refreshTimestamp, long readOpCount, + long writeOpCount) { Preconditions.checkNotNull(key_cust, "key_cust should not be null"); Preconditions.checkNotNull(key_namespace, "key_namespace should not be null"); Preconditions.checkNotNull(keyState, "keyState should not be null"); // Only check for null metadata if state is not FAILED if (keyState != ManagedKeyState.FAILED) { - Preconditions.checkNotNull(keyMetadata, "keyMetadata should not be null"); + Preconditions.checkNotNull(keyMetadata, "keyMetadata should not be null"); } Preconditions.checkArgument(readOpCount >= 0, "readOpCount: " + readOpCount + " should be >= 0"); @@ -242,7 +243,7 @@ public static long constructKeyChecksum(byte[] data) { * @return The hash of the key metadata as a byte array. */ public byte[] getKeyMetadataHash() { - if (keyMetadataHash == null) { + if (keyMetadataHash == null && keyMetadata != null) { keyMetadataHash = constructMetadataHash(keyMetadata); } return keyMetadataHash; diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/io/crypto/ManagedKeyProvider.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/io/crypto/ManagedKeyProvider.java index 4140ee2d1713..a7fb4dbe177a 100644 --- a/hbase-common/src/main/java/org/apache/hadoop/hbase/io/crypto/ManagedKeyProvider.java +++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/io/crypto/ManagedKeyProvider.java @@ -83,7 +83,8 @@ static byte[] decodeToBytes(String encodedKeyCust) throws IOException { key_cust = Base64.getDecoder().decode(encodedKeyCust); } catch (IllegalArgumentException e) { - throw new IOException("Failed to decode specified key custodian as Base64 string: " + encodedKeyCust, e); + throw new IOException("Failed to decode specified key custodian as Base64 string: " + + encodedKeyCust, e); } return key_cust; } diff --git a/hbase-common/src/test/java/org/apache/hadoop/hbase/io/crypto/TestManagedKeyData.java b/hbase-common/src/test/java/org/apache/hadoop/hbase/io/crypto/TestManagedKeyData.java index 5d3410a131b6..f665f1c7566c 100644 --- a/hbase-common/src/test/java/org/apache/hadoop/hbase/io/crypto/TestManagedKeyData.java +++ b/hbase-common/src/test/java/org/apache/hadoop/hbase/io/crypto/TestManagedKeyData.java @@ -163,6 +163,18 @@ public void testGetKeyMetadataHashEncoded() { assertEquals(24, encodedHash.length()); // Base64 encoded MD5 hash is 24 characters long } + @Test + public void testGetKeyMetadataHashEncodedWithNullHash() { + // Create ManagedKeyData with FAILED state and null metadata + ManagedKeyData keyData = new ManagedKeyData( + "custodian".getBytes(), "namespace", null, ManagedKeyState.FAILED, + null // null metadata should result in null hash + ); + + String encoded = keyData.getKeyMetadataHashEncoded(); + assertNull(encoded); + } + @Test public void testConstructMetadataHash() { byte[] hash = ManagedKeyData.constructMetadataHash(keyMetadata); diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/keymeta/SystemKeyAccessor.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/keymeta/SystemKeyAccessor.java index c207dc251d1f..68b83c907872 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/keymeta/SystemKeyAccessor.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/keymeta/SystemKeyAccessor.java @@ -84,7 +84,11 @@ public List getAllSystemKeyFiles() throws IOException { public ManagedKeyData loadSystemKey(Path keyPath) throws IOException { ManagedKeyProvider provider = getKeyProvider(); - return provider.unwrapKey(loadKeyMetadata(keyPath), null); + ManagedKeyData keyData = provider.unwrapKey(loadKeyMetadata(keyPath), null); + if (keyData == null) { + throw new RuntimeException("Failed to load system key from: " + keyPath); + } + return keyData; } @VisibleForTesting @@ -110,7 +114,14 @@ public static int extractSystemKeySeqNum(Path keyPath) throws IOException { public static int extractKeySequence(Path clusterKeyFile) throws IOException { int keySeq = -1; if (clusterKeyFile.getName().startsWith(SYSTEM_KEY_FILE_PREFIX)) { - keySeq = Integer.valueOf(clusterKeyFile.getName().substring(SYSTEM_KEY_FILE_PREFIX.length())); + String seqStr = clusterKeyFile.getName().substring(SYSTEM_KEY_FILE_PREFIX.length()); + if (! seqStr.isEmpty()) { + try { + keySeq = Integer.valueOf(seqStr); + } catch (NumberFormatException e) { + throw new IOException("Invalid file name for a cluster key: " + clusterKeyFile, e); + } + } } return keySeq; } diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/keymeta/DummyKeyProvider.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/keymeta/DummyKeyProvider.java new file mode 100644 index 000000000000..bdbaa9fa009f --- /dev/null +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/keymeta/DummyKeyProvider.java @@ -0,0 +1,38 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.hadoop.hbase.keymeta; + +import java.security.Key; + +import org.apache.hadoop.hbase.io.crypto.KeyProvider; + +public class DummyKeyProvider implements KeyProvider { + @Override + public void init(String params) { + } + + @Override + public Key[] getKeys(String[] aliases) { + return null; + } + + @Override + public Key getKey(String alias) { + return null; + } +} \ No newline at end of file diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/keymeta/TestKeyManagementBase.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/keymeta/TestKeyManagementBase.java new file mode 100644 index 000000000000..6e6bb237fa60 --- /dev/null +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/keymeta/TestKeyManagementBase.java @@ -0,0 +1,64 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.hadoop.hbase.keymeta; + +import static org.junit.Assert.assertThrows; +import static org.junit.Assert.assertTrue; +import static org.mockito.Mockito.mock; +import static org.mockito.Mockito.when; + +import org.apache.hadoop.conf.Configuration; +import org.apache.hadoop.hbase.HBaseClassTestRule; +import org.apache.hadoop.hbase.HConstants; +import org.apache.hadoop.hbase.Server; +import org.apache.hadoop.hbase.testclassification.SmallTests; +import org.apache.hadoop.hbase.testclassification.MasterTests; +import org.junit.Test; +import org.junit.experimental.categories.Category; + +@Category({ MasterTests.class, SmallTests.class }) +public class TestKeyManagementBase { + public static final HBaseClassTestRule CLASS_RULE = HBaseClassTestRule.forClass( + TestKeyManagementBase.class); + + @Test + public void testGetKeyProviderWithInvalidProvider() throws Exception { + // Setup configuration with a non-ManagedKeyProvider + Configuration conf = new Configuration(); + conf.set(HConstants.CRYPTO_KEYPROVIDER_CONF_KEY, + "org.apache.hadoop.hbase.keymeta.DummyKeyProvider"); + + Server mockServer = mock(Server.class); + when(mockServer.getConfiguration()).thenReturn(conf); + + KeyManagementBase keyMgmt = new TestKeyManagement(mockServer); + + // Should throw RuntimeException when provider is not ManagedKeyProvider + RuntimeException exception = assertThrows(RuntimeException.class, () -> { + keyMgmt.getKeyProvider(); + }); + + assertTrue(exception.getMessage().contains("expected to be of type ManagedKeyProvider")); + } + + private static class TestKeyManagement extends KeyManagementBase { + public TestKeyManagement(Server server) { + super(server); + } + } +} \ No newline at end of file diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/keymeta/TestManagedKeymeta.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/keymeta/TestManagedKeymeta.java index 6eb4a998236b..dee4e5128e97 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/keymeta/TestManagedKeymeta.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/keymeta/TestManagedKeymeta.java @@ -29,6 +29,15 @@ import org.junit.ClassRule; import org.junit.Test; import org.junit.experimental.categories.Category; +import org.apache.hadoop.hbase.protobuf.generated.ManagedKeysProtos; +import org.apache.hbase.thirdparty.com.google.protobuf.ServiceException; +import org.mockito.stubbing.OngoingStubbing; +import static org.mockito.Mockito.mock; +import static org.mockito.Mockito.when; +import static org.mockito.Mockito.any; +import static org.junit.Assert.assertThrows; +import static org.junit.Assert.assertTrue; +import java.lang.reflect.Field; import java.io.IOException; import java.security.KeyException; import java.util.List; @@ -93,4 +102,48 @@ private static void assertKeyDataListSingleKey(List managedKeySt assertEquals(1, managedKeyStates.size()); assertEquals(keyState, managedKeyStates.get(0).getKeyState()); } + + @Test + public void testEnableKeyManagementWithServiceException() throws Exception { + ManagedKeysProtos.ManagedKeysService.BlockingInterface mockStub = + mock(ManagedKeysProtos.ManagedKeysService.BlockingInterface.class); + + ServiceException networkError = new ServiceException("Network error"); + networkError.initCause(new IOException("Network error")); + when(mockStub.enableKeyManagement(any(), any())).thenThrow(networkError); + + KeymetaAdminClient client = new KeymetaAdminClient(TEST_UTIL.getConnection()); + // Use reflection to set the stub + Field stubField = KeymetaAdminClient.class.getDeclaredField("stub"); + stubField.setAccessible(true); + stubField.set(client, mockStub); + + IOException exception = assertThrows(IOException.class, () -> { + client.enableKeyManagement("cust", "namespace"); + }); + + assertTrue(exception.getMessage().contains("Network error")); + } + + @Test + public void testGetManagedKeysWithServiceException() throws Exception { + // Similar test for getManagedKeys method + ManagedKeysProtos.ManagedKeysService.BlockingInterface mockStub = + mock(ManagedKeysProtos.ManagedKeysService.BlockingInterface.class); + + ServiceException networkError = new ServiceException("Network error"); + networkError.initCause(new IOException("Network error")); + when(mockStub.getManagedKeys(any(), any())).thenThrow(networkError); + + KeymetaAdminClient client = new KeymetaAdminClient(TEST_UTIL.getConnection()); + Field stubField = KeymetaAdminClient.class.getDeclaredField("stub"); + stubField.setAccessible(true); + stubField.set(client, mockStub); + + IOException exception = assertThrows(IOException.class, () -> { + client.getManagedKeys("cust", "namespace"); + }); + + assertTrue(exception.getMessage().contains("Network error")); + } } diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/keymeta/TestSystemKeyCache.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/keymeta/TestSystemKeyCache.java new file mode 100644 index 000000000000..bd051c311516 --- /dev/null +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/keymeta/TestSystemKeyCache.java @@ -0,0 +1,316 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.hadoop.hbase.keymeta; + +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertNotNull; +import static org.junit.Assert.assertNull; +import static org.junit.Assert.assertSame; +import static org.junit.Assert.assertThrows; +import static org.junit.Assert.assertTrue; +import static org.mockito.Mockito.mock; +import static org.mockito.Mockito.verify; +import static org.mockito.Mockito.when; + +import java.io.IOException; +import java.security.Key; +import java.util.ArrayList; +import java.util.Arrays; +import java.util.Collections; +import java.util.List; +import javax.crypto.spec.SecretKeySpec; + +import org.apache.hadoop.conf.Configuration; +import org.apache.hadoop.crypto.key.TestKeyProvider; +import org.apache.hadoop.fs.Path; +import org.apache.hadoop.hbase.HBaseClassTestRule; +import org.apache.hadoop.hbase.Server; +import org.apache.hadoop.hbase.io.crypto.ManagedKeyData; +import org.apache.hadoop.hbase.io.crypto.ManagedKeyProvider; +import org.apache.hadoop.hbase.io.crypto.ManagedKeyState; +import org.apache.hadoop.hbase.io.crypto.MockManagedKeyProvider; +import org.apache.hadoop.hbase.testclassification.MasterTests; +import org.apache.hadoop.hbase.testclassification.SmallTests; +import org.junit.Before; +import org.junit.ClassRule; +import org.junit.Test; +import org.junit.experimental.categories.Category; +import org.mockito.Mock; +import org.mockito.MockitoAnnotations; + +/** + * Tests for SystemKeyCache class + */ +@Category({ MasterTests.class, SmallTests.class }) +public class TestSystemKeyCache { + + @ClassRule + public static final HBaseClassTestRule CLASS_RULE = + HBaseClassTestRule.forClass(TestSystemKeyCache.class); + + @Mock + private SystemKeyAccessor mockAccessor; + + private static final byte[] TEST_CUSTODIAN = "test-custodian".getBytes(); + private static final String TEST_NAMESPACE = "test-namespace"; + private static final String TEST_METADATA_1 = "metadata-1"; + private static final String TEST_METADATA_2 = "metadata-2"; + private static final String TEST_METADATA_3 = "metadata-3"; + + private Key testKey1; + private Key testKey2; + private Key testKey3; + private ManagedKeyData keyData1; + private ManagedKeyData keyData2; + private ManagedKeyData keyData3; + private Path keyPath1; + private Path keyPath2; + private Path keyPath3; + + @Before + public void setUp() { + MockitoAnnotations.openMocks(this); + + // Create test keys + testKey1 = new SecretKeySpec("test-key-1-bytes".getBytes(), "AES"); + testKey2 = new SecretKeySpec("test-key-2-bytes".getBytes(), "AES"); + testKey3 = new SecretKeySpec("test-key-3-bytes".getBytes(), "AES"); + + // Create test key data with different checksums + keyData1 = new ManagedKeyData(TEST_CUSTODIAN, TEST_NAMESPACE, testKey1, + ManagedKeyState.ACTIVE, TEST_METADATA_1, 1000L, 0, 0); + keyData2 = new ManagedKeyData(TEST_CUSTODIAN, TEST_NAMESPACE, testKey2, + ManagedKeyState.ACTIVE, TEST_METADATA_2, 2000L, 0, 0); + keyData3 = new ManagedKeyData(TEST_CUSTODIAN, TEST_NAMESPACE, testKey3, + ManagedKeyState.ACTIVE, TEST_METADATA_3, 3000L, 0, 0); + + // Create test paths + keyPath1 = new Path("/system/keys/key1"); + keyPath2 = new Path("/system/keys/key2"); + keyPath3 = new Path("/system/keys/key3"); + } + + @Test + public void testCreateCacheWithSingleSystemKey() throws Exception { + // Setup + List keyPaths = Collections.singletonList(keyPath1); + when(mockAccessor.getAllSystemKeyFiles()).thenReturn(keyPaths); + when(mockAccessor.loadSystemKey(keyPath1)).thenReturn(keyData1); + + // Execute + SystemKeyCache cache = SystemKeyCache.createCache(mockAccessor); + + // Verify + assertNotNull(cache); + assertSame(keyData1, cache.getLatestSystemKey()); + assertSame(keyData1, cache.getSystemKeyByChecksum(keyData1.getKeyChecksum())); + assertNull(cache.getSystemKeyByChecksum(999L)); // Non-existent checksum + + verify(mockAccessor).getAllSystemKeyFiles(); + verify(mockAccessor).loadSystemKey(keyPath1); + } + + @Test + public void testCreateCacheWithMultipleSystemKeys() throws Exception { + // Setup - keys should be processed in order, first one becomes latest + List keyPaths = Arrays.asList(keyPath1, keyPath2, keyPath3); + when(mockAccessor.getAllSystemKeyFiles()).thenReturn(keyPaths); + when(mockAccessor.loadSystemKey(keyPath1)).thenReturn(keyData1); + when(mockAccessor.loadSystemKey(keyPath2)).thenReturn(keyData2); + when(mockAccessor.loadSystemKey(keyPath3)).thenReturn(keyData3); + + // Execute + SystemKeyCache cache = SystemKeyCache.createCache(mockAccessor); + + // Verify + assertNotNull(cache); + assertSame(keyData1, cache.getLatestSystemKey()); // First key becomes latest + + // All keys should be accessible by checksum + assertSame(keyData1, cache.getSystemKeyByChecksum(keyData1.getKeyChecksum())); + assertSame(keyData2, cache.getSystemKeyByChecksum(keyData2.getKeyChecksum())); + assertSame(keyData3, cache.getSystemKeyByChecksum(keyData3.getKeyChecksum())); + + // Non-existent checksum should return null + assertNull(cache.getSystemKeyByChecksum(999L)); + + verify(mockAccessor).getAllSystemKeyFiles(); + verify(mockAccessor).loadSystemKey(keyPath1); + verify(mockAccessor).loadSystemKey(keyPath2); + verify(mockAccessor).loadSystemKey(keyPath3); + } + + @Test + public void testCreateCacheWithNoSystemKeyFiles() throws Exception { + // Setup - this covers the uncovered lines 46-47 + when(mockAccessor.getAllSystemKeyFiles()).thenReturn(Collections.emptyList()); + + // Execute + SystemKeyCache cache = SystemKeyCache.createCache(mockAccessor); + + // Verify + assertNull(cache); + verify(mockAccessor).getAllSystemKeyFiles(); + } + + @Test + public void testCreateCacheWithEmptyKeyFilesList() throws Exception { + // Setup - alternative empty scenario + when(mockAccessor.getAllSystemKeyFiles()).thenReturn(new ArrayList<>()); + + // Execute + SystemKeyCache cache = SystemKeyCache.createCache(mockAccessor); + + // Verify + assertNull(cache); + verify(mockAccessor).getAllSystemKeyFiles(); + } + + @Test + public void testGetLatestSystemKeyConsistency() throws Exception { + // Setup + List keyPaths = Arrays.asList(keyPath1, keyPath2); + when(mockAccessor.getAllSystemKeyFiles()).thenReturn(keyPaths); + when(mockAccessor.loadSystemKey(keyPath1)).thenReturn(keyData1); + when(mockAccessor.loadSystemKey(keyPath2)).thenReturn(keyData2); + + // Execute + SystemKeyCache cache = SystemKeyCache.createCache(mockAccessor); + + // Verify - latest key should be consistent across calls + ManagedKeyData latest1 = cache.getLatestSystemKey(); + ManagedKeyData latest2 = cache.getLatestSystemKey(); + + assertNotNull(latest1); + assertSame(latest1, latest2); + assertSame(keyData1, latest1); // First key should be latest + } + + @Test + public void testGetSystemKeyByChecksumWithDifferentKeys() throws Exception { + // Setup + List keyPaths = Arrays.asList(keyPath1, keyPath2, keyPath3); + when(mockAccessor.getAllSystemKeyFiles()).thenReturn(keyPaths); + when(mockAccessor.loadSystemKey(keyPath1)).thenReturn(keyData1); + when(mockAccessor.loadSystemKey(keyPath2)).thenReturn(keyData2); + when(mockAccessor.loadSystemKey(keyPath3)).thenReturn(keyData3); + + // Execute + SystemKeyCache cache = SystemKeyCache.createCache(mockAccessor); + + // Verify each key can be retrieved by its unique checksum + long checksum1 = keyData1.getKeyChecksum(); + long checksum2 = keyData2.getKeyChecksum(); + long checksum3 = keyData3.getKeyChecksum(); + + // Checksums should be different + assert checksum1 != checksum2; + assert checksum2 != checksum3; + assert checksum1 != checksum3; + + // Each key should be retrievable by its checksum + assertSame(keyData1, cache.getSystemKeyByChecksum(checksum1)); + assertSame(keyData2, cache.getSystemKeyByChecksum(checksum2)); + assertSame(keyData3, cache.getSystemKeyByChecksum(checksum3)); + } + + @Test + public void testGetSystemKeyByChecksumWithNonExistentChecksum() throws Exception { + // Setup + List keyPaths = Collections.singletonList(keyPath1); + when(mockAccessor.getAllSystemKeyFiles()).thenReturn(keyPaths); + when(mockAccessor.loadSystemKey(keyPath1)).thenReturn(keyData1); + + // Execute + SystemKeyCache cache = SystemKeyCache.createCache(mockAccessor); + + // Verify + assertNotNull(cache); + + // Test various non-existent checksums + assertNull(cache.getSystemKeyByChecksum(0L)); + assertNull(cache.getSystemKeyByChecksum(-1L)); + assertNull(cache.getSystemKeyByChecksum(Long.MAX_VALUE)); + assertNull(cache.getSystemKeyByChecksum(Long.MIN_VALUE)); + + // But the actual checksum should work + assertSame(keyData1, cache.getSystemKeyByChecksum(keyData1.getKeyChecksum())); + } + + @Test(expected = IOException.class) + public void testCreateCacheWithAccessorIOException() throws Exception { + // Setup - accessor throws IOException + when(mockAccessor.getAllSystemKeyFiles()).thenThrow(new IOException("File system error")); + + // Execute - should propagate the IOException + SystemKeyCache.createCache(mockAccessor); + } + + @Test(expected = IOException.class) + public void testCreateCacheWithLoadSystemKeyIOException() throws Exception { + // Setup - loading key throws IOException + List keyPaths = Collections.singletonList(keyPath1); + when(mockAccessor.getAllSystemKeyFiles()).thenReturn(keyPaths); + when(mockAccessor.loadSystemKey(keyPath1)).thenThrow(new IOException("Key load error")); + + // Execute - should propagate the IOException + SystemKeyCache.createCache(mockAccessor); + } + + @Test + public void testCacheWithKeysHavingSameChecksum() throws Exception { + // Setup - create two keys that will have the same checksum (same content) + Key sameKey1 = new SecretKeySpec("identical-bytes".getBytes(), "AES"); + Key sameKey2 = new SecretKeySpec("identical-bytes".getBytes(), "AES"); + + ManagedKeyData sameManagedKey1 = new ManagedKeyData(TEST_CUSTODIAN, TEST_NAMESPACE, + sameKey1, ManagedKeyState.ACTIVE, "metadata-A", 1000L, 0, 0); + ManagedKeyData sameManagedKey2 = new ManagedKeyData(TEST_CUSTODIAN, TEST_NAMESPACE, + sameKey2, ManagedKeyState.ACTIVE, "metadata-B", 2000L, 0, 0); + + // Verify they have the same checksum + assertEquals(sameManagedKey1.getKeyChecksum(), sameManagedKey2.getKeyChecksum()); + + List keyPaths = Arrays.asList(keyPath1, keyPath2); + when(mockAccessor.getAllSystemKeyFiles()).thenReturn(keyPaths); + when(mockAccessor.loadSystemKey(keyPath1)).thenReturn(sameManagedKey1); + when(mockAccessor.loadSystemKey(keyPath2)).thenReturn(sameManagedKey2); + + // Execute + SystemKeyCache cache = SystemKeyCache.createCache(mockAccessor); + + // Verify - second key should overwrite first in the map due to same checksum + assertNotNull(cache); + assertSame(sameManagedKey1, cache.getLatestSystemKey()); // First is still latest + + // The map should contain the second key for the shared checksum + ManagedKeyData retrievedKey = cache.getSystemKeyByChecksum(sameManagedKey1.getKeyChecksum()); + assertSame(sameManagedKey2, retrievedKey); // Last one wins in TreeMap + } + + @Test + public void testCreateCacheWithUnexpectedNullKeyData() throws Exception { + when(mockAccessor.getAllSystemKeyFiles()).thenReturn(Arrays.asList(keyPath1)); + when(mockAccessor.loadSystemKey(keyPath1)).thenThrow(new RuntimeException("Key load error")); + + RuntimeException ex = assertThrows(RuntimeException.class, () -> { + SystemKeyCache.createCache(mockAccessor); + }); + assertTrue(ex.getMessage().equals("Key load error")); + } +} \ No newline at end of file diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestSystemKey.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestSystemKeyAccessorAndManager.java similarity index 67% rename from hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestSystemKey.java rename to hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestSystemKeyAccessorAndManager.java index c16d832d8a17..197795cb825d 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestSystemKey.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestSystemKeyAccessorAndManager.java @@ -11,6 +11,7 @@ import org.apache.hadoop.hbase.HConstants; import org.apache.hadoop.hbase.io.crypto.ManagedKeyData; import org.apache.hadoop.hbase.io.crypto.ManagedKeyProvider; +import org.apache.hadoop.hbase.io.crypto.ManagedKeyState; import org.apache.hadoop.hbase.keymeta.SystemKeyAccessor; import org.apache.hadoop.hbase.testclassification.MasterTests; import org.apache.hadoop.hbase.testclassification.SmallTests; @@ -32,10 +33,12 @@ import org.mockito.Mock; import org.mockito.MockitoAnnotations; import java.io.IOException; +import java.security.Key; import java.util.Arrays; import java.util.Collection; import java.util.List; import java.util.stream.IntStream; +import javax.crypto.spec.SecretKeySpec; import static org.apache.hadoop.hbase.HConstants.SYSTEM_KEY_FILE_PREFIX; import static org.apache.hadoop.hbase.io.crypto.ManagedKeyState.ACTIVE; import static org.apache.hadoop.hbase.io.crypto.ManagedKeyState.INACTIVE; @@ -45,18 +48,20 @@ import static org.mockito.ArgumentMatchers.any; import static org.mockito.ArgumentMatchers.eq; import static org.mockito.Mockito.mock; +import static org.mockito.Mockito.verify; import static org.mockito.Mockito.when; @RunWith(Suite.class) @Suite.SuiteClasses({ - TestSystemKey.TestAccessorWhenDisabled.class, - TestSystemKey.TestManagerWhenDisabled.class, - TestSystemKey.TestAccessor.class, - TestSystemKey.TestForInvalidFilenames.class, - TestSystemKey.TestManagerForErrors.class + TestSystemKeyAccessorAndManager.TestAccessorWhenDisabled.class, + TestSystemKeyAccessorAndManager.TestManagerWhenDisabled.class, + TestSystemKeyAccessorAndManager.TestAccessor.class, + TestSystemKeyAccessorAndManager.TestForInvalidFilenames.class, + TestSystemKeyAccessorAndManager.TestManagerForErrors.class, + TestSystemKeyAccessorAndManager.TestAccessorAdvanced.class // ADD THIS }) @Category({ MasterTests.class, SmallTests.class }) -public class TestSystemKey { +public class TestSystemKeyAccessorAndManager { private static final HBaseTestingUtil TEST_UTIL = new HBaseTestingUtil(); @Rule @@ -93,7 +98,7 @@ private static FileStatus createMockFile(String fileName) { @RunWith(BlockJUnit4ClassRunner.class) @Category({ MasterTests.class, SmallTests.class }) - public static class TestAccessorWhenDisabled extends TestSystemKey { + public static class TestAccessorWhenDisabled extends TestSystemKeyAccessorAndManager { @ClassRule public static final HBaseClassTestRule CLASS_RULE = HBaseClassTestRule.forClass(TestAccessorWhenDisabled.class); @@ -110,7 +115,7 @@ public static class TestAccessorWhenDisabled extends TestSystemKey { @RunWith(BlockJUnit4ClassRunner.class) @Category({ MasterTests.class, SmallTests.class }) - public static class TestManagerWhenDisabled extends TestSystemKey { + public static class TestManagerWhenDisabled extends TestSystemKeyAccessorAndManager { @ClassRule public static final HBaseClassTestRule CLASS_RULE = HBaseClassTestRule.forClass(TestManagerWhenDisabled.class); @@ -127,7 +132,7 @@ public static class TestManagerWhenDisabled extends TestSystemKey { @RunWith(BlockJUnit4ClassRunner.class) @Category({ MasterTests.class, SmallTests.class }) - public static class TestAccessor extends TestSystemKey { + public static class TestAccessor extends TestSystemKeyAccessorAndManager { @ClassRule public static final HBaseClassTestRule CLASS_RULE = HBaseClassTestRule.forClass(TestAccessor.class); @@ -188,7 +193,7 @@ public void testExtractKeySequenceForInvalidFilename() throws Exception { @RunWith(Parameterized.class) @Category({ MasterTests.class, SmallTests.class }) - public static class TestForInvalidFilenames extends TestSystemKey { + public static class TestForInvalidFilenames extends TestSystemKeyAccessorAndManager { @ClassRule public static final HBaseClassTestRule CLASS_RULE = HBaseClassTestRule.forClass(TestForInvalidFilenames.class); @@ -220,7 +225,7 @@ public void test() throws Exception { @RunWith(BlockJUnit4ClassRunner.class) @Category({ MasterTests.class, SmallTests.class }) - public static class TestManagerForErrors extends TestSystemKey { + public static class TestManagerForErrors extends TestSystemKeyAccessorAndManager { @ClassRule public static final HBaseClassTestRule CLASS_RULE = HBaseClassTestRule.forClass(TestManagerForErrors.class); @@ -328,6 +333,159 @@ public void testEnsureSystemKeyInitialized_RaceCondition() throws Exception { } } + @RunWith(BlockJUnit4ClassRunner.class) + @Category({ MasterTests.class, SmallTests.class }) + public static class TestAccessorAdvanced extends TestSystemKeyAccessorAndManager { + @ClassRule + public static final HBaseClassTestRule CLASS_RULE = + HBaseClassTestRule.forClass(TestAccessorAdvanced.class); + + + + + @Test + public void testLoadSystemKeySuccess() throws Exception { + Path testPath = new Path("/test/key/path"); + String testMetadata = "test-metadata"; + + // Create test key data + Key testKey = new SecretKeySpec("test-key-bytes".getBytes(), "AES"); + ManagedKeyData testKeyData = new ManagedKeyData( + "custodian".getBytes(), "namespace", testKey, + ManagedKeyState.ACTIVE, testMetadata, 1000L, 0, 0); + + // Mock key provider + ManagedKeyProvider realProvider = mock(ManagedKeyProvider.class); + when(realProvider.unwrapKey(testMetadata, null)).thenReturn(testKeyData); + + // Create testable SystemKeyAccessor that overrides both loadKeyMetadata and getKeyProvider + SystemKeyAccessor testAccessor = new SystemKeyAccessor(mockMaster) { + @Override + protected String loadKeyMetadata(Path keyPath) throws IOException { + assertEquals(testPath, keyPath); + return testMetadata; + } + + @Override + protected ManagedKeyProvider getKeyProvider() { + return realProvider; + } + }; + + ManagedKeyData result = testAccessor.loadSystemKey(testPath); + assertEquals(testKeyData, result); + + // Verify the key provider was called correctly + verify(realProvider).unwrapKey(testMetadata, null); + } + + @Test(expected = RuntimeException.class) + public void testLoadSystemKeyNullResult() throws Exception { + Path testPath = new Path("/test/key/path"); + String testMetadata = "test-metadata"; + + // Mock key provider to return null + ManagedKeyProvider realProvider = mock(ManagedKeyProvider.class); + when(realProvider.unwrapKey(testMetadata, null)).thenReturn(null); + + SystemKeyAccessor testAccessor = new SystemKeyAccessor(mockMaster) { + @Override + protected String loadKeyMetadata(Path keyPath) throws IOException { + assertEquals(testPath, keyPath); + return testMetadata; + } + + @Override + protected ManagedKeyProvider getKeyProvider() { + return realProvider; + } + }; + + testAccessor.loadSystemKey(testPath); + } + + @Test + public void testExtractSystemKeySeqNumValid() throws Exception { + Path testPath1 = new Path(SYSTEM_KEY_FILE_PREFIX + "1"); + Path testPath123 = new Path(SYSTEM_KEY_FILE_PREFIX + "123"); + Path testPathMax = new Path(SYSTEM_KEY_FILE_PREFIX + Integer.MAX_VALUE); + + assertEquals(1, SystemKeyAccessor.extractSystemKeySeqNum(testPath1)); + assertEquals(123, SystemKeyAccessor.extractSystemKeySeqNum(testPath123)); + assertEquals(Integer.MAX_VALUE, SystemKeyAccessor.extractSystemKeySeqNum(testPathMax)); + } + + + + @Test(expected = IOException.class) + public void testGetAllSystemKeyFilesIOException() throws Exception { + when(mockFileSystem.globStatus(any())).thenThrow(new IOException("Filesystem error")); + systemKeyManager.getAllSystemKeyFiles(); + } + + @Test(expected = IOException.class) + public void testLoadSystemKeyIOExceptionFromMetadata() throws Exception { + Path testPath = new Path("/test/key/path"); + + SystemKeyAccessor testAccessor = new SystemKeyAccessor(mockMaster) { + @Override + protected String loadKeyMetadata(Path keyPath) throws IOException { + assertEquals(testPath, keyPath); + throw new IOException("Metadata read failed"); + } + + @Override + protected ManagedKeyProvider getKeyProvider() { + return mock(ManagedKeyProvider.class); + } + }; + + testAccessor.loadSystemKey(testPath); + } + + @Test(expected = RuntimeException.class) + public void testLoadSystemKeyProviderException() throws Exception { + Path testPath = new Path("/test/key/path"); + String testMetadata = "test-metadata"; + + SystemKeyAccessor testAccessor = new SystemKeyAccessor(mockMaster) { + @Override + protected String loadKeyMetadata(Path keyPath) throws IOException { + assertEquals(testPath, keyPath); + return testMetadata; + } + + @Override + protected ManagedKeyProvider getKeyProvider() { + throw new RuntimeException("Key provider not available"); + } + }; + + testAccessor.loadSystemKey(testPath); + } + + @Test + public void testExtractSystemKeySeqNumBoundaryValues() throws Exception { + // Test boundary values + Path testPath0 = new Path(SYSTEM_KEY_FILE_PREFIX + "0"); + Path testPathMin = new Path(SYSTEM_KEY_FILE_PREFIX + Integer.MIN_VALUE); + + assertEquals(0, SystemKeyAccessor.extractSystemKeySeqNum(testPath0)); + assertEquals(Integer.MIN_VALUE, SystemKeyAccessor.extractSystemKeySeqNum(testPathMin)); + } + + @Test + public void testExtractKeySequenceEdgeCases() throws Exception { + // Test various edge cases for extractKeySequence + Path validZero = new Path(SYSTEM_KEY_FILE_PREFIX + "0"); + Path validNegative = new Path(SYSTEM_KEY_FILE_PREFIX + "-1"); + + // Valid cases should still work + assertEquals(0, SystemKeyAccessor.extractKeySequence(validZero)); + assertEquals(-1, SystemKeyAccessor.extractKeySequence(validNegative)); + } + } + private static class MockSystemKeyManager extends SystemKeyManager { private final ManagedKeyProvider keyProvider; From dcc9ec019d16a86ea6b54a2e51225c376253280b Mon Sep 17 00:00:00 2001 From: Hari Dara Date: Mon, 9 Jun 2025 19:36:38 +0530 Subject: [PATCH 38/70] New test clas --- .../hbase/keymeta/KeymetaMasterService.java | 1 - .../keymeta/TestKeymetaMasterService.java | 200 ++++++++++++++++++ .../hbase/keymeta/TestSystemKeyCache.java | 6 - .../TestSystemKeyAccessorAndManager.java | 9 +- 4 files changed, 203 insertions(+), 13 deletions(-) create mode 100644 hbase-server/src/test/java/org/apache/hadoop/hbase/keymeta/TestKeymetaMasterService.java diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/keymeta/KeymetaMasterService.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/keymeta/KeymetaMasterService.java index 497ed0a7d93f..d9b025943512 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/keymeta/KeymetaMasterService.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/keymeta/KeymetaMasterService.java @@ -18,7 +18,6 @@ package org.apache.hadoop.hbase.keymeta; import org.apache.hadoop.hbase.HConstants; -import org.apache.hadoop.hbase.Server; import org.apache.hadoop.hbase.client.ColumnFamilyDescriptorBuilder; import org.apache.hadoop.hbase.client.TableDescriptorBuilder; import org.apache.hadoop.hbase.master.MasterServices; diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/keymeta/TestKeymetaMasterService.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/keymeta/TestKeymetaMasterService.java new file mode 100644 index 000000000000..f81eb5dd6676 --- /dev/null +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/keymeta/TestKeymetaMasterService.java @@ -0,0 +1,200 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.hadoop.hbase.keymeta; + +import org.apache.hadoop.conf.Configuration; +import org.apache.hadoop.hbase.HBaseClassTestRule; +import org.apache.hadoop.hbase.HConstants; +import org.apache.hadoop.hbase.TableDescriptors; +import org.apache.hadoop.hbase.TableName; +import org.apache.hadoop.hbase.client.TableDescriptor; +import org.apache.hadoop.hbase.master.MasterServices; +import org.apache.hadoop.hbase.testclassification.MasterTests; +import org.apache.hadoop.hbase.testclassification.SmallTests; +import org.junit.After; +import org.junit.Before; +import org.junit.ClassRule; +import org.junit.Test; +import org.junit.experimental.categories.Category; +import org.mockito.Mock; +import org.mockito.MockitoAnnotations; + +import java.io.IOException; + +import static org.mockito.ArgumentMatchers.any; +import static org.mockito.Mockito.never; +import static org.mockito.Mockito.times; +import static org.mockito.Mockito.verify; +import static org.mockito.Mockito.when; + +/** + * Tests for KeymetaMasterService class + */ +@Category({ MasterTests.class, SmallTests.class }) +public class TestKeymetaMasterService { + + @ClassRule + public static final HBaseClassTestRule CLASS_RULE = + HBaseClassTestRule.forClass(TestKeymetaMasterService.class); + + @Mock + private MasterServices mockMaster; + @Mock + private TableDescriptors mockTableDescriptors; + + private Configuration conf; + private KeymetaMasterService service; + private AutoCloseable closeableMocks; + + @Before + public void setUp() throws Exception { + closeableMocks = MockitoAnnotations.openMocks(this); + + conf = new Configuration(); + when(mockMaster.getConfiguration()).thenReturn(conf); + when(mockMaster.getTableDescriptors()).thenReturn(mockTableDescriptors); + } + + @After + public void tearDown() throws Exception { + if (closeableMocks != null) { + closeableMocks.close(); + } + } + + @Test + public void testInitWithKeyManagementDisabled() throws Exception { + // Setup - disable key management + conf.setBoolean(HConstants.CRYPTO_MANAGED_KEYS_ENABLED_CONF_KEY, false); + + service = new KeymetaMasterService(mockMaster); + + // Execute + service.init(); // Should return early without creating table + + // Verify - no table operations should be performed + verify(mockMaster, never()).getTableDescriptors(); + verify(mockMaster, never()).createSystemTable(any()); + } + + @Test + public void testInitWithKeyManagementEnabledAndTableExists() throws Exception { + // Setup - enable key management and table already exists + conf.setBoolean(HConstants.CRYPTO_MANAGED_KEYS_ENABLED_CONF_KEY, true); + when(mockTableDescriptors.exists(KeymetaTableAccessor.KEY_META_TABLE_NAME)).thenReturn(true); + + service = new KeymetaMasterService(mockMaster); + + // Execute + service.init(); + + // Verify - table exists check is performed but no table creation + verify(mockMaster).getTableDescriptors(); + verify(mockTableDescriptors).exists(KeymetaTableAccessor.KEY_META_TABLE_NAME); + verify(mockMaster, never()).createSystemTable(any()); + } + + @Test + public void testInitWithKeyManagementEnabledAndTableDoesNotExist() throws Exception { + // Setup - enable key management and table does not exist + conf.setBoolean(HConstants.CRYPTO_MANAGED_KEYS_ENABLED_CONF_KEY, true); + when(mockTableDescriptors.exists(KeymetaTableAccessor.KEY_META_TABLE_NAME)).thenReturn(false); + + service = new KeymetaMasterService(mockMaster); + + // Execute + service.init(); + + // Verify - table is created + verify(mockMaster).getTableDescriptors(); + verify(mockTableDescriptors).exists(KeymetaTableAccessor.KEY_META_TABLE_NAME); + verify(mockMaster).createSystemTable(any(TableDescriptor.class)); + } + + @Test + public void testInitWithTableDescriptorsIOException() throws Exception { + // Setup - enable key management but table descriptors throws IOException + conf.setBoolean(HConstants.CRYPTO_MANAGED_KEYS_ENABLED_CONF_KEY, true); + when(mockTableDescriptors.exists(any(TableName.class))) + .thenThrow(new IOException("Table descriptors error")); + + service = new KeymetaMasterService(mockMaster); + + // Execute & Verify - IOException should propagate + try { + service.init(); + } catch (IOException e) { + // Expected exception + } + + verify(mockMaster).getTableDescriptors(); + verify(mockTableDescriptors).exists(KeymetaTableAccessor.KEY_META_TABLE_NAME); + verify(mockMaster, never()).createSystemTable(any()); + } + + @Test + public void testInitWithCreateSystemTableIOException() throws Exception { + // Setup - enable key management, table doesn't exist, but createSystemTable throws IOException + conf.setBoolean(HConstants.CRYPTO_MANAGED_KEYS_ENABLED_CONF_KEY, true); + when(mockTableDescriptors.exists(KeymetaTableAccessor.KEY_META_TABLE_NAME)).thenReturn(false); + when(mockMaster.createSystemTable(any(TableDescriptor.class))) + .thenThrow(new IOException("Create table error")); + + service = new KeymetaMasterService(mockMaster); + + // Execute & Verify - IOException should propagate + try { + service.init(); + } catch (IOException e) { + // Expected exception + } + + verify(mockMaster).getTableDescriptors(); + verify(mockTableDescriptors).exists(KeymetaTableAccessor.KEY_META_TABLE_NAME); + verify(mockMaster).createSystemTable(any(TableDescriptor.class)); + } + + @Test + public void testConstructorWithMasterServices() throws Exception { + // Execute + service = new KeymetaMasterService(mockMaster); + + // Verify - constructor should not throw an exception + // The service should be created successfully (no exceptions = success) + // We don't verify internal calls since the constructor just stores references + } + + @Test + public void testMultipleInitCalls() throws Exception { + // Setup - enable key management and table exists + conf.setBoolean(HConstants.CRYPTO_MANAGED_KEYS_ENABLED_CONF_KEY, true); + when(mockTableDescriptors.exists(KeymetaTableAccessor.KEY_META_TABLE_NAME)).thenReturn(true); + + service = new KeymetaMasterService(mockMaster); + + // Execute - call init multiple times + service.init(); + service.init(); + service.init(); + + // Verify - each call should check table existence (idempotent behavior) + verify(mockMaster, times(3)).getTableDescriptors(); + verify(mockTableDescriptors, times(3)).exists(KeymetaTableAccessor.KEY_META_TABLE_NAME); + verify(mockMaster, never()).createSystemTable(any()); + } +} \ No newline at end of file diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/keymeta/TestSystemKeyCache.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/keymeta/TestSystemKeyCache.java index bd051c311516..555b5b47c726 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/keymeta/TestSystemKeyCache.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/keymeta/TestSystemKeyCache.java @@ -23,7 +23,6 @@ import static org.junit.Assert.assertSame; import static org.junit.Assert.assertThrows; import static org.junit.Assert.assertTrue; -import static org.mockito.Mockito.mock; import static org.mockito.Mockito.verify; import static org.mockito.Mockito.when; @@ -35,15 +34,10 @@ import java.util.List; import javax.crypto.spec.SecretKeySpec; -import org.apache.hadoop.conf.Configuration; -import org.apache.hadoop.crypto.key.TestKeyProvider; import org.apache.hadoop.fs.Path; import org.apache.hadoop.hbase.HBaseClassTestRule; -import org.apache.hadoop.hbase.Server; import org.apache.hadoop.hbase.io.crypto.ManagedKeyData; -import org.apache.hadoop.hbase.io.crypto.ManagedKeyProvider; import org.apache.hadoop.hbase.io.crypto.ManagedKeyState; -import org.apache.hadoop.hbase.io.crypto.MockManagedKeyProvider; import org.apache.hadoop.hbase.testclassification.MasterTests; import org.apache.hadoop.hbase.testclassification.SmallTests; import org.junit.Before; diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestSystemKeyAccessorAndManager.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestSystemKeyAccessorAndManager.java index 197795cb825d..2d71ad95e5c5 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestSystemKeyAccessorAndManager.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestSystemKeyAccessorAndManager.java @@ -58,7 +58,7 @@ TestSystemKeyAccessorAndManager.TestAccessor.class, TestSystemKeyAccessorAndManager.TestForInvalidFilenames.class, TestSystemKeyAccessorAndManager.TestManagerForErrors.class, - TestSystemKeyAccessorAndManager.TestAccessorAdvanced.class // ADD THIS + TestSystemKeyAccessorAndManager.TestAccessorMisc.class // ADD THIS }) @Category({ MasterTests.class, SmallTests.class }) public class TestSystemKeyAccessorAndManager { @@ -335,13 +335,10 @@ public void testEnsureSystemKeyInitialized_RaceCondition() throws Exception { @RunWith(BlockJUnit4ClassRunner.class) @Category({ MasterTests.class, SmallTests.class }) - public static class TestAccessorAdvanced extends TestSystemKeyAccessorAndManager { + public static class TestAccessorMisc extends TestSystemKeyAccessorAndManager { @ClassRule public static final HBaseClassTestRule CLASS_RULE = - HBaseClassTestRule.forClass(TestAccessorAdvanced.class); - - - + HBaseClassTestRule.forClass(TestAccessorMisc.class); @Test public void testLoadSystemKeySuccess() throws Exception { From 5a71fd6be0010170a1f6a5ae73dfa1118352bcbe Mon Sep 17 00:00:00 2001 From: Hari Dara Date: Mon, 9 Jun 2025 19:57:38 +0530 Subject: [PATCH 39/70] format --- .../hadoop/hbase/keymeta/KeymetaAdminClient.java | 6 ++++-- .../hadoop/hbase/io/crypto/ManagedKeyData.java | 4 ++-- .../hbase/io/crypto/ManagedKeyStoreKeyProvider.java | 4 ++-- .../apache/hadoop/hbase/keymeta/KeymetaAdmin.java | 6 ++++-- .../hadoop/hbase/io/crypto/TestManagedKeyData.java | 3 ++- .../hadoop/hbase/keymeta/KeyManagementBase.java | 8 ++++---- .../hbase/keymeta/KeymetaServiceEndpoint.java | 4 ++-- .../hadoop/hbase/keymeta/KeymetaTableAccessor.java | 13 ++++++++----- .../hadoop/hbase/master/SystemKeyManager.java | 6 ++++-- .../hadoop/hbase/keymeta/ManagedKeyTestBase.java | 9 ++++++--- .../hadoop/hbase/keymeta/TestKeymetaEndpoint.java | 3 ++- .../master/TestSystemKeyAccessorAndManager.java | 6 ++++-- .../hadoop/hbase/master/TestSystemKeyManager.java | 5 +++-- 13 files changed, 47 insertions(+), 30 deletions(-) diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/keymeta/KeymetaAdminClient.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/keymeta/KeymetaAdminClient.java index 7a4f6d58ae4f..e9e17a2f7707 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/keymeta/KeymetaAdminClient.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/keymeta/KeymetaAdminClient.java @@ -22,7 +22,8 @@ public class KeymetaAdminClient implements KeymetaAdmin { private ManagedKeysProtos.ManagedKeysService.BlockingInterface stub; public KeymetaAdminClient(Connection conn) throws IOException { - this.stub = ManagedKeysProtos.ManagedKeysService.newBlockingStub(conn.getAdmin().coprocessorService()); + this.stub = ManagedKeysProtos.ManagedKeysService.newBlockingStub( + conn.getAdmin().coprocessorService()); } @Override @@ -49,7 +50,8 @@ public List getManagedKeys(String keyCust, String keyNamespace) } } - private static List generateKeyDataList(ManagedKeysProtos.GetManagedKeysResponse stateResponse) { + private static List generateKeyDataList( + ManagedKeysProtos.GetManagedKeysResponse stateResponse) { List keyStates = new ArrayList<>(); for (ManagedKeysResponse state: stateResponse.getStateList()) { keyStates.add(new ManagedKeyData( diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/io/crypto/ManagedKeyData.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/io/crypto/ManagedKeyData.java index 98c40359b962..d6672f9ab0d0 100644 --- a/hbase-common/src/main/java/org/apache/hadoop/hbase/io/crypto/ManagedKeyData.java +++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/io/crypto/ManagedKeyData.java @@ -33,8 +33,8 @@ /** * This class represents an encryption key data which includes the key itself, its state, metadata * and a prefix. The metadata encodes enough information on the key such that it can be used to - * retrieve the exact same key again in the future. If the key state is {@link ManagedKeyState#FAILED} - * expect the key to be {@code null}. + * retrieve the exact same key again in the future. If the key state is + * {@link ManagedKeyState#FAILED} expect the key to be {@code null}. * * The key data is represented by the following fields: *
      diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/io/crypto/ManagedKeyStoreKeyProvider.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/io/crypto/ManagedKeyStoreKeyProvider.java index f3f0d3d407b1..e5023a7419b8 100644 --- a/hbase-common/src/main/java/org/apache/hadoop/hbase/io/crypto/ManagedKeyStoreKeyProvider.java +++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/io/crypto/ManagedKeyStoreKeyProvider.java @@ -36,8 +36,8 @@ public ManagedKeyData getSystemKey(byte[] clusterId) { // Encode clusterId too for consistency with that of key custodian. String keyMetadata = generateKeyMetadata(systemKeyAlias, ManagedKeyProvider.encodeToStr(clusterId)); - return new ManagedKeyData(clusterId, ManagedKeyData.KEY_SPACE_GLOBAL, key, ManagedKeyState.ACTIVE, - keyMetadata); + return new ManagedKeyData(clusterId, ManagedKeyData.KEY_SPACE_GLOBAL, key, + ManagedKeyState.ACTIVE, keyMetadata); } @Override diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/keymeta/KeymetaAdmin.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/keymeta/KeymetaAdmin.java index f90aa6317552..0f01bf557bc4 100644 --- a/hbase-common/src/main/java/org/apache/hadoop/hbase/keymeta/KeymetaAdmin.java +++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/keymeta/KeymetaAdmin.java @@ -35,7 +35,8 @@ public interface KeymetaAdmin { * @param keyCust The key custodian in base64 encoded format. * @param keyNamespace The namespace for the key management. * - * @return The list of {@link ManagedKeyData} objects each identifying the key and its current status. + * @return The list of {@link ManagedKeyData} objects each identifying the key and its current + * status. * @throws IOException if an error occurs while enabling key management. */ List enableKeyManagement(String keyCust, String keyNamespace) @@ -46,7 +47,8 @@ List enableKeyManagement(String keyCust, String keyNamespace) * * @param keyCust The key custodian in base64 encoded format. * @param keyNamespace The namespace for the key management. - * @return The list of {@link ManagedKeyData} objects each identifying the key and its current status. + * @return The list of {@link ManagedKeyData} objects each identifying the key and its current + * status. * @throws IOException if an error occurs while enabling key management. */ List getManagedKeys(String keyCust, String keyNamespace) diff --git a/hbase-common/src/test/java/org/apache/hadoop/hbase/io/crypto/TestManagedKeyData.java b/hbase-common/src/test/java/org/apache/hadoop/hbase/io/crypto/TestManagedKeyData.java index f665f1c7566c..52705ac8e27e 100644 --- a/hbase-common/src/test/java/org/apache/hadoop/hbase/io/crypto/TestManagedKeyData.java +++ b/hbase-common/src/test/java/org/apache/hadoop/hbase/io/crypto/TestManagedKeyData.java @@ -99,7 +99,8 @@ public void testConstructorNullChecks() { @Test public void testConstructorWithFailedStateAndNullMetadata() { - ManagedKeyData keyData = new ManagedKeyData(keyCust, keyNamespace, null, ManagedKeyState.FAILED, null); + ManagedKeyData keyData = new ManagedKeyData(keyCust, keyNamespace, null, + ManagedKeyState.FAILED, null); assertNotNull(keyData); assertEquals(ManagedKeyState.FAILED, keyData.getKeyState()); assertNull(keyData.getKeyMetadata()); diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/keymeta/KeyManagementBase.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/keymeta/KeyManagementBase.java index 5711bf946e5a..4d861f8c408c 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/keymeta/KeyManagementBase.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/keymeta/KeyManagementBase.java @@ -56,8 +56,8 @@ protected Server getServer() { protected ManagedKeyProvider getKeyProvider() { KeyProvider provider = Encryption.getKeyProvider(getServer().getConfiguration()); if (!(provider instanceof ManagedKeyProvider)) { - throw new RuntimeException( - "KeyProvider: " + provider.getClass().getName() + " expected to be of type ManagedKeyProvider"); + throw new RuntimeException("KeyProvider: " + provider.getClass().getName() + + " expected to be of type ManagedKeyProvider"); } return (ManagedKeyProvider) provider; } @@ -100,8 +100,8 @@ protected int getPerCustodianNamespaceActiveKeyConfCount() throws IOException { HConstants.CRYPTO_MANAGED_KEYS_PER_CUST_NAMESPACE_ACTIVE_KEY_DEFAULT_COUNT); } if (perCustNamespaceActiveKeyCount <= 0) { - throw new IOException("Invalid value: " + perCustNamespaceActiveKeyCount + " configured for: " + - HConstants.CRYPTO_MANAGED_KEYS_PER_CUST_NAMESPACE_ACTIVE_KEY_COUNT); + throw new IOException("Invalid value: " + perCustNamespaceActiveKeyCount + " configured for: " + + HConstants.CRYPTO_MANAGED_KEYS_PER_CUST_NAMESPACE_ACTIVE_KEY_COUNT); } return perCustNamespaceActiveKeyCount; } diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/keymeta/KeymetaServiceEndpoint.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/keymeta/KeymetaServiceEndpoint.java index a2882314b1b1..1f6a8169c9ff 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/keymeta/KeymetaServiceEndpoint.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/keymeta/KeymetaServiceEndpoint.java @@ -60,8 +60,8 @@ public class KeymetaServiceEndpoint implements MasterCoprocessor { private final ManagedKeysService managedKeysService = new KeymetaAdminServiceImpl(); /** - * Starts the coprocessor by initializing the reference to the {@link org.apache.hadoop.hbase.master.MasterServices} - * instance. + * Starts the coprocessor by initializing the reference to the + * {@link org.apache.hadoop.hbase.master.MasterServices} * instance. * * @param env The coprocessor environment. * @throws IOException If an error occurs during initialization. diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/keymeta/KeymetaTableAccessor.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/keymeta/KeymetaTableAccessor.java index 6ac6d09bb445..ebb5a0ce9326 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/keymeta/KeymetaTableAccessor.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/keymeta/KeymetaTableAccessor.java @@ -63,13 +63,15 @@ public class KeymetaTableAccessor extends KeyManagementBase { public static final byte[] DEK_CHECKSUM_QUAL_BYTES = Bytes.toBytes(DEK_CHECKSUM_QUAL_NAME); public static final String DEK_WRAPPED_BY_STK_QUAL_NAME = "dek_wrapped_by_stk"; - public static final byte[] DEK_WRAPPED_BY_STK_QUAL_BYTES = Bytes.toBytes(DEK_WRAPPED_BY_STK_QUAL_NAME); + public static final byte[] DEK_WRAPPED_BY_STK_QUAL_BYTES = + Bytes.toBytes(DEK_WRAPPED_BY_STK_QUAL_NAME); public static final String STK_CHECKSUM_QUAL_NAME = "stk_checksum"; public static final byte[] STK_CHECKSUM_QUAL_BYTES = Bytes.toBytes(STK_CHECKSUM_QUAL_NAME); public static final String REFRESHED_TIMESTAMP_QUAL_NAME = "refreshed_timestamp"; - public static final byte[] REFRESHED_TIMESTAMP_QUAL_BYTES = Bytes.toBytes(REFRESHED_TIMESTAMP_QUAL_NAME); + public static final byte[] REFRESHED_TIMESTAMP_QUAL_BYTES = + Bytes.toBytes(REFRESHED_TIMESTAMP_QUAL_NAME); public static final String KEY_STATE_QUAL_NAME = "key_state"; public static final byte[] KEY_STATE_QUAL_BYTES = Bytes.toBytes(KEY_STATE_QUAL_NAME); @@ -181,7 +183,8 @@ public ManagedKeyData getKey(byte[] key_cust, String keyNamespace, ManagedKeySta */ public ManagedKeyData getKey(byte[] key_cust, String keyNamespace, String keyMetadata) throws IOException, KeyException { - return getKeyInternal(key_cust, keyNamespace, ManagedKeyData.constructMetadataHash(keyMetadata)); + return getKeyInternal(key_cust, keyNamespace, + ManagedKeyData.constructMetadataHash(keyMetadata)); } /** @@ -194,8 +197,8 @@ public ManagedKeyData getKey(byte[] key_cust, String keyNamespace, String keyMet * @throws IOException when there is an underlying IOException. * @throws KeyException when there is an underlying KeyException. */ - private ManagedKeyData getKeyInternal(byte[] key_cust, String keyNamespace, byte[] keyMetadataHash) - throws IOException, KeyException { + private ManagedKeyData getKeyInternal(byte[] key_cust, String keyNamespace, + byte[] keyMetadataHash) throws IOException, KeyException { assertKeyManagementEnabled(); Connection connection = getServer().getConnection(); try (Table table = connection.getTable(KEY_META_TABLE_NAME)) { diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/SystemKeyManager.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/SystemKeyManager.java index 096e48b3d975..db164530115d 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/SystemKeyManager.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/SystemKeyManager.java @@ -70,7 +70,8 @@ public ManagedKeyData rotateSystemKeyIfChanged() throws IOException { return rotateSystemKey(latestKeyMetadata, latestFileResult.getSecond()); } - private ManagedKeyData rotateSystemKey(String currentKeyMetadata, List allSystemKeyFiles) throws IOException { + private ManagedKeyData rotateSystemKey(String currentKeyMetadata, List allSystemKeyFiles) + throws IOException { ManagedKeyProvider provider = getKeyProvider(); ManagedKeyData clusterKey = provider.getSystemKey( master.getMasterFileSystem().getClusterId().toString().getBytes()); @@ -92,7 +93,8 @@ private ManagedKeyData rotateSystemKey(String currentKeyMetadata, List all return null; } - private boolean saveLatestSystemKey(String keyMetadata, List allSystemKeyFiles) throws IOException { + private boolean saveLatestSystemKey(String keyMetadata, List allSystemKeyFiles) + throws IOException { int nextSystemKeySeq = (allSystemKeyFiles.isEmpty() ? -1 : SystemKeyAccessor.extractKeySequence(allSystemKeyFiles.get(0))) + 1; LOG.info("Trying to save a new cluster key at seq: {}", nextSystemKeySeq); diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/keymeta/ManagedKeyTestBase.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/keymeta/ManagedKeyTestBase.java index 4b5ade468202..4813abcac750 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/keymeta/ManagedKeyTestBase.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/keymeta/ManagedKeyTestBase.java @@ -11,13 +11,16 @@ public class ManagedKeyTestBase { @Before public void setUp() throws Exception { - TEST_UTIL.getConfiguration().set(HConstants.CRYPTO_KEYPROVIDER_CONF_KEY, MockManagedKeyProvider.class.getName()); + TEST_UTIL.getConfiguration().set(HConstants.CRYPTO_KEYPROVIDER_CONF_KEY, + MockManagedKeyProvider.class.getName()); TEST_UTIL.getConfiguration().set(HConstants.CRYPTO_MANAGED_KEYS_ENABLED_CONF_KEY, "true"); - TEST_UTIL.getConfiguration().set("hbase.coprocessor.master.classes", KeymetaServiceEndpoint.class.getName()); + TEST_UTIL.getConfiguration().set("hbase.coprocessor.master.classes", + KeymetaServiceEndpoint.class.getName()); // Start the minicluster TEST_UTIL.startMiniCluster(1); - TEST_UTIL.waitFor(60000, () -> TEST_UTIL.getMiniHBaseCluster().getMaster().isInitialized()); + TEST_UTIL.waitFor(60000, + () -> TEST_UTIL.getMiniHBaseCluster().getMaster().isInitialized()); TEST_UTIL.waitUntilAllRegionsAssigned(KeymetaTableAccessor.KEY_META_TABLE_NAME); } diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/keymeta/TestKeymetaEndpoint.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/keymeta/TestKeymetaEndpoint.java index 7e1ba09838cf..6b0bb6867b9c 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/keymeta/TestKeymetaEndpoint.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/keymeta/TestKeymetaEndpoint.java @@ -173,7 +173,8 @@ public void testGetResponseBuilderWithInvalidBase64() { .build(); // Act - ManagedKeysResponse.Builder result = KeymetaServiceEndpoint.getResponseBuilder(controller, request); + ManagedKeysResponse.Builder result = KeymetaServiceEndpoint.getResponseBuilder(controller, + request); // Assert assertNotNull(result); diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestSystemKeyAccessorAndManager.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestSystemKeyAccessorAndManager.java index 2d71ad95e5c5..3dd1adbb5a42 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestSystemKeyAccessorAndManager.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestSystemKeyAccessorAndManager.java @@ -305,7 +305,8 @@ public void testEnsureSystemKeyInitialized_WithSaveFailure() throws Exception { when(mockFileSystem.create(any())).thenReturn(mockStream); when(mockFileSystem.rename(any(), any())).thenReturn(false); - RuntimeException ex = assertThrows(RuntimeException.class, manager::ensureSystemKeyInitialized); + RuntimeException ex = assertThrows(RuntimeException.class, + manager::ensureSystemKeyInitialized); assertEquals("Failed to generate or save System Key", ex.getMessage()); } @@ -486,7 +487,8 @@ public void testExtractKeySequenceEdgeCases() throws Exception { private static class MockSystemKeyManager extends SystemKeyManager { private final ManagedKeyProvider keyProvider; - public MockSystemKeyManager(MasterServices master, ManagedKeyProvider keyProvider) throws IOException { + public MockSystemKeyManager(MasterServices master, ManagedKeyProvider keyProvider) + throws IOException { super(master); this.keyProvider = keyProvider; //systemKeyDir = mock(Path.class); diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestSystemKeyManager.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestSystemKeyManager.java index e184add8f695..099ffa08a1db 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestSystemKeyManager.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestSystemKeyManager.java @@ -94,7 +94,7 @@ public void testWithInvalidSystemKey() throws Exception { assertThrows(IOException.class, tmpCKM::ensureSystemKeyInitialized); } - private ManagedKeyData validateInitialState(HMaster master, MockManagedKeyProvider pbeKeyProvider ) + private ManagedKeyData validateInitialState(HMaster master, MockManagedKeyProvider pbeKeyProvider) throws IOException { SystemKeyAccessor systemKeyAccessor = new SystemKeyAccessor(master); assertEquals(1, systemKeyAccessor.getAllSystemKeyFiles().size()); @@ -111,6 +111,7 @@ private void restartSystem() throws Exception { TEST_UTIL.shutdownMiniHBaseCluster(); Thread.sleep(2000); TEST_UTIL.restartHBaseCluster(1); - TEST_UTIL.waitFor(60000, () -> TEST_UTIL.getMiniHBaseCluster().getMaster().isInitialized()); + TEST_UTIL.waitFor(60000, + () -> TEST_UTIL.getMiniHBaseCluster().getMaster().isInitialized()); } } From b006d49b0ae947b24a20abb4a319ec0b2cc501c9 Mon Sep 17 00:00:00 2001 From: Hari Dara Date: Wed, 18 Jun 2025 10:40:24 +0530 Subject: [PATCH 40/70] some cleanup --- .../hbase/keymeta/SystemKeyAccessor.java | 5 +++ .../hbase/master/TestKeymetaAdminImpl.java | 43 ++++++++++++------- 2 files changed, 32 insertions(+), 16 deletions(-) diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/keymeta/SystemKeyAccessor.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/keymeta/SystemKeyAccessor.java index 68b83c907872..9906ea1b6b52 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/keymeta/SystemKeyAccessor.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/keymeta/SystemKeyAccessor.java @@ -45,6 +45,11 @@ public SystemKeyAccessor(Server server) throws IOException { this.systemKeyDir = CommonFSUtils.getSystemKeyDir(server.getConfiguration()); } + /** + * Return both the latest system key file and all system key files. + * @return a pair of the latest system key file and all system key files + * @throws IOException if there is an error getting the latest system key file + */ public Pair> getLatestSystemKeyFile() throws IOException { if (! isKeyManagementEnabled()) { return new Pair<>(null, null); diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestKeymetaAdminImpl.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestKeymetaAdminImpl.java index 582dd9b6351a..90dd63f3730f 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestKeymetaAdminImpl.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestKeymetaAdminImpl.java @@ -17,6 +17,7 @@ import org.apache.hadoop.hbase.testclassification.MasterTests; import org.apache.hadoop.hbase.testclassification.SmallTests; import org.apache.hadoop.hbase.util.Bytes; +import org.junit.After; import org.junit.Before; import org.junit.ClassRule; import org.junit.Rule; @@ -62,6 +63,10 @@ public class TestKeymetaAdminImpl { private static final HBaseTestingUtil TEST_UTIL = new HBaseTestingUtil(); + private static final String CUST = "cust1"; + private static final String ENCODED_CUST = ManagedKeyProvider.encodeToStr(CUST.getBytes()); + + @Rule public TestName name = new TestName(); @@ -201,46 +206,52 @@ public void setUp() throws Exception { managedKeyProvider.setMultikeyGenMode(true); } + @After + public void tearDown() { + // Reset as this instance gets reused for more than 1 test. + managedKeyProvider.setMockedKeyState(CUST, ACTIVE); + } + @Test public void testEnable() throws Exception { List managedKeyStates; - String cust = "cust1"; - String encodedCust = ManagedKeyProvider.encodeToStr(cust.getBytes()); - managedKeyStates = keymetaAdmin.enableKeyManagement(encodedCust, keySpace); + // Test 1: Enable key management with 3 keys + managedKeyStates = keymetaAdmin.enableKeyManagement(ENCODED_CUST, keySpace); assertKeys(managedKeyStates, 3); - verify(keymetaAccessor).getAllKeys(cust.getBytes(), keySpace); + verify(keymetaAccessor).getAllKeys(CUST.getBytes(), keySpace); verify(keymetaAccessor, times(3)).addKey(any()); + // Test 2: Enable key management with 3 keys, but already enabled reset(keymetaAccessor); - - when(keymetaAccessor.getAllKeys(cust.getBytes(), keySpace)).thenReturn(managedKeyStates); - managedKeyStates = keymetaAdmin.enableKeyManagement(encodedCust, keySpace); + when(keymetaAccessor.getAllKeys(CUST.getBytes(), keySpace)).thenReturn(managedKeyStates); + managedKeyStates = keymetaAdmin.enableKeyManagement(ENCODED_CUST, keySpace); assertKeys(managedKeyStates, 3); verify(keymetaAccessor, times(0)).addKey(any()); + // Test 3: Enable key management with 4 keys, but only 1 key is added reset(keymetaAccessor); - when(keymetaAccessor.getAllKeys(cust.getBytes(), keySpace)).thenReturn(managedKeyStates); + when(keymetaAccessor.getAllKeys(CUST.getBytes(), keySpace)).thenReturn(managedKeyStates); keymetaAdmin.activeKeyCountOverride = 4; - managedKeyStates = keymetaAdmin.enableKeyManagement(encodedCust, keySpace); + managedKeyStates = keymetaAdmin.enableKeyManagement(ENCODED_CUST, keySpace); assertKeys(managedKeyStates, 1); verify(keymetaAccessor, times(1)).addKey(any()); + // Test 4: Enable key management when key provider is not able to generate any new keys reset(keymetaAccessor); - when(keymetaAccessor.getAllKeys(cust.getBytes(), keySpace)).thenReturn(managedKeyStates); + when(keymetaAccessor.getAllKeys(CUST.getBytes(), keySpace)).thenReturn(managedKeyStates); managedKeyProvider.setMultikeyGenMode(false); - managedKeyStates = keymetaAdmin.enableKeyManagement(encodedCust, keySpace); + managedKeyStates = keymetaAdmin.enableKeyManagement(ENCODED_CUST, keySpace); assertKeys(managedKeyStates, 0); verify(keymetaAccessor, times(0)).addKey(any()); - //reset(keymetaAccessor); - managedKeyProvider.setMockedKeyState(cust, FAILED); - managedKeyStates = keymetaAdmin.enableKeyManagement(encodedCust, keySpace); + // Test 5: Enable key management when key provider is not able to generate any new keys + reset(keymetaAccessor); + managedKeyProvider.setMockedKeyState(CUST, FAILED); + managedKeyStates = keymetaAdmin.enableKeyManagement(ENCODED_CUST, keySpace); assertNotNull(managedKeyStates); assertEquals(1, managedKeyStates.size()); assertEquals(FAILED, managedKeyStates.get(0).getKeyState()); verify(keymetaAccessor, times(1)).addKey(any()); - // NOTE: Reset as this instance is shared for more than 1 test. - managedKeyProvider.setMockedKeyState(cust, ACTIVE); } private static void assertKeys(List managedKeyStates, int expectedCnt) { From 4cfaff2e0cd1fdcf285498733edb9bf5b3b6fc48 Mon Sep 17 00:00:00 2001 From: Hari Dara Date: Wed, 18 Jun 2025 15:39:32 +0530 Subject: [PATCH 41/70] Javadoc --- .../src/main/java/org/apache/hadoop/hbase/HConstants.java | 7 +++++++ .../main/java/org/apache/hadoop/hbase/util/GsonUtil.java | 1 - 2 files changed, 7 insertions(+), 1 deletion(-) diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/HConstants.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/HConstants.java index fc2619f1ce48..7eff6f610df9 100644 --- a/hbase-common/src/main/java/org/apache/hadoop/hbase/HConstants.java +++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/HConstants.java @@ -1309,18 +1309,25 @@ public enum OperationStatusCode { /** Configuration key for enabling WAL encryption, a boolean */ public static final String ENABLE_WAL_ENCRYPTION = "hbase.regionserver.wal.encryption"; + /** Property used by {@link ManagedKeyStoreKeyProvider} class to set the alias that identifies + * the current system key. */ public static final String CRYPTO_MANAGED_KEY_STORE_SYSTEM_KEY_NAME_CONF_KEY = "hbase.crypto.managed_key_store.system.key.name"; public static final String CRYPTO_MANAGED_KEY_STORE_CONF_KEY_PREFIX = "hbase.crypto.managed_key_store.cust."; + /** Enables or disables the key management feature. */ public static final String CRYPTO_MANAGED_KEYS_ENABLED_CONF_KEY = "hbase.crypto.managed_keys.enabled"; public static final boolean CRYPTO_MANAGED_KEYS_DEFAULT_ENABLED = false; + /** The number of keys to retrieve from Key Provider per each custodian and namespace + * combination. */ public static final String CRYPTO_MANAGED_KEYS_PER_CUST_NAMESPACE_ACTIVE_KEY_COUNT = "hbase.crypto.managed_keys.per_cust_namespace.active_count"; public static final int CRYPTO_MANAGED_KEYS_PER_CUST_NAMESPACE_ACTIVE_KEY_DEFAULT_COUNT = 1; + /** Enables or disables key lookup during data path as an alternative to static injection of keys + * using control path. */ public static final String CRYPTO_MANAGED_KEYS_DYNAMIC_LOOKUP_ENABLED_CONF_KEY = "hbase.crypto.managed_keys.dynamic_lookup.enabled"; public static final boolean CRYPTO_MANAGED_KEYS_DYNAMIC_LOOKUP_DEFAULT_ENABLED = true; diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/util/GsonUtil.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/util/GsonUtil.java index adb6536efbb1..792604864b17 100644 --- a/hbase-common/src/main/java/org/apache/hadoop/hbase/util/GsonUtil.java +++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/util/GsonUtil.java @@ -69,7 +69,6 @@ public static GsonBuilder createGsonWithDisableHtmlEscaping() { public static Gson getDefaultInstance() { if (INSTANCE == null) { INSTANCE = createGson().create(); - } return INSTANCE; } From 4517f6dc60dd7b1a2c86f165c2b9256144a322a8 Mon Sep 17 00:00:00 2001 From: Hari Dara Date: Wed, 18 Jun 2025 18:10:38 +0530 Subject: [PATCH 42/70] Fix build error --- .../hbase/io/crypto/ManagedKeyStoreKeyProvider.java | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/io/crypto/ManagedKeyStoreKeyProvider.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/io/crypto/ManagedKeyStoreKeyProvider.java index e5023a7419b8..f05c40d8733d 100644 --- a/hbase-common/src/main/java/org/apache/hadoop/hbase/io/crypto/ManagedKeyStoreKeyProvider.java +++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/io/crypto/ManagedKeyStoreKeyProvider.java @@ -76,10 +76,10 @@ private void checkConfig() { } public static String generateKeyMetadata(String aliasName, String encodedCust) { - return GsonUtil.getDefaultInstance().toJson(new HashMap() {{ - put(KEY_METADATA_ALIAS, aliasName); - put(KEY_METADATA_CUST, encodedCust); - }}, HashMap.class); + return GsonUtil.getDefaultInstance().toJson(Map.of( + KEY_METADATA_ALIAS, aliasName, + KEY_METADATA_CUST, encodedCust + ), HashMap.class); } } From bcec30f41dd3d5c7554e7a05ccecddfb749dc85c Mon Sep 17 00:00:00 2001 From: Hari Dara Date: Wed, 18 Jun 2025 19:17:58 +0530 Subject: [PATCH 43/70] Fix checkstyle errors for imports --- .../hbase/keymeta/KeymetaAdminClient.java | 15 ++++++----- .../hbase/io/crypto/ManagedKeyData.java | 24 ++++++++++-------- .../hbase/io/crypto/ManagedKeyProvider.java | 5 ++-- .../hbase/io/crypto/ManagedKeyState.java | 3 ++- .../io/crypto/ManagedKeyStoreKeyProvider.java | 10 ++++---- .../hadoop/hbase/keymeta/KeymetaAdmin.java | 5 ++-- .../hbase/io/crypto/TestManagedKeyData.java | 25 +++++++++++-------- .../hbase/keymeta/KeyManagementBase.java | 5 ++-- .../hbase/keymeta/KeymetaAdminImpl.java | 11 ++++---- .../hbase/keymeta/KeymetaMasterService.java | 3 ++- .../hbase/keymeta/KeymetaServiceEndpoint.java | 18 +++++++------ .../hbase/keymeta/KeymetaTableAccessor.java | 11 ++++---- .../hbase/keymeta/ManagedKeyAccessor.java | 7 +++--- .../hbase/keymeta/ManagedKeyDataCache.java | 12 ++++----- .../hbase/keymeta/SystemKeyAccessor.java | 16 ++++++------ .../hadoop/hbase/keymeta/SystemKeyCache.java | 9 ++++--- .../hadoop/hbase/master/SystemKeyManager.java | 4 ++- .../hbase/keymeta/TestKeyManagementBase.java | 2 +- .../keymeta/TestManagedKeyDataCache.java | 10 ++++---- 19 files changed, 109 insertions(+), 86 deletions(-) diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/keymeta/KeymetaAdminClient.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/keymeta/KeymetaAdminClient.java index e9e17a2f7707..676f993b502d 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/keymeta/KeymetaAdminClient.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/keymeta/KeymetaAdminClient.java @@ -1,20 +1,23 @@ package org.apache.hadoop.hbase.keymeta; +import java.io.IOException; +import java.security.KeyException; +import java.util.ArrayList; +import java.util.List; + import org.apache.hadoop.hbase.client.Connection; import org.apache.hadoop.hbase.io.crypto.ManagedKeyData; import org.apache.hadoop.hbase.io.crypto.ManagedKeyState; import org.apache.hadoop.hbase.protobuf.generated.ManagedKeysProtos; import org.apache.hadoop.hbase.protobuf.generated.ManagedKeysProtos.ManagedKeysRequest; import org.apache.hadoop.hbase.protobuf.generated.ManagedKeysProtos.ManagedKeysResponse; -import org.apache.hadoop.hbase.shaded.protobuf.ProtobufUtil; -import org.apache.hbase.thirdparty.com.google.protobuf.ServiceException; import org.apache.yetus.audience.InterfaceAudience; import org.slf4j.Logger; import org.slf4j.LoggerFactory; -import java.io.IOException; -import java.security.KeyException; -import java.util.ArrayList; -import java.util.List; + +import org.apache.hbase.thirdparty.com.google.protobuf.ServiceException; + +import org.apache.hadoop.hbase.shaded.protobuf.ProtobufUtil; @InterfaceAudience.Public public class KeymetaAdminClient implements KeymetaAdmin { diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/io/crypto/ManagedKeyData.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/io/crypto/ManagedKeyData.java index d6672f9ab0d0..0d98ec053f04 100644 --- a/hbase-common/src/main/java/org/apache/hadoop/hbase/io/crypto/ManagedKeyData.java +++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/io/crypto/ManagedKeyData.java @@ -17,18 +17,20 @@ */ package org.apache.hadoop.hbase.io.crypto; +import java.security.Key; +import java.security.MessageDigest; +import java.security.NoSuchAlgorithmException; +import java.util.Arrays; +import java.util.Base64; + import org.apache.commons.lang3.builder.EqualsBuilder; import org.apache.commons.lang3.builder.HashCodeBuilder; import org.apache.hadoop.classification.VisibleForTesting; import org.apache.hadoop.hbase.util.EnvironmentEdgeManager; import org.apache.hadoop.util.DataChecksum; -import org.apache.hbase.thirdparty.com.google.common.base.Preconditions; import org.apache.yetus.audience.InterfaceAudience; -import java.security.Key; -import java.security.MessageDigest; -import java.security.NoSuchAlgorithmException; -import java.util.Arrays; -import java.util.Base64; + +import org.apache.hbase.thirdparty.com.google.common.base.Preconditions; /** * This class represents an encryption key data which includes the key itself, its state, metadata @@ -64,7 +66,7 @@ public class ManagedKeyData { public static final String KEY_GLOBAL_CUSTODIAN = ManagedKeyProvider.encodeToStr(KEY_SPACE_GLOBAL.getBytes()); - private final byte[] keyCust; + private final byte[] keyCustodian; private final String keyNamespace; private final Key theKey; private final ManagedKeyState keyState; @@ -117,7 +119,7 @@ public ManagedKeyData(byte[] key_cust, String key_namespace, Key theKey, Managed Preconditions.checkArgument(writeOpCount >= 0, "writeOpCount: " + writeOpCount + " should be >= 0"); - this.keyCust = key_cust; + this.keyCustodian = key_cust; this.keyNamespace = key_namespace; this.theKey = theKey; this.keyState = keyState; @@ -129,7 +131,7 @@ public ManagedKeyData(byte[] key_cust, String key_namespace, Key theKey, Managed @VisibleForTesting public ManagedKeyData cloneWithoutKey() { - return new ManagedKeyData(keyCust, keyNamespace, null, keyState, keyMetadata, + return new ManagedKeyData(keyCustodian, keyNamespace, null, keyState, keyMetadata, refreshTimestamp, readOpCount, writeOpCount); } @@ -139,7 +141,7 @@ public ManagedKeyData cloneWithoutKey() { * @return The key custodian as a byte array. */ public byte[] getKeyCustodian() { - return keyCust; + return keyCustodian; } /** @@ -147,7 +149,7 @@ public byte[] getKeyCustodian() { * @return the encoded key custodian */ public String getKeyCustodianEncoded() { - return Base64.getEncoder().encodeToString(keyCust); + return Base64.getEncoder().encodeToString(keyCustodian); } diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/io/crypto/ManagedKeyProvider.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/io/crypto/ManagedKeyProvider.java index a7fb4dbe177a..ff7152b6ed96 100644 --- a/hbase-common/src/main/java/org/apache/hadoop/hbase/io/crypto/ManagedKeyProvider.java +++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/io/crypto/ManagedKeyProvider.java @@ -17,11 +17,12 @@ */ package org.apache.hadoop.hbase.io.crypto; -import org.apache.hadoop.conf.Configuration; -import org.apache.yetus.audience.InterfaceAudience; import java.io.IOException; import java.util.Base64; +import org.apache.hadoop.conf.Configuration; +import org.apache.yetus.audience.InterfaceAudience; + /** * Interface for key providers of managed keys. Defines methods for generating and managing * managed keys, as well as handling key storage and retrieval. diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/io/crypto/ManagedKeyState.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/io/crypto/ManagedKeyState.java index 0cc31dd2c19b..8987e849e347 100644 --- a/hbase-common/src/main/java/org/apache/hadoop/hbase/io/crypto/ManagedKeyState.java +++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/io/crypto/ManagedKeyState.java @@ -17,10 +17,11 @@ */ package org.apache.hadoop.hbase.io.crypto; -import org.apache.yetus.audience.InterfaceAudience; import java.util.HashMap; import java.util.Map; +import org.apache.yetus.audience.InterfaceAudience; + /** * Enum of Managed key status. It is used to indicate the status of managed custodian keys. */ diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/io/crypto/ManagedKeyStoreKeyProvider.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/io/crypto/ManagedKeyStoreKeyProvider.java index f05c40d8733d..ab2c3a343330 100644 --- a/hbase-common/src/main/java/org/apache/hadoop/hbase/io/crypto/ManagedKeyStoreKeyProvider.java +++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/io/crypto/ManagedKeyStoreKeyProvider.java @@ -1,14 +1,15 @@ package org.apache.hadoop.hbase.io.crypto; -import org.apache.hadoop.conf.Configuration; -import org.apache.hadoop.hbase.HConstants; -import org.apache.hadoop.hbase.util.GsonUtil; -import org.apache.yetus.audience.InterfaceAudience; import java.io.IOException; import java.security.Key; import java.util.HashMap; import java.util.Map; +import org.apache.hadoop.conf.Configuration; +import org.apache.hadoop.hbase.HConstants; +import org.apache.hadoop.hbase.util.GsonUtil; +import org.apache.yetus.audience.InterfaceAudience; + @InterfaceAudience.Public public class ManagedKeyStoreKeyProvider extends KeyStoreKeyProvider implements ManagedKeyProvider { public static final String KEY_METADATA_ALIAS = "KeyAlias"; @@ -81,5 +82,4 @@ public static String generateKeyMetadata(String aliasName, String encodedCust) { KEY_METADATA_CUST, encodedCust ), HashMap.class); } - } diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/keymeta/KeymetaAdmin.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/keymeta/KeymetaAdmin.java index 0f01bf557bc4..d74af46b5173 100644 --- a/hbase-common/src/main/java/org/apache/hadoop/hbase/keymeta/KeymetaAdmin.java +++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/keymeta/KeymetaAdmin.java @@ -17,12 +17,13 @@ */ package org.apache.hadoop.hbase.keymeta; -import org.apache.hadoop.hbase.io.crypto.ManagedKeyData; -import org.apache.yetus.audience.InterfaceAudience; import java.io.IOException; import java.security.KeyException; import java.util.List; +import org.apache.hadoop.hbase.io.crypto.ManagedKeyData; +import org.apache.yetus.audience.InterfaceAudience; + /** * KeymetaAdmin is an interface for administrative functions related to managed keys. * It handles the following methods: diff --git a/hbase-common/src/test/java/org/apache/hadoop/hbase/io/crypto/TestManagedKeyData.java b/hbase-common/src/test/java/org/apache/hadoop/hbase/io/crypto/TestManagedKeyData.java index 52705ac8e27e..d4b4fd019a64 100644 --- a/hbase-common/src/test/java/org/apache/hadoop/hbase/io/crypto/TestManagedKeyData.java +++ b/hbase-common/src/test/java/org/apache/hadoop/hbase/io/crypto/TestManagedKeyData.java @@ -17,17 +17,6 @@ */ package org.apache.hadoop.hbase.io.crypto; -import org.apache.hadoop.hbase.HBaseClassTestRule; -import org.apache.hadoop.hbase.testclassification.MiscTests; -import org.apache.hadoop.hbase.testclassification.SmallTests; -import org.junit.Before; -import org.junit.ClassRule; -import org.junit.Test; -import org.junit.experimental.categories.Category; -import javax.crypto.KeyGenerator; -import java.security.Key; -import java.security.NoSuchAlgorithmException; -import java.util.Base64; import static org.junit.Assert.assertArrayEquals; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertNotEquals; @@ -36,6 +25,20 @@ import static org.junit.Assert.assertThrows; import static org.junit.Assert.assertTrue; +import java.security.Key; +import java.security.NoSuchAlgorithmException; +import java.util.Base64; + +import javax.crypto.KeyGenerator; + +import org.apache.hadoop.hbase.HBaseClassTestRule; +import org.apache.hadoop.hbase.testclassification.MiscTests; +import org.apache.hadoop.hbase.testclassification.SmallTests; +import org.junit.Before; +import org.junit.ClassRule; +import org.junit.Test; +import org.junit.experimental.categories.Category; + @Category({ MiscTests.class, SmallTests.class }) public class TestManagedKeyData { @ClassRule diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/keymeta/KeyManagementBase.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/keymeta/KeyManagementBase.java index 4d861f8c408c..1f899fcdfc25 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/keymeta/KeyManagementBase.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/keymeta/KeyManagementBase.java @@ -17,15 +17,16 @@ */ package org.apache.hadoop.hbase.keymeta; +import java.io.IOException; + import org.apache.hadoop.hbase.HConstants; +import org.apache.hadoop.hbase.Server; import org.apache.hadoop.hbase.io.crypto.Encryption; import org.apache.hadoop.hbase.io.crypto.KeyProvider; import org.apache.hadoop.hbase.io.crypto.ManagedKeyProvider; -import org.apache.hadoop.hbase.Server; import org.apache.yetus.audience.InterfaceAudience; import org.slf4j.Logger; import org.slf4j.LoggerFactory; -import java.io.IOException; /** * A base class for all keymeta accessor/manager implementations. diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/keymeta/KeymetaAdminImpl.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/keymeta/KeymetaAdminImpl.java index e15c7102d4d9..fa54b05478e1 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/keymeta/KeymetaAdminImpl.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/keymeta/KeymetaAdminImpl.java @@ -17,6 +17,12 @@ */ package org.apache.hadoop.hbase.keymeta; +import java.io.IOException; +import java.security.KeyException; +import java.util.HashSet; +import java.util.List; +import java.util.Set; + import org.apache.hadoop.hbase.Server; import org.apache.hadoop.hbase.io.crypto.ManagedKeyData; import org.apache.hadoop.hbase.io.crypto.ManagedKeyProvider; @@ -24,11 +30,6 @@ import org.apache.yetus.audience.InterfaceAudience; import org.slf4j.Logger; import org.slf4j.LoggerFactory; -import java.io.IOException; -import java.security.KeyException; -import java.util.HashSet; -import java.util.List; -import java.util.Set; @InterfaceAudience.Private public class KeymetaAdminImpl extends KeymetaTableAccessor implements KeymetaAdmin { diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/keymeta/KeymetaMasterService.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/keymeta/KeymetaMasterService.java index d9b025943512..68f78cd12dd3 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/keymeta/KeymetaMasterService.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/keymeta/KeymetaMasterService.java @@ -17,6 +17,8 @@ */ package org.apache.hadoop.hbase.keymeta; +import java.io.IOException; + import org.apache.hadoop.hbase.HConstants; import org.apache.hadoop.hbase.client.ColumnFamilyDescriptorBuilder; import org.apache.hadoop.hbase.client.TableDescriptorBuilder; @@ -24,7 +26,6 @@ import org.apache.yetus.audience.InterfaceAudience; import org.slf4j.Logger; import org.slf4j.LoggerFactory; -import java.io.IOException; @InterfaceAudience.Private public class KeymetaMasterService extends KeyManagementBase { diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/keymeta/KeymetaServiceEndpoint.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/keymeta/KeymetaServiceEndpoint.java index 1f6a8169c9ff..b8faf11bde5b 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/keymeta/KeymetaServiceEndpoint.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/keymeta/KeymetaServiceEndpoint.java @@ -17,6 +17,12 @@ */ package org.apache.hadoop.hbase.keymeta; +import java.io.IOException; +import java.security.KeyException; +import java.util.Base64; +import java.util.Collections; +import java.util.List; + import org.apache.hadoop.classification.VisibleForTesting; import org.apache.hadoop.hbase.CoprocessorEnvironment; import org.apache.hadoop.hbase.coprocessor.CoreCoprocessor; @@ -30,18 +36,14 @@ import org.apache.hadoop.hbase.protobuf.generated.ManagedKeysProtos.ManagedKeysResponse; import org.apache.hadoop.hbase.protobuf.generated.ManagedKeysProtos.GetManagedKeysResponse; import org.apache.hadoop.hbase.protobuf.generated.ManagedKeysProtos.ManagedKeysService; +import org.apache.yetus.audience.InterfaceAudience; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + import org.apache.hbase.thirdparty.com.google.protobuf.ByteString; import org.apache.hbase.thirdparty.com.google.protobuf.RpcCallback; import org.apache.hbase.thirdparty.com.google.protobuf.RpcController; import org.apache.hbase.thirdparty.com.google.protobuf.Service; -import org.apache.yetus.audience.InterfaceAudience; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; -import java.io.IOException; -import java.security.KeyException; -import java.util.Base64; -import java.util.Collections; -import java.util.List; /** * This class implements a coprocessor service endpoint for the key management metadata operations. diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/keymeta/KeymetaTableAccessor.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/keymeta/KeymetaTableAccessor.java index ebb5a0ce9326..2418d77c9b48 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/keymeta/KeymetaTableAccessor.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/keymeta/KeymetaTableAccessor.java @@ -17,6 +17,12 @@ */ package org.apache.hadoop.hbase.keymeta; +import java.io.IOException; +import java.security.Key; +import java.security.KeyException; +import java.util.ArrayList; +import java.util.List; + import org.apache.hadoop.classification.VisibleForTesting; import org.apache.hadoop.hbase.HBaseInterfaceAudience; import org.apache.hadoop.hbase.HConstants; @@ -38,11 +44,6 @@ import org.apache.hadoop.hbase.security.EncryptionUtil; import org.apache.hadoop.hbase.util.Bytes; import org.apache.yetus.audience.InterfaceAudience; -import java.io.IOException; -import java.security.Key; -import java.security.KeyException; -import java.util.ArrayList; -import java.util.List; /** * Accessor for keymeta table as part of key management. diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/keymeta/ManagedKeyAccessor.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/keymeta/ManagedKeyAccessor.java index c6190838ec11..f2a832ebe4bc 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/keymeta/ManagedKeyAccessor.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/keymeta/ManagedKeyAccessor.java @@ -17,13 +17,14 @@ */ package org.apache.hadoop.hbase.keymeta; -import org.apache.hadoop.hbase.io.crypto.ManagedKeyData; -import org.apache.hadoop.hbase.io.crypto.ManagedKeyProvider; -import org.apache.yetus.audience.InterfaceAudience; import java.io.IOException; import java.security.KeyException; import java.util.List; +import org.apache.hadoop.hbase.io.crypto.ManagedKeyData; +import org.apache.hadoop.hbase.io.crypto.ManagedKeyProvider; +import org.apache.yetus.audience.InterfaceAudience; + /** * This class provides unified access on top of both {@code ManagedKeyDataCache} (L1) and * {@code KeymetaTableAccessor} (L2) to access managed keys. When the getter is called, it first diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/keymeta/ManagedKeyDataCache.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/keymeta/ManagedKeyDataCache.java index fa1e1421a28c..224a9e922c04 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/keymeta/ManagedKeyDataCache.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/keymeta/ManagedKeyDataCache.java @@ -17,17 +17,17 @@ */ package org.apache.hadoop.hbase.keymeta; -import org.apache.hadoop.hbase.io.crypto.ManagedKeyData; -import org.apache.hadoop.hbase.io.crypto.ManagedKeyState; -import org.apache.hadoop.hbase.util.Bytes; -import org.apache.yetus.audience.InterfaceAudience; - import java.util.ArrayList; +import java.util.HashMap; import java.util.List; import java.util.Map; -import java.util.HashMap; import java.util.concurrent.locks.ReentrantLock; +import org.apache.hadoop.hbase.io.crypto.ManagedKeyData; +import org.apache.hadoop.hbase.io.crypto.ManagedKeyState; +import org.apache.hadoop.hbase.util.Bytes; +import org.apache.yetus.audience.InterfaceAudience; + /** * In-memory cache for ManagedKeyData entries, using key metadata as the cache key. */ diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/keymeta/SystemKeyAccessor.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/keymeta/SystemKeyAccessor.java index 9906ea1b6b52..087eb6bad75f 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/keymeta/SystemKeyAccessor.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/keymeta/SystemKeyAccessor.java @@ -17,6 +17,15 @@ */ package org.apache.hadoop.hbase.keymeta; +import static org.apache.hadoop.hbase.HConstants.SYSTEM_KEY_FILE_PREFIX; + +import java.io.IOException; +import java.util.ArrayList; +import java.util.Comparator; +import java.util.List; +import java.util.Map; +import java.util.TreeMap; + import org.apache.hadoop.classification.VisibleForTesting; import org.apache.hadoop.fs.FSDataInputStream; import org.apache.hadoop.fs.FileStatus; @@ -28,13 +37,6 @@ import org.apache.hadoop.hbase.util.CommonFSUtils; import org.apache.hadoop.hbase.util.Pair; import org.apache.yetus.audience.InterfaceAudience; -import java.io.IOException; -import java.util.ArrayList; -import java.util.Comparator; -import java.util.List; -import java.util.Map; -import java.util.TreeMap; -import static org.apache.hadoop.hbase.HConstants.SYSTEM_KEY_FILE_PREFIX; @InterfaceAudience.Private public class SystemKeyAccessor extends KeyManagementBase { diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/keymeta/SystemKeyCache.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/keymeta/SystemKeyCache.java index e0e4a5a84795..3a50b4ae86e4 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/keymeta/SystemKeyCache.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/keymeta/SystemKeyCache.java @@ -17,15 +17,16 @@ */ package org.apache.hadoop.hbase.keymeta; +import java.io.IOException; +import java.util.List; +import java.util.Map; +import java.util.TreeMap; + import org.apache.hadoop.fs.Path; import org.apache.hadoop.hbase.io.crypto.ManagedKeyData; import org.apache.yetus.audience.InterfaceAudience; import org.slf4j.Logger; import org.slf4j.LoggerFactory; -import java.io.IOException; -import java.util.List; -import java.util.Map; -import java.util.TreeMap; @InterfaceAudience.Private public class SystemKeyCache { diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/SystemKeyManager.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/SystemKeyManager.java index db164530115d..45b021c77feb 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/SystemKeyManager.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/SystemKeyManager.java @@ -17,9 +17,12 @@ */ package org.apache.hadoop.hbase.master; +import static org.apache.hadoop.hbase.HConstants.SYSTEM_KEY_FILE_PREFIX; + import java.io.IOException; import java.util.List; import java.util.UUID; + import org.apache.hadoop.fs.FSDataOutputStream; import org.apache.hadoop.fs.Path; import org.apache.hadoop.hbase.io.crypto.ManagedKeyData; @@ -28,7 +31,6 @@ import org.apache.hadoop.hbase.keymeta.SystemKeyAccessor; import org.apache.hadoop.hbase.util.Pair; import org.apache.yetus.audience.InterfaceAudience; -import static org.apache.hadoop.hbase.HConstants.SYSTEM_KEY_FILE_PREFIX; @InterfaceAudience.Private public class SystemKeyManager extends SystemKeyAccessor { diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/keymeta/TestKeyManagementBase.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/keymeta/TestKeyManagementBase.java index 6e6bb237fa60..747f59c2ef69 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/keymeta/TestKeyManagementBase.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/keymeta/TestKeyManagementBase.java @@ -26,8 +26,8 @@ import org.apache.hadoop.hbase.HBaseClassTestRule; import org.apache.hadoop.hbase.HConstants; import org.apache.hadoop.hbase.Server; -import org.apache.hadoop.hbase.testclassification.SmallTests; import org.apache.hadoop.hbase.testclassification.MasterTests; +import org.apache.hadoop.hbase.testclassification.SmallTests; import org.junit.Test; import org.junit.experimental.categories.Category; diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/keymeta/TestManagedKeyDataCache.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/keymeta/TestManagedKeyDataCache.java index 00c702b17098..77b38aee2184 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/keymeta/TestManagedKeyDataCache.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/keymeta/TestManagedKeyDataCache.java @@ -21,6 +21,11 @@ import static org.apache.hadoop.hbase.io.crypto.ManagedKeyState.FAILED; import static org.junit.Assert.*; +import java.util.ArrayList; +import java.util.HashSet; +import java.util.List; +import java.util.Set; + import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hbase.HBaseClassTestRule; import org.apache.hadoop.hbase.HBaseConfiguration; @@ -34,11 +39,6 @@ import org.junit.Test; import org.junit.experimental.categories.Category; -import java.util.ArrayList; -import java.util.HashSet; -import java.util.List; -import java.util.Set; - @Category({ MasterTests.class, SmallTests.class }) public class TestManagedKeyDataCache { @ClassRule From b1fd7f534cfa51e5d0d2b84abdbeaef3e95654af Mon Sep 17 00:00:00 2001 From: Hari Dara Date: Wed, 18 Jun 2025 19:50:32 +0530 Subject: [PATCH 44/70] Fix checkstyle errors --- .../hadoop/hbase/security/EncryptionUtil.java | 10 ++++++---- .../hadoop/hbase/io/crypto/ManagedKeyData.java | 18 +++++++++++------- .../hbase/keymeta/KeymetaTableAccessor.java | 7 +++---- .../hbase/keymeta/SystemKeyAccessor.java | 3 +-- 4 files changed, 21 insertions(+), 17 deletions(-) diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/security/EncryptionUtil.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/security/EncryptionUtil.java index 4d0efba8122f..91630215e75d 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/security/EncryptionUtil.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/security/EncryptionUtil.java @@ -93,7 +93,8 @@ public static byte[] wrapKey(Configuration conf, String subject, Key key) throws * @param kek the key encryption key * @return the encrypted key bytes */ - public static byte[] wrapKey(Configuration conf, String subject, Key key, Key kek) throws IOException { + public static byte[] wrapKey(Configuration conf, String subject, Key key, Key kek) + throws IOException { // Wrap the key with the configured encryption algorithm. String algorithm = conf.get(HConstants.CRYPTO_KEY_ALGORITHM_CONF_KEY, HConstants.CIPHER_AES); Cipher cipher = Encryption.getCipher(conf, algorithm); @@ -164,7 +165,8 @@ public static Key unwrapKey(Configuration conf, String subject, byte[] value, Ke } private static Key getUnwrapKey(Configuration conf, String subject, - EncryptionProtos.WrappedKey wrappedKey, Cipher cipher, Key kek) throws IOException, KeyException { + EncryptionProtos.WrappedKey wrappedKey, Cipher cipher, Key kek) + throws IOException, KeyException { String configuredHashAlgorithm = Encryption.getConfiguredHashAlgorithm(conf); String wrappedHashAlgorithm = wrappedKey.getHashAlgorithm().trim(); if (!configuredHashAlgorithm.equalsIgnoreCase(wrappedHashAlgorithm)) { @@ -178,8 +180,8 @@ private static Key getUnwrapKey(Configuration conf, String subject, ByteArrayOutputStream out = new ByteArrayOutputStream(); byte[] iv = wrappedKey.hasIv() ? wrappedKey.getIv().toByteArray() : null; if (kek != null) { - Encryption.decryptWithGivenKey(kek, out, wrappedKey.getData().newInput(), wrappedKey.getLength(), - cipher, iv); + Encryption.decryptWithGivenKey(kek, out, wrappedKey.getData().newInput(), + wrappedKey.getLength(), cipher, iv); } else { Encryption.decryptWithSubjectKey(out, wrappedKey.getData().newInput(), wrappedKey.getLength(), diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/io/crypto/ManagedKeyData.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/io/crypto/ManagedKeyData.java index 0d98ec053f04..44b43852f8ee 100644 --- a/hbase-common/src/main/java/org/apache/hadoop/hbase/io/crypto/ManagedKeyData.java +++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/io/crypto/ManagedKeyData.java @@ -25,7 +25,6 @@ import org.apache.commons.lang3.builder.EqualsBuilder; import org.apache.commons.lang3.builder.HashCodeBuilder; -import org.apache.hadoop.classification.VisibleForTesting; import org.apache.hadoop.hbase.util.EnvironmentEdgeManager; import org.apache.hadoop.util.DataChecksum; import org.apache.yetus.audience.InterfaceAudience; @@ -129,7 +128,7 @@ public ManagedKeyData(byte[] key_cust, String key_namespace, Key theKey, Managed this.writeOpCount = writeOpCount; } - @VisibleForTesting + @InterfaceAudience.Private public ManagedKeyData cloneWithoutKey() { return new ManagedKeyData(keyCustodian, keyNamespace, null, keyState, keyMetadata, refreshTimestamp, readOpCount, writeOpCount); @@ -191,9 +190,14 @@ public String getKeyMetadata() { @Override public String toString() { - return "ManagedKeyData{" + "keyCustodian=" + Arrays.toString(keyCust) + ", keyNamespace='" - + keyNamespace + '\'' + ", keyState=" + keyState + ", keyMetadata='" + keyMetadata + '\'' - + ", refreshTimestamp=" + refreshTimestamp + ", keyChecksum=" + getKeyChecksum() + '}'; + return "ManagedKeyData{" + + "keyCustodian=" + Arrays.toString(keyCustodian) + + ", keyNamespace='" + keyNamespace + '\'' + + ", keyState=" + keyState + + ", keyMetadata='" + keyMetadata + '\'' + + ", refreshTimestamp=" + refreshTimestamp + + ", keyChecksum=" + getKeyChecksum() + + '}'; } public long getRefreshTimestamp() { @@ -282,7 +286,7 @@ public boolean equals(Object o) { ManagedKeyData that = (ManagedKeyData) o; return new EqualsBuilder() - .append(keyCust, that.keyCust) + .append(keyCustodian, that.keyCustodian) .append(keyNamespace, that.keyNamespace) .append(theKey, that.theKey) .append(keyState, that.keyState) @@ -293,7 +297,7 @@ public boolean equals(Object o) { @Override public int hashCode() { return new HashCodeBuilder(17, 37) - .append(keyCust) + .append(keyCustodian) .append(keyNamespace) .append(theKey) .append(keyState) diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/keymeta/KeymetaTableAccessor.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/keymeta/KeymetaTableAccessor.java index 2418d77c9b48..60ec5a53d251 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/keymeta/KeymetaTableAccessor.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/keymeta/KeymetaTableAccessor.java @@ -23,7 +23,6 @@ import java.util.ArrayList; import java.util.List; -import org.apache.hadoop.classification.VisibleForTesting; import org.apache.hadoop.hbase.HBaseInterfaceAudience; import org.apache.hadoop.hbase.HConstants; import org.apache.hadoop.hbase.NamespaceDescriptor; @@ -265,7 +264,7 @@ private Put addMutationColumns(Put put, ManagedKeyData keyData) throws IOExcepti return result; } - @VisibleForTesting + @InterfaceAudience.Private public static byte[] constructRowKeyForMetadata(ManagedKeyData keyData) { byte[] keyMetadataHash; if (keyData.getKeyState() == ManagedKeyState.FAILED && keyData.getKeyMetadata() == null) { @@ -278,7 +277,7 @@ public static byte[] constructRowKeyForMetadata(ManagedKeyData keyData) { keyMetadataHash); } - @VisibleForTesting + @InterfaceAudience.Private public static byte[] constructRowKeyForMetadata(byte[] key_cust, String keyNamespace, byte[] keyMetadataHash) { int custLength = key_cust.length; @@ -286,7 +285,7 @@ public static byte[] constructRowKeyForMetadata(byte[] key_cust, String keyNames keyMetadataHash); } - @VisibleForTesting + @InterfaceAudience.Private public static ManagedKeyData parseFromResult(Server server, byte[] key_cust, String keyNamespace, Result result) throws IOException, KeyException { if (result == null || result.isEmpty()) { diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/keymeta/SystemKeyAccessor.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/keymeta/SystemKeyAccessor.java index 087eb6bad75f..eaca915c4c18 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/keymeta/SystemKeyAccessor.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/keymeta/SystemKeyAccessor.java @@ -26,7 +26,6 @@ import java.util.Map; import java.util.TreeMap; -import org.apache.hadoop.classification.VisibleForTesting; import org.apache.hadoop.fs.FSDataInputStream; import org.apache.hadoop.fs.FileStatus; import org.apache.hadoop.fs.FileSystem; @@ -98,7 +97,7 @@ public ManagedKeyData loadSystemKey(Path keyPath) throws IOException { return keyData; } - @VisibleForTesting + @InterfaceAudience.Private public static int extractSystemKeySeqNum(Path keyPath) throws IOException { if (keyPath.getName().startsWith(SYSTEM_KEY_FILE_PREFIX)) { try { From 51b122dcbc2879bebbcf17c5d2e76e2fb3a97834 Mon Sep 17 00:00:00 2001 From: Hari Dara Date: Wed, 18 Jun 2025 21:00:59 +0530 Subject: [PATCH 45/70] more checkstyle fixes --- .../hbase/io/crypto/ManagedKeyData.java | 8 ++- .../hbase/io/crypto/ManagedKeyProvider.java | 2 +- .../io/crypto/TestManagedKeyProvider.java | 2 +- .../hbase/keymeta/SystemKeyAccessor.java | 11 ++-- .../hadoop/hbase/keymeta/SystemKeyCache.java | 6 +- .../hbase/keymeta/DummyKeyProvider.java | 22 +++---- .../hbase/keymeta/TestKeyManagementBase.java | 40 ++++++------ .../hbase/keymeta/TestKeymetaEndpoint.java | 62 ++++++++++--------- .../keymeta/TestKeymetaTableAccessor.java | 23 ++++++- .../hbase/keymeta/TestManagedKeymeta.java | 30 ++++----- .../hbase/master/TestKeymetaAdminImpl.java | 46 +++++++------- .../TestSystemKeyAccessorAndManager.java | 4 +- 12 files changed, 144 insertions(+), 112 deletions(-) diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/io/crypto/ManagedKeyData.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/io/crypto/ManagedKeyData.java index 44b43852f8ee..7fdecb9d4f09 100644 --- a/hbase-common/src/main/java/org/apache/hadoop/hbase/io/crypto/ManagedKeyData.java +++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/io/crypto/ManagedKeyData.java @@ -279,9 +279,13 @@ public static byte[] constructMetadataHash(String metadata) { @Override public boolean equals(Object o) { - if (this == o) return true; + if (this == o) { + return true; + } - if (o == null || getClass() != o.getClass()) return false; + if (o == null || getClass() != o.getClass()) { + return false; + } ManagedKeyData that = (ManagedKeyData) o; diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/io/crypto/ManagedKeyProvider.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/io/crypto/ManagedKeyProvider.java index ff7152b6ed96..99f08d7aca5f 100644 --- a/hbase-common/src/main/java/org/apache/hadoop/hbase/io/crypto/ManagedKeyProvider.java +++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/io/crypto/ManagedKeyProvider.java @@ -76,7 +76,7 @@ public interface ManagedKeyProvider extends KeyProvider { * Decode the given key custodian which is encoded as Base64 string. * @param encodedKeyCust The encoded key custodian * @return the decoded key custodian - * @throws IOException + * @throws IOException if the encoded key custodian is not a valid Base64 string */ static byte[] decodeToBytes(String encodedKeyCust) throws IOException { byte[] key_cust; diff --git a/hbase-common/src/test/java/org/apache/hadoop/hbase/io/crypto/TestManagedKeyProvider.java b/hbase-common/src/test/java/org/apache/hadoop/hbase/io/crypto/TestManagedKeyProvider.java index 9bce1e5c3c9b..7d51a80638b6 100644 --- a/hbase-common/src/test/java/org/apache/hadoop/hbase/io/crypto/TestManagedKeyProvider.java +++ b/hbase-common/src/test/java/org/apache/hadoop/hbase/io/crypto/TestManagedKeyProvider.java @@ -74,7 +74,7 @@ public static class TestManagedKeyStoreKeyProvider extends TestKeyStoreKeyProvid @Before public void setUp() throws Exception { - super.setUp();; + super.setUp(); managedKeyProvider = (ManagedKeyProvider) provider; managedKeyProvider.initConfig(conf); } diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/keymeta/SystemKeyAccessor.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/keymeta/SystemKeyAccessor.java index eaca915c4c18..110670bd26cd 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/keymeta/SystemKeyAccessor.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/keymeta/SystemKeyAccessor.java @@ -49,7 +49,8 @@ public SystemKeyAccessor(Server server) throws IOException { /** * Return both the latest system key file and all system key files. * @return a pair of the latest system key file and all system key files - * @throws IOException if there is an error getting the latest system key file + * @throws IOException if there is an error getting the latest system key file or no cluster key + * is initialized yet. */ public Pair> getLatestSystemKeyFile() throws IOException { if (! isKeyManagementEnabled()) { @@ -70,7 +71,7 @@ public Pair> getLatestSystemKeyFile() throws IOException { * enabled, then return null. * * @return a list of all available cluster key files - * @throws IOException + * @throws IOException if there is an error getting the cluster key files */ public List getAllSystemKeyFiles() throws IOException { if (!isKeyManagementEnabled()) { @@ -112,11 +113,11 @@ public static int extractSystemKeySeqNum(Path keyPath) throws IOException { /** * Extract the key sequence number from the cluster key file name. - * @param clusterKeyFile + * @param clusterKeyFile the path to the cluster key file * @return The sequence or {@code -1} if not a valid sequence file. - * @throws IOException + * @throws IOException if the file name is not a valid sequence file */ - @VisibleForTesting + @InterfaceAudience.Private public static int extractKeySequence(Path clusterKeyFile) throws IOException { int keySeq = -1; if (clusterKeyFile.getName().startsWith(SYSTEM_KEY_FILE_PREFIX)) { diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/keymeta/SystemKeyCache.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/keymeta/SystemKeyCache.java index 3a50b4ae86e4..6296fc3f9950 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/keymeta/SystemKeyCache.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/keymeta/SystemKeyCache.java @@ -29,7 +29,7 @@ import org.slf4j.LoggerFactory; @InterfaceAudience.Private -public class SystemKeyCache { +public final class SystemKeyCache { private static final Logger LOG = LoggerFactory.getLogger(SystemKeyCache.class); private final ManagedKeyData latestSystemKey; @@ -37,9 +37,9 @@ public class SystemKeyCache { /** * Construct the System Key cache from the specified accessor. - * @param accessor + * @param accessor the accessor to use to load the system keys * @return the cache or {@code null} if no keys are found. - * @throws IOException + * @throws IOException if there is an error loading the system keys */ public static SystemKeyCache createCache(SystemKeyAccessor accessor) throws IOException { List allSystemKeyFiles = accessor.getAllSystemKeyFiles(); diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/keymeta/DummyKeyProvider.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/keymeta/DummyKeyProvider.java index bdbaa9fa009f..036110c5c120 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/keymeta/DummyKeyProvider.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/keymeta/DummyKeyProvider.java @@ -22,17 +22,17 @@ import org.apache.hadoop.hbase.io.crypto.KeyProvider; public class DummyKeyProvider implements KeyProvider { - @Override - public void init(String params) { - } + @Override + public void init(String params) { + } - @Override - public Key[] getKeys(String[] aliases) { - return null; - } + @Override + public Key[] getKeys(String[] aliases) { + return null; + } - @Override - public Key getKey(String alias) { - return null; - } + @Override + public Key getKey(String alias) { + return null; + } } \ No newline at end of file diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/keymeta/TestKeyManagementBase.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/keymeta/TestKeyManagementBase.java index 747f59c2ef69..4e26df5a76a3 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/keymeta/TestKeyManagementBase.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/keymeta/TestKeyManagementBase.java @@ -33,32 +33,32 @@ @Category({ MasterTests.class, SmallTests.class }) public class TestKeyManagementBase { - public static final HBaseClassTestRule CLASS_RULE = HBaseClassTestRule.forClass( - TestKeyManagementBase.class); + public static final HBaseClassTestRule CLASS_RULE = HBaseClassTestRule.forClass( + TestKeyManagementBase.class); - @Test - public void testGetKeyProviderWithInvalidProvider() throws Exception { - // Setup configuration with a non-ManagedKeyProvider - Configuration conf = new Configuration(); - conf.set(HConstants.CRYPTO_KEYPROVIDER_CONF_KEY, + @Test + public void testGetKeyProviderWithInvalidProvider() throws Exception { + // Setup configuration with a non-ManagedKeyProvider + Configuration conf = new Configuration(); + conf.set(HConstants.CRYPTO_KEYPROVIDER_CONF_KEY, "org.apache.hadoop.hbase.keymeta.DummyKeyProvider"); - Server mockServer = mock(Server.class); - when(mockServer.getConfiguration()).thenReturn(conf); + Server mockServer = mock(Server.class); + when(mockServer.getConfiguration()).thenReturn(conf); - KeyManagementBase keyMgmt = new TestKeyManagement(mockServer); + KeyManagementBase keyMgmt = new TestKeyManagement(mockServer); - // Should throw RuntimeException when provider is not ManagedKeyProvider - RuntimeException exception = assertThrows(RuntimeException.class, () -> { - keyMgmt.getKeyProvider(); - }); + // Should throw RuntimeException when provider is not ManagedKeyProvider + RuntimeException exception = assertThrows(RuntimeException.class, () -> { + keyMgmt.getKeyProvider(); + }); - assertTrue(exception.getMessage().contains("expected to be of type ManagedKeyProvider")); - } + assertTrue(exception.getMessage().contains("expected to be of type ManagedKeyProvider")); + } - private static class TestKeyManagement extends KeyManagementBase { - public TestKeyManagement(Server server) { - super(server); - } + private static class TestKeyManagement extends KeyManagementBase { + public TestKeyManagement(Server server) { + super(server); } + } } \ No newline at end of file diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/keymeta/TestKeymetaEndpoint.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/keymeta/TestKeymetaEndpoint.java index 6b0bb6867b9c..c1618a004025 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/keymeta/TestKeymetaEndpoint.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/keymeta/TestKeymetaEndpoint.java @@ -17,34 +17,6 @@ */ package org.apache.hadoop.hbase.keymeta; -import org.apache.hadoop.hbase.CoprocessorEnvironment; -import org.apache.hadoop.hbase.HBaseClassTestRule; -import org.apache.hadoop.hbase.coprocessor.HasMasterServices; -import org.apache.hadoop.hbase.io.crypto.ManagedKeyData; -import org.apache.hadoop.hbase.keymeta.KeymetaServiceEndpoint.KeymetaAdminServiceImpl; -import org.apache.hadoop.hbase.master.MasterServices; -import org.apache.hadoop.hbase.protobuf.generated.ManagedKeysProtos.GetManagedKeysResponse; -import org.apache.hadoop.hbase.protobuf.generated.ManagedKeysProtos.ManagedKeysRequest; -import org.apache.hadoop.hbase.protobuf.generated.ManagedKeysProtos.ManagedKeysResponse; -import org.apache.hadoop.hbase.testclassification.MasterTests; -import org.apache.hadoop.hbase.testclassification.SmallTests; -import org.apache.hadoop.hbase.util.Bytes; -import org.apache.hbase.thirdparty.com.google.protobuf.ByteString; -import org.apache.hbase.thirdparty.com.google.protobuf.RpcCallback; -import org.apache.hbase.thirdparty.com.google.protobuf.RpcController; -import org.junit.Before; -import org.junit.ClassRule; -import org.junit.Test; -import org.junit.experimental.categories.Category; -import org.mockito.Mock; -import org.mockito.MockitoAnnotations; -import javax.crypto.spec.SecretKeySpec; -import java.io.IOException; -import java.security.KeyException; -import java.util.ArrayList; -import java.util.Arrays; -import java.util.Base64; -import java.util.List; import static org.apache.hadoop.hbase.io.crypto.ManagedKeyState.ACTIVE; import static org.apache.hadoop.hbase.protobuf.generated.ManagedKeysProtos.ManagedKeyState.KEY_ACTIVE; import static org.apache.hadoop.hbase.protobuf.generated.ManagedKeysProtos.ManagedKeyState.KEY_FAILED; @@ -52,7 +24,6 @@ import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertNotNull; import static org.junit.Assert.assertNull; -import static org.junit.Assert.assertTrue; import static org.mockito.ArgumentMatchers.any; import static org.mockito.ArgumentMatchers.anyString; import static org.mockito.ArgumentMatchers.contains; @@ -62,6 +33,39 @@ import static org.mockito.Mockito.when; import static org.mockito.Mockito.withSettings; +import java.io.IOException; +import java.security.KeyException; +import java.util.ArrayList; +import java.util.Arrays; +import java.util.Base64; +import java.util.List; + +import javax.crypto.spec.SecretKeySpec; + +import org.junit.Before; +import org.junit.ClassRule; +import org.junit.Test; +import org.junit.experimental.categories.Category; +import org.mockito.Mock; +import org.mockito.MockitoAnnotations; + +import org.apache.hbase.thirdparty.com.google.protobuf.ByteString; +import org.apache.hbase.thirdparty.com.google.protobuf.RpcCallback; +import org.apache.hbase.thirdparty.com.google.protobuf.RpcController; + +import org.apache.hadoop.hbase.CoprocessorEnvironment; +import org.apache.hadoop.hbase.HBaseClassTestRule; +import org.apache.hadoop.hbase.coprocessor.HasMasterServices; +import org.apache.hadoop.hbase.io.crypto.ManagedKeyData; +import org.apache.hadoop.hbase.keymeta.KeymetaServiceEndpoint.KeymetaAdminServiceImpl; +import org.apache.hadoop.hbase.master.MasterServices; +import org.apache.hadoop.hbase.protobuf.generated.ManagedKeysProtos.GetManagedKeysResponse; +import org.apache.hadoop.hbase.protobuf.generated.ManagedKeysProtos.ManagedKeysRequest; +import org.apache.hadoop.hbase.protobuf.generated.ManagedKeysProtos.ManagedKeysResponse; +import org.apache.hadoop.hbase.testclassification.MasterTests; +import org.apache.hadoop.hbase.testclassification.SmallTests; +import org.apache.hadoop.hbase.util.Bytes; + @Category({ MasterTests.class, SmallTests.class }) public class TestKeymetaEndpoint { diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/keymeta/TestKeymetaTableAccessor.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/keymeta/TestKeymetaTableAccessor.java index cada943b64ee..3c80f928c9e1 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/keymeta/TestKeymetaTableAccessor.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/keymeta/TestKeymetaTableAccessor.java @@ -33,8 +33,17 @@ import static org.apache.hadoop.hbase.keymeta.KeymetaTableAccessor.WRITE_OP_COUNT_QUAL_BYTES; import static org.apache.hadoop.hbase.keymeta.KeymetaTableAccessor.constructRowKeyForMetadata; import static org.apache.hadoop.hbase.keymeta.KeymetaTableAccessor.parseFromResult; -import static org.junit.Assert.*; -import static org.mockito.Mockito.*; +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertNotNull; +import static org.junit.Assert.assertNull; +import static org.junit.Assert.assertThrows; +import static org.junit.Assert.assertTrue; +import static org.mockito.Mockito.any; +import static org.mockito.Mockito.anyLong; +import static org.mockito.Mockito.eq; +import static org.mockito.Mockito.mock; +import static org.mockito.Mockito.verify; +import static org.mockito.Mockito.when; import java.io.IOException; import java.util.Arrays; @@ -50,7 +59,15 @@ import org.apache.hadoop.hbase.HBaseConfiguration; import org.apache.hadoop.hbase.HConstants; import org.apache.hadoop.hbase.Server; -import org.apache.hadoop.hbase.client.*; +import org.apache.hadoop.hbase.client.Connection; +import org.apache.hadoop.hbase.client.Durability; +import org.apache.hadoop.hbase.client.Get; +import org.apache.hadoop.hbase.client.Increment; +import org.apache.hadoop.hbase.client.Put; +import org.apache.hadoop.hbase.client.Result; +import org.apache.hadoop.hbase.client.ResultScanner; +import org.apache.hadoop.hbase.client.Scan; +import org.apache.hadoop.hbase.client.Table; import org.apache.hadoop.hbase.io.crypto.ManagedKeyData; import org.apache.hadoop.hbase.io.crypto.ManagedKeyState; import org.apache.hadoop.hbase.io.crypto.MockManagedKeyProvider; diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/keymeta/TestManagedKeymeta.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/keymeta/TestManagedKeymeta.java index dee4e5128e97..3413f440926c 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/keymeta/TestManagedKeymeta.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/keymeta/TestManagedKeymeta.java @@ -17,6 +17,21 @@ */ package org.apache.hadoop.hbase.keymeta; +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertNotNull; +import static org.junit.Assert.assertThrows; +import static org.junit.Assert.assertTrue; +import static org.mockito.Mockito.any; +import static org.mockito.Mockito.mock; +import static org.mockito.Mockito.when; + +import java.io.IOException; +import java.lang.reflect.Field; +import java.security.KeyException; +import java.util.List; + +import org.apache.hbase.thirdparty.com.google.protobuf.ServiceException; + import org.apache.hadoop.hbase.HBaseClassTestRule; import org.apache.hadoop.hbase.io.crypto.Encryption; import org.apache.hadoop.hbase.io.crypto.ManagedKeyData; @@ -24,25 +39,12 @@ import org.apache.hadoop.hbase.io.crypto.ManagedKeyState; import org.apache.hadoop.hbase.io.crypto.MockManagedKeyProvider; import org.apache.hadoop.hbase.master.HMaster; +import org.apache.hadoop.hbase.protobuf.generated.ManagedKeysProtos; import org.apache.hadoop.hbase.testclassification.MasterTests; import org.apache.hadoop.hbase.testclassification.MediumTests; import org.junit.ClassRule; import org.junit.Test; import org.junit.experimental.categories.Category; -import org.apache.hadoop.hbase.protobuf.generated.ManagedKeysProtos; -import org.apache.hbase.thirdparty.com.google.protobuf.ServiceException; -import org.mockito.stubbing.OngoingStubbing; -import static org.mockito.Mockito.mock; -import static org.mockito.Mockito.when; -import static org.mockito.Mockito.any; -import static org.junit.Assert.assertThrows; -import static org.junit.Assert.assertTrue; -import java.lang.reflect.Field; -import java.io.IOException; -import java.security.KeyException; -import java.util.List; -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertNotNull; @Category({ MasterTests.class, MediumTests.class }) public class TestManagedKeymeta extends ManagedKeyTestBase { diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestKeymetaAdminImpl.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestKeymetaAdminImpl.java index 90dd63f3730f..de00303c4284 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestKeymetaAdminImpl.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestKeymetaAdminImpl.java @@ -1,8 +1,33 @@ package org.apache.hadoop.hbase.master; +import static org.apache.hadoop.hbase.io.crypto.ManagedKeyData.KEY_SPACE_GLOBAL; +import static org.apache.hadoop.hbase.io.crypto.ManagedKeyState.ACTIVE; +import static org.apache.hadoop.hbase.io.crypto.ManagedKeyState.DISABLED; +import static org.apache.hadoop.hbase.io.crypto.ManagedKeyState.FAILED; +import static org.apache.hadoop.hbase.io.crypto.ManagedKeyState.INACTIVE; +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertNotNull; +import static org.junit.Assert.assertNull; +import static org.junit.Assert.assertThrows; +import static org.mockito.ArgumentMatchers.any; +import static org.mockito.ArgumentMatchers.argThat; +import static org.mockito.Mockito.mock; +import static org.mockito.Mockito.reset; +import static org.mockito.Mockito.times; +import static org.mockito.Mockito.verify; +import static org.mockito.Mockito.when; + +import java.io.IOException; +import java.security.Key; +import java.security.KeyException; +import java.util.Arrays; +import java.util.Collection; +import java.util.List; + import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; + import org.apache.hadoop.hbase.HBaseClassTestRule; import org.apache.hadoop.hbase.HBaseTestingUtil; import org.apache.hadoop.hbase.HConstants; @@ -31,27 +56,6 @@ import org.junit.runners.Parameterized.Parameters; import org.junit.runners.Suite; -import java.io.IOException; -import java.security.Key; -import java.security.KeyException; -import java.util.Arrays; -import java.util.Collection; -import java.util.List; - -import static org.apache.hadoop.hbase.io.crypto.ManagedKeyData.KEY_SPACE_GLOBAL; -import static org.apache.hadoop.hbase.io.crypto.ManagedKeyState.*; -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertNotNull; -import static org.junit.Assert.assertNull; -import static org.junit.Assert.assertThrows; -import static org.mockito.ArgumentMatchers.any; -import static org.mockito.ArgumentMatchers.argThat; -import static org.mockito.Mockito.mock; -import static org.mockito.Mockito.reset; -import static org.mockito.Mockito.times; -import static org.mockito.Mockito.verify; -import static org.mockito.Mockito.when; - @RunWith(Suite.class) @Suite.SuiteClasses({ TestKeymetaAdminImpl.TestWhenDisabled.class, TestKeymetaAdminImpl.TestAdminImpl.class, diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestSystemKeyAccessorAndManager.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestSystemKeyAccessorAndManager.java index 3dd1adbb5a42..c9706776d421 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestSystemKeyAccessorAndManager.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestSystemKeyAccessorAndManager.java @@ -174,7 +174,7 @@ public void testGetWithMultiple() throws Exception { Path systemKeyDir = CommonFSUtils.getSystemKeyDir(conf); when(mockFileSystem.globStatus(eq(new Path(systemKeyDir, SYSTEM_KEY_FILE_PREFIX+"*")))) - .thenReturn( mockFileStatuses ); + .thenReturn(mockFileStatuses); List files = systemKeyManager.getAllSystemKeyFiles(); assertEquals(3, files.size()); @@ -257,7 +257,7 @@ public void setUp() throws Exception { @After public void tearDown() throws Exception { - closeableMocks.close();; + closeableMocks.close(); } @Test From 5de212ce450a57c57faad548a4a3b75ac17de04d Mon Sep 17 00:00:00 2001 From: Hari Dara Date: Wed, 18 Jun 2025 22:02:20 +0530 Subject: [PATCH 46/70] fix spotbugs --- .../org/apache/hadoop/hbase/keymeta/SystemKeyAccessor.java | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/keymeta/SystemKeyAccessor.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/keymeta/SystemKeyAccessor.java index 110670bd26cd..5a89d38a0bb2 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/keymeta/SystemKeyAccessor.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/keymeta/SystemKeyAccessor.java @@ -102,7 +102,7 @@ public ManagedKeyData loadSystemKey(Path keyPath) throws IOException { public static int extractSystemKeySeqNum(Path keyPath) throws IOException { if (keyPath.getName().startsWith(SYSTEM_KEY_FILE_PREFIX)) { try { - return Integer.valueOf(keyPath.getName().substring(SYSTEM_KEY_FILE_PREFIX.length())); + return Integer.parseInt(keyPath.getName().substring(SYSTEM_KEY_FILE_PREFIX.length())); } catch (NumberFormatException e) { LOG.error("Invalid file name for a cluster key: {}", keyPath, e); @@ -124,7 +124,7 @@ public static int extractKeySequence(Path clusterKeyFile) throws IOException { String seqStr = clusterKeyFile.getName().substring(SYSTEM_KEY_FILE_PREFIX.length()); if (! seqStr.isEmpty()) { try { - keySeq = Integer.valueOf(seqStr); + keySeq = Integer.parseInt(seqStr); } catch (NumberFormatException e) { throw new IOException("Invalid file name for a cluster key: " + clusterKeyFile, e); } From 61b2bbf7a4bb18b03a0a1b081912c15cda446eb3 Mon Sep 17 00:00:00 2001 From: Hari Dara Date: Thu, 19 Jun 2025 11:36:27 +0530 Subject: [PATCH 47/70] Fix NPE caused by changing new HashMap to Map.of --- .../io/crypto/ManagedKeyStoreKeyProvider.java | 14 +++++++++----- 1 file changed, 9 insertions(+), 5 deletions(-) diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/io/crypto/ManagedKeyStoreKeyProvider.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/io/crypto/ManagedKeyStoreKeyProvider.java index ab2c3a343330..c19a793654ed 100644 --- a/hbase-common/src/main/java/org/apache/hadoop/hbase/io/crypto/ManagedKeyStoreKeyProvider.java +++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/io/crypto/ManagedKeyStoreKeyProvider.java @@ -9,12 +9,16 @@ import org.apache.hadoop.hbase.HConstants; import org.apache.hadoop.hbase.util.GsonUtil; import org.apache.yetus.audience.InterfaceAudience; +import com.google.gson.reflect.TypeToken; @InterfaceAudience.Public public class ManagedKeyStoreKeyProvider extends KeyStoreKeyProvider implements ManagedKeyProvider { public static final String KEY_METADATA_ALIAS = "KeyAlias"; public static final String KEY_METADATA_CUST = "KeyCustodian"; + private static final java.lang.reflect.Type KEY_METADATA_TYPE = + new TypeToken>(){}.getType(); + private Configuration conf; @Override @@ -54,7 +58,7 @@ public ManagedKeyData getManagedKey(byte[] key_cust, String key_namespace) throw @Override public ManagedKeyData unwrapKey(String keyMetadataStr, byte[] wrappedKey) throws IOException { Map keyMetadata = GsonUtil.getDefaultInstance().fromJson(keyMetadataStr, - HashMap.class); + KEY_METADATA_TYPE); String encodedCust = keyMetadata.get(KEY_METADATA_CUST); String activeStatusConfKey = HConstants.CRYPTO_MANAGED_KEY_STORE_CONF_KEY_PREFIX + encodedCust + ".active"; @@ -77,9 +81,9 @@ private void checkConfig() { } public static String generateKeyMetadata(String aliasName, String encodedCust) { - return GsonUtil.getDefaultInstance().toJson(Map.of( - KEY_METADATA_ALIAS, aliasName, - KEY_METADATA_CUST, encodedCust - ), HashMap.class); + Map metadata = new HashMap<>(2); + metadata.put(KEY_METADATA_ALIAS, aliasName); + metadata.put(KEY_METADATA_CUST, encodedCust); + return GsonUtil.getDefaultInstance().toJson(metadata, HashMap.class); } } From ad7d8178fc8fc81c5fae7eb6e196289a89afebac Mon Sep 17 00:00:00 2001 From: Hari Dara Date: Thu, 19 Jun 2025 11:49:54 +0530 Subject: [PATCH 48/70] javadoc warning --- .../src/main/java/org/apache/hadoop/hbase/HConstants.java | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/HConstants.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/HConstants.java index 7eff6f610df9..38c3add280ab 100644 --- a/hbase-common/src/main/java/org/apache/hadoop/hbase/HConstants.java +++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/HConstants.java @@ -1309,7 +1309,7 @@ public enum OperationStatusCode { /** Configuration key for enabling WAL encryption, a boolean */ public static final String ENABLE_WAL_ENCRYPTION = "hbase.regionserver.wal.encryption"; - /** Property used by {@link ManagedKeyStoreKeyProvider} class to set the alias that identifies + /** Property used by ManagedKeyStoreKeyProvider class to set the alias that identifies * the current system key. */ public static final String CRYPTO_MANAGED_KEY_STORE_SYSTEM_KEY_NAME_CONF_KEY = "hbase.crypto.managed_key_store.system.key.name"; From 47a9a58da4eba6ccf9562478e8eda508c6f8ddf4 Mon Sep 17 00:00:00 2001 From: Hari Dara Date: Thu, 19 Jun 2025 18:39:24 +0530 Subject: [PATCH 49/70] Fix test failures and errors --- .../java/org/apache/hadoop/hbase/keymeta/SystemKeyCache.java | 2 +- .../org/apache/hadoop/hbase/keymeta/TestKeyManagementBase.java | 2 ++ 2 files changed, 3 insertions(+), 1 deletion(-) diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/keymeta/SystemKeyCache.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/keymeta/SystemKeyCache.java index 6296fc3f9950..778f270cc69a 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/keymeta/SystemKeyCache.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/keymeta/SystemKeyCache.java @@ -29,7 +29,7 @@ import org.slf4j.LoggerFactory; @InterfaceAudience.Private -public final class SystemKeyCache { +public class SystemKeyCache { private static final Logger LOG = LoggerFactory.getLogger(SystemKeyCache.class); private final ManagedKeyData latestSystemKey; diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/keymeta/TestKeyManagementBase.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/keymeta/TestKeyManagementBase.java index 4e26df5a76a3..ab871b241830 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/keymeta/TestKeyManagementBase.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/keymeta/TestKeyManagementBase.java @@ -28,11 +28,13 @@ import org.apache.hadoop.hbase.Server; import org.apache.hadoop.hbase.testclassification.MasterTests; import org.apache.hadoop.hbase.testclassification.SmallTests; +import org.junit.ClassRule; import org.junit.Test; import org.junit.experimental.categories.Category; @Category({ MasterTests.class, SmallTests.class }) public class TestKeyManagementBase { + @ClassRule public static final HBaseClassTestRule CLASS_RULE = HBaseClassTestRule.forClass( TestKeyManagementBase.class); From 846ea5923a61cbe992e6228fc9626d211fa0d8d7 Mon Sep 17 00:00:00 2001 From: Hari Dara Date: Fri, 20 Jun 2025 09:41:33 +0530 Subject: [PATCH 50/70] Add missing license header --- .../io/crypto/ManagedKeyStoreKeyProvider.java | 17 +++++++++++++++++ 1 file changed, 17 insertions(+) diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/io/crypto/ManagedKeyStoreKeyProvider.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/io/crypto/ManagedKeyStoreKeyProvider.java index c19a793654ed..868e46de00cc 100644 --- a/hbase-common/src/main/java/org/apache/hadoop/hbase/io/crypto/ManagedKeyStoreKeyProvider.java +++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/io/crypto/ManagedKeyStoreKeyProvider.java @@ -1,3 +1,20 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ package org.apache.hadoop.hbase.io.crypto; import java.io.IOException; From 2256d71da41797ab512bb35590f1a940b4b163dd Mon Sep 17 00:00:00 2001 From: Hari Dara Date: Fri, 20 Jun 2025 10:10:41 +0530 Subject: [PATCH 51/70] Revert an inadvertent whitespace change --- .../org/apache/hadoop/hbase/regionserver/HRegion.java | 10 +++++----- 1 file changed, 5 insertions(+), 5 deletions(-) diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HRegion.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HRegion.java index 165b1cd68a34..1801b4d971ed 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HRegion.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HRegion.java @@ -806,8 +806,8 @@ public HRegion(final HRegionFileSystem fs, final WAL wal, final Configuration co this.conf = new CompoundConfiguration().add(confParam).addBytesMap(htd.getValues()); this.cellComparator = htd.isMetaTable() || conf.getBoolean(USE_META_CELL_COMPARATOR, DEFAULT_USE_META_CELL_COMPARATOR) - ? MetaCellComparator.META_COMPARATOR - : CellComparatorImpl.COMPARATOR; + ? MetaCellComparator.META_COMPARATOR + : CellComparatorImpl.COMPARATOR; this.lock = new ReentrantReadWriteLock( conf.getBoolean(FAIR_REENTRANT_CLOSE_LOCK, DEFAULT_FAIR_REENTRANT_CLOSE_LOCK)); this.regionLockHolders = new ConcurrentHashMap<>(); @@ -906,9 +906,9 @@ public HRegion(final HRegionFileSystem fs, final WAL wal, final Configuration co // disable stats tracking system tables, but check the config for everything else this.regionStatsEnabled = htd.getTableName().getNamespaceAsString() .equals(NamespaceDescriptor.SYSTEM_NAMESPACE_NAME_STR) - ? false - : conf.getBoolean(HConstants.ENABLE_CLIENT_BACKPRESSURE, - HConstants.DEFAULT_ENABLE_CLIENT_BACKPRESSURE); + ? false + : conf.getBoolean(HConstants.ENABLE_CLIENT_BACKPRESSURE, + HConstants.DEFAULT_ENABLE_CLIENT_BACKPRESSURE); this.maxCellSize = conf.getLong(HBASE_MAX_CELL_SIZE_KEY, DEFAULT_MAX_CELL_SIZE); this.miniBatchSize = From 65a1c1dce0820e265781f8b28457635d2e93a8ab Mon Sep 17 00:00:00 2001 From: Hari Dara Date: Fri, 20 Jun 2025 15:33:04 +0530 Subject: [PATCH 52/70] Fix for test failures on cluster ID --- .../java/org/apache/hadoop/hbase/master/MasterFileSystem.java | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/MasterFileSystem.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/MasterFileSystem.java index a4e1202f40a3..0ffbfd15c41d 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/MasterFileSystem.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/MasterFileSystem.java @@ -136,7 +136,7 @@ private void createInitialFileSystemLayout() throws IOException { HConstants.CORRUPT_DIR_NAME, ReplicationUtils.REMOTE_WAL_DIR_NAME }; // check if the root directory exists checkRootDir(this.rootdir, conf, this.fs); - checkRootDir(this.systemKeyDir, conf, this.fs); + checkSubDir(this.systemKeyDir, HBASE_DIR_PERMS); // Check the directories under rootdir. checkTempDir(this.tempdir, conf, this.fs); From 13f265433e84639d57d5bfd3a174632379ab0c00 Mon Sep 17 00:00:00 2001 From: Hari Dara Date: Mon, 23 Jun 2025 10:59:06 +0530 Subject: [PATCH 53/70] One more file missing license header --- .../hbase/keymeta/KeymetaAdminClient.java | 17 +++++++++++++++++ 1 file changed, 17 insertions(+) diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/keymeta/KeymetaAdminClient.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/keymeta/KeymetaAdminClient.java index 676f993b502d..dddbbdfeae87 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/keymeta/KeymetaAdminClient.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/keymeta/KeymetaAdminClient.java @@ -1,3 +1,20 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ package org.apache.hadoop.hbase.keymeta; import java.io.IOException; From 895749bb4e6688e3ef411953e51993d16a933a90 Mon Sep 17 00:00:00 2001 From: Hari Dara Date: Mon, 23 Jun 2025 17:23:00 +0530 Subject: [PATCH 54/70] Add missing license header --- .../hbase/keymeta/ManagedKeyTestBase.java | 17 +++++++++++++++++ .../hbase/master/TestKeymetaAdminImpl.java | 17 +++++++++++++++++ .../master/TestSystemKeyAccessorAndManager.java | 17 +++++++++++++++++ 3 files changed, 51 insertions(+) diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/keymeta/ManagedKeyTestBase.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/keymeta/ManagedKeyTestBase.java index 4813abcac750..bdc5902022e3 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/keymeta/ManagedKeyTestBase.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/keymeta/ManagedKeyTestBase.java @@ -1,3 +1,20 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ package org.apache.hadoop.hbase.keymeta; import org.apache.hadoop.hbase.HBaseTestingUtil; diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestKeymetaAdminImpl.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestKeymetaAdminImpl.java index de00303c4284..b1ae39435357 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestKeymetaAdminImpl.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestKeymetaAdminImpl.java @@ -1,3 +1,20 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ package org.apache.hadoop.hbase.master; import static org.apache.hadoop.hbase.io.crypto.ManagedKeyData.KEY_SPACE_GLOBAL; diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestSystemKeyAccessorAndManager.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestSystemKeyAccessorAndManager.java index c9706776d421..6f145454c49e 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestSystemKeyAccessorAndManager.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestSystemKeyAccessorAndManager.java @@ -1,3 +1,20 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ package org.apache.hadoop.hbase.master; import org.apache.hadoop.conf.Configuration; From 015c1960715d2788a0ef90e51579998260cee1ba Mon Sep 17 00:00:00 2001 From: Hari Dara Date: Wed, 25 Jun 2025 14:46:44 +0530 Subject: [PATCH 55/70] Address most of Rubocop errors --- .../src/main/ruby/hbase/keymeta_admin.rb | 16 +++++--- hbase-shell/src/main/ruby/shell.rb | 5 ++- .../shell/commands/enable_key_management.rb | 11 +++++- .../shell/commands/keymeta_command_base.rb | 38 +++++++++++-------- .../ruby/shell/commands/show_key_status.rb | 11 +++++- 5 files changed, 56 insertions(+), 25 deletions(-) diff --git a/hbase-shell/src/main/ruby/hbase/keymeta_admin.rb b/hbase-shell/src/main/ruby/hbase/keymeta_admin.rb index 729f02d0dc03..f70abbdde55b 100644 --- a/hbase-shell/src/main/ruby/hbase/keymeta_admin.rb +++ b/hbase-shell/src/main/ruby/hbase/keymeta_admin.rb @@ -1,3 +1,5 @@ +# +# # Licensed to the Apache Software Foundation (ASF) under one # or more contributor license agreements. See the NOTICE file # distributed with this work for additional information @@ -14,12 +16,15 @@ # See the License for the specific language governing permissions and # limitations under the License. # +# frozen_string_literal: true -include Java +require 'java' java_import org.apache.hadoop.hbase.io.crypto.ManagedKeyData java_import org.apache.hadoop.hbase.keymeta.KeymetaAdminClient module Hbase + # KeymetaAdmin is a class that provides a Ruby interface to the HBase Key Management API. + # It is used to interface with the HBase Key Management API. class KeymetaAdmin def initialize(connection) @connection = connection @@ -42,11 +47,10 @@ def get_key_statuses(key_info) end def extract_cust_info(key_info) - custInfo = key_info.split(':') - raise(ArgumentError, 'Invalid cust:namespace format') unless (custInfo.length == 1 || - custInfo.length == 2) - return custInfo[0], custInfo.length > 1 ? custInfo[1] : - ManagedKeyData::KEY_SPACE_GLOBAL + cust_info = key_info.split(':') + raise(ArgumentError, 'Invalid cust:namespace format') unless [1, 2].include?(cust_info.length) + + [cust_info[0], cust_info.length > 1 ? cust_info[1] : ManagedKeyData::KEY_SPACE_GLOBAL] end end end diff --git a/hbase-shell/src/main/ruby/shell.rb b/hbase-shell/src/main/ruby/shell.rb index c87498bc6d35..10f24c4a0d24 100644 --- a/hbase-shell/src/main/ruby/shell.rb +++ b/hbase-shell/src/main/ruby/shell.rb @@ -622,8 +622,9 @@ def self.exception_handler(hide_traceback) Shell.load_command_group( 'keymeta', full_name: 'Keymeta', - comment: "NOTE: The KeyMeta Coprocessor Endpoint must be enabled on the Master else commands fail with: - UnknownProtocolException: No registered Master Coprocessor Endpoint found for ManagedKeysService", + comment: "NOTE: The KeyMeta Coprocessor Endpoint must be enabled on the Master else commands fail + with: UnknownProtocolException: No registered Master Coprocessor Endpoint found for + ManagedKeysService", commands: %w[ enable_key_management show_key_status diff --git a/hbase-shell/src/main/ruby/shell/commands/enable_key_management.rb b/hbase-shell/src/main/ruby/shell/commands/enable_key_management.rb index 796c32f7d70b..9a6d0422ad4e 100644 --- a/hbase-shell/src/main/ruby/shell/commands/enable_key_management.rb +++ b/hbase-shell/src/main/ruby/shell/commands/enable_key_management.rb @@ -1,3 +1,5 @@ +# +# # Licensed to the Apache Software Foundation (ASF) under one # or more contributor license agreements. See the NOTICE file # distributed with this work for additional information @@ -14,17 +16,24 @@ # See the License for the specific language governing permissions and # limitations under the License. # +# frozen_string_literal: true require 'shell/commands/keymeta_command_base' module Shell module Commands + # EnableKeyManagement is a class that provides a Ruby interface to enable key management via + # HBase Key Management API. class EnableKeyManagement < KeymetaCommandBase def help <<-EOF Enable key management for a given cust:namespace (cust in Base64 format). If no namespace is specified, the global namespace (*) is used. -EOF + +Example: + hbase> enable_key_management 'cust:namespace' + hbase> enable_key_management 'cust' + EOF end def command(key_info) diff --git a/hbase-shell/src/main/ruby/shell/commands/keymeta_command_base.rb b/hbase-shell/src/main/ruby/shell/commands/keymeta_command_base.rb index 79e8087cfc6b..92b52f5e4be5 100644 --- a/hbase-shell/src/main/ruby/shell/commands/keymeta_command_base.rb +++ b/hbase-shell/src/main/ruby/shell/commands/keymeta_command_base.rb @@ -1,3 +1,5 @@ +# +# # Licensed to the Apache Software Foundation (ASF) under one # or more contributor license agreements. See the NOTICE file # distributed with this work for additional information @@ -14,26 +16,32 @@ # See the License for the specific language governing permissions and # limitations under the License. # +# frozen_string_literal: true module Shell module Commands + # KeymetaCommandBase is a base class for all key management commands. class KeymetaCommandBase < Command def print_key_statuses(statuses) - formatter.header(['ENCODED-KEY', 'NAMESPACE', 'STATUS', 'METADATA', 'METADATA-HASH', - 'REFRESH-TIMESTAMP', 'READ-OP-COUNT', 'WRITE-OP-COUNT']) - statuses.each { |status| - formatter.row([ - status.getKeyCustodianEncoded(), - status.getKeyNamespace(), - status.getKeyStatus().toString(), - status.getKeyMetadata(), - status.getKeyMetadataHashEncoded(), - status.getRefreshTimestamp(), - status.getReadOpCount(), - status.getWriteOpCount() - ]) - } - formatter.footer(statuses.size()) + formatter.header(%w[ENCODED-KEY NAMESPACE STATUS METADATA METADATA-HASH + REFRESH-TIMESTAMP READ-OP-COUNT WRITE-OP-COUNT]) + statuses.each { |status| formatter.row(format_status_row(status)) } + formatter.footer(statuses.size) + end + + private + + def format_status_row(status) + [ + status.getKeyCustodianEncoded, + status.getKeyNamespace, + status.getKeyStatus.toString, + status.getKeyMetadata, + status.getKeyMetadataHashEncoded, + status.getRefreshTimestamp, + status.getReadOpCount, + status.getWriteOpCount + ] end end end diff --git a/hbase-shell/src/main/ruby/shell/commands/show_key_status.rb b/hbase-shell/src/main/ruby/shell/commands/show_key_status.rb index 5866285884c9..d3670d094ed3 100644 --- a/hbase-shell/src/main/ruby/shell/commands/show_key_status.rb +++ b/hbase-shell/src/main/ruby/shell/commands/show_key_status.rb @@ -1,3 +1,5 @@ +# +# # Licensed to the Apache Software Foundation (ASF) under one # or more contributor license agreements. See the NOTICE file # distributed with this work for additional information @@ -14,17 +16,24 @@ # See the License for the specific language governing permissions and # limitations under the License. # +# frozen_string_literal: true require 'shell/commands/keymeta_command_base' module Shell module Commands + # ShowKeyStatus is a class that provides a Ruby interface to show key statuses via + # HBase Key Management API. class ShowKeyStatus < KeymetaCommandBase def help <<-EOF Show key statuses for a given cust:namespace (cust in Base64 format). If no namespace is specified, the global namespace (*) is used. -EOF + +Example: + hbase> show_key_status 'cust:namespace' + hbase> show_key_status 'cust' + EOF end def command(key_info) From cb033c9e8822b72a380e5e9bbd65cbe8e6d0ff82 Mon Sep 17 00:00:00 2001 From: Hari Dara Date: Wed, 25 Jun 2025 15:45:59 +0530 Subject: [PATCH 56/70] Another attempt to organize imports --- .../hbase/keymeta/KeymetaAdminClient.java | 6 +- .../hbase/io/crypto/ManagedKeyData.java | 4 +- .../hbase/io/crypto/ManagedKeyProvider.java | 2 +- .../hbase/io/crypto/ManagedKeyState.java | 1 - .../io/crypto/ManagedKeyStoreKeyProvider.java | 4 +- .../hadoop/hbase/keymeta/KeymetaAdmin.java | 2 +- .../io/crypto/TestManagedKeyProvider.java | 35 ++++++----- .../hbase/keymeta/KeyManagementBase.java | 6 +- .../hbase/keymeta/KeymetaAdminImpl.java | 6 +- .../hbase/keymeta/KeymetaMasterService.java | 6 +- .../hbase/keymeta/KeymetaServiceEndpoint.java | 6 +- .../hbase/keymeta/KeymetaTableAccessor.java | 2 +- .../hbase/keymeta/ManagedKeyAccessor.java | 2 +- .../hbase/keymeta/ManagedKeyDataCache.java | 2 +- .../hbase/keymeta/SystemKeyAccessor.java | 2 +- .../hadoop/hbase/keymeta/SystemKeyCache.java | 6 +- .../hadoop/hbase/master/SystemKeyManager.java | 2 +- .../hbase/keymeta/TestKeymetaEndpoint.java | 10 ++-- .../keymeta/TestKeymetaMasterService.java | 30 +++++----- .../keymeta/TestKeymetaTableAccessor.java | 26 ++++----- .../hbase/keymeta/TestManagedKeyAccessor.java | 38 ++++++------ .../hbase/keymeta/TestManagedKeymeta.java | 10 ++-- .../hbase/master/TestKeymetaAdminImpl.java | 27 +++++---- .../TestSystemKeyAccessorAndManager.java | 58 ++++++++++--------- .../hbase/master/TestSystemKeyManager.java | 20 ++++--- 25 files changed, 159 insertions(+), 154 deletions(-) diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/keymeta/KeymetaAdminClient.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/keymeta/KeymetaAdminClient.java index dddbbdfeae87..fb0abd39c867 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/keymeta/KeymetaAdminClient.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/keymeta/KeymetaAdminClient.java @@ -21,6 +21,9 @@ import java.security.KeyException; import java.util.ArrayList; import java.util.List; +import org.apache.yetus.audience.InterfaceAudience; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hbase.client.Connection; import org.apache.hadoop.hbase.io.crypto.ManagedKeyData; @@ -28,9 +31,6 @@ import org.apache.hadoop.hbase.protobuf.generated.ManagedKeysProtos; import org.apache.hadoop.hbase.protobuf.generated.ManagedKeysProtos.ManagedKeysRequest; import org.apache.hadoop.hbase.protobuf.generated.ManagedKeysProtos.ManagedKeysResponse; -import org.apache.yetus.audience.InterfaceAudience; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; import org.apache.hbase.thirdparty.com.google.protobuf.ServiceException; diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/io/crypto/ManagedKeyData.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/io/crypto/ManagedKeyData.java index 7fdecb9d4f09..c8ecf11f59d8 100644 --- a/hbase-common/src/main/java/org/apache/hadoop/hbase/io/crypto/ManagedKeyData.java +++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/io/crypto/ManagedKeyData.java @@ -22,12 +22,12 @@ import java.security.NoSuchAlgorithmException; import java.util.Arrays; import java.util.Base64; - import org.apache.commons.lang3.builder.EqualsBuilder; import org.apache.commons.lang3.builder.HashCodeBuilder; +import org.apache.yetus.audience.InterfaceAudience; + import org.apache.hadoop.hbase.util.EnvironmentEdgeManager; import org.apache.hadoop.util.DataChecksum; -import org.apache.yetus.audience.InterfaceAudience; import org.apache.hbase.thirdparty.com.google.common.base.Preconditions; diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/io/crypto/ManagedKeyProvider.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/io/crypto/ManagedKeyProvider.java index 99f08d7aca5f..3396188d63c0 100644 --- a/hbase-common/src/main/java/org/apache/hadoop/hbase/io/crypto/ManagedKeyProvider.java +++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/io/crypto/ManagedKeyProvider.java @@ -19,9 +19,9 @@ import java.io.IOException; import java.util.Base64; +import org.apache.yetus.audience.InterfaceAudience; import org.apache.hadoop.conf.Configuration; -import org.apache.yetus.audience.InterfaceAudience; /** * Interface for key providers of managed keys. Defines methods for generating and managing diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/io/crypto/ManagedKeyState.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/io/crypto/ManagedKeyState.java index 8987e849e347..7c2fdbccc687 100644 --- a/hbase-common/src/main/java/org/apache/hadoop/hbase/io/crypto/ManagedKeyState.java +++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/io/crypto/ManagedKeyState.java @@ -19,7 +19,6 @@ import java.util.HashMap; import java.util.Map; - import org.apache.yetus.audience.InterfaceAudience; /** diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/io/crypto/ManagedKeyStoreKeyProvider.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/io/crypto/ManagedKeyStoreKeyProvider.java index 868e46de00cc..64c2afd0b6c8 100644 --- a/hbase-common/src/main/java/org/apache/hadoop/hbase/io/crypto/ManagedKeyStoreKeyProvider.java +++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/io/crypto/ManagedKeyStoreKeyProvider.java @@ -21,12 +21,12 @@ import java.security.Key; import java.util.HashMap; import java.util.Map; +import org.apache.yetus.audience.InterfaceAudience; +import com.google.gson.reflect.TypeToken; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hbase.HConstants; import org.apache.hadoop.hbase.util.GsonUtil; -import org.apache.yetus.audience.InterfaceAudience; -import com.google.gson.reflect.TypeToken; @InterfaceAudience.Public public class ManagedKeyStoreKeyProvider extends KeyStoreKeyProvider implements ManagedKeyProvider { diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/keymeta/KeymetaAdmin.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/keymeta/KeymetaAdmin.java index d74af46b5173..728f435e7d1c 100644 --- a/hbase-common/src/main/java/org/apache/hadoop/hbase/keymeta/KeymetaAdmin.java +++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/keymeta/KeymetaAdmin.java @@ -20,9 +20,9 @@ import java.io.IOException; import java.security.KeyException; import java.util.List; +import org.apache.yetus.audience.InterfaceAudience; import org.apache.hadoop.hbase.io.crypto.ManagedKeyData; -import org.apache.yetus.audience.InterfaceAudience; /** * KeymetaAdmin is an interface for administrative functions related to managed keys. diff --git a/hbase-common/src/test/java/org/apache/hadoop/hbase/io/crypto/TestManagedKeyProvider.java b/hbase-common/src/test/java/org/apache/hadoop/hbase/io/crypto/TestManagedKeyProvider.java index 7d51a80638b6..be93409813a5 100644 --- a/hbase-common/src/test/java/org/apache/hadoop/hbase/io/crypto/TestManagedKeyProvider.java +++ b/hbase-common/src/test/java/org/apache/hadoop/hbase/io/crypto/TestManagedKeyProvider.java @@ -17,6 +17,24 @@ */ package org.apache.hadoop.hbase.io.crypto; +import static org.apache.hadoop.hbase.io.crypto.ManagedKeyStoreKeyProvider.KEY_METADATA_ALIAS; +import static org.apache.hadoop.hbase.io.crypto.ManagedKeyStoreKeyProvider.KEY_METADATA_CUST; +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertNotNull; +import static org.junit.Assert.assertNull; +import static org.junit.Assert.assertThrows; +import static org.junit.Assert.assertTrue; + +import java.security.KeyStore; +import java.security.MessageDigest; +import java.util.Arrays; +import java.util.Base64; +import java.util.HashMap; +import java.util.Map; +import java.util.Properties; +import java.util.UUID; +import javax.crypto.spec.SecretKeySpec; + import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hbase.HBaseClassTestRule; import org.apache.hadoop.hbase.HBaseConfiguration; @@ -25,6 +43,7 @@ import org.apache.hadoop.hbase.testclassification.SmallTests; import org.apache.hadoop.hbase.util.Bytes; import org.apache.hadoop.hbase.util.GsonUtil; + import org.junit.Before; import org.junit.ClassRule; import org.junit.Test; @@ -33,22 +52,6 @@ import org.junit.runners.BlockJUnit4ClassRunner; import org.junit.runners.Parameterized; import org.junit.runners.Suite; -import javax.crypto.spec.SecretKeySpec; -import java.security.KeyStore; -import java.security.MessageDigest; -import java.util.Arrays; -import java.util.Base64; -import java.util.HashMap; -import java.util.Map; -import java.util.Properties; -import java.util.UUID; -import static org.apache.hadoop.hbase.io.crypto.ManagedKeyStoreKeyProvider.KEY_METADATA_ALIAS; -import static org.apache.hadoop.hbase.io.crypto.ManagedKeyStoreKeyProvider.KEY_METADATA_CUST; -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertNotNull; -import static org.junit.Assert.assertNull; -import static org.junit.Assert.assertThrows; -import static org.junit.Assert.assertTrue; @RunWith(Suite.class) @Suite.SuiteClasses({ TestManagedKeyProvider.TestManagedKeyStoreKeyProvider.class, diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/keymeta/KeyManagementBase.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/keymeta/KeyManagementBase.java index 1f899fcdfc25..bd81221129ab 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/keymeta/KeyManagementBase.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/keymeta/KeyManagementBase.java @@ -18,15 +18,15 @@ package org.apache.hadoop.hbase.keymeta; import java.io.IOException; +import org.apache.yetus.audience.InterfaceAudience; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hbase.HConstants; import org.apache.hadoop.hbase.Server; import org.apache.hadoop.hbase.io.crypto.Encryption; import org.apache.hadoop.hbase.io.crypto.KeyProvider; import org.apache.hadoop.hbase.io.crypto.ManagedKeyProvider; -import org.apache.yetus.audience.InterfaceAudience; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; /** * A base class for all keymeta accessor/manager implementations. diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/keymeta/KeymetaAdminImpl.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/keymeta/KeymetaAdminImpl.java index fa54b05478e1..85cd5013b2c7 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/keymeta/KeymetaAdminImpl.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/keymeta/KeymetaAdminImpl.java @@ -22,14 +22,14 @@ import java.util.HashSet; import java.util.List; import java.util.Set; +import org.apache.yetus.audience.InterfaceAudience; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hbase.Server; import org.apache.hadoop.hbase.io.crypto.ManagedKeyData; import org.apache.hadoop.hbase.io.crypto.ManagedKeyProvider; import org.apache.hadoop.hbase.io.crypto.ManagedKeyState; -import org.apache.yetus.audience.InterfaceAudience; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; @InterfaceAudience.Private public class KeymetaAdminImpl extends KeymetaTableAccessor implements KeymetaAdmin { diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/keymeta/KeymetaMasterService.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/keymeta/KeymetaMasterService.java index 68f78cd12dd3..ae1c6215a61a 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/keymeta/KeymetaMasterService.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/keymeta/KeymetaMasterService.java @@ -18,14 +18,14 @@ package org.apache.hadoop.hbase.keymeta; import java.io.IOException; +import org.apache.yetus.audience.InterfaceAudience; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hbase.HConstants; import org.apache.hadoop.hbase.client.ColumnFamilyDescriptorBuilder; import org.apache.hadoop.hbase.client.TableDescriptorBuilder; import org.apache.hadoop.hbase.master.MasterServices; -import org.apache.yetus.audience.InterfaceAudience; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; @InterfaceAudience.Private public class KeymetaMasterService extends KeyManagementBase { diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/keymeta/KeymetaServiceEndpoint.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/keymeta/KeymetaServiceEndpoint.java index b8faf11bde5b..a4ceeaa615a4 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/keymeta/KeymetaServiceEndpoint.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/keymeta/KeymetaServiceEndpoint.java @@ -22,6 +22,9 @@ import java.util.Base64; import java.util.Collections; import java.util.List; +import org.apache.yetus.audience.InterfaceAudience; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.classification.VisibleForTesting; import org.apache.hadoop.hbase.CoprocessorEnvironment; @@ -36,9 +39,6 @@ import org.apache.hadoop.hbase.protobuf.generated.ManagedKeysProtos.ManagedKeysResponse; import org.apache.hadoop.hbase.protobuf.generated.ManagedKeysProtos.GetManagedKeysResponse; import org.apache.hadoop.hbase.protobuf.generated.ManagedKeysProtos.ManagedKeysService; -import org.apache.yetus.audience.InterfaceAudience; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; import org.apache.hbase.thirdparty.com.google.protobuf.ByteString; import org.apache.hbase.thirdparty.com.google.protobuf.RpcCallback; diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/keymeta/KeymetaTableAccessor.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/keymeta/KeymetaTableAccessor.java index 60ec5a53d251..deee88d810d2 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/keymeta/KeymetaTableAccessor.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/keymeta/KeymetaTableAccessor.java @@ -22,6 +22,7 @@ import java.security.KeyException; import java.util.ArrayList; import java.util.List; +import org.apache.yetus.audience.InterfaceAudience; import org.apache.hadoop.hbase.HBaseInterfaceAudience; import org.apache.hadoop.hbase.HConstants; @@ -42,7 +43,6 @@ import org.apache.hadoop.hbase.io.crypto.ManagedKeyState; import org.apache.hadoop.hbase.security.EncryptionUtil; import org.apache.hadoop.hbase.util.Bytes; -import org.apache.yetus.audience.InterfaceAudience; /** * Accessor for keymeta table as part of key management. diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/keymeta/ManagedKeyAccessor.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/keymeta/ManagedKeyAccessor.java index f2a832ebe4bc..06c3919adf08 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/keymeta/ManagedKeyAccessor.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/keymeta/ManagedKeyAccessor.java @@ -20,10 +20,10 @@ import java.io.IOException; import java.security.KeyException; import java.util.List; +import org.apache.yetus.audience.InterfaceAudience; import org.apache.hadoop.hbase.io.crypto.ManagedKeyData; import org.apache.hadoop.hbase.io.crypto.ManagedKeyProvider; -import org.apache.yetus.audience.InterfaceAudience; /** * This class provides unified access on top of both {@code ManagedKeyDataCache} (L1) and diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/keymeta/ManagedKeyDataCache.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/keymeta/ManagedKeyDataCache.java index 224a9e922c04..cc2e091a0c9a 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/keymeta/ManagedKeyDataCache.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/keymeta/ManagedKeyDataCache.java @@ -22,11 +22,11 @@ import java.util.List; import java.util.Map; import java.util.concurrent.locks.ReentrantLock; +import org.apache.yetus.audience.InterfaceAudience; import org.apache.hadoop.hbase.io.crypto.ManagedKeyData; import org.apache.hadoop.hbase.io.crypto.ManagedKeyState; import org.apache.hadoop.hbase.util.Bytes; -import org.apache.yetus.audience.InterfaceAudience; /** * In-memory cache for ManagedKeyData entries, using key metadata as the cache key. diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/keymeta/SystemKeyAccessor.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/keymeta/SystemKeyAccessor.java index 5a89d38a0bb2..46bf6fe25ede 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/keymeta/SystemKeyAccessor.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/keymeta/SystemKeyAccessor.java @@ -25,6 +25,7 @@ import java.util.List; import java.util.Map; import java.util.TreeMap; +import org.apache.yetus.audience.InterfaceAudience; import org.apache.hadoop.fs.FSDataInputStream; import org.apache.hadoop.fs.FileStatus; @@ -35,7 +36,6 @@ import org.apache.hadoop.hbase.io.crypto.ManagedKeyProvider; import org.apache.hadoop.hbase.util.CommonFSUtils; import org.apache.hadoop.hbase.util.Pair; -import org.apache.yetus.audience.InterfaceAudience; @InterfaceAudience.Private public class SystemKeyAccessor extends KeyManagementBase { diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/keymeta/SystemKeyCache.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/keymeta/SystemKeyCache.java index 778f270cc69a..5b61e800e27b 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/keymeta/SystemKeyCache.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/keymeta/SystemKeyCache.java @@ -21,13 +21,13 @@ import java.util.List; import java.util.Map; import java.util.TreeMap; - -import org.apache.hadoop.fs.Path; -import org.apache.hadoop.hbase.io.crypto.ManagedKeyData; import org.apache.yetus.audience.InterfaceAudience; import org.slf4j.Logger; import org.slf4j.LoggerFactory; +import org.apache.hadoop.fs.Path; +import org.apache.hadoop.hbase.io.crypto.ManagedKeyData; + @InterfaceAudience.Private public class SystemKeyCache { private static final Logger LOG = LoggerFactory.getLogger(SystemKeyCache.class); diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/SystemKeyManager.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/SystemKeyManager.java index 45b021c77feb..1fe5e1d4076e 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/SystemKeyManager.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/SystemKeyManager.java @@ -22,6 +22,7 @@ import java.io.IOException; import java.util.List; import java.util.UUID; +import org.apache.yetus.audience.InterfaceAudience; import org.apache.hadoop.fs.FSDataOutputStream; import org.apache.hadoop.fs.Path; @@ -30,7 +31,6 @@ import org.apache.hadoop.hbase.io.crypto.ManagedKeyState; import org.apache.hadoop.hbase.keymeta.SystemKeyAccessor; import org.apache.hadoop.hbase.util.Pair; -import org.apache.yetus.audience.InterfaceAudience; @InterfaceAudience.Private public class SystemKeyManager extends SystemKeyAccessor { diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/keymeta/TestKeymetaEndpoint.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/keymeta/TestKeymetaEndpoint.java index c1618a004025..dc320f1b658d 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/keymeta/TestKeymetaEndpoint.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/keymeta/TestKeymetaEndpoint.java @@ -39,9 +39,7 @@ import java.util.Arrays; import java.util.Base64; import java.util.List; - import javax.crypto.spec.SecretKeySpec; - import org.junit.Before; import org.junit.ClassRule; import org.junit.Test; @@ -49,10 +47,6 @@ import org.mockito.Mock; import org.mockito.MockitoAnnotations; -import org.apache.hbase.thirdparty.com.google.protobuf.ByteString; -import org.apache.hbase.thirdparty.com.google.protobuf.RpcCallback; -import org.apache.hbase.thirdparty.com.google.protobuf.RpcController; - import org.apache.hadoop.hbase.CoprocessorEnvironment; import org.apache.hadoop.hbase.HBaseClassTestRule; import org.apache.hadoop.hbase.coprocessor.HasMasterServices; @@ -66,6 +60,10 @@ import org.apache.hadoop.hbase.testclassification.SmallTests; import org.apache.hadoop.hbase.util.Bytes; +import org.apache.hbase.thirdparty.com.google.protobuf.ByteString; +import org.apache.hbase.thirdparty.com.google.protobuf.RpcCallback; +import org.apache.hbase.thirdparty.com.google.protobuf.RpcController; + @Category({ MasterTests.class, SmallTests.class }) public class TestKeymetaEndpoint { diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/keymeta/TestKeymetaMasterService.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/keymeta/TestKeymetaMasterService.java index f81eb5dd6676..f524f1d2f55f 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/keymeta/TestKeymetaMasterService.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/keymeta/TestKeymetaMasterService.java @@ -17,6 +17,21 @@ */ package org.apache.hadoop.hbase.keymeta; +import static org.mockito.ArgumentMatchers.any; +import static org.mockito.Mockito.never; +import static org.mockito.Mockito.times; +import static org.mockito.Mockito.verify; +import static org.mockito.Mockito.when; + +import java.io.IOException; +import org.junit.After; +import org.junit.Before; +import org.junit.ClassRule; +import org.junit.Test; +import org.junit.experimental.categories.Category; +import org.mockito.Mock; +import org.mockito.MockitoAnnotations; + import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hbase.HBaseClassTestRule; import org.apache.hadoop.hbase.HConstants; @@ -26,21 +41,6 @@ import org.apache.hadoop.hbase.master.MasterServices; import org.apache.hadoop.hbase.testclassification.MasterTests; import org.apache.hadoop.hbase.testclassification.SmallTests; -import org.junit.After; -import org.junit.Before; -import org.junit.ClassRule; -import org.junit.Test; -import org.junit.experimental.categories.Category; -import org.mockito.Mock; -import org.mockito.MockitoAnnotations; - -import java.io.IOException; - -import static org.mockito.ArgumentMatchers.any; -import static org.mockito.Mockito.never; -import static org.mockito.Mockito.times; -import static org.mockito.Mockito.verify; -import static org.mockito.Mockito.when; /** * Tests for KeymetaMasterService class diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/keymeta/TestKeymetaTableAccessor.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/keymeta/TestKeymetaTableAccessor.java index 3c80f928c9e1..21d9efb43ce7 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/keymeta/TestKeymetaTableAccessor.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/keymeta/TestKeymetaTableAccessor.java @@ -52,6 +52,19 @@ import java.util.List; import java.util.Map; import java.util.NavigableMap; +import org.junit.After; +import org.junit.Before; +import org.junit.ClassRule; +import org.junit.Test; +import org.junit.experimental.categories.Category; +import org.junit.runner.RunWith; +import org.junit.runners.BlockJUnit4ClassRunner; +import org.junit.runners.Parameterized; +import org.junit.runners.Parameterized.Parameter; +import org.junit.runners.Suite; +import org.mockito.ArgumentCaptor; +import org.mockito.Mock; +import org.mockito.MockitoAnnotations; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hbase.Cell; @@ -75,19 +88,6 @@ import org.apache.hadoop.hbase.testclassification.MasterTests; import org.apache.hadoop.hbase.testclassification.SmallTests; import org.apache.hadoop.hbase.util.Bytes; -import org.junit.After; -import org.junit.Before; -import org.junit.ClassRule; -import org.junit.Test; -import org.junit.experimental.categories.Category; -import org.junit.runner.RunWith; -import org.junit.runners.BlockJUnit4ClassRunner; -import org.junit.runners.Parameterized; -import org.junit.runners.Parameterized.Parameter; -import org.junit.runners.Suite; -import org.mockito.ArgumentCaptor; -import org.mockito.Mock; -import org.mockito.MockitoAnnotations; @RunWith(Suite.class) @Suite.SuiteClasses({ diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/keymeta/TestManagedKeyAccessor.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/keymeta/TestManagedKeyAccessor.java index dd4e5393926c..f54c0e8fb0e8 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/keymeta/TestManagedKeyAccessor.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/keymeta/TestManagedKeyAccessor.java @@ -17,6 +17,26 @@ */ package org.apache.hadoop.hbase.keymeta; +import static org.apache.hadoop.hbase.io.crypto.ManagedKeyData.KEY_SPACE_GLOBAL; +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertNull; +import static org.mockito.ArgumentMatchers.any; +import static org.mockito.Mockito.clearInvocations; +import static org.mockito.Mockito.never; +import static org.mockito.Mockito.times; +import static org.mockito.Mockito.verify; +import static org.mockito.Mockito.when; + +import java.util.ArrayList; +import java.util.Arrays; +import org.junit.After; +import org.junit.Before; +import org.junit.ClassRule; +import org.junit.Test; +import org.junit.experimental.categories.Category; +import org.mockito.Mock; +import org.mockito.MockitoAnnotations; + import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hbase.HBaseClassTestRule; import org.apache.hadoop.hbase.HBaseConfiguration; @@ -27,24 +47,6 @@ import org.apache.hadoop.hbase.io.crypto.MockManagedKeyProvider; import org.apache.hadoop.hbase.testclassification.MasterTests; import org.apache.hadoop.hbase.testclassification.SmallTests; -import org.junit.After; -import org.junit.Before; -import org.junit.ClassRule; -import org.junit.Test; -import org.junit.experimental.categories.Category; -import org.mockito.Mock; -import org.mockito.MockitoAnnotations; -import java.util.ArrayList; -import java.util.Arrays; -import static org.apache.hadoop.hbase.io.crypto.ManagedKeyData.KEY_SPACE_GLOBAL; -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertNull; -import static org.mockito.ArgumentMatchers.any; -import static org.mockito.Mockito.clearInvocations; -import static org.mockito.Mockito.never; -import static org.mockito.Mockito.times; -import static org.mockito.Mockito.verify; -import static org.mockito.Mockito.when; @Category({ MasterTests.class, SmallTests.class }) public class TestManagedKeyAccessor { diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/keymeta/TestManagedKeymeta.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/keymeta/TestManagedKeymeta.java index 3413f440926c..7d228aa2ff48 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/keymeta/TestManagedKeymeta.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/keymeta/TestManagedKeymeta.java @@ -29,8 +29,9 @@ import java.lang.reflect.Field; import java.security.KeyException; import java.util.List; - -import org.apache.hbase.thirdparty.com.google.protobuf.ServiceException; +import org.junit.ClassRule; +import org.junit.Test; +import org.junit.experimental.categories.Category; import org.apache.hadoop.hbase.HBaseClassTestRule; import org.apache.hadoop.hbase.io.crypto.Encryption; @@ -42,9 +43,8 @@ import org.apache.hadoop.hbase.protobuf.generated.ManagedKeysProtos; import org.apache.hadoop.hbase.testclassification.MasterTests; import org.apache.hadoop.hbase.testclassification.MediumTests; -import org.junit.ClassRule; -import org.junit.Test; -import org.junit.experimental.categories.Category; + +import org.apache.hbase.thirdparty.com.google.protobuf.ServiceException; @Category({ MasterTests.class, MediumTests.class }) public class TestManagedKeymeta extends ManagedKeyTestBase { diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestKeymetaAdminImpl.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestKeymetaAdminImpl.java index b1ae39435357..ee42ab428c81 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestKeymetaAdminImpl.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestKeymetaAdminImpl.java @@ -40,11 +40,23 @@ import java.util.Arrays; import java.util.Collection; import java.util.List; +import org.junit.After; +import org.junit.Before; +import org.junit.ClassRule; +import org.junit.Rule; +import org.junit.Test; +import org.junit.experimental.categories.Category; +import org.junit.rules.TestName; +import org.junit.runner.RunWith; +import org.junit.runners.BlockJUnit4ClassRunner; +import org.junit.runners.Parameterized; +import org.junit.runners.Parameterized.Parameter; +import org.junit.runners.Parameterized.Parameters; +import org.junit.runners.Suite; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; - import org.apache.hadoop.hbase.HBaseClassTestRule; import org.apache.hadoop.hbase.HBaseTestingUtil; import org.apache.hadoop.hbase.HConstants; @@ -59,19 +71,6 @@ import org.apache.hadoop.hbase.testclassification.MasterTests; import org.apache.hadoop.hbase.testclassification.SmallTests; import org.apache.hadoop.hbase.util.Bytes; -import org.junit.After; -import org.junit.Before; -import org.junit.ClassRule; -import org.junit.Rule; -import org.junit.Test; -import org.junit.experimental.categories.Category; -import org.junit.rules.TestName; -import org.junit.runner.RunWith; -import org.junit.runners.BlockJUnit4ClassRunner; -import org.junit.runners.Parameterized; -import org.junit.runners.Parameterized.Parameter; -import org.junit.runners.Parameterized.Parameters; -import org.junit.runners.Suite; @RunWith(Suite.class) @Suite.SuiteClasses({ TestKeymetaAdminImpl.TestWhenDisabled.class, diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestSystemKeyAccessorAndManager.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestSystemKeyAccessorAndManager.java index 6f145454c49e..9ab79ff5c92e 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestSystemKeyAccessorAndManager.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestSystemKeyAccessorAndManager.java @@ -17,23 +17,18 @@ */ package org.apache.hadoop.hbase.master; -import org.apache.hadoop.conf.Configuration; -import org.apache.hadoop.fs.FSDataOutputStream; -import org.apache.hadoop.fs.FileStatus; -import org.apache.hadoop.fs.FileSystem; -import org.apache.hadoop.fs.Path; -import org.apache.hadoop.hbase.ClusterId; -import org.apache.hadoop.hbase.HBaseClassTestRule; -import org.apache.hadoop.hbase.HBaseTestingUtil; -import org.apache.hadoop.hbase.HConstants; -import org.apache.hadoop.hbase.io.crypto.ManagedKeyData; -import org.apache.hadoop.hbase.io.crypto.ManagedKeyProvider; -import org.apache.hadoop.hbase.io.crypto.ManagedKeyState; -import org.apache.hadoop.hbase.keymeta.SystemKeyAccessor; -import org.apache.hadoop.hbase.testclassification.MasterTests; -import org.apache.hadoop.hbase.testclassification.SmallTests; -import org.apache.hadoop.hbase.util.CommonFSUtils; -import org.apache.hadoop.hbase.util.Pair; +import static org.apache.hadoop.hbase.HConstants.SYSTEM_KEY_FILE_PREFIX; +import static org.apache.hadoop.hbase.io.crypto.ManagedKeyState.ACTIVE; +import static org.apache.hadoop.hbase.io.crypto.ManagedKeyState.INACTIVE; +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertNull; +import static org.junit.Assert.assertThrows; +import static org.mockito.ArgumentMatchers.any; +import static org.mockito.ArgumentMatchers.eq; +import static org.mockito.Mockito.mock; +import static org.mockito.Mockito.verify; +import static org.mockito.Mockito.when; + import org.junit.After; import org.junit.Before; import org.junit.ClassRule; @@ -56,17 +51,24 @@ import java.util.List; import java.util.stream.IntStream; import javax.crypto.spec.SecretKeySpec; -import static org.apache.hadoop.hbase.HConstants.SYSTEM_KEY_FILE_PREFIX; -import static org.apache.hadoop.hbase.io.crypto.ManagedKeyState.ACTIVE; -import static org.apache.hadoop.hbase.io.crypto.ManagedKeyState.INACTIVE; -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertNull; -import static org.junit.Assert.assertThrows; -import static org.mockito.ArgumentMatchers.any; -import static org.mockito.ArgumentMatchers.eq; -import static org.mockito.Mockito.mock; -import static org.mockito.Mockito.verify; -import static org.mockito.Mockito.when; + +import org.apache.hadoop.conf.Configuration; +import org.apache.hadoop.fs.FSDataOutputStream; +import org.apache.hadoop.fs.FileStatus; +import org.apache.hadoop.fs.FileSystem; +import org.apache.hadoop.fs.Path; +import org.apache.hadoop.hbase.ClusterId; +import org.apache.hadoop.hbase.HBaseClassTestRule; +import org.apache.hadoop.hbase.HBaseTestingUtil; +import org.apache.hadoop.hbase.HConstants; +import org.apache.hadoop.hbase.io.crypto.ManagedKeyData; +import org.apache.hadoop.hbase.io.crypto.ManagedKeyProvider; +import org.apache.hadoop.hbase.io.crypto.ManagedKeyState; +import org.apache.hadoop.hbase.keymeta.SystemKeyAccessor; +import org.apache.hadoop.hbase.testclassification.MasterTests; +import org.apache.hadoop.hbase.testclassification.SmallTests; +import org.apache.hadoop.hbase.util.CommonFSUtils; +import org.apache.hadoop.hbase.util.Pair; @RunWith(Suite.class) @Suite.SuiteClasses({ diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestSystemKeyManager.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestSystemKeyManager.java index 099ffa08a1db..0e5a279590cf 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestSystemKeyManager.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestSystemKeyManager.java @@ -17,6 +17,17 @@ */ package org.apache.hadoop.hbase.master; +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertNotNull; +import static org.junit.Assert.assertThrows; +import static org.junit.Assert.assertTrue; + +import java.io.IOException; +import java.security.Key; +import org.junit.ClassRule; +import org.junit.Test; +import org.junit.experimental.categories.Category; + import org.apache.hadoop.hbase.HBaseClassTestRule; import org.apache.hadoop.hbase.io.crypto.Encryption; import org.apache.hadoop.hbase.io.crypto.KeyProvider; @@ -30,15 +41,6 @@ import org.apache.hadoop.hbase.testclassification.MasterTests; import org.apache.hadoop.hbase.testclassification.MediumTests; import org.apache.hadoop.hbase.util.Bytes; -import org.junit.ClassRule; -import org.junit.Test; -import org.junit.experimental.categories.Category; -import java.io.IOException; -import java.security.Key; -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertNotNull; -import static org.junit.Assert.assertThrows; -import static org.junit.Assert.assertTrue; @Category({ MasterTests.class, MediumTests.class }) public class TestSystemKeyManager extends ManagedKeyTestBase { From 60c956884bd9c3ff5306594a1a88847cab4c7f4b Mon Sep 17 00:00:00 2001 From: Hari Dara Date: Wed, 25 Jun 2025 16:14:56 +0530 Subject: [PATCH 57/70] Additional checkstyle import fixes --- .../hbase/keymeta/KeymetaAdminClient.java | 6 ++-- .../hbase/io/crypto/ManagedKeyData.java | 2 +- .../hbase/io/crypto/ManagedKeyProvider.java | 2 +- .../io/crypto/ManagedKeyStoreKeyProvider.java | 4 +-- .../hadoop/hbase/keymeta/KeymetaAdmin.java | 2 +- .../apache/hadoop/hbase/util/GsonUtil.java | 3 +- .../apache/hadoop/hbase/HBaseServerBase.java | 2 +- .../hbase/MockRegionServerServices.java | 2 +- .../java/org/apache/hadoop/hbase/Server.java | 2 +- .../hbase/keymeta/KeyManagementBase.java | 6 ++-- .../hbase/keymeta/KeymetaAdminImpl.java | 6 ++-- .../hbase/keymeta/KeymetaMasterService.java | 6 ++-- .../hbase/keymeta/KeymetaServiceEndpoint.java | 8 ++--- .../hbase/keymeta/KeymetaTableAccessor.java | 2 +- .../hbase/keymeta/ManagedKeyAccessor.java | 2 +- .../hbase/keymeta/ManagedKeyDataCache.java | 2 +- .../hbase/keymeta/SystemKeyAccessor.java | 2 +- .../hadoop/hbase/keymeta/SystemKeyCache.java | 6 ++-- .../hadoop/hbase/master/SystemKeyManager.java | 2 +- .../regionserver/ReplicationSyncUp.java | 2 +- .../hbase/keymeta/TestKeymetaEndpoint.java | 12 ++++---- .../keymeta/TestKeymetaMasterService.java | 14 ++++----- .../keymeta/TestKeymetaTableAccessor.java | 26 ++++++++-------- .../hbase/keymeta/TestManagedKeyAccessor.java | 14 ++++----- .../hbase/keymeta/TestManagedKeymeta.java | 6 ++-- .../hbase/master/MockNoopMasterServices.java | 2 +- .../hadoop/hbase/master/MockRegionServer.java | 2 +- .../hbase/master/TestActiveMasterManager.java | 4 +-- .../hbase/master/TestKeymetaAdminImpl.java | 26 ++++++++-------- .../TestSystemKeyAccessorAndManager.java | 30 +++++++++---------- .../hbase/master/TestSystemKeyManager.java | 10 +++---- .../cleaner/TestReplicationHFileCleaner.java | 2 +- ...onProcedureStorePerformanceEvaluation.java | 2 +- .../regionserver/TestHeapMemoryManager.java | 2 +- .../token/TestTokenAuthentication.java | 4 +-- .../apache/hadoop/hbase/util/MockServer.java | 4 +-- 36 files changed, 115 insertions(+), 114 deletions(-) diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/keymeta/KeymetaAdminClient.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/keymeta/KeymetaAdminClient.java index fb0abd39c867..dddbbdfeae87 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/keymeta/KeymetaAdminClient.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/keymeta/KeymetaAdminClient.java @@ -21,9 +21,6 @@ import java.security.KeyException; import java.util.ArrayList; import java.util.List; -import org.apache.yetus.audience.InterfaceAudience; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; import org.apache.hadoop.hbase.client.Connection; import org.apache.hadoop.hbase.io.crypto.ManagedKeyData; @@ -31,6 +28,9 @@ import org.apache.hadoop.hbase.protobuf.generated.ManagedKeysProtos; import org.apache.hadoop.hbase.protobuf.generated.ManagedKeysProtos.ManagedKeysRequest; import org.apache.hadoop.hbase.protobuf.generated.ManagedKeysProtos.ManagedKeysResponse; +import org.apache.yetus.audience.InterfaceAudience; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hbase.thirdparty.com.google.protobuf.ServiceException; diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/io/crypto/ManagedKeyData.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/io/crypto/ManagedKeyData.java index c8ecf11f59d8..d4f2a7b5d6ca 100644 --- a/hbase-common/src/main/java/org/apache/hadoop/hbase/io/crypto/ManagedKeyData.java +++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/io/crypto/ManagedKeyData.java @@ -24,10 +24,10 @@ import java.util.Base64; import org.apache.commons.lang3.builder.EqualsBuilder; import org.apache.commons.lang3.builder.HashCodeBuilder; -import org.apache.yetus.audience.InterfaceAudience; import org.apache.hadoop.hbase.util.EnvironmentEdgeManager; import org.apache.hadoop.util.DataChecksum; +import org.apache.yetus.audience.InterfaceAudience; import org.apache.hbase.thirdparty.com.google.common.base.Preconditions; diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/io/crypto/ManagedKeyProvider.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/io/crypto/ManagedKeyProvider.java index 3396188d63c0..99f08d7aca5f 100644 --- a/hbase-common/src/main/java/org/apache/hadoop/hbase/io/crypto/ManagedKeyProvider.java +++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/io/crypto/ManagedKeyProvider.java @@ -19,9 +19,9 @@ import java.io.IOException; import java.util.Base64; -import org.apache.yetus.audience.InterfaceAudience; import org.apache.hadoop.conf.Configuration; +import org.apache.yetus.audience.InterfaceAudience; /** * Interface for key providers of managed keys. Defines methods for generating and managing diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/io/crypto/ManagedKeyStoreKeyProvider.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/io/crypto/ManagedKeyStoreKeyProvider.java index 64c2afd0b6c8..b9005e1b27e7 100644 --- a/hbase-common/src/main/java/org/apache/hadoop/hbase/io/crypto/ManagedKeyStoreKeyProvider.java +++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/io/crypto/ManagedKeyStoreKeyProvider.java @@ -17,16 +17,16 @@ */ package org.apache.hadoop.hbase.io.crypto; +import com.google.gson.reflect.TypeToken; import java.io.IOException; import java.security.Key; import java.util.HashMap; import java.util.Map; -import org.apache.yetus.audience.InterfaceAudience; -import com.google.gson.reflect.TypeToken; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hbase.HConstants; import org.apache.hadoop.hbase.util.GsonUtil; +import org.apache.yetus.audience.InterfaceAudience; @InterfaceAudience.Public public class ManagedKeyStoreKeyProvider extends KeyStoreKeyProvider implements ManagedKeyProvider { diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/keymeta/KeymetaAdmin.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/keymeta/KeymetaAdmin.java index 728f435e7d1c..d74af46b5173 100644 --- a/hbase-common/src/main/java/org/apache/hadoop/hbase/keymeta/KeymetaAdmin.java +++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/keymeta/KeymetaAdmin.java @@ -20,9 +20,9 @@ import java.io.IOException; import java.security.KeyException; import java.util.List; -import org.apache.yetus.audience.InterfaceAudience; import org.apache.hadoop.hbase.io.crypto.ManagedKeyData; +import org.apache.yetus.audience.InterfaceAudience; /** * KeymetaAdmin is an interface for administrative functions related to managed keys. diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/util/GsonUtil.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/util/GsonUtil.java index 792604864b17..a4a8ce82b2a8 100644 --- a/hbase-common/src/main/java/org/apache/hadoop/hbase/util/GsonUtil.java +++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/util/GsonUtil.java @@ -19,9 +19,10 @@ import java.io.IOException; import java.util.concurrent.atomic.LongAdder; -import org.apache.hbase.thirdparty.com.google.gson.Gson; + import org.apache.yetus.audience.InterfaceAudience; +import org.apache.hbase.thirdparty.com.google.gson.Gson; import org.apache.hbase.thirdparty.com.google.gson.GsonBuilder; import org.apache.hbase.thirdparty.com.google.gson.LongSerializationPolicy; import org.apache.hbase.thirdparty.com.google.gson.TypeAdapter; diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/HBaseServerBase.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/HBaseServerBase.java index 8c18ad2349a4..0fa2d26a0eda 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/HBaseServerBase.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/HBaseServerBase.java @@ -54,9 +54,9 @@ import org.apache.hadoop.hbase.ipc.RpcServerInterface; import org.apache.hadoop.hbase.keymeta.KeymetaAdmin; import org.apache.hadoop.hbase.keymeta.KeymetaAdminImpl; +import org.apache.hadoop.hbase.keymeta.ManagedKeyAccessor; import org.apache.hadoop.hbase.keymeta.SystemKeyAccessor; import org.apache.hadoop.hbase.keymeta.SystemKeyCache; -import org.apache.hadoop.hbase.keymeta.ManagedKeyAccessor; import org.apache.hadoop.hbase.master.HMaster; import org.apache.hadoop.hbase.master.MasterCoprocessorHost; import org.apache.hadoop.hbase.namequeues.NamedQueueRecorder; diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/MockRegionServerServices.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/MockRegionServerServices.java index 821a7a2a7dda..3827ad147e0d 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/MockRegionServerServices.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/MockRegionServerServices.java @@ -39,8 +39,8 @@ import org.apache.hadoop.hbase.io.hfile.BlockCache; import org.apache.hadoop.hbase.ipc.RpcServerInterface; import org.apache.hadoop.hbase.keymeta.KeymetaAdmin; -import org.apache.hadoop.hbase.keymeta.SystemKeyCache; import org.apache.hadoop.hbase.keymeta.ManagedKeyAccessor; +import org.apache.hadoop.hbase.keymeta.SystemKeyCache; import org.apache.hadoop.hbase.mob.MobFileCache; import org.apache.hadoop.hbase.quotas.RegionServerRpcQuotaManager; import org.apache.hadoop.hbase.quotas.RegionServerSpaceQuotaManager; diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/Server.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/Server.java index 120430b5c637..d122510e2248 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/Server.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/Server.java @@ -24,8 +24,8 @@ import org.apache.hadoop.hbase.client.AsyncConnection; import org.apache.hadoop.hbase.client.Connection; import org.apache.hadoop.hbase.keymeta.KeymetaAdmin; -import org.apache.hadoop.hbase.keymeta.SystemKeyCache; import org.apache.hadoop.hbase.keymeta.ManagedKeyAccessor; +import org.apache.hadoop.hbase.keymeta.SystemKeyCache; import org.apache.hadoop.hbase.zookeeper.ZKWatcher; import org.apache.yetus.audience.InterfaceAudience; diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/keymeta/KeyManagementBase.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/keymeta/KeyManagementBase.java index bd81221129ab..1f899fcdfc25 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/keymeta/KeyManagementBase.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/keymeta/KeyManagementBase.java @@ -18,15 +18,15 @@ package org.apache.hadoop.hbase.keymeta; import java.io.IOException; -import org.apache.yetus.audience.InterfaceAudience; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; import org.apache.hadoop.hbase.HConstants; import org.apache.hadoop.hbase.Server; import org.apache.hadoop.hbase.io.crypto.Encryption; import org.apache.hadoop.hbase.io.crypto.KeyProvider; import org.apache.hadoop.hbase.io.crypto.ManagedKeyProvider; +import org.apache.yetus.audience.InterfaceAudience; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; /** * A base class for all keymeta accessor/manager implementations. diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/keymeta/KeymetaAdminImpl.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/keymeta/KeymetaAdminImpl.java index 85cd5013b2c7..fa54b05478e1 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/keymeta/KeymetaAdminImpl.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/keymeta/KeymetaAdminImpl.java @@ -22,14 +22,14 @@ import java.util.HashSet; import java.util.List; import java.util.Set; -import org.apache.yetus.audience.InterfaceAudience; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; import org.apache.hadoop.hbase.Server; import org.apache.hadoop.hbase.io.crypto.ManagedKeyData; import org.apache.hadoop.hbase.io.crypto.ManagedKeyProvider; import org.apache.hadoop.hbase.io.crypto.ManagedKeyState; +import org.apache.yetus.audience.InterfaceAudience; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; @InterfaceAudience.Private public class KeymetaAdminImpl extends KeymetaTableAccessor implements KeymetaAdmin { diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/keymeta/KeymetaMasterService.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/keymeta/KeymetaMasterService.java index ae1c6215a61a..68f78cd12dd3 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/keymeta/KeymetaMasterService.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/keymeta/KeymetaMasterService.java @@ -18,14 +18,14 @@ package org.apache.hadoop.hbase.keymeta; import java.io.IOException; -import org.apache.yetus.audience.InterfaceAudience; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; import org.apache.hadoop.hbase.HConstants; import org.apache.hadoop.hbase.client.ColumnFamilyDescriptorBuilder; import org.apache.hadoop.hbase.client.TableDescriptorBuilder; import org.apache.hadoop.hbase.master.MasterServices; +import org.apache.yetus.audience.InterfaceAudience; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; @InterfaceAudience.Private public class KeymetaMasterService extends KeyManagementBase { diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/keymeta/KeymetaServiceEndpoint.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/keymeta/KeymetaServiceEndpoint.java index a4ceeaa615a4..e03caa901d66 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/keymeta/KeymetaServiceEndpoint.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/keymeta/KeymetaServiceEndpoint.java @@ -22,9 +22,6 @@ import java.util.Base64; import java.util.Collections; import java.util.List; -import org.apache.yetus.audience.InterfaceAudience; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; import org.apache.hadoop.classification.VisibleForTesting; import org.apache.hadoop.hbase.CoprocessorEnvironment; @@ -35,10 +32,13 @@ import org.apache.hadoop.hbase.ipc.CoprocessorRpcUtils; import org.apache.hadoop.hbase.master.MasterServices; import org.apache.hadoop.hbase.protobuf.generated.ManagedKeysProtos; +import org.apache.hadoop.hbase.protobuf.generated.ManagedKeysProtos.GetManagedKeysResponse; import org.apache.hadoop.hbase.protobuf.generated.ManagedKeysProtos.ManagedKeysRequest; import org.apache.hadoop.hbase.protobuf.generated.ManagedKeysProtos.ManagedKeysResponse; -import org.apache.hadoop.hbase.protobuf.generated.ManagedKeysProtos.GetManagedKeysResponse; import org.apache.hadoop.hbase.protobuf.generated.ManagedKeysProtos.ManagedKeysService; +import org.apache.yetus.audience.InterfaceAudience; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hbase.thirdparty.com.google.protobuf.ByteString; import org.apache.hbase.thirdparty.com.google.protobuf.RpcCallback; diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/keymeta/KeymetaTableAccessor.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/keymeta/KeymetaTableAccessor.java index deee88d810d2..60ec5a53d251 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/keymeta/KeymetaTableAccessor.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/keymeta/KeymetaTableAccessor.java @@ -22,7 +22,6 @@ import java.security.KeyException; import java.util.ArrayList; import java.util.List; -import org.apache.yetus.audience.InterfaceAudience; import org.apache.hadoop.hbase.HBaseInterfaceAudience; import org.apache.hadoop.hbase.HConstants; @@ -43,6 +42,7 @@ import org.apache.hadoop.hbase.io.crypto.ManagedKeyState; import org.apache.hadoop.hbase.security.EncryptionUtil; import org.apache.hadoop.hbase.util.Bytes; +import org.apache.yetus.audience.InterfaceAudience; /** * Accessor for keymeta table as part of key management. diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/keymeta/ManagedKeyAccessor.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/keymeta/ManagedKeyAccessor.java index 06c3919adf08..f2a832ebe4bc 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/keymeta/ManagedKeyAccessor.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/keymeta/ManagedKeyAccessor.java @@ -20,10 +20,10 @@ import java.io.IOException; import java.security.KeyException; import java.util.List; -import org.apache.yetus.audience.InterfaceAudience; import org.apache.hadoop.hbase.io.crypto.ManagedKeyData; import org.apache.hadoop.hbase.io.crypto.ManagedKeyProvider; +import org.apache.yetus.audience.InterfaceAudience; /** * This class provides unified access on top of both {@code ManagedKeyDataCache} (L1) and diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/keymeta/ManagedKeyDataCache.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/keymeta/ManagedKeyDataCache.java index cc2e091a0c9a..224a9e922c04 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/keymeta/ManagedKeyDataCache.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/keymeta/ManagedKeyDataCache.java @@ -22,11 +22,11 @@ import java.util.List; import java.util.Map; import java.util.concurrent.locks.ReentrantLock; -import org.apache.yetus.audience.InterfaceAudience; import org.apache.hadoop.hbase.io.crypto.ManagedKeyData; import org.apache.hadoop.hbase.io.crypto.ManagedKeyState; import org.apache.hadoop.hbase.util.Bytes; +import org.apache.yetus.audience.InterfaceAudience; /** * In-memory cache for ManagedKeyData entries, using key metadata as the cache key. diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/keymeta/SystemKeyAccessor.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/keymeta/SystemKeyAccessor.java index 46bf6fe25ede..5a89d38a0bb2 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/keymeta/SystemKeyAccessor.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/keymeta/SystemKeyAccessor.java @@ -25,7 +25,6 @@ import java.util.List; import java.util.Map; import java.util.TreeMap; -import org.apache.yetus.audience.InterfaceAudience; import org.apache.hadoop.fs.FSDataInputStream; import org.apache.hadoop.fs.FileStatus; @@ -36,6 +35,7 @@ import org.apache.hadoop.hbase.io.crypto.ManagedKeyProvider; import org.apache.hadoop.hbase.util.CommonFSUtils; import org.apache.hadoop.hbase.util.Pair; +import org.apache.yetus.audience.InterfaceAudience; @InterfaceAudience.Private public class SystemKeyAccessor extends KeyManagementBase { diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/keymeta/SystemKeyCache.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/keymeta/SystemKeyCache.java index 5b61e800e27b..778f270cc69a 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/keymeta/SystemKeyCache.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/keymeta/SystemKeyCache.java @@ -21,12 +21,12 @@ import java.util.List; import java.util.Map; import java.util.TreeMap; -import org.apache.yetus.audience.InterfaceAudience; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; import org.apache.hadoop.fs.Path; import org.apache.hadoop.hbase.io.crypto.ManagedKeyData; +import org.apache.yetus.audience.InterfaceAudience; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; @InterfaceAudience.Private public class SystemKeyCache { diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/SystemKeyManager.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/SystemKeyManager.java index 1fe5e1d4076e..45b021c77feb 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/SystemKeyManager.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/SystemKeyManager.java @@ -22,7 +22,6 @@ import java.io.IOException; import java.util.List; import java.util.UUID; -import org.apache.yetus.audience.InterfaceAudience; import org.apache.hadoop.fs.FSDataOutputStream; import org.apache.hadoop.fs.Path; @@ -31,6 +30,7 @@ import org.apache.hadoop.hbase.io.crypto.ManagedKeyState; import org.apache.hadoop.hbase.keymeta.SystemKeyAccessor; import org.apache.hadoop.hbase.util.Pair; +import org.apache.yetus.audience.InterfaceAudience; @InterfaceAudience.Private public class SystemKeyManager extends SystemKeyAccessor { diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/replication/regionserver/ReplicationSyncUp.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/replication/regionserver/ReplicationSyncUp.java index ba2c9bbf2a0e..104daaf4e9bf 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/replication/regionserver/ReplicationSyncUp.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/replication/regionserver/ReplicationSyncUp.java @@ -42,8 +42,8 @@ import org.apache.hadoop.hbase.client.AsyncClusterConnection; import org.apache.hadoop.hbase.client.Connection; import org.apache.hadoop.hbase.keymeta.KeymetaAdmin; -import org.apache.hadoop.hbase.keymeta.SystemKeyCache; import org.apache.hadoop.hbase.keymeta.ManagedKeyAccessor; +import org.apache.hadoop.hbase.keymeta.SystemKeyCache; import org.apache.hadoop.hbase.master.replication.OfflineTableReplicationQueueStorage; import org.apache.hadoop.hbase.replication.ReplicationException; import org.apache.hadoop.hbase.replication.ReplicationGroupOffset; diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/keymeta/TestKeymetaEndpoint.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/keymeta/TestKeymetaEndpoint.java index dc320f1b658d..7070596a93c0 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/keymeta/TestKeymetaEndpoint.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/keymeta/TestKeymetaEndpoint.java @@ -40,12 +40,6 @@ import java.util.Base64; import java.util.List; import javax.crypto.spec.SecretKeySpec; -import org.junit.Before; -import org.junit.ClassRule; -import org.junit.Test; -import org.junit.experimental.categories.Category; -import org.mockito.Mock; -import org.mockito.MockitoAnnotations; import org.apache.hadoop.hbase.CoprocessorEnvironment; import org.apache.hadoop.hbase.HBaseClassTestRule; @@ -59,6 +53,12 @@ import org.apache.hadoop.hbase.testclassification.MasterTests; import org.apache.hadoop.hbase.testclassification.SmallTests; import org.apache.hadoop.hbase.util.Bytes; +import org.junit.Before; +import org.junit.ClassRule; +import org.junit.Test; +import org.junit.experimental.categories.Category; +import org.mockito.Mock; +import org.mockito.MockitoAnnotations; import org.apache.hbase.thirdparty.com.google.protobuf.ByteString; import org.apache.hbase.thirdparty.com.google.protobuf.RpcCallback; diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/keymeta/TestKeymetaMasterService.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/keymeta/TestKeymetaMasterService.java index f524f1d2f55f..0a90b992c379 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/keymeta/TestKeymetaMasterService.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/keymeta/TestKeymetaMasterService.java @@ -24,13 +24,6 @@ import static org.mockito.Mockito.when; import java.io.IOException; -import org.junit.After; -import org.junit.Before; -import org.junit.ClassRule; -import org.junit.Test; -import org.junit.experimental.categories.Category; -import org.mockito.Mock; -import org.mockito.MockitoAnnotations; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hbase.HBaseClassTestRule; @@ -41,6 +34,13 @@ import org.apache.hadoop.hbase.master.MasterServices; import org.apache.hadoop.hbase.testclassification.MasterTests; import org.apache.hadoop.hbase.testclassification.SmallTests; +import org.junit.After; +import org.junit.Before; +import org.junit.ClassRule; +import org.junit.Test; +import org.junit.experimental.categories.Category; +import org.mockito.Mock; +import org.mockito.MockitoAnnotations; /** * Tests for KeymetaMasterService class diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/keymeta/TestKeymetaTableAccessor.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/keymeta/TestKeymetaTableAccessor.java index 21d9efb43ce7..3c80f928c9e1 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/keymeta/TestKeymetaTableAccessor.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/keymeta/TestKeymetaTableAccessor.java @@ -52,19 +52,6 @@ import java.util.List; import java.util.Map; import java.util.NavigableMap; -import org.junit.After; -import org.junit.Before; -import org.junit.ClassRule; -import org.junit.Test; -import org.junit.experimental.categories.Category; -import org.junit.runner.RunWith; -import org.junit.runners.BlockJUnit4ClassRunner; -import org.junit.runners.Parameterized; -import org.junit.runners.Parameterized.Parameter; -import org.junit.runners.Suite; -import org.mockito.ArgumentCaptor; -import org.mockito.Mock; -import org.mockito.MockitoAnnotations; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hbase.Cell; @@ -88,6 +75,19 @@ import org.apache.hadoop.hbase.testclassification.MasterTests; import org.apache.hadoop.hbase.testclassification.SmallTests; import org.apache.hadoop.hbase.util.Bytes; +import org.junit.After; +import org.junit.Before; +import org.junit.ClassRule; +import org.junit.Test; +import org.junit.experimental.categories.Category; +import org.junit.runner.RunWith; +import org.junit.runners.BlockJUnit4ClassRunner; +import org.junit.runners.Parameterized; +import org.junit.runners.Parameterized.Parameter; +import org.junit.runners.Suite; +import org.mockito.ArgumentCaptor; +import org.mockito.Mock; +import org.mockito.MockitoAnnotations; @RunWith(Suite.class) @Suite.SuiteClasses({ diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/keymeta/TestManagedKeyAccessor.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/keymeta/TestManagedKeyAccessor.java index f54c0e8fb0e8..116b2bf6f4c4 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/keymeta/TestManagedKeyAccessor.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/keymeta/TestManagedKeyAccessor.java @@ -29,13 +29,6 @@ import java.util.ArrayList; import java.util.Arrays; -import org.junit.After; -import org.junit.Before; -import org.junit.ClassRule; -import org.junit.Test; -import org.junit.experimental.categories.Category; -import org.mockito.Mock; -import org.mockito.MockitoAnnotations; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hbase.HBaseClassTestRule; @@ -47,6 +40,13 @@ import org.apache.hadoop.hbase.io.crypto.MockManagedKeyProvider; import org.apache.hadoop.hbase.testclassification.MasterTests; import org.apache.hadoop.hbase.testclassification.SmallTests; +import org.junit.After; +import org.junit.Before; +import org.junit.ClassRule; +import org.junit.Test; +import org.junit.experimental.categories.Category; +import org.mockito.Mock; +import org.mockito.MockitoAnnotations; @Category({ MasterTests.class, SmallTests.class }) public class TestManagedKeyAccessor { diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/keymeta/TestManagedKeymeta.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/keymeta/TestManagedKeymeta.java index 7d228aa2ff48..82008d26e7ee 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/keymeta/TestManagedKeymeta.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/keymeta/TestManagedKeymeta.java @@ -29,9 +29,6 @@ import java.lang.reflect.Field; import java.security.KeyException; import java.util.List; -import org.junit.ClassRule; -import org.junit.Test; -import org.junit.experimental.categories.Category; import org.apache.hadoop.hbase.HBaseClassTestRule; import org.apache.hadoop.hbase.io.crypto.Encryption; @@ -43,6 +40,9 @@ import org.apache.hadoop.hbase.protobuf.generated.ManagedKeysProtos; import org.apache.hadoop.hbase.testclassification.MasterTests; import org.apache.hadoop.hbase.testclassification.MediumTests; +import org.junit.ClassRule; +import org.junit.Test; +import org.junit.experimental.categories.Category; import org.apache.hbase.thirdparty.com.google.protobuf.ServiceException; diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/MockNoopMasterServices.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/MockNoopMasterServices.java index 3cf11d108f08..936068ff8b3d 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/MockNoopMasterServices.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/MockNoopMasterServices.java @@ -39,8 +39,8 @@ import org.apache.hadoop.hbase.executor.ExecutorService; import org.apache.hadoop.hbase.favored.FavoredNodesManager; import org.apache.hadoop.hbase.keymeta.KeymetaAdmin; -import org.apache.hadoop.hbase.keymeta.SystemKeyCache; import org.apache.hadoop.hbase.keymeta.ManagedKeyAccessor; +import org.apache.hadoop.hbase.keymeta.SystemKeyCache; import org.apache.hadoop.hbase.master.assignment.AssignmentManager; import org.apache.hadoop.hbase.master.hbck.HbckChore; import org.apache.hadoop.hbase.master.janitor.CatalogJanitor; diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/MockRegionServer.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/MockRegionServer.java index 806bf3f61b66..cb7c1ffb7e00 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/MockRegionServer.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/MockRegionServer.java @@ -53,8 +53,8 @@ import org.apache.hadoop.hbase.ipc.HBaseRpcController; import org.apache.hadoop.hbase.ipc.RpcServerInterface; import org.apache.hadoop.hbase.keymeta.KeymetaAdmin; -import org.apache.hadoop.hbase.keymeta.SystemKeyCache; import org.apache.hadoop.hbase.keymeta.ManagedKeyAccessor; +import org.apache.hadoop.hbase.keymeta.SystemKeyCache; import org.apache.hadoop.hbase.mob.MobFileCache; import org.apache.hadoop.hbase.quotas.RegionServerRpcQuotaManager; import org.apache.hadoop.hbase.quotas.RegionServerSpaceQuotaManager; diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestActiveMasterManager.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestActiveMasterManager.java index 3ef6f9c4cd66..fc48cd57077d 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestActiveMasterManager.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestActiveMasterManager.java @@ -33,9 +33,9 @@ import org.apache.hadoop.hbase.HBaseClassTestRule; import org.apache.hadoop.hbase.HBaseTestingUtil; import org.apache.hadoop.hbase.ServerName; -import org.apache.hadoop.hbase.keymeta.SystemKeyCache; -import org.apache.hadoop.hbase.keymeta.ManagedKeyAccessor; import org.apache.hadoop.hbase.keymeta.KeymetaAdmin; +import org.apache.hadoop.hbase.keymeta.ManagedKeyAccessor; +import org.apache.hadoop.hbase.keymeta.SystemKeyCache; import org.apache.hadoop.hbase.monitoring.MonitoredTask; import org.apache.hadoop.hbase.monitoring.TaskGroup; import org.apache.hadoop.hbase.testclassification.MasterTests; diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestKeymetaAdminImpl.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestKeymetaAdminImpl.java index ee42ab428c81..6733ad2eb959 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestKeymetaAdminImpl.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestKeymetaAdminImpl.java @@ -40,19 +40,6 @@ import java.util.Arrays; import java.util.Collection; import java.util.List; -import org.junit.After; -import org.junit.Before; -import org.junit.ClassRule; -import org.junit.Rule; -import org.junit.Test; -import org.junit.experimental.categories.Category; -import org.junit.rules.TestName; -import org.junit.runner.RunWith; -import org.junit.runners.BlockJUnit4ClassRunner; -import org.junit.runners.Parameterized; -import org.junit.runners.Parameterized.Parameter; -import org.junit.runners.Parameterized.Parameters; -import org.junit.runners.Suite; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.FileSystem; @@ -71,6 +58,19 @@ import org.apache.hadoop.hbase.testclassification.MasterTests; import org.apache.hadoop.hbase.testclassification.SmallTests; import org.apache.hadoop.hbase.util.Bytes; +import org.junit.After; +import org.junit.Before; +import org.junit.ClassRule; +import org.junit.Rule; +import org.junit.Test; +import org.junit.experimental.categories.Category; +import org.junit.rules.TestName; +import org.junit.runner.RunWith; +import org.junit.runners.BlockJUnit4ClassRunner; +import org.junit.runners.Parameterized; +import org.junit.runners.Parameterized.Parameter; +import org.junit.runners.Parameterized.Parameters; +import org.junit.runners.Suite; @RunWith(Suite.class) @Suite.SuiteClasses({ TestKeymetaAdminImpl.TestWhenDisabled.class, diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestSystemKeyAccessorAndManager.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestSystemKeyAccessorAndManager.java index 9ab79ff5c92e..b2f9418a5703 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestSystemKeyAccessorAndManager.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestSystemKeyAccessorAndManager.java @@ -29,21 +29,6 @@ import static org.mockito.Mockito.verify; import static org.mockito.Mockito.when; -import org.junit.After; -import org.junit.Before; -import org.junit.ClassRule; -import org.junit.Rule; -import org.junit.Test; -import org.junit.experimental.categories.Category; -import org.junit.rules.TestName; -import org.junit.runner.RunWith; -import org.junit.runners.BlockJUnit4ClassRunner; -import org.junit.runners.Parameterized; -import org.junit.runners.Parameterized.Parameter; -import org.junit.runners.Parameterized.Parameters; -import org.junit.runners.Suite; -import org.mockito.Mock; -import org.mockito.MockitoAnnotations; import java.io.IOException; import java.security.Key; import java.util.Arrays; @@ -69,6 +54,21 @@ import org.apache.hadoop.hbase.testclassification.SmallTests; import org.apache.hadoop.hbase.util.CommonFSUtils; import org.apache.hadoop.hbase.util.Pair; +import org.junit.After; +import org.junit.Before; +import org.junit.ClassRule; +import org.junit.Rule; +import org.junit.Test; +import org.junit.experimental.categories.Category; +import org.junit.rules.TestName; +import org.junit.runner.RunWith; +import org.junit.runners.BlockJUnit4ClassRunner; +import org.junit.runners.Parameterized; +import org.junit.runners.Parameterized.Parameter; +import org.junit.runners.Parameterized.Parameters; +import org.junit.runners.Suite; +import org.mockito.Mock; +import org.mockito.MockitoAnnotations; @RunWith(Suite.class) @Suite.SuiteClasses({ diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestSystemKeyManager.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestSystemKeyManager.java index 0e5a279590cf..37ba6f7f9135 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestSystemKeyManager.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestSystemKeyManager.java @@ -24,23 +24,23 @@ import java.io.IOException; import java.security.Key; -import org.junit.ClassRule; -import org.junit.Test; -import org.junit.experimental.categories.Category; import org.apache.hadoop.hbase.HBaseClassTestRule; import org.apache.hadoop.hbase.io.crypto.Encryption; import org.apache.hadoop.hbase.io.crypto.KeyProvider; import org.apache.hadoop.hbase.io.crypto.ManagedKeyData; import org.apache.hadoop.hbase.io.crypto.ManagedKeyProvider; -import org.apache.hadoop.hbase.io.crypto.MockManagedKeyProvider; import org.apache.hadoop.hbase.io.crypto.ManagedKeyState; +import org.apache.hadoop.hbase.io.crypto.MockManagedKeyProvider; +import org.apache.hadoop.hbase.keymeta.ManagedKeyTestBase; import org.apache.hadoop.hbase.keymeta.SystemKeyAccessor; import org.apache.hadoop.hbase.keymeta.SystemKeyCache; -import org.apache.hadoop.hbase.keymeta.ManagedKeyTestBase; import org.apache.hadoop.hbase.testclassification.MasterTests; import org.apache.hadoop.hbase.testclassification.MediumTests; import org.apache.hadoop.hbase.util.Bytes; +import org.junit.ClassRule; +import org.junit.Test; +import org.junit.experimental.categories.Category; @Category({ MasterTests.class, MediumTests.class }) public class TestSystemKeyManager extends ManagedKeyTestBase { diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/cleaner/TestReplicationHFileCleaner.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/cleaner/TestReplicationHFileCleaner.java index dbaeb4b6d950..10241520be2d 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/cleaner/TestReplicationHFileCleaner.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/cleaner/TestReplicationHFileCleaner.java @@ -39,8 +39,8 @@ import org.apache.hadoop.hbase.client.Connection; import org.apache.hadoop.hbase.client.TableDescriptor; import org.apache.hadoop.hbase.keymeta.KeymetaAdmin; -import org.apache.hadoop.hbase.keymeta.SystemKeyCache; import org.apache.hadoop.hbase.keymeta.ManagedKeyAccessor; +import org.apache.hadoop.hbase.keymeta.SystemKeyCache; import org.apache.hadoop.hbase.master.HMaster; import org.apache.hadoop.hbase.replication.ReplicationException; import org.apache.hadoop.hbase.replication.ReplicationFactory; diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/procedure2/store/region/RegionProcedureStorePerformanceEvaluation.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/procedure2/store/region/RegionProcedureStorePerformanceEvaluation.java index ba8e910a6f9c..f7e5a5e56cf4 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/procedure2/store/region/RegionProcedureStorePerformanceEvaluation.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/procedure2/store/region/RegionProcedureStorePerformanceEvaluation.java @@ -25,8 +25,8 @@ import org.apache.hadoop.hbase.ServerName; import org.apache.hadoop.hbase.io.util.MemorySizeUtil; import org.apache.hadoop.hbase.keymeta.KeymetaAdmin; -import org.apache.hadoop.hbase.keymeta.SystemKeyCache; import org.apache.hadoop.hbase.keymeta.ManagedKeyAccessor; +import org.apache.hadoop.hbase.keymeta.SystemKeyCache; import org.apache.hadoop.hbase.master.region.MasterRegion; import org.apache.hadoop.hbase.master.region.MasterRegionFactory; import org.apache.hadoop.hbase.procedure2.store.ProcedureStorePerformanceEvaluation; diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestHeapMemoryManager.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestHeapMemoryManager.java index 9aeaf36c7a84..7f299f0f9842 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestHeapMemoryManager.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestHeapMemoryManager.java @@ -47,8 +47,8 @@ import org.apache.hadoop.hbase.io.hfile.ResizableBlockCache; import org.apache.hadoop.hbase.io.util.MemorySizeUtil; import org.apache.hadoop.hbase.keymeta.KeymetaAdmin; -import org.apache.hadoop.hbase.keymeta.SystemKeyCache; import org.apache.hadoop.hbase.keymeta.ManagedKeyAccessor; +import org.apache.hadoop.hbase.keymeta.SystemKeyCache; import org.apache.hadoop.hbase.regionserver.HeapMemoryManager.TunerContext; import org.apache.hadoop.hbase.regionserver.HeapMemoryManager.TunerResult; import org.apache.hadoop.hbase.testclassification.MediumTests; diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/security/token/TestTokenAuthentication.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/security/token/TestTokenAuthentication.java index ee5f42ea5524..bf4f8c40ff02 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/security/token/TestTokenAuthentication.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/security/token/TestTokenAuthentication.java @@ -55,9 +55,9 @@ import org.apache.hadoop.hbase.ipc.RpcServerInterface; import org.apache.hadoop.hbase.ipc.ServerRpcController; import org.apache.hadoop.hbase.ipc.SimpleRpcServer; -import org.apache.hadoop.hbase.keymeta.SystemKeyCache; -import org.apache.hadoop.hbase.keymeta.ManagedKeyAccessor; import org.apache.hadoop.hbase.keymeta.KeymetaAdmin; +import org.apache.hadoop.hbase.keymeta.ManagedKeyAccessor; +import org.apache.hadoop.hbase.keymeta.SystemKeyCache; import org.apache.hadoop.hbase.log.HBaseMarkers; import org.apache.hadoop.hbase.regionserver.RegionServerServices; import org.apache.hadoop.hbase.security.SecurityInfo; diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/util/MockServer.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/util/MockServer.java index d1d0f89b7915..9181a30e9d50 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/util/MockServer.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/util/MockServer.java @@ -26,9 +26,9 @@ import org.apache.hadoop.hbase.ServerName; import org.apache.hadoop.hbase.client.AsyncClusterConnection; import org.apache.hadoop.hbase.client.Connection; -import org.apache.hadoop.hbase.keymeta.SystemKeyCache; -import org.apache.hadoop.hbase.keymeta.ManagedKeyAccessor; import org.apache.hadoop.hbase.keymeta.KeymetaAdmin; +import org.apache.hadoop.hbase.keymeta.ManagedKeyAccessor; +import org.apache.hadoop.hbase.keymeta.SystemKeyCache; import org.apache.hadoop.hbase.log.HBaseMarkers; import org.apache.hadoop.hbase.zookeeper.ZKWatcher; import org.slf4j.Logger; From 0f1f467ed3cd98bdd0a472fc60e67b2bb41fe73f Mon Sep 17 00:00:00 2001 From: Hari Dara Date: Wed, 25 Jun 2025 20:02:25 +0530 Subject: [PATCH 58/70] Misc. checkstyle fixes --- .../apache/hadoop/hbase/io/crypto/ManagedKeyData.java | 4 ++-- .../org/apache/hadoop/hbase/keymeta/KeymetaAdmin.java | 4 ++-- .../hadoop/hbase/io/crypto/TestManagedKeyProvider.java | 2 +- .../apache/hadoop/hbase/keymeta/KeyManagementBase.java | 2 +- .../hadoop/hbase/keymeta/KeymetaServiceEndpoint.java | 9 ++++----- .../org/apache/hadoop/hbase/keymeta/SystemKeyCache.java | 1 + .../apache/hadoop/hbase/keymeta/DummyKeyProvider.java | 4 ++-- .../apache/hadoop/hbase/keymeta/ManagedKeyTestBase.java | 2 +- .../hadoop/hbase/keymeta/TestKeymetaMasterService.java | 2 +- .../hadoop/hbase/keymeta/TestManagedKeyDataCache.java | 7 +++---- .../apache/hadoop/hbase/keymeta/TestManagedKeymeta.java | 1 - .../hbase/master/TestSystemKeyAccessorAndManager.java | 6 +++--- .../apache/hadoop/hbase/master/TestSystemKeyManager.java | 2 +- 13 files changed, 22 insertions(+), 24 deletions(-) diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/io/crypto/ManagedKeyData.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/io/crypto/ManagedKeyData.java index d4f2a7b5d6ca..9d2710fc5a21 100644 --- a/hbase-common/src/main/java/org/apache/hadoop/hbase/io/crypto/ManagedKeyData.java +++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/io/crypto/ManagedKeyData.java @@ -206,7 +206,7 @@ public long getRefreshTimestamp() { /** * @return the number of times this key has been used for read operations as of the time this - * key data was initialized. + * key data was initialized. */ public long getReadOpCount() { return readOpCount; @@ -214,7 +214,7 @@ public long getReadOpCount() { /** * @return the number of times this key has been used for write operations as of the time this - * key data was initialized. + * key data was initialized. */ public long getWriteOpCount() { return writeOpCount; diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/keymeta/KeymetaAdmin.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/keymeta/KeymetaAdmin.java index d74af46b5173..2e52dccc0598 100644 --- a/hbase-common/src/main/java/org/apache/hadoop/hbase/keymeta/KeymetaAdmin.java +++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/keymeta/KeymetaAdmin.java @@ -37,7 +37,7 @@ public interface KeymetaAdmin { * @param keyNamespace The namespace for the key management. * * @return The list of {@link ManagedKeyData} objects each identifying the key and its current - * status. + * status. * @throws IOException if an error occurs while enabling key management. */ List enableKeyManagement(String keyCust, String keyNamespace) @@ -49,7 +49,7 @@ List enableKeyManagement(String keyCust, String keyNamespace) * @param keyCust The key custodian in base64 encoded format. * @param keyNamespace The namespace for the key management. * @return The list of {@link ManagedKeyData} objects each identifying the key and its current - * status. + * status. * @throws IOException if an error occurs while enabling key management. */ List getManagedKeys(String keyCust, String keyNamespace) diff --git a/hbase-common/src/test/java/org/apache/hadoop/hbase/io/crypto/TestManagedKeyProvider.java b/hbase-common/src/test/java/org/apache/hadoop/hbase/io/crypto/TestManagedKeyProvider.java index be93409813a5..876e14fa1101 100644 --- a/hbase-common/src/test/java/org/apache/hadoop/hbase/io/crypto/TestManagedKeyProvider.java +++ b/hbase-common/src/test/java/org/apache/hadoop/hbase/io/crypto/TestManagedKeyProvider.java @@ -122,7 +122,7 @@ protected void addCustomEntries(KeyStore store, Properties passwdProps) throws E private void addEntry(String alias, String prefix) { String encPrefix = Base64.getEncoder().encodeToString(prefix.getBytes()); String confKey = HConstants.CRYPTO_MANAGED_KEY_STORE_CONF_KEY_PREFIX + encPrefix + "." - + "alias"; + + "alias"; conf.set(confKey, alias); } diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/keymeta/KeyManagementBase.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/keymeta/KeyManagementBase.java index 1f899fcdfc25..27d1e0c1e0cf 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/keymeta/KeyManagementBase.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/keymeta/KeyManagementBase.java @@ -52,7 +52,7 @@ protected Server getServer() { * A utility method for getting the managed key provider. * @return the key provider * @throws RuntimeException if no provider is configured or if the configured provider is not an - * instance of ManagedKeyProvider + * instance of ManagedKeyProvider */ protected ManagedKeyProvider getKeyProvider() { KeyProvider provider = Encryption.getKeyProvider(getServer().getConfiguration()); diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/keymeta/KeymetaServiceEndpoint.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/keymeta/KeymetaServiceEndpoint.java index e03caa901d66..067f98340575 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/keymeta/KeymetaServiceEndpoint.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/keymeta/KeymetaServiceEndpoint.java @@ -23,7 +23,6 @@ import java.util.Collections; import java.util.List; -import org.apache.hadoop.classification.VisibleForTesting; import org.apache.hadoop.hbase.CoprocessorEnvironment; import org.apache.hadoop.hbase.coprocessor.CoreCoprocessor; import org.apache.hadoop.hbase.coprocessor.HasMasterServices; @@ -93,7 +92,7 @@ public Iterable getServices() { * The implementation of the {@link ManagedKeysProtos.ManagedKeysService} * interface, which provides the actual method implementations for enabling key management. */ - @VisibleForTesting + @InterfaceAudience.Private public class KeymetaAdminServiceImpl extends ManagedKeysService { /** @@ -139,7 +138,7 @@ public void getManagedKeys(RpcController controller, ManagedKeysRequest request, } } - @VisibleForTesting + @InterfaceAudience.Private public static ManagedKeysResponse.Builder getResponseBuilder(RpcController controller, ManagedKeysRequest request) { ManagedKeysResponse.Builder builder = ManagedKeysResponse.newBuilder() @@ -152,7 +151,7 @@ public static ManagedKeysResponse.Builder getResponseBuilder(RpcController contr } // Assumes that all ManagedKeyData objects belong to the same custodian and namespace. - @VisibleForTesting + @InterfaceAudience.Private public static GetManagedKeysResponse generateKeyStateResponse( List managedKeyStates, ManagedKeysResponse.Builder builder) { GetManagedKeysResponse.Builder responseBuilder = GetManagedKeysResponse.newBuilder(); @@ -169,7 +168,7 @@ public static GetManagedKeysResponse generateKeyStateResponse( return responseBuilder.build(); } - @VisibleForTesting + @InterfaceAudience.Private public static byte[] convertToKeyCustBytes(RpcController controller, ManagedKeysRequest request, ManagedKeysResponse.Builder builder) { byte[] key_cust = null; diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/keymeta/SystemKeyCache.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/keymeta/SystemKeyCache.java index 778f270cc69a..d1e3eb048a9b 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/keymeta/SystemKeyCache.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/keymeta/SystemKeyCache.java @@ -28,6 +28,7 @@ import org.slf4j.Logger; import org.slf4j.LoggerFactory; +@SuppressWarnings("checkstyle:FinalClass") // as otherwise it breaks mocking. @InterfaceAudience.Private public class SystemKeyCache { private static final Logger LOG = LoggerFactory.getLogger(SystemKeyCache.class); diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/keymeta/DummyKeyProvider.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/keymeta/DummyKeyProvider.java index 036110c5c120..2d8ae446da3a 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/keymeta/DummyKeyProvider.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/keymeta/DummyKeyProvider.java @@ -28,11 +28,11 @@ public void init(String params) { @Override public Key[] getKeys(String[] aliases) { - return null; + return null; } @Override public Key getKey(String alias) { - return null; + return null; } } \ No newline at end of file diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/keymeta/ManagedKeyTestBase.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/keymeta/ManagedKeyTestBase.java index bdc5902022e3..a0147e6e4e2e 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/keymeta/ManagedKeyTestBase.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/keymeta/ManagedKeyTestBase.java @@ -37,7 +37,7 @@ public void setUp() throws Exception { // Start the minicluster TEST_UTIL.startMiniCluster(1); TEST_UTIL.waitFor(60000, - () -> TEST_UTIL.getMiniHBaseCluster().getMaster().isInitialized()); + () -> TEST_UTIL.getMiniHBaseCluster().getMaster().isInitialized()); TEST_UTIL.waitUntilAllRegionsAssigned(KeymetaTableAccessor.KEY_META_TABLE_NAME); } diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/keymeta/TestKeymetaMasterService.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/keymeta/TestKeymetaMasterService.java index 0a90b992c379..f34d482d7940 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/keymeta/TestKeymetaMasterService.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/keymeta/TestKeymetaMasterService.java @@ -169,7 +169,7 @@ public void testInitWithCreateSystemTableIOException() throws Exception { verify(mockMaster).createSystemTable(any(TableDescriptor.class)); } - @Test + @Test public void testConstructorWithMasterServices() throws Exception { // Execute service = new KeymetaMasterService(mockMaster); diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/keymeta/TestManagedKeyDataCache.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/keymeta/TestManagedKeyDataCache.java index 77b38aee2184..701bfe4a0025 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/keymeta/TestManagedKeyDataCache.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/keymeta/TestManagedKeyDataCache.java @@ -19,11 +19,10 @@ import static org.apache.hadoop.hbase.io.crypto.ManagedKeyData.KEY_SPACE_GLOBAL; import static org.apache.hadoop.hbase.io.crypto.ManagedKeyState.FAILED; -import static org.junit.Assert.*; +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertNull; +import static org.junit.Assert.assertTrue; -import java.util.ArrayList; -import java.util.HashSet; -import java.util.List; import java.util.Set; import org.apache.hadoop.conf.Configuration; diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/keymeta/TestManagedKeymeta.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/keymeta/TestManagedKeymeta.java index 82008d26e7ee..1ffed4707475 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/keymeta/TestManagedKeymeta.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/keymeta/TestManagedKeymeta.java @@ -70,7 +70,6 @@ private void doTestEnable(KeymetaAdmin adminClient) throws IOException, KeyExcep HMaster master = TEST_UTIL.getHBaseCluster().getMaster(); MockManagedKeyProvider managedKeyProvider = (MockManagedKeyProvider) Encryption.getKeyProvider(master.getConfiguration()); - ; String cust = "cust1"; String encodedCust = ManagedKeyProvider.encodeToStr(cust.getBytes()); List managedKeyStates = diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestSystemKeyAccessorAndManager.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestSystemKeyAccessorAndManager.java index b2f9418a5703..2e62dbee0007 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestSystemKeyAccessorAndManager.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestSystemKeyAccessorAndManager.java @@ -227,7 +227,7 @@ public static Collection data() { return Arrays.asList(new Object[][] { { "abcd", "Couldn't parse key file name: abcd" }, {SYSTEM_KEY_FILE_PREFIX+"abcd", "Couldn't parse key file name: "+ - SYSTEM_KEY_FILE_PREFIX+"abcd"}, + SYSTEM_KEY_FILE_PREFIX+"abcd"}, // Add more test cases here }); } @@ -237,7 +237,7 @@ public void test() throws Exception { FileStatus mockFileStatus = createMockFile(fileName); IOException ex = assertThrows(IOException.class, - () -> SystemKeyAccessor.extractSystemKeySeqNum(mockFileStatus.getPath())); + () -> SystemKeyAccessor.extractSystemKeySeqNum(mockFileStatus.getPath())); assertEquals(expectedErrorMessage, ex.getMessage()); } } @@ -347,7 +347,7 @@ public void testEnsureSystemKeyInitialized_RaceCondition() throws Exception { when(mockFileSystem.globStatus(any())).thenReturn( new FileStatus[0], new FileStatus[] { mockFileStatus } - ); + ); manager.ensureSystemKeyInitialized(); } diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestSystemKeyManager.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestSystemKeyManager.java index 37ba6f7f9135..a764a5b7de87 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestSystemKeyManager.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestSystemKeyManager.java @@ -114,6 +114,6 @@ private void restartSystem() throws Exception { Thread.sleep(2000); TEST_UTIL.restartHBaseCluster(1); TEST_UTIL.waitFor(60000, - () -> TEST_UTIL.getMiniHBaseCluster().getMaster().isInitialized()); + () -> TEST_UTIL.getMiniHBaseCluster().getMaster().isInitialized()); } } From 346659c4cd5c7decbd5b5b9fa0e08d95c6a6da3a Mon Sep 17 00:00:00 2001 From: Hari Dara Date: Wed, 25 Jun 2025 21:30:00 +0530 Subject: [PATCH 59/70] Fix compilation error --- .../apache/hadoop/hbase/keymeta/TestManagedKeyDataCache.java | 3 +++ 1 file changed, 3 insertions(+) diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/keymeta/TestManagedKeyDataCache.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/keymeta/TestManagedKeyDataCache.java index 701bfe4a0025..08a5da59311b 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/keymeta/TestManagedKeyDataCache.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/keymeta/TestManagedKeyDataCache.java @@ -23,6 +23,9 @@ import static org.junit.Assert.assertNull; import static org.junit.Assert.assertTrue; +import java.util.ArrayList; +import java.util.HashSet; +import java.util.List; import java.util.Set; import org.apache.hadoop.conf.Configuration; From 98183ccc14a570a9c7d3a2ab89d22a73476e1dea Mon Sep 17 00:00:00 2001 From: Hari Dara Date: Mon, 7 Jul 2025 12:04:07 +0530 Subject: [PATCH 60/70] Use single letter qualifier names to reduce storage space --- .../hbase/keymeta/KeymetaTableAccessor.java | 16 ++++++++-------- 1 file changed, 8 insertions(+), 8 deletions(-) diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/keymeta/KeymetaTableAccessor.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/keymeta/KeymetaTableAccessor.java index 60ec5a53d251..5f2cdcdbfca3 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/keymeta/KeymetaTableAccessor.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/keymeta/KeymetaTableAccessor.java @@ -56,30 +56,30 @@ public class KeymetaTableAccessor extends KeyManagementBase { public static final TableName KEY_META_TABLE_NAME = TableName.valueOf(NamespaceDescriptor.SYSTEM_NAMESPACE_NAME_STR, "keymeta"); - public static final String DEK_METADATA_QUAL_NAME = "dek_metadata"; + public static final String DEK_METADATA_QUAL_NAME = "m"; public static final byte[] DEK_METADATA_QUAL_BYTES = Bytes.toBytes(DEK_METADATA_QUAL_NAME); - public static final String DEK_CHECKSUM_QUAL_NAME = "dek_checksum"; + public static final String DEK_CHECKSUM_QUAL_NAME = "c"; public static final byte[] DEK_CHECKSUM_QUAL_BYTES = Bytes.toBytes(DEK_CHECKSUM_QUAL_NAME); - public static final String DEK_WRAPPED_BY_STK_QUAL_NAME = "dek_wrapped_by_stk"; + public static final String DEK_WRAPPED_BY_STK_QUAL_NAME = "w"; public static final byte[] DEK_WRAPPED_BY_STK_QUAL_BYTES = Bytes.toBytes(DEK_WRAPPED_BY_STK_QUAL_NAME); - public static final String STK_CHECKSUM_QUAL_NAME = "stk_checksum"; + public static final String STK_CHECKSUM_QUAL_NAME = "s"; public static final byte[] STK_CHECKSUM_QUAL_BYTES = Bytes.toBytes(STK_CHECKSUM_QUAL_NAME); - public static final String REFRESHED_TIMESTAMP_QUAL_NAME = "refreshed_timestamp"; + public static final String REFRESHED_TIMESTAMP_QUAL_NAME = "t"; public static final byte[] REFRESHED_TIMESTAMP_QUAL_BYTES = Bytes.toBytes(REFRESHED_TIMESTAMP_QUAL_NAME); - public static final String KEY_STATE_QUAL_NAME = "key_state"; + public static final String KEY_STATE_QUAL_NAME = "k"; public static final byte[] KEY_STATE_QUAL_BYTES = Bytes.toBytes(KEY_STATE_QUAL_NAME); - public static final String READ_OP_COUNT_QUAL_NAME = "read_op_count"; + public static final String READ_OP_COUNT_QUAL_NAME = "R"; public static final byte[] READ_OP_COUNT_QUAL_BYTES = Bytes.toBytes(READ_OP_COUNT_QUAL_NAME); - public static final String WRITE_OP_COUNT_QUAL_NAME = "write_op_count"; + public static final String WRITE_OP_COUNT_QUAL_NAME = "W"; public static final byte[] WRITE_OP_COUNT_QUAL_BYTES = Bytes.toBytes(WRITE_OP_COUNT_QUAL_NAME); public KeymetaTableAccessor(Server server) { From 588e6b3a30d6b98670288a1e9a7b275837ab8454 Mon Sep 17 00:00:00 2001 From: Hari Dara Date: Wed, 9 Jul 2025 09:27:47 +0530 Subject: [PATCH 61/70] Switched to Caffeine cache with a better architecture. MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit It took me several back and forth cycles to tell Cursor what I want and also make corrections to the code, so I asked it to summarize by generating a comprehensive prompt and the below is what I got. Saving it as a quick future reference. I will follow up with manaul improvements to this change and better test coverage. Here's a comprehensive prompt that captures all the incremental changes and corrections we made throughout this refactoring: --- **Comprehensive Refactoring Prompt for ManagedKeyDataCache and ManagedKeyAccessor** I want to refactor the key management system in HBase to improve performance, simplify the architecture, and eliminate redundant code. Here are the specific requirements: ## Initial Refactoring Goals 1. **Refactor ManagedKeyDataCache.getEntry() to use CacheLoader interface**: Remove redundant methods and delegate responsibility properly. Use Caffeine's `get()` method with a lambda function for loading to allow passing context parameters (`key_cust` and `keyNamespace`) directly. 2. **Refactor ManagedKeyAccessor to delegate all key retrieval logic to the cache**: Include dynamic provider access, simplifying the accessor to be a thin wrapper. 3. **Merge getActiveKeys method into getRandomEntry**: Make `cacheByNS` a separate Caffeine cache with its own config to handle only active keys efficiently. 4. **Simplify active keys cache structure**: Use a 2-level cache with a composite key (custodian + namespace) and a regular Map for keys, then change to a List for O(1) random selection. ## Cache Architecture Changes 5. **Change active keys cache from Map to List**: Change the value type in `activeKeysCache` from `Map` to `List` to make random selection lightweight (O(1) instead of O(n)). 6. **Remove redundant addEntry() calls**: Explicit `addEntry()` calls inside Caffeine callbacks are redundant since Caffeine automatically caches returned values. Remove these from `getEntry()`, `getRandomEntry()`, and `getActiveKeys()` methods. 7. **Update removeEntry to work with both caches**: The `removeEntry` method should remove from both the main cache and the active keys cache. 8. **Update getEntryCount to include both caches**: Make it lightweight and clearly indicate it's an estimate that may include double-counting. ## Active Keys Cache Behavior 9. **Remove automatic addition to active keys cache**: `addEntry()` should not add to the active keys cache to avoid suppressing bulk loading logic. During random key selection, include individual active entries from the main cache if missing from the active keys cache. 10. **Simplify getRandomEntry**: Use `cache.getIfPresent` instead of `cache.get` to avoid triggering loading callbacks when just checking what's already cached. 11. **Remove dynamic lookup from active keys cache callback**: The `isDynamicLookupEnabled()` check should not be used in the callback for `activeKeysCache` since the callback is for loading active keys from the cache, not for determining dynamic lookup. 12. **Ignore main cache entries in getRandomEntry**: The `getRandomEntry` method should only return entries from the active keys cache and ignore any entries that might exist in the main cache. ## Method Cleanup 13. **Remove getActiveKeys() method**: Since `activeKeysCache` already contains only active keys, the `getActiveKeys()` method is redundant. Remove it and simplify `ManagedKeyAccessor.getAnActiveKey()` method. 14. **Remove ManagedKeyAccessor entirely**: Once `ManagedKeyAccessor` has no meaningful logic left, remove it and replace usage with `ManagedKeyDataCache` directly. Update the `Server` interface to return `ManagedKeyDataCache` instead of `ManagedKeyAccessor`. 15. **Remove addEntry method**: If `addEntry` is not being used by any callers, remove it. If it's only used internally, replace with direct cache access. 16. **Remove unused methods**: Remove `getActiveEntriesFromCache()` method if it's not being used anywhere. ## Inheritance and Code Reuse 17. **Make KeyManagementBase the base class**: Make `ManagedKeyDataCache` extend `KeyManagementBase` so it can reuse the `getKeyProvider()` method and other common functionality like `getServer()`. ## Testing and Validation 18. **Update all test files**: Replace all usages of `ManagedKeyAccessor` with `ManagedKeyDataCache` in test files and mock classes. 19. **Fix test compilation issues**: Update test mocks to properly configure the server mock to return a valid Configuration object. 20. **Update test logic**: Modify tests to work with the new architecture where `getRandomEntry()` only looks at the active keys cache, not the main cache. ## Key Design Principles - **Performance**: Random key selection should be O(1) - **Simplicity**: Remove redundant layers and methods - **Correctness**: Maintain proper L1 → L2 → Dynamic Provider hierarchy - **Testability**: Ensure tests can still validate functionality - **Memory efficiency**: Use appropriate data structures for each use case ## Expected Final Architecture - **Main cache**: Caffeine cache for general key data with automatic loading - **Active keys cache**: Caffeine cache with List values for O(1) random selection - **No accessor layer**: Direct usage of `ManagedKeyDataCache` - **Inheritance**: `ManagedKeyDataCache` extends `KeyManagementBase` - **Clean separation**: Active keys cache only for active keys, main cache for all keys Please implement these changes incrementally, ensuring each step compiles and tests pass before proceeding to the next. --- .../org/apache/hadoop/hbase/HConstants.java | 10 + .../apache/hadoop/hbase/HBaseServerBase.java | 8 +- .../hbase/MockRegionServerServices.java | 4 +- .../java/org/apache/hadoop/hbase/Server.java | 6 +- .../hbase/keymeta/KeymetaTableAccessor.java | 10 +- .../hbase/keymeta/ManagedKeyAccessor.java | 111 ------- .../hbase/keymeta/ManagedKeyDataCache.java | 297 +++++++++++++----- .../hbase/regionserver/HRegionServer.java | 3 +- .../regionserver/ReplicationSyncUp.java | 4 +- .../hbase/keymeta/TestManagedKeyAccessor.java | 213 ------------- .../keymeta/TestManagedKeyDataCache.java | 69 ++-- .../hbase/master/MockNoopMasterServices.java | 4 +- .../hadoop/hbase/master/MockRegionServer.java | 4 +- .../hbase/master/TestActiveMasterManager.java | 4 +- .../cleaner/TestReplicationHFileCleaner.java | 4 +- ...onProcedureStorePerformanceEvaluation.java | 4 +- .../regionserver/TestHeapMemoryManager.java | 4 +- .../token/TestTokenAuthentication.java | 4 +- .../apache/hadoop/hbase/util/MockServer.java | 4 +- 19 files changed, 304 insertions(+), 463 deletions(-) delete mode 100644 hbase-server/src/main/java/org/apache/hadoop/hbase/keymeta/ManagedKeyAccessor.java delete mode 100644 hbase-server/src/test/java/org/apache/hadoop/hbase/keymeta/TestManagedKeyAccessor.java diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/HConstants.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/HConstants.java index 38c3add280ab..42e4b61defdb 100644 --- a/hbase-common/src/main/java/org/apache/hadoop/hbase/HConstants.java +++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/HConstants.java @@ -1332,6 +1332,16 @@ public enum OperationStatusCode { "hbase.crypto.managed_keys.dynamic_lookup.enabled"; public static final boolean CRYPTO_MANAGED_KEYS_DYNAMIC_LOOKUP_DEFAULT_ENABLED = true; + /** Maximum number of entries in the managed key data cache. */ + public static final String CRYPTO_MANAGED_KEYS_CACHE_MAX_SIZE_CONF_KEY = + "hbase.crypto.managed_keys.cache.max_size"; + public static final int CRYPTO_MANAGED_KEYS_CACHE_MAX_SIZE_DEFAULT = 1000; + + /** Maximum number of entries in the managed key active keys cache. */ + public static final String CRYPTO_MANAGED_KEYS_ACTIVE_CACHE_MAX_SIZE_CONF_KEY = + "hbase.crypto.managed_keys.active_cache.max_size"; + public static final int CRYPTO_MANAGED_KEYS_ACTIVE_CACHE_MAX_SIZE_DEFAULT = 100; + /** Configuration key for setting RPC codec class name */ public static final String RPC_CODEC_CONF_KEY = "hbase.client.rpc.codec"; diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/HBaseServerBase.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/HBaseServerBase.java index 0fa2d26a0eda..1d770db087eb 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/HBaseServerBase.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/HBaseServerBase.java @@ -54,7 +54,7 @@ import org.apache.hadoop.hbase.ipc.RpcServerInterface; import org.apache.hadoop.hbase.keymeta.KeymetaAdmin; import org.apache.hadoop.hbase.keymeta.KeymetaAdminImpl; -import org.apache.hadoop.hbase.keymeta.ManagedKeyAccessor; +import org.apache.hadoop.hbase.keymeta.ManagedKeyDataCache; import org.apache.hadoop.hbase.keymeta.SystemKeyAccessor; import org.apache.hadoop.hbase.keymeta.SystemKeyCache; import org.apache.hadoop.hbase.master.HMaster; @@ -194,7 +194,7 @@ public abstract class HBaseServerBase> extends private SystemKeyCache systemKeyCache; protected KeymetaAdminImpl keymetaAdmin; - protected ManagedKeyAccessor managedKeyAccessor; + protected ManagedKeyDataCache managedKeyDataCache; private void setupSignalHandlers() { if (!SystemUtils.IS_OS_WINDOWS) { @@ -420,8 +420,8 @@ public KeymetaAdmin getKeymetaAdmin() { } @Override - public ManagedKeyAccessor getManagedKeyAccessor() { - return managedKeyAccessor; + public ManagedKeyDataCache getManagedKeyDataCache() { + return managedKeyDataCache; } @Override diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/MockRegionServerServices.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/MockRegionServerServices.java index 3827ad147e0d..0996fbf21c52 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/MockRegionServerServices.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/MockRegionServerServices.java @@ -39,7 +39,7 @@ import org.apache.hadoop.hbase.io.hfile.BlockCache; import org.apache.hadoop.hbase.ipc.RpcServerInterface; import org.apache.hadoop.hbase.keymeta.KeymetaAdmin; -import org.apache.hadoop.hbase.keymeta.ManagedKeyAccessor; +import org.apache.hadoop.hbase.keymeta.ManagedKeyDataCache; import org.apache.hadoop.hbase.keymeta.SystemKeyCache; import org.apache.hadoop.hbase.mob.MobFileCache; import org.apache.hadoop.hbase.quotas.RegionServerRpcQuotaManager; @@ -263,7 +263,7 @@ public ChoreService getChoreService() { return null; } - @Override public ManagedKeyAccessor getManagedKeyAccessor() { + @Override public ManagedKeyDataCache getManagedKeyDataCache() { return null; } diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/Server.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/Server.java index d122510e2248..8ca25419472f 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/Server.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/Server.java @@ -24,7 +24,7 @@ import org.apache.hadoop.hbase.client.AsyncConnection; import org.apache.hadoop.hbase.client.Connection; import org.apache.hadoop.hbase.keymeta.KeymetaAdmin; -import org.apache.hadoop.hbase.keymeta.ManagedKeyAccessor; +import org.apache.hadoop.hbase.keymeta.ManagedKeyDataCache; import org.apache.hadoop.hbase.keymeta.SystemKeyCache; import org.apache.hadoop.hbase.zookeeper.ZKWatcher; import org.apache.yetus.audience.InterfaceAudience; @@ -92,9 +92,9 @@ default AsyncConnection getAsyncConnection() { public SystemKeyCache getSystemKeyCache(); /** - * @return the accessor for cluster keys. + * @return the cache for managed keys. */ - public ManagedKeyAccessor getManagedKeyAccessor(); + public ManagedKeyDataCache getManagedKeyDataCache(); /** * @return the admin for keymeta. diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/keymeta/KeymetaTableAccessor.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/keymeta/KeymetaTableAccessor.java index 5f2cdcdbfca3..6efb24a57fff 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/keymeta/KeymetaTableAccessor.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/keymeta/KeymetaTableAccessor.java @@ -117,12 +117,12 @@ public List getAllKeys(byte[] key_cust, String keyNamespace) Connection connection = getServer().getConnection(); byte[] prefixForScan = Bytes.add(Bytes.toBytes(key_cust.length), key_cust, Bytes.toBytes(keyNamespace)); - try (Table table = connection.getTable(KEY_META_TABLE_NAME)) { - PrefixFilter prefixFilter = new PrefixFilter(prefixForScan); - Scan scan = new Scan(); - scan.setFilter(prefixFilter); - scan.addFamily(KEY_META_INFO_FAMILY); + PrefixFilter prefixFilter = new PrefixFilter(prefixForScan); + Scan scan = new Scan(); + scan.setFilter(prefixFilter); + scan.addFamily(KEY_META_INFO_FAMILY); + try (Table table = connection.getTable(KEY_META_TABLE_NAME)) { ResultScanner scanner = table.getScanner(scan); List allKeys = new ArrayList<>(); for (Result result : scanner) { diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/keymeta/ManagedKeyAccessor.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/keymeta/ManagedKeyAccessor.java deleted file mode 100644 index f2a832ebe4bc..000000000000 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/keymeta/ManagedKeyAccessor.java +++ /dev/null @@ -1,111 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.apache.hadoop.hbase.keymeta; - -import java.io.IOException; -import java.security.KeyException; -import java.util.List; - -import org.apache.hadoop.hbase.io.crypto.ManagedKeyData; -import org.apache.hadoop.hbase.io.crypto.ManagedKeyProvider; -import org.apache.yetus.audience.InterfaceAudience; - -/** - * This class provides unified access on top of both {@code ManagedKeyDataCache} (L1) and - * {@code KeymetaTableAccessor} (L2) to access managed keys. When the getter is called, it first - * checks if L1 cache has the key, if not, it tries to get the key from L2. - */ -@InterfaceAudience.Private -public class ManagedKeyAccessor extends KeyManagementBase { - private final ManagedKeyDataCache keyDataCache; - private final KeymetaTableAccessor keymetaAccessor; - - public ManagedKeyAccessor(KeymetaTableAccessor keymetaAccessor, - ManagedKeyDataCache keyDataCache) { - super(keymetaAccessor.getServer()); - this.keymetaAccessor = keymetaAccessor; - this.keyDataCache = keyDataCache; - } - - /** - * Get key data by key metadata. - * - * @param key_cust The key custodian. - * @param keyNamespace The namespace of the key - * @param keyMetadata The metadata of the key - * @param wrappedKey The DEK key material encrypted with the corresponding KEK, if available. - * @return The key data or {@code null} - * @throws IOException if an error occurs while retrieving the key - */ - public ManagedKeyData getKey(byte[] key_cust, String keyNamespace, String keyMetadata, - byte[] wrappedKey) - throws IOException, KeyException { - assertKeyManagementEnabled(); - // 1. Check L1 cache. - ManagedKeyData keyData = keyDataCache.getEntry(keyMetadata); - if (keyData == null) { - // 2. Check L2 cache. - keyData = keymetaAccessor.getKey(key_cust, keyNamespace, keyMetadata); - if (keyData == null) { - // 3. If dynamic lookup is enabled, check with Key Provider. - if (isDynamicLookupEnabled()) { - ManagedKeyProvider provider = getKeyProvider(); - keyData = provider.unwrapKey(keyMetadata, wrappedKey); - } - if (keyData != null) { - LOG.info("Got key data with status: {} and metadata: {} for prefix: {}", - keyData.getKeyState(), keyData.getKeyMetadata(), - ManagedKeyProvider.encodeToStr(key_cust)); - keymetaAccessor.addKey(keyData); - } - else { - LOG.info("Failed to get key data with metadata: {} for prefix: {}", - keyMetadata, ManagedKeyProvider.encodeToStr(key_cust)); - } - } - if (keyData != null) { - keyDataCache.addEntry(keyData); - } - } - return keyData; - } - - /** - * Get an active key for the given prefix suitable for use in encryption. - * - * @param key_cust The key custodian. - * @param keyNamespace The namespace of the key - * @return The key data - * @throws IOException if an error occurs while retrieving the key - */ - public ManagedKeyData getAnActiveKey(byte[] key_cust, String keyNamespace) - throws IOException, KeyException { - assertKeyManagementEnabled(); - ManagedKeyData keyData = keyDataCache.getRandomEntry(key_cust, keyNamespace); - if (keyData == null) { - List activeKeys = keymetaAccessor.getActiveKeys(key_cust, keyNamespace); - if (! activeKeys.isEmpty()) { - for (ManagedKeyData kd : activeKeys) { - keyDataCache.addEntry(kd); - } - keyData = keyDataCache.getRandomEntry(key_cust, keyNamespace); - } - } - return keyData; - } -} diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/keymeta/ManagedKeyDataCache.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/keymeta/ManagedKeyDataCache.java index 224a9e922c04..5853019719a8 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/keymeta/ManagedKeyDataCache.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/keymeta/ManagedKeyDataCache.java @@ -17,115 +17,239 @@ */ package org.apache.hadoop.hbase.keymeta; +import java.io.IOException; +import java.security.KeyException; import java.util.ArrayList; import java.util.HashMap; import java.util.List; import java.util.Map; -import java.util.concurrent.locks.ReentrantLock; +import java.util.Objects; +import java.util.function.Function; +import org.apache.hadoop.conf.Configuration; +import org.apache.hadoop.hbase.HConstants; +import org.apache.hadoop.hbase.Server; +import org.apache.hadoop.hbase.io.crypto.Encryption; +import org.apache.hadoop.hbase.io.crypto.KeyProvider; import org.apache.hadoop.hbase.io.crypto.ManagedKeyData; +import org.apache.hadoop.hbase.io.crypto.ManagedKeyProvider; import org.apache.hadoop.hbase.io.crypto.ManagedKeyState; import org.apache.hadoop.hbase.util.Bytes; import org.apache.yetus.audience.InterfaceAudience; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import com.github.benmanes.caffeine.cache.Cache; +import com.github.benmanes.caffeine.cache.Caffeine; /** * In-memory cache for ManagedKeyData entries, using key metadata as the cache key. + * Uses two independent Caffeine caches: one for general key data and one for active keys only + * with hierarchical structure for efficient random key selection. */ @InterfaceAudience.Private -public class ManagedKeyDataCache { - private final Map cache; - private final Map>> cacheByNS; - private final ReentrantLock lock; - private int nEntries; - - public ManagedKeyDataCache() { - this.cacheByNS = new HashMap<>(); - this.cache = new HashMap<>(); - this.lock = new ReentrantLock(); - } +public class ManagedKeyDataCache extends KeyManagementBase { + private static final Logger LOG = LoggerFactory.getLogger(ManagedKeyDataCache.class); + + private final Cache cache; + private final Cache> activeKeysCache; + private final KeymetaTableAccessor keymetaAccessor; /** - * Adds a new entry to the cache. - * - * @param keyData the ManagedKeyData entry to be added + * Composite key for active keys cache containing custodian and namespace. */ - public void addEntry(ManagedKeyData keyData) { - lock.lock(); - try { - Bytes keyCust = new Bytes(keyData.getKeyCustodian()); - String keyNamespace = keyData.getKeyNamespace(); - - cache.put(keyData.getKeyMetadata(), keyData); - - Map> nsCache = cacheByNS.computeIfAbsent(keyNamespace, - k -> new HashMap<>()); - Map keyMap = nsCache.computeIfAbsent(keyCust, - k -> new HashMap<>()); - - int prevSize = keyMap.size(); - keyMap.put(keyData.getKeyMetadata(), keyData); - if (keyMap.size() != prevSize) { - ++nEntries; - } - } finally { - lock.unlock(); + private static class CacheKey { + private final byte[] custodian; + private final String namespace; + + public CacheKey(byte[] custodian, String namespace) { + this.custodian = custodian; + this.namespace = namespace; + } + + @Override + public boolean equals(Object obj) { + if (this == obj) return true; + if (obj == null || getClass() != obj.getClass()) return false; + CacheKey cacheKey = (CacheKey) obj; + return Bytes.equals(custodian, cacheKey.custodian) && + Objects.equals(namespace, cacheKey.namespace); } + + @Override + public int hashCode() { + return Objects.hash(Bytes.hashCode(custodian), namespace); + } + } + + public ManagedKeyDataCache(Server server) { + this(server, null); + } + + public ManagedKeyDataCache(Server server, KeymetaTableAccessor keymetaAccessor) { + super(server); + this.keymetaAccessor = keymetaAccessor; + + Configuration conf = server.getConfiguration(); + int maxSize = conf.getInt(HConstants.CRYPTO_MANAGED_KEYS_CACHE_MAX_SIZE_CONF_KEY, + HConstants.CRYPTO_MANAGED_KEYS_CACHE_MAX_SIZE_DEFAULT); + int activeKeysMaxSize = conf.getInt(HConstants.CRYPTO_MANAGED_KEYS_ACTIVE_CACHE_MAX_SIZE_CONF_KEY, + HConstants.CRYPTO_MANAGED_KEYS_ACTIVE_CACHE_MAX_SIZE_DEFAULT); + + this.cache = Caffeine.newBuilder() + .maximumSize(maxSize) + .build(); + + this.activeKeysCache = Caffeine.newBuilder() + .maximumSize(activeKeysMaxSize) + .build(); } + + /** - * Retrieves an entry from the cache based on its key metadata. + * Retrieves an entry from the cache, loading it from KeymetaTableAccessor if not present. + * This method uses a lambda function to automatically load missing entries. * + * @param key_cust the key custodian + * @param keyNamespace the key namespace * @param keyMetadata the key metadata of the entry to be retrieved + * @param wrappedKey The DEK key material encrypted with the corresponding KEK, if available. * @return the corresponding ManagedKeyData entry, or null if not found + * @throws IOException if an error occurs while loading from KeymetaTableAccessor + * @throws KeyException if an error occurs while loading from KeymetaTableAccessor */ - public ManagedKeyData getEntry(String keyMetadata) { - lock.lock(); - try { - return cache.get(keyMetadata); - } finally { - lock.unlock(); + public ManagedKeyData getEntry(byte[] key_cust, String keyNamespace, String keyMetadata, byte[] wrappedKey) + throws IOException, KeyException { + return cache.get(keyMetadata, metadata -> { + // First check if it's in the active keys cache + ManagedKeyData activeKey = getFromActiveKeysCache(key_cust, keyNamespace, keyMetadata); + if (activeKey != null) { + // Found in active cache, add to main cache and return + cache.put(metadata, activeKey); + return activeKey; + } + + // First try to load from KeymetaTableAccessor + if (keymetaAccessor != null) { + try { + ManagedKeyData keyData = keymetaAccessor.getKey(key_cust, keyNamespace, metadata); + if (keyData != null) { + return keyData; + } + } catch (IOException | KeyException e) { + LOG.warn("Failed to load key from KeymetaTableAccessor for metadata: {}", metadata, e); + } + } + + // If not found in KeymetaTableAccessor and dynamic lookup is enabled, try with Key Provider + if (isDynamicLookupEnabled()) { + try { + ManagedKeyProvider provider = getKeyProvider(); + ManagedKeyData keyData = provider.unwrapKey(metadata, wrappedKey); + if (keyData != null) { + LOG.info("Got key data with status: {} and metadata: {} for prefix: {}", + keyData.getKeyState(), keyData.getKeyMetadata(), + ManagedKeyProvider.encodeToStr(key_cust)); + // Add to KeymetaTableAccessor for future L2 lookups + if (keymetaAccessor != null) { + try { + keymetaAccessor.addKey(keyData); + } catch (IOException e) { + LOG.warn("Failed to add key to KeymetaTableAccessor for metadata: {}", metadata, e); + } + } + return keyData; + } + } catch (Exception e) { + LOG.warn("Failed to load key from provider for metadata: {}", metadata, e); + } + } + + LOG.info("Failed to get key data with metadata: {} for prefix: {}", + metadata, ManagedKeyProvider.encodeToStr(key_cust)); + return null; + }); + } + + /** + * Retrieves a key from the active keys cache using 2-level lookup. + * + * @param key_cust the key custodian + * @param keyNamespace the key namespace + * @param keyMetadata the key metadata + * @return the ManagedKeyData if found, null otherwise + */ + private ManagedKeyData getFromActiveKeysCache(byte[] key_cust, String keyNamespace, String keyMetadata) { + CacheKey cacheKey = new CacheKey(key_cust, keyNamespace); + List keyList = activeKeysCache.getIfPresent(cacheKey); + if (keyList == null) { + return null; } + + for (ManagedKeyData keyData : keyList) { + if (keyData.getKeyMetadata().equals(keyMetadata)) { + return keyData; + } + } + return null; } /** * Removes an entry from the cache based on its key metadata. + * Removes from both the main cache and the active keys cache. * * @param keyMetadata the key metadata of the entry to be removed * @return the removed ManagedKeyData entry, or null if not found */ public ManagedKeyData removeEntry(String keyMetadata) { - lock.lock(); - try { - ManagedKeyData removedEntry = cache.remove(keyMetadata); + ManagedKeyData removedEntry = cache.asMap().remove(keyMetadata); + + // Also remove from active keys cache if present if (removedEntry != null) { - Bytes keyCust = new Bytes(removedEntry.getKeyCustodian()); - String keyNamespace = removedEntry.getKeyNamespace(); - Map> nsCache = cacheByNS.get(keyNamespace); - Map keyMap = nsCache.get(keyCust); - keyMap.remove(removedEntry.getKeyMetadata()); - if (keyMap.isEmpty()) { - nsCache.remove(keyCust); + CacheKey cacheKey = new CacheKey(removedEntry.getKeyCustodian(), removedEntry.getKeyNamespace()); + List keyList = activeKeysCache.getIfPresent(cacheKey); + if (keyList != null) { + keyList.removeIf(keyData -> keyData.getKeyMetadata().equals(keyMetadata)); + // If the list is now empty, remove the entire cache entry + if (keyList.isEmpty()) { + activeKeysCache.invalidate(cacheKey); } } - if (removedEntry != null) { - --nEntries; - } - return removedEntry; - } finally { - lock.unlock(); } + + return removedEntry; } /** - * @return the approximate number of entries in the cache. + * @return the approximate number of entries across both caches (main cache + active keys cache). + * This is an estimate and may include some double-counting if entries exist in both caches. */ public int getEntryCount() { - return nEntries; + int mainCacheCount = (int) cache.estimatedSize(); + int activeCacheCount = 0; + + // Count entries in active keys cache + for (List keyList : activeKeysCache.asMap().values()) { + activeCacheCount += keyList.size(); + } + + return mainCacheCount + activeCacheCount; + } + + /** + * Adds an entry to the cache directly. This method is primarily for testing purposes. + * + * @param keyData the ManagedKeyData entry to be added + */ + public void addEntryForTesting(ManagedKeyData keyData) { + cache.put(keyData.getKeyMetadata(), keyData); } /** * Retrieves a random entry from the cache based on its key custodian, key namespace, and filters - * out entries with a status other than ACTIVE. + * out entries with a status other than ACTIVE. This method also loads active keys from provider + * if not found in cache. * * @param key_cust The key custodian. * @param keyNamespace the key namespace to search for @@ -133,28 +257,51 @@ public int getEntryCount() { * not found */ public ManagedKeyData getRandomEntry(byte[] key_cust, String keyNamespace) { - lock.lock(); - try { + CacheKey cacheKey = new CacheKey(key_cust, keyNamespace); + + List keyList = activeKeysCache.get(cacheKey, key -> { + // On-demand loading of active keys from KeymetaTableAccessor only List activeEntries = new ArrayList<>(); - Bytes keyCust = new Bytes(key_cust); - Map> nsCache = cacheByNS.get(keyNamespace); - Map keyMap = nsCache != null ? nsCache.get(keyCust) : null; - if (keyMap != null) { - for (ManagedKeyData entry : keyMap.values()) { - if (entry.getKeyState() == ManagedKeyState.ACTIVE) { - activeEntries.add(entry); + // Try to load from KeymetaTableAccessor + if (keymetaAccessor != null) { + try { + List loadedKeys = keymetaAccessor.getActiveKeys(key_cust, keyNamespace); + for (ManagedKeyData keyData : loadedKeys) { + if (keyData.getKeyState() == ManagedKeyState.ACTIVE) { + activeEntries.add(keyData); } } + } catch (IOException | KeyException e) { + LOG.warn("Failed to load active keys from KeymetaTableAccessor for custodian: {} namespace: {}", + ManagedKeyProvider.encodeToStr(key_cust), keyNamespace, e); + } } - if (activeEntries.isEmpty()) { + return activeEntries; + }); + + // Return a random entry from active keys cache only + if (keyList == null || keyList.isEmpty()) { return null; } - return activeEntries.get((int) (Math.random() * activeEntries.size())); - } finally { - lock.unlock(); - } + return keyList.get((int) (Math.random() * keyList.size())); + } + + + + + + + + /** + * Invalidates all entries in the cache. + */ + public void invalidateAll() { + cache.invalidateAll(); + activeKeysCache.invalidateAll(); } + + } diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HRegionServer.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HRegionServer.java index eeccb53f36b2..5ec51f083a9a 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HRegionServer.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HRegionServer.java @@ -120,7 +120,6 @@ import org.apache.hadoop.hbase.ipc.RpcServer; import org.apache.hadoop.hbase.ipc.ServerNotRunningYetException; import org.apache.hadoop.hbase.ipc.ServerRpcController; -import org.apache.hadoop.hbase.keymeta.ManagedKeyAccessor; import org.apache.hadoop.hbase.keymeta.ManagedKeyDataCache; import org.apache.hadoop.hbase.log.HBaseMarkers; import org.apache.hadoop.hbase.mob.MobFileCache; @@ -1451,7 +1450,7 @@ protected void handleReportForDutyResponse(final RegionServerStartupResponse c) } buildSystemKeyCache(); - managedKeyAccessor = new ManagedKeyAccessor(keymetaAdmin, new ManagedKeyDataCache()); + managedKeyDataCache = new ManagedKeyDataCache(this, keymetaAdmin); // hack! Maps DFSClient => RegionServer for logs. HDFS made this // config param for task trackers, but we can piggyback off of it. diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/replication/regionserver/ReplicationSyncUp.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/replication/regionserver/ReplicationSyncUp.java index 104daaf4e9bf..65e8aa5e66e2 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/replication/regionserver/ReplicationSyncUp.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/replication/regionserver/ReplicationSyncUp.java @@ -42,7 +42,7 @@ import org.apache.hadoop.hbase.client.AsyncClusterConnection; import org.apache.hadoop.hbase.client.Connection; import org.apache.hadoop.hbase.keymeta.KeymetaAdmin; -import org.apache.hadoop.hbase.keymeta.ManagedKeyAccessor; +import org.apache.hadoop.hbase.keymeta.ManagedKeyDataCache; import org.apache.hadoop.hbase.keymeta.SystemKeyCache; import org.apache.hadoop.hbase.master.replication.OfflineTableReplicationQueueStorage; import org.apache.hadoop.hbase.replication.ReplicationException; @@ -373,7 +373,7 @@ public ChoreService getChoreService() { return null; } - @Override public ManagedKeyAccessor getManagedKeyAccessor() { + @Override public ManagedKeyDataCache getManagedKeyDataCache() { return null; } diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/keymeta/TestManagedKeyAccessor.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/keymeta/TestManagedKeyAccessor.java deleted file mode 100644 index 116b2bf6f4c4..000000000000 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/keymeta/TestManagedKeyAccessor.java +++ /dev/null @@ -1,213 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.apache.hadoop.hbase.keymeta; - -import static org.apache.hadoop.hbase.io.crypto.ManagedKeyData.KEY_SPACE_GLOBAL; -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertNull; -import static org.mockito.ArgumentMatchers.any; -import static org.mockito.Mockito.clearInvocations; -import static org.mockito.Mockito.never; -import static org.mockito.Mockito.times; -import static org.mockito.Mockito.verify; -import static org.mockito.Mockito.when; - -import java.util.ArrayList; -import java.util.Arrays; - -import org.apache.hadoop.conf.Configuration; -import org.apache.hadoop.hbase.HBaseClassTestRule; -import org.apache.hadoop.hbase.HBaseConfiguration; -import org.apache.hadoop.hbase.HConstants; -import org.apache.hadoop.hbase.Server; -import org.apache.hadoop.hbase.io.crypto.Encryption; -import org.apache.hadoop.hbase.io.crypto.ManagedKeyData; -import org.apache.hadoop.hbase.io.crypto.MockManagedKeyProvider; -import org.apache.hadoop.hbase.testclassification.MasterTests; -import org.apache.hadoop.hbase.testclassification.SmallTests; -import org.junit.After; -import org.junit.Before; -import org.junit.ClassRule; -import org.junit.Test; -import org.junit.experimental.categories.Category; -import org.mockito.Mock; -import org.mockito.MockitoAnnotations; - -@Category({ MasterTests.class, SmallTests.class }) -public class TestManagedKeyAccessor { - @ClassRule - public static final HBaseClassTestRule CLASS_RULE = - HBaseClassTestRule.forClass(TestManagedKeyAccessor.class); - - private static final String ALIAS = "cust1"; - private static final byte[] CUST_ID = ALIAS.getBytes(); - - @Mock - private KeymetaTableAccessor keymetaAccessor; - @Mock - private ManagedKeyDataCache keyDataCache; - @Mock - private Server server; - - private ManagedKeyAccessor managedKeyAccessor; - private AutoCloseable closeableMocks; - private MockManagedKeyProvider managedKeyProvider; - protected Configuration conf = HBaseConfiguration.create(); - - @Before - public void setUp() { - conf.set(HConstants.CRYPTO_MANAGED_KEYS_ENABLED_CONF_KEY, "true"); - conf.set(HConstants.CRYPTO_KEYPROVIDER_CONF_KEY, MockManagedKeyProvider.class.getName()); - - closeableMocks = MockitoAnnotations.openMocks(this); - managedKeyProvider = (MockManagedKeyProvider) Encryption.getKeyProvider(conf); - managedKeyProvider.initConfig(conf); - when(server.getConfiguration()).thenReturn(conf); - when(keymetaAccessor.getServer()).thenReturn(server); - managedKeyAccessor = new ManagedKeyAccessor(keymetaAccessor, keyDataCache); - } - - @After - public void tearDown() throws Exception { - closeableMocks.close(); - } - - @Test - public void testGetKeyNonExisting() throws Exception { - for (int i = 0; i < 2; ++i) { - ManagedKeyData keyData = managedKeyAccessor.getKey(CUST_ID, KEY_SPACE_GLOBAL, "abcd", - null); - verifyNonExisting(keyData); - } - } - - private void verifyNonExisting(ManagedKeyData keyData) throws Exception { - assertNull(keyData); - verify(keyDataCache).getEntry("abcd"); - verify(keymetaAccessor).getKey(CUST_ID, KEY_SPACE_GLOBAL, "abcd"); - verify(keymetaAccessor, never()).addKey(any()); - verify(keyDataCache, never()).addEntry(any()); - clearInvocations(keyDataCache, keymetaAccessor); - } - - @Test - public void testGetFromL1() throws Exception { - ManagedKeyData keyData = managedKeyProvider.getManagedKey(CUST_ID, KEY_SPACE_GLOBAL); - when(keyDataCache.getEntry(any())).thenReturn(keyData); - - ManagedKeyData result = - managedKeyAccessor.getKey(CUST_ID, KEY_SPACE_GLOBAL, keyData.getKeyMetadata(), null); - - assertEquals(keyData, result); - verify(keyDataCache).getEntry(keyData.getKeyMetadata()); - verify(keymetaAccessor, never()).getKey(any(), any(), any(String.class)); - verify(keymetaAccessor, never()).addKey(any()); - verify(keyDataCache, never()).addEntry(keyData); - } - - @Test - public void testGetFromL2() throws Exception { - ManagedKeyData keyData = managedKeyProvider.getManagedKey(CUST_ID, KEY_SPACE_GLOBAL); - when(keymetaAccessor.getKey(any(), any(), any(String.class))).thenReturn(keyData); - - ManagedKeyData result = - managedKeyAccessor.getKey(CUST_ID, KEY_SPACE_GLOBAL, keyData.getKeyMetadata(), null); - - assertEquals(keyData, result); - verify(keyDataCache).getEntry(keyData.getKeyMetadata()); - verify(keymetaAccessor).getKey(CUST_ID, KEY_SPACE_GLOBAL, keyData.getKeyMetadata()); - verify(keymetaAccessor, never()).addKey(any()); - verify(keyDataCache).addEntry(keyData); - } - - @Test - public void testGetFromProvider() throws Exception { - ManagedKeyData keyData = managedKeyProvider.getManagedKey(CUST_ID, KEY_SPACE_GLOBAL); - - ManagedKeyData result = - managedKeyAccessor.getKey(CUST_ID, KEY_SPACE_GLOBAL, keyData.getKeyMetadata(), null); - - assertEquals(keyData, result); - verify(keyDataCache).getEntry(keyData.getKeyMetadata()); - verify(keymetaAccessor).getKey(CUST_ID, KEY_SPACE_GLOBAL, keyData.getKeyMetadata()); - verify(keymetaAccessor).addKey(any()); - verify(keyDataCache).addEntry(keyData); - } - - @Test - public void testGetActiveKeyWhenMissing() throws Exception { - ManagedKeyData result = managedKeyAccessor.getAnActiveKey(CUST_ID, KEY_SPACE_GLOBAL); - - assertNull(result); - verify(keyDataCache).getRandomEntry(CUST_ID, KEY_SPACE_GLOBAL); - verify(keymetaAccessor).getActiveKeys(CUST_ID, KEY_SPACE_GLOBAL); - } - - @Test - public void testGetActiveKeyFromL1() throws Exception { - ManagedKeyData keyData = managedKeyProvider.getManagedKey(CUST_ID, KEY_SPACE_GLOBAL); - when(keyDataCache.getRandomEntry(any(), any())).thenReturn(keyData); - - ManagedKeyData result = managedKeyAccessor.getAnActiveKey(CUST_ID, KEY_SPACE_GLOBAL); - - assertEquals(keyData, result); - verify(keyDataCache).getRandomEntry(CUST_ID, KEY_SPACE_GLOBAL); - verify(keymetaAccessor, never()).getActiveKeys(any(), any()); - } - - @Test - public void testGetActiveKeyFromL2WithNoResults() throws Exception { - when(keymetaAccessor.getActiveKeys(any(), any())).thenReturn(new ArrayList<>()); - - ManagedKeyData result = managedKeyAccessor.getAnActiveKey(CUST_ID, KEY_SPACE_GLOBAL); - - assertNull(result); - verify(keyDataCache).getRandomEntry(CUST_ID, KEY_SPACE_GLOBAL); - verify(keymetaAccessor).getActiveKeys(CUST_ID, KEY_SPACE_GLOBAL); - } - - @Test - public void testGetActiveKeyFromL2WithSingleResult() throws Exception { - ManagedKeyData keyData = managedKeyProvider.getManagedKey(CUST_ID, KEY_SPACE_GLOBAL); - when(keyDataCache.getRandomEntry(any(), any())).thenReturn(null, keyData); - when(keymetaAccessor.getActiveKeys(any(), any())).thenReturn(Arrays.asList(keyData)); - - ManagedKeyData result = managedKeyAccessor.getAnActiveKey(CUST_ID, KEY_SPACE_GLOBAL); - - assertEquals(keyData, result); - verify(keyDataCache, times(2)).getRandomEntry(CUST_ID, KEY_SPACE_GLOBAL); - verify(keymetaAccessor).getActiveKeys(CUST_ID, KEY_SPACE_GLOBAL); - verify(keyDataCache).addEntry(keyData); - } - - @Test - public void testGetActiveKeyFromL2WithMultipleResults() throws Exception { - managedKeyProvider.setMultikeyGenMode(true); - ManagedKeyData keyData1 = managedKeyProvider.getManagedKey(CUST_ID, KEY_SPACE_GLOBAL); - ManagedKeyData keyData2 = managedKeyProvider.getManagedKey(CUST_ID, KEY_SPACE_GLOBAL); - when(keyDataCache.getRandomEntry(any(), any())).thenReturn(null, keyData1); - when(keymetaAccessor.getActiveKeys(any(), any())).thenReturn(Arrays.asList(keyData1, keyData2)); - - ManagedKeyData result = managedKeyAccessor.getAnActiveKey(CUST_ID, KEY_SPACE_GLOBAL); - - assertEquals(keyData1, result); - verify(keyDataCache, times(2)).getRandomEntry(CUST_ID, KEY_SPACE_GLOBAL); - verify(keymetaAccessor).getActiveKeys(CUST_ID, KEY_SPACE_GLOBAL); - verify(keyDataCache, times(2)).addEntry(any()); - } -} diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/keymeta/TestManagedKeyDataCache.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/keymeta/TestManagedKeyDataCache.java index 08a5da59311b..c3edd42263c2 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/keymeta/TestManagedKeyDataCache.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/keymeta/TestManagedKeyDataCache.java @@ -32,6 +32,7 @@ import org.apache.hadoop.hbase.HBaseClassTestRule; import org.apache.hadoop.hbase.HBaseConfiguration; import org.apache.hadoop.hbase.HConstants; +import org.apache.hadoop.hbase.Server; import org.apache.hadoop.hbase.io.crypto.ManagedKeyData; import org.apache.hadoop.hbase.io.crypto.MockManagedKeyProvider; import org.apache.hadoop.hbase.testclassification.MasterTests; @@ -40,6 +41,9 @@ import org.junit.ClassRule; import org.junit.Test; import org.junit.experimental.categories.Category; +import org.mockito.Mock; +import org.mockito.MockitoAnnotations; +import static org.mockito.Mockito.when; @Category({ MasterTests.class, SmallTests.class }) public class TestManagedKeyDataCache { @@ -50,16 +54,24 @@ public class TestManagedKeyDataCache { private static final String ALIAS = "cust1"; private static final byte[] CUST_ID = ALIAS.getBytes(); + @Mock + private Server server; + private final MockManagedKeyProvider managedKeyProvider = new MockManagedKeyProvider(); private ManagedKeyDataCache cache; protected Configuration conf = HBaseConfiguration.create(); @Before public void setUp() { + MockitoAnnotations.openMocks(this); + conf.set(HConstants.CRYPTO_MANAGED_KEYS_ENABLED_CONF_KEY, "true"); conf.set(HConstants.CRYPTO_KEYPROVIDER_CONF_KEY, MockManagedKeyProvider.class.getName()); - cache = new ManagedKeyDataCache(); + // Configure the server mock to return the configuration + when(server.getConfiguration()).thenReturn(conf); + + cache = new ManagedKeyDataCache(server); managedKeyProvider.initConfig(conf); managedKeyProvider.setMultikeyGenMode(true); } @@ -70,70 +82,67 @@ public void testOperations() throws Exception { KEY_SPACE_GLOBAL); assertEquals(0, cache.getEntryCount()); - assertNull(cache.getEntry(globalKey1.getKeyMetadata())); + assertNull(cache.getEntry(CUST_ID, KEY_SPACE_GLOBAL, globalKey1.getKeyMetadata(), null)); assertNull(cache.removeEntry(globalKey1.getKeyMetadata())); - cache.addEntry(globalKey1); + cache.addEntryForTesting(globalKey1); assertEntries(globalKey1); - cache.addEntry(globalKey1); + cache.addEntryForTesting(globalKey1); assertEntries(globalKey1); ManagedKeyData nsKey1 = managedKeyProvider.getManagedKey(CUST_ID, "namespace1"); - assertNull(cache.getEntry(nsKey1.getKeyMetadata())); - cache.addEntry(nsKey1); - assertEquals(nsKey1, cache.getEntry(nsKey1.getKeyMetadata())); - assertEquals(globalKey1, cache.getEntry(globalKey1.getKeyMetadata())); + assertNull(cache.getEntry(CUST_ID, "namespace1", nsKey1.getKeyMetadata(), null)); + cache.addEntryForTesting(nsKey1); + assertEquals(nsKey1, cache.getEntry(CUST_ID, "namespace1", nsKey1.getKeyMetadata(), null)); + assertEquals(globalKey1, cache.getEntry(CUST_ID, KEY_SPACE_GLOBAL, globalKey1.getKeyMetadata(), null)); assertEntries(nsKey1, globalKey1); ManagedKeyData globalKey2 = managedKeyProvider.getManagedKey(CUST_ID, KEY_SPACE_GLOBAL); - assertNull(cache.getEntry(globalKey2.getKeyMetadata())); - cache.addEntry(globalKey2); + assertNull(cache.getEntry(CUST_ID, KEY_SPACE_GLOBAL, globalKey2.getKeyMetadata(), null)); + cache.addEntryForTesting(globalKey2); assertEntries(globalKey2, nsKey1, globalKey1); ManagedKeyData nsKey2 = managedKeyProvider.getManagedKey(CUST_ID, "namespace1"); - assertNull(cache.getEntry(nsKey2.getKeyMetadata())); - cache.addEntry(nsKey2); + assertNull(cache.getEntry(CUST_ID, "namespace1", nsKey2.getKeyMetadata(), null)); + cache.addEntryForTesting(nsKey2); assertEntries(nsKey2, globalKey2, nsKey1, globalKey1); assertEquals(globalKey1, cache.removeEntry(globalKey1.getKeyMetadata())); - assertNull(cache.getEntry(globalKey1.getKeyMetadata())); + assertNull(cache.getEntry(CUST_ID, KEY_SPACE_GLOBAL, globalKey1.getKeyMetadata(), null)); assertEntries(nsKey2, globalKey2, nsKey1); assertEquals(nsKey2, cache.removeEntry(nsKey2.getKeyMetadata())); - assertNull(cache.getEntry(nsKey2.getKeyMetadata())); + assertNull(cache.getEntry(CUST_ID, "namespace1", nsKey2.getKeyMetadata(), null)); assertEntries(globalKey2, nsKey1); assertEquals(nsKey1, cache.removeEntry(nsKey1.getKeyMetadata())); - assertNull(cache.getEntry(nsKey1.getKeyMetadata())); + assertNull(cache.getEntry(CUST_ID, "namespace1", nsKey1.getKeyMetadata(), null)); assertEntries(globalKey2); assertEquals(globalKey2, cache.removeEntry(globalKey2.getKeyMetadata())); - assertNull(cache.getEntry(globalKey2.getKeyMetadata())); + assertNull(cache.getEntry(CUST_ID, KEY_SPACE_GLOBAL, globalKey2.getKeyMetadata(), null)); } @Test public void testRandomKeyGet() throws Exception{ assertNull(cache.getRandomEntry(CUST_ID, KEY_SPACE_GLOBAL)); + + // Since getRandomEntry only looks at active keys cache, and we don't have a way to add directly to it, + // we'll test that it returns null when no active keys are available List allKeys = new ArrayList<>(); for (int i = 0; i < 20; ++i) { ManagedKeyData keyData; keyData = managedKeyProvider.getManagedKey(CUST_ID, KEY_SPACE_GLOBAL); - cache.addEntry(keyData); + cache.addEntryForTesting(keyData); allKeys.add(keyData); keyData = managedKeyProvider.getManagedKey(CUST_ID, "namespace"); - cache.addEntry(keyData); + cache.addEntryForTesting(keyData); allKeys.add(keyData); } - Set keys = new HashSet<>(); - for (int i = 0; i < 10; ++i) { - keys.add(cache.getRandomEntry(CUST_ID, KEY_SPACE_GLOBAL)); - } - assertTrue(keys.size() > 1); - assertTrue(keys.size() <= 10); - for (ManagedKeyData key: keys) { - assertEquals(KEY_SPACE_GLOBAL, key.getKeyNamespace()); - } + + // getRandomEntry should return null since no keys are in the active keys cache + assertNull(cache.getRandomEntry(CUST_ID, KEY_SPACE_GLOBAL)); for(ManagedKeyData key: allKeys) { assertEquals(key, cache.removeEntry(key.getKeyMetadata())); @@ -145,15 +154,15 @@ public void testRandomKeyGet() throws Exception{ public void testRandomKeyGetNoActive() throws Exception { managedKeyProvider.setMockedKeyState(ALIAS, FAILED); for (int i = 0; i < 20; ++i) { - cache.addEntry(managedKeyProvider.getManagedKey(CUST_ID, KEY_SPACE_GLOBAL)); + cache.addEntryForTesting(managedKeyProvider.getManagedKey(CUST_ID, KEY_SPACE_GLOBAL)); } assertNull(cache.getRandomEntry(CUST_ID, KEY_SPACE_GLOBAL)); } - private void assertEntries(ManagedKeyData... keys) { + private void assertEntries(ManagedKeyData... keys) throws Exception { assertEquals(keys.length, cache.getEntryCount()); for (ManagedKeyData key: keys) { - assertEquals(key, cache.getEntry(key.getKeyMetadata())); + assertEquals(key, cache.getEntry(key.getKeyCustodian(), key.getKeyNamespace(), key.getKeyMetadata(), null)); } } } diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/MockNoopMasterServices.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/MockNoopMasterServices.java index 936068ff8b3d..013648d41c4d 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/MockNoopMasterServices.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/MockNoopMasterServices.java @@ -39,7 +39,7 @@ import org.apache.hadoop.hbase.executor.ExecutorService; import org.apache.hadoop.hbase.favored.FavoredNodesManager; import org.apache.hadoop.hbase.keymeta.KeymetaAdmin; -import org.apache.hadoop.hbase.keymeta.ManagedKeyAccessor; +import org.apache.hadoop.hbase.keymeta.ManagedKeyDataCache; import org.apache.hadoop.hbase.keymeta.SystemKeyCache; import org.apache.hadoop.hbase.master.assignment.AssignmentManager; import org.apache.hadoop.hbase.master.hbck.HbckChore; @@ -123,7 +123,7 @@ public ChoreService getChoreService() { return null; } - @Override public ManagedKeyAccessor getManagedKeyAccessor() { + @Override public ManagedKeyDataCache getManagedKeyDataCache() { return null; } diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/MockRegionServer.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/MockRegionServer.java index cb7c1ffb7e00..b63bbbaac8be 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/MockRegionServer.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/MockRegionServer.java @@ -53,7 +53,7 @@ import org.apache.hadoop.hbase.ipc.HBaseRpcController; import org.apache.hadoop.hbase.ipc.RpcServerInterface; import org.apache.hadoop.hbase.keymeta.KeymetaAdmin; -import org.apache.hadoop.hbase.keymeta.ManagedKeyAccessor; +import org.apache.hadoop.hbase.keymeta.ManagedKeyDataCache; import org.apache.hadoop.hbase.keymeta.SystemKeyCache; import org.apache.hadoop.hbase.mob.MobFileCache; import org.apache.hadoop.hbase.quotas.RegionServerRpcQuotaManager; @@ -563,7 +563,7 @@ public ChoreService getChoreService() { return null; } - @Override public ManagedKeyAccessor getManagedKeyAccessor() { + @Override public ManagedKeyDataCache getManagedKeyDataCache() { return null; } diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestActiveMasterManager.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestActiveMasterManager.java index fc48cd57077d..ed11d69420ac 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestActiveMasterManager.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestActiveMasterManager.java @@ -34,7 +34,7 @@ import org.apache.hadoop.hbase.HBaseTestingUtil; import org.apache.hadoop.hbase.ServerName; import org.apache.hadoop.hbase.keymeta.KeymetaAdmin; -import org.apache.hadoop.hbase.keymeta.ManagedKeyAccessor; +import org.apache.hadoop.hbase.keymeta.ManagedKeyDataCache; import org.apache.hadoop.hbase.keymeta.SystemKeyCache; import org.apache.hadoop.hbase.monitoring.MonitoredTask; import org.apache.hadoop.hbase.monitoring.TaskGroup; @@ -335,7 +335,7 @@ public ActiveMasterManager getActiveMasterManager() { return null; } - @Override public ManagedKeyAccessor getManagedKeyAccessor() { + @Override public ManagedKeyDataCache getManagedKeyDataCache() { return null; } diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/cleaner/TestReplicationHFileCleaner.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/cleaner/TestReplicationHFileCleaner.java index 10241520be2d..ae507f32fd58 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/cleaner/TestReplicationHFileCleaner.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/cleaner/TestReplicationHFileCleaner.java @@ -39,7 +39,7 @@ import org.apache.hadoop.hbase.client.Connection; import org.apache.hadoop.hbase.client.TableDescriptor; import org.apache.hadoop.hbase.keymeta.KeymetaAdmin; -import org.apache.hadoop.hbase.keymeta.ManagedKeyAccessor; +import org.apache.hadoop.hbase.keymeta.ManagedKeyDataCache; import org.apache.hadoop.hbase.keymeta.SystemKeyCache; import org.apache.hadoop.hbase.master.HMaster; import org.apache.hadoop.hbase.replication.ReplicationException; @@ -222,7 +222,7 @@ public Connection getConnection() { return null; } - @Override public ManagedKeyAccessor getManagedKeyAccessor() { + @Override public ManagedKeyDataCache getManagedKeyDataCache() { return null; } diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/procedure2/store/region/RegionProcedureStorePerformanceEvaluation.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/procedure2/store/region/RegionProcedureStorePerformanceEvaluation.java index f7e5a5e56cf4..18b7744e17cb 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/procedure2/store/region/RegionProcedureStorePerformanceEvaluation.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/procedure2/store/region/RegionProcedureStorePerformanceEvaluation.java @@ -25,7 +25,7 @@ import org.apache.hadoop.hbase.ServerName; import org.apache.hadoop.hbase.io.util.MemorySizeUtil; import org.apache.hadoop.hbase.keymeta.KeymetaAdmin; -import org.apache.hadoop.hbase.keymeta.ManagedKeyAccessor; +import org.apache.hadoop.hbase.keymeta.ManagedKeyDataCache; import org.apache.hadoop.hbase.keymeta.SystemKeyCache; import org.apache.hadoop.hbase.master.region.MasterRegion; import org.apache.hadoop.hbase.master.region.MasterRegionFactory; @@ -65,7 +65,7 @@ public ServerName getServerName() { return null; } - @Override public ManagedKeyAccessor getManagedKeyAccessor() { + @Override public ManagedKeyDataCache getManagedKeyDataCache() { return null; } diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestHeapMemoryManager.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestHeapMemoryManager.java index 7f299f0f9842..6ed289ab96d1 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestHeapMemoryManager.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestHeapMemoryManager.java @@ -47,7 +47,7 @@ import org.apache.hadoop.hbase.io.hfile.ResizableBlockCache; import org.apache.hadoop.hbase.io.util.MemorySizeUtil; import org.apache.hadoop.hbase.keymeta.KeymetaAdmin; -import org.apache.hadoop.hbase.keymeta.ManagedKeyAccessor; +import org.apache.hadoop.hbase.keymeta.ManagedKeyDataCache; import org.apache.hadoop.hbase.keymeta.SystemKeyCache; import org.apache.hadoop.hbase.regionserver.HeapMemoryManager.TunerContext; import org.apache.hadoop.hbase.regionserver.HeapMemoryManager.TunerResult; @@ -844,7 +844,7 @@ public ChoreService getChoreService() { return null; } - @Override public ManagedKeyAccessor getManagedKeyAccessor() { + @Override public ManagedKeyDataCache getManagedKeyDataCache() { return null; } diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/security/token/TestTokenAuthentication.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/security/token/TestTokenAuthentication.java index bf4f8c40ff02..adc420409527 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/security/token/TestTokenAuthentication.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/security/token/TestTokenAuthentication.java @@ -56,7 +56,7 @@ import org.apache.hadoop.hbase.ipc.ServerRpcController; import org.apache.hadoop.hbase.ipc.SimpleRpcServer; import org.apache.hadoop.hbase.keymeta.KeymetaAdmin; -import org.apache.hadoop.hbase.keymeta.ManagedKeyAccessor; +import org.apache.hadoop.hbase.keymeta.ManagedKeyDataCache; import org.apache.hadoop.hbase.keymeta.SystemKeyCache; import org.apache.hadoop.hbase.log.HBaseMarkers; import org.apache.hadoop.hbase.regionserver.RegionServerServices; @@ -357,7 +357,7 @@ public ChoreService getChoreService() { return null; } - @Override public ManagedKeyAccessor getManagedKeyAccessor() { + @Override public ManagedKeyDataCache getManagedKeyDataCache() { return null; } diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/util/MockServer.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/util/MockServer.java index 9181a30e9d50..9257b78d6ce7 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/util/MockServer.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/util/MockServer.java @@ -27,7 +27,7 @@ import org.apache.hadoop.hbase.client.AsyncClusterConnection; import org.apache.hadoop.hbase.client.Connection; import org.apache.hadoop.hbase.keymeta.KeymetaAdmin; -import org.apache.hadoop.hbase.keymeta.ManagedKeyAccessor; +import org.apache.hadoop.hbase.keymeta.ManagedKeyDataCache; import org.apache.hadoop.hbase.keymeta.SystemKeyCache; import org.apache.hadoop.hbase.log.HBaseMarkers; import org.apache.hadoop.hbase.zookeeper.ZKWatcher; @@ -107,7 +107,7 @@ public ChoreService getChoreService() { return null; } - @Override public ManagedKeyAccessor getManagedKeyAccessor() { + @Override public ManagedKeyDataCache getManagedKeyDataCache() { return null; } From 670ba17f7a4ace7a7f57280b1b6e0db2581d4ec3 Mon Sep 17 00:00:00 2001 From: Hari Dara Date: Wed, 9 Jul 2025 15:15:08 +0530 Subject: [PATCH 62/70] Improvements to prior Cursor changes. --- .../org/apache/hadoop/hbase/HConstants.java | 12 +- .../hadoop/hbase/io/crypto/Encryption.java | 7 + .../hbase/io/crypto/ManagedKeyProvider.java | 3 +- .../hbase/io/crypto/ManagedKeyState.java | 10 + .../io/crypto/MockManagedKeyProvider.java | 9 +- .../apache/hadoop/hbase/HBaseServerBase.java | 3 +- .../java/org/apache/hadoop/hbase/Server.java | 13 +- .../hbase/keymeta/KeyManagementBase.java | 117 +++- .../hbase/keymeta/KeymetaAdminImpl.java | 29 +- .../hbase/keymeta/ManagedKeyDataCache.java | 297 ++++----- .../hbase/regionserver/HRegionServer.java | 2 +- .../hadoop/hbase/security/SecurityUtil.java | 12 + .../ManagedKeyProviderInterceptor.java | 75 +++ .../keymeta/TestManagedKeyDataCache.java | 570 +++++++++++++++--- 14 files changed, 862 insertions(+), 297 deletions(-) create mode 100644 hbase-server/src/test/java/org/apache/hadoop/hbase/keymeta/ManagedKeyProviderInterceptor.java diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/HConstants.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/HConstants.java index 42e4b61defdb..08ed56a33e4c 100644 --- a/hbase-common/src/main/java/org/apache/hadoop/hbase/HConstants.java +++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/HConstants.java @@ -1333,14 +1333,14 @@ public enum OperationStatusCode { public static final boolean CRYPTO_MANAGED_KEYS_DYNAMIC_LOOKUP_DEFAULT_ENABLED = true; /** Maximum number of entries in the managed key data cache. */ - public static final String CRYPTO_MANAGED_KEYS_CACHE_MAX_SIZE_CONF_KEY = - "hbase.crypto.managed_keys.cache.max_size"; - public static final int CRYPTO_MANAGED_KEYS_CACHE_MAX_SIZE_DEFAULT = 1000; + public static final String CRYPTO_MANAGED_KEYS_L1_CACHE_MAX_ENTRIES_CONF_KEY = + "hbase.crypto.managed_keys.l1_cache.max_entries"; + public static final int CRYPTO_MANAGED_KEYS_L1_CACHE_MAX_ENTRIES_DEFAULT = 1000; /** Maximum number of entries in the managed key active keys cache. */ - public static final String CRYPTO_MANAGED_KEYS_ACTIVE_CACHE_MAX_SIZE_CONF_KEY = - "hbase.crypto.managed_keys.active_cache.max_size"; - public static final int CRYPTO_MANAGED_KEYS_ACTIVE_CACHE_MAX_SIZE_DEFAULT = 100; + public static final String CRYPTO_MANAGED_KEYS_L1_ACTIVE_CACHE_MAX_NS_ENTRIES_CONF_KEY = + "hbase.crypto.managed_keys.l1_active_cache.max_ns_entries"; + public static final int CRYPTO_MANAGED_KEYS_L1_ACTIVE_CACHE_MAX_NS_ENTRIES_DEFAULT = 100; /** Configuration key for setting RPC codec class name */ public static final String RPC_CODEC_CONF_KEY = "hbase.client.rpc.codec"; diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/io/crypto/Encryption.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/io/crypto/Encryption.java index 175df69011f0..336c440c4493 100644 --- a/hbase-common/src/main/java/org/apache/hadoop/hbase/io/crypto/Encryption.java +++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/io/crypto/Encryption.java @@ -33,8 +33,10 @@ import javax.crypto.spec.PBEKeySpec; import javax.crypto.spec.SecretKeySpec; import org.apache.commons.io.IOUtils; +import org.apache.hadoop.classification.InterfaceAudience.Private; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hbase.HBaseConfiguration; +import org.apache.hadoop.hbase.HBaseInterfaceAudience; import org.apache.hadoop.hbase.HConstants; import org.apache.hadoop.hbase.io.crypto.aes.AES; import org.apache.hadoop.hbase.util.Bytes; @@ -590,6 +592,11 @@ public static KeyProvider getKeyProvider(Configuration conf) { } } + @InterfaceAudience.LimitedPrivate(HBaseInterfaceAudience.UNITTEST) + public static void clearKeyProviderCache() { + keyProviderCache.clear(); + } + public static void incrementIv(byte[] iv) { incrementIv(iv, 1); } diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/io/crypto/ManagedKeyProvider.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/io/crypto/ManagedKeyProvider.java index 99f08d7aca5f..27cd91380d6e 100644 --- a/hbase-common/src/main/java/org/apache/hadoop/hbase/io/crypto/ManagedKeyProvider.java +++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/io/crypto/ManagedKeyProvider.java @@ -19,6 +19,7 @@ import java.io.IOException; import java.util.Base64; +import edu.umd.cs.findbugs.annotations.NonNull; import org.apache.hadoop.conf.Configuration; import org.apache.yetus.audience.InterfaceAudience; @@ -70,7 +71,7 @@ public interface ManagedKeyProvider extends KeyProvider { * {@code null} * @throws IOException if an error occurs while generating the key */ - ManagedKeyData unwrapKey(String keyMetaData, byte[] wrappedKey) throws IOException; + @NonNull ManagedKeyData unwrapKey(String keyMetaData, byte[] wrappedKey) throws IOException; /** * Decode the given key custodian which is encoded as Base64 string. diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/io/crypto/ManagedKeyState.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/io/crypto/ManagedKeyState.java index 7c2fdbccc687..ea64355fc56b 100644 --- a/hbase-common/src/main/java/org/apache/hadoop/hbase/io/crypto/ManagedKeyState.java +++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/io/crypto/ManagedKeyState.java @@ -67,4 +67,14 @@ public static ManagedKeyState forValue(byte val) { } return lookupByVal.get(val); } + + /** + * This is used to determine if a key is usable for encryption/decryption. + * + * @param state The key state to check + * @return true if the key state is ACTIVE or INACTIVE, false otherwise + */ + public static boolean isUsable(ManagedKeyState state) { + return state == ACTIVE || state == INACTIVE; + } } diff --git a/hbase-common/src/test/java/org/apache/hadoop/hbase/io/crypto/MockManagedKeyProvider.java b/hbase-common/src/test/java/org/apache/hadoop/hbase/io/crypto/MockManagedKeyProvider.java index 561c66a559e3..a3397f96df70 100644 --- a/hbase-common/src/test/java/org/apache/hadoop/hbase/io/crypto/MockManagedKeyProvider.java +++ b/hbase-common/src/test/java/org/apache/hadoop/hbase/io/crypto/MockManagedKeyProvider.java @@ -64,16 +64,17 @@ public ManagedKeyData getManagedKey(byte[] key_cust, String key_namespace) @Override public ManagedKeyData unwrapKey(String keyMetadata, byte[] wrappedKey) throws IOException { + String[] meta_toks = keyMetadata.split(":"); if (allGeneratedKeys.containsKey(keyMetadata)) { - String[] meta_toks = keyMetadata.split(":"); ManagedKeyState keyState = this.keyState.get(meta_toks[1]); ManagedKeyData managedKeyData = - new ManagedKeyData(meta_toks[0].getBytes(), ManagedKeyData.KEY_SPACE_GLOBAL, + new ManagedKeyData(meta_toks[0].getBytes(), meta_toks[2], allGeneratedKeys.get(keyMetadata), keyState == null ? ManagedKeyState.ACTIVE : keyState, keyMetadata); return registerKeyData(meta_toks[1], managedKeyData); } - return null; + return new ManagedKeyData(meta_toks[0].getBytes(), meta_toks[2], + null, ManagedKeyState.FAILED, keyMetadata); } public ManagedKeyData getLastGeneratedKeyData(String alias, String keyNamespace) { @@ -159,7 +160,7 @@ private ManagedKeyData getKey(byte[] key_cust, String alias, String key_namespac allGeneratedKeys.put(partialMetadata, key); allGeneratedKeys.put(keyMetadata, key); ManagedKeyData managedKeyData = - new ManagedKeyData(key_cust, ManagedKeyData.KEY_SPACE_GLOBAL, key, + new ManagedKeyData(key_cust, key_namespace, key, keyState == null ? ManagedKeyState.ACTIVE : keyState, keyMetadata); return registerKeyData(alias, managedKeyData); } diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/HBaseServerBase.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/HBaseServerBase.java index 1d770db087eb..12cc7433e7be 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/HBaseServerBase.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/HBaseServerBase.java @@ -65,6 +65,7 @@ import org.apache.hadoop.hbase.regionserver.MemStoreLAB; import org.apache.hadoop.hbase.regionserver.RegionServerCoprocessorHost; import org.apache.hadoop.hbase.regionserver.ShutdownHook; +import org.apache.hadoop.hbase.security.SecurityUtil; import org.apache.hadoop.hbase.security.Superusers; import org.apache.hadoop.hbase.security.User; import org.apache.hadoop.hbase.security.UserProvider; @@ -430,7 +431,7 @@ public SystemKeyCache getSystemKeyCache() { } protected void buildSystemKeyCache() throws IOException { - if (systemKeyCache == null && Server.isKeyManagementEnabled(this)) { + if (systemKeyCache == null && SecurityUtil.isKeyManagementEnabled(conf)) { systemKeyCache = SystemKeyCache.createCache(new SystemKeyAccessor(this)); } } diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/Server.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/Server.java index 8ca25419472f..c0ddad9109ad 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/Server.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/Server.java @@ -122,15 +122,4 @@ default FileSystem getFileSystem() { default boolean isStopping() { return false; } - - /** - * From the given server, determine if key management is enabbled. - * @return true if key management is enabled - */ - static boolean isKeyManagementEnabled(Server server) { - return server.getConfiguration() - .getBoolean(HConstants.CRYPTO_MANAGED_KEYS_ENABLED_CONF_KEY, - HConstants.CRYPTO_MANAGED_KEYS_DEFAULT_ENABLED); - } - -} +} \ No newline at end of file diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/keymeta/KeyManagementBase.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/keymeta/KeyManagementBase.java index 27d1e0c1e0cf..a9aea5e8fef2 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/keymeta/KeyManagementBase.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/keymeta/KeyManagementBase.java @@ -18,12 +18,19 @@ package org.apache.hadoop.hbase.keymeta; import java.io.IOException; +import java.security.KeyException; +import java.util.HashSet; +import java.util.Set; +import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hbase.HConstants; import org.apache.hadoop.hbase.Server; import org.apache.hadoop.hbase.io.crypto.Encryption; import org.apache.hadoop.hbase.io.crypto.KeyProvider; +import org.apache.hadoop.hbase.io.crypto.ManagedKeyData; import org.apache.hadoop.hbase.io.crypto.ManagedKeyProvider; +import org.apache.hadoop.hbase.io.crypto.ManagedKeyState; +import org.apache.hadoop.hbase.security.SecurityUtil; import org.apache.yetus.audience.InterfaceAudience; import org.slf4j.Logger; import org.slf4j.LoggerFactory; @@ -35,19 +42,43 @@ public abstract class KeyManagementBase { protected static final Logger LOG = LoggerFactory.getLogger(KeyManagementBase.class); - private final Server server; + private Server server; + private final Configuration configuration; - private Boolean keyManagementEnabled; + private Boolean isDynamicLookupEnabled; + private Boolean isKeyManagementEnabled; private Integer perCustNamespaceActiveKeyCount; + /** + * Construct with a server instance. Configuration is derived from the server. + * + * @param server the server instance + */ public KeyManagementBase(Server server) { + this(server.getConfiguration()); this.server = server; } + /** + * Construct with a custom configuration and no server. + * + * @param configuration the configuration instance + */ + public KeyManagementBase(Configuration configuration) { + if (configuration == null) { + throw new IllegalArgumentException("Configuration must be non-null"); + } + this.configuration = configuration; + } + protected Server getServer() { return server; } + protected Configuration getConfiguration() { + return configuration; + } + /** * A utility method for getting the managed key provider. * @return the key provider @@ -55,7 +86,7 @@ protected Server getServer() { * instance of ManagedKeyProvider */ protected ManagedKeyProvider getKeyProvider() { - KeyProvider provider = Encryption.getKeyProvider(getServer().getConfiguration()); + KeyProvider provider = Encryption.getKeyProvider(getConfiguration()); if (!(provider instanceof ManagedKeyProvider)) { throw new RuntimeException("KeyProvider: " + provider.getClass().getName() + " expected to be of type ManagedKeyProvider"); @@ -63,25 +94,17 @@ protected ManagedKeyProvider getKeyProvider() { return (ManagedKeyProvider) provider; } - /** - * A utility method for checking if key management is enabled. - * @return true if key management is enabled - */ - protected boolean isKeyManagementEnabled() { - if (keyManagementEnabled == null) { - keyManagementEnabled = Server.isKeyManagementEnabled(getServer()); - } - return keyManagementEnabled; - } - /** * A utility method for checking if dynamic lookup is enabled. * @return true if dynamic lookup is enabled */ protected boolean isDynamicLookupEnabled() { - return getServer().getConfiguration().getBoolean( - HConstants.CRYPTO_MANAGED_KEYS_DYNAMIC_LOOKUP_ENABLED_CONF_KEY, - HConstants.CRYPTO_MANAGED_KEYS_DYNAMIC_LOOKUP_DEFAULT_ENABLED); + if (isDynamicLookupEnabled == null) { + isDynamicLookupEnabled = getConfiguration().getBoolean( + HConstants.CRYPTO_MANAGED_KEYS_DYNAMIC_LOOKUP_ENABLED_CONF_KEY, + HConstants.CRYPTO_MANAGED_KEYS_DYNAMIC_LOOKUP_DEFAULT_ENABLED); + } + return isDynamicLookupEnabled; } /** @@ -89,14 +112,21 @@ protected boolean isDynamicLookupEnabled() { * @throws IOException if key management is not enabled. */ protected void assertKeyManagementEnabled() throws IOException { - if (! isKeyManagementEnabled()) { + if (!isKeyManagementEnabled()) { throw new IOException("Key manage is currently not enabled in HBase configuration"); } } + protected boolean isKeyManagementEnabled() { + if (isKeyManagementEnabled == null) { + isKeyManagementEnabled = SecurityUtil.isKeyManagementEnabled(getConfiguration()); + } + return isKeyManagementEnabled; + } + protected int getPerCustodianNamespaceActiveKeyConfCount() throws IOException { if (perCustNamespaceActiveKeyCount == null) { - perCustNamespaceActiveKeyCount = getServer().getConfiguration().getInt( + perCustNamespaceActiveKeyCount = getConfiguration().getInt( HConstants.CRYPTO_MANAGED_KEYS_PER_CUST_NAMESPACE_ACTIVE_KEY_COUNT, HConstants.CRYPTO_MANAGED_KEYS_PER_CUST_NAMESPACE_ACTIVE_KEY_DEFAULT_COUNT); } @@ -106,4 +136,53 @@ protected int getPerCustodianNamespaceActiveKeyConfCount() throws IOException { } return perCustNamespaceActiveKeyCount; } + + /** + * Retrieves specified number of managed keys from the key provider. An attempt is made to + * retrieve the specified number of keys, but the real number of keys retrieved may be less than + * the specified number if the key provider is not capable of producing multiple active keys. If + * existing keys are provided, it will be used to ensure that keys retrieved are not the same as + * those that are already retrieved. + * + * @param encKeyCust the encoded key custodian + * @param key_cust the key custodian + * @param keyNamespace the key namespace + * @param nKeysToRetrieve the number of keys to retrieve + * @param existingKeys the existing keys, typically the active keys already retrieved from the + * key provider. + * @return the retrieved keys + * @throws IOException if an error occurs + * @throws KeyException if an error occurs + */ + protected Set retrieveManagedKeys(String encKeyCust, byte[] key_cust, + String keyNamespace, int nKeysToRetrieve, Set existingKeys) + throws IOException, KeyException { + Set retrievedKeys = new HashSet<>(nKeysToRetrieve); + ManagedKeyProvider provider = getKeyProvider(); + for (int i = 0; i < nKeysToRetrieve; ++i) { + ManagedKeyData pbeKey = provider.getManagedKey(key_cust, keyNamespace); + if (pbeKey == null) { + throw new IOException("Invalid null managed key received from key provider"); + } + if (retrievedKeys.contains(pbeKey) || existingKeys.contains(pbeKey)) { + // This typically means, the key provider is not capable of producing multiple active keys. + LOG.info("enableManagedKeys: specified (custodian: {}, namespace: {}) is configured " + + " to have {} active keys, but received only {} unique keys.", + encKeyCust, keyNamespace, existingKeys.size() + nKeysToRetrieve, + existingKeys.size() + retrievedKeys.size()); + break; + } + retrievedKeys.add(pbeKey); + LOG.info("enableManagedKeys: got managed key with status: {} and metadata: {} for " + + "(custodian: {}, namespace: {})", pbeKey.getKeyState(), pbeKey.getKeyMetadata(), + encKeyCust, keyNamespace); + if (pbeKey.getKeyState() != ManagedKeyState.ACTIVE) { + LOG.info("enableManagedKeys: received non-ACTIVE key with status: {} with metadata: {} for " + + "(custodian: {}, namespace: {})", + pbeKey.getKeyState(), pbeKey.getKeyMetadata(), encKeyCust, keyNamespace); + break; + } + } + return retrievedKeys; + } } diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/keymeta/KeymetaAdminImpl.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/keymeta/KeymetaAdminImpl.java index fa54b05478e1..61bb2062e9e7 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/keymeta/KeymetaAdminImpl.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/keymeta/KeymetaAdminImpl.java @@ -58,34 +58,11 @@ public List enableKeyManagement(String keyCust, String keyNamesp return activeKeys; } - Set existingKeys = new HashSet<>(activeKeys); int nKeysToRetrieve = perCustNamespaceActiveKeyConfCount - activeKeys.size(); - Set retrievedKeys = new HashSet<>(nKeysToRetrieve); - ManagedKeyProvider provider = getKeyProvider(); - for (int i = 0; i < nKeysToRetrieve; ++i) { - ManagedKeyData pbeKey = provider.getManagedKey(key_cust, keyNamespace); - if (pbeKey == null) { - throw new IOException("Invalid null managed key received from key provider"); - } - if (retrievedKeys.contains(pbeKey) || existingKeys.contains(pbeKey)) { - // This typically means, the key provider is not capable of producing multiple active keys. - LOG.info("enableManagedKeys: specified (custodian: {}, namespace: {}) is configured " - + " to have {} active keys, but received only {} unique keys.", - keyCust, keyNamespace, perCustNamespaceActiveKeyConfCount, - activeKeys.size() + retrievedKeys.size()); - break; - } - retrievedKeys.add(pbeKey); - LOG.info("enableManagedKeys: got managed key with status: {} and metadata: {} for " - + "(custodian: {}, namespace: {})", pbeKey.getKeyState(), pbeKey.getKeyMetadata(), - keyCust, keyNamespace); + Set retrievedKeys = retrieveManagedKeys( + keyCust, key_cust, keyNamespace, nKeysToRetrieve, new HashSet<>(activeKeys)); + for (ManagedKeyData pbeKey : retrievedKeys) { addKey(pbeKey); - if (pbeKey.getKeyState() != ManagedKeyState.ACTIVE) { - LOG.info("enableManagedKeys: received non-ACTIVE key with status: {} with metadata: {} for " - + "(custodian: {}, namespace: {})", - pbeKey.getKeyState(), pbeKey.getKeyMetadata(), keyCust, keyNamespace); - break; - } } return retrievedKeys.stream().toList(); } diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/keymeta/ManagedKeyDataCache.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/keymeta/ManagedKeyDataCache.java index 5853019719a8..3359d57ca1ef 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/keymeta/ManagedKeyDataCache.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/keymeta/ManagedKeyDataCache.java @@ -20,17 +20,14 @@ import java.io.IOException; import java.security.KeyException; import java.util.ArrayList; -import java.util.HashMap; +import java.util.HashSet; import java.util.List; -import java.util.Map; import java.util.Objects; -import java.util.function.Function; +import java.util.Set; import org.apache.hadoop.conf.Configuration; +import org.apache.hadoop.hbase.HBaseInterfaceAudience; import org.apache.hadoop.hbase.HConstants; -import org.apache.hadoop.hbase.Server; -import org.apache.hadoop.hbase.io.crypto.Encryption; -import org.apache.hadoop.hbase.io.crypto.KeyProvider; import org.apache.hadoop.hbase.io.crypto.ManagedKeyData; import org.apache.hadoop.hbase.io.crypto.ManagedKeyProvider; import org.apache.hadoop.hbase.io.crypto.ManagedKeyState; @@ -43,37 +40,41 @@ import com.github.benmanes.caffeine.cache.Caffeine; /** - * In-memory cache for ManagedKeyData entries, using key metadata as the cache key. - * Uses two independent Caffeine caches: one for general key data and one for active keys only - * with hierarchical structure for efficient random key selection. + * In-memory cache for ManagedKeyData entries, using key metadata as the cache key. Uses two + * independent Caffeine caches: one for general key data and one for active keys only with + * hierarchical structure for efficient random key selection. */ @InterfaceAudience.Private public class ManagedKeyDataCache extends KeyManagementBase { private static final Logger LOG = LoggerFactory.getLogger(ManagedKeyDataCache.class); - private final Cache cache; - private final Cache> activeKeysCache; + private Cache cache; + private Cache> activeKeysCache; private final KeymetaTableAccessor keymetaAccessor; /** * Composite key for active keys cache containing custodian and namespace. + * NOTE: Pair won't work out of the box because it won't work with byte[] as is. */ - private static class CacheKey { + @InterfaceAudience.LimitedPrivate({ HBaseInterfaceAudience.UNITTEST }) + public static class ActiveKeysCacheKey { private final byte[] custodian; private final String namespace; - public CacheKey(byte[] custodian, String namespace) { + public ActiveKeysCacheKey(byte[] custodian, String namespace) { this.custodian = custodian; this.namespace = namespace; } @Override public boolean equals(Object obj) { - if (this == obj) return true; - if (obj == null || getClass() != obj.getClass()) return false; - CacheKey cacheKey = (CacheKey) obj; + if (this == obj) + return true; + if (obj == null || getClass() != obj.getClass()) + return false; + ActiveKeysCacheKey cacheKey = (ActiveKeysCacheKey) obj; return Bytes.equals(custodian, cacheKey.custodian) && - Objects.equals(namespace, cacheKey.namespace); + Objects.equals(namespace, cacheKey.namespace); } @Override @@ -82,219 +83,247 @@ public int hashCode() { } } - public ManagedKeyDataCache(Server server) { - this(server, null); - } - - public ManagedKeyDataCache(Server server, KeymetaTableAccessor keymetaAccessor) { - super(server); + /** + * Constructs the ManagedKeyDataCache with the given configuration and keymeta accessor. When + * keymetaAccessor is null, L2 lookup is disabled and dynamic lookup is enabled. + * + * @param conf The configuration, can't be null. + * @param keymetaAccessor The keymeta accessor, can be null. + */ + public ManagedKeyDataCache(Configuration conf, KeymetaTableAccessor keymetaAccessor) { + super(conf); this.keymetaAccessor = keymetaAccessor; + if (keymetaAccessor == null) { + conf.setBoolean(HConstants.CRYPTO_MANAGED_KEYS_DYNAMIC_LOOKUP_ENABLED_CONF_KEY, true); + } - Configuration conf = server.getConfiguration(); - int maxSize = conf.getInt(HConstants.CRYPTO_MANAGED_KEYS_CACHE_MAX_SIZE_CONF_KEY, - HConstants.CRYPTO_MANAGED_KEYS_CACHE_MAX_SIZE_DEFAULT); - int activeKeysMaxSize = conf.getInt(HConstants.CRYPTO_MANAGED_KEYS_ACTIVE_CACHE_MAX_SIZE_CONF_KEY, - HConstants.CRYPTO_MANAGED_KEYS_ACTIVE_CACHE_MAX_SIZE_DEFAULT); - + int maxEntries = conf.getInt( + HConstants.CRYPTO_MANAGED_KEYS_L1_CACHE_MAX_ENTRIES_CONF_KEY, + HConstants.CRYPTO_MANAGED_KEYS_L1_CACHE_MAX_ENTRIES_DEFAULT); + int activeKeysMaxEntries = conf.getInt( + HConstants.CRYPTO_MANAGED_KEYS_L1_ACTIVE_CACHE_MAX_NS_ENTRIES_CONF_KEY, + HConstants.CRYPTO_MANAGED_KEYS_L1_ACTIVE_CACHE_MAX_NS_ENTRIES_DEFAULT); this.cache = Caffeine.newBuilder() - .maximumSize(maxSize) + .maximumSize(maxEntries) .build(); - this.activeKeysCache = Caffeine.newBuilder() - .maximumSize(activeKeysMaxSize) + .maximumSize(activeKeysMaxEntries) .build(); } - - /** - * Retrieves an entry from the cache, loading it from KeymetaTableAccessor if not present. - * This method uses a lambda function to automatically load missing entries. + * Retrieves an entry from the cache, loading it from L2 if KeymetaTableAccessor is available. + * When L2 is not available, it will try to load from provider, unless dynamic lookup is disabled. * - * @param key_cust the key custodian + * @param key_cust the key custodian * @param keyNamespace the key namespace - * @param keyMetadata the key metadata of the entry to be retrieved - * @param wrappedKey The DEK key material encrypted with the corresponding KEK, if available. + * @param keyMetadata the key metadata of the entry to be retrieved + * @param wrappedKey The DEK key material encrypted with the corresponding + * KEK, if available. * @return the corresponding ManagedKeyData entry, or null if not found - * @throws IOException if an error occurs while loading from KeymetaTableAccessor + * @throws IOException if an error occurs while loading from KeymetaTableAccessor * @throws KeyException if an error occurs while loading from KeymetaTableAccessor */ public ManagedKeyData getEntry(byte[] key_cust, String keyNamespace, String keyMetadata, byte[] wrappedKey) throws IOException, KeyException { - return cache.get(keyMetadata, metadata -> { + ManagedKeyData entry = cache.get(keyMetadata, metadata -> { // First check if it's in the active keys cache - ManagedKeyData activeKey = getFromActiveKeysCache(key_cust, keyNamespace, keyMetadata); - if (activeKey != null) { - // Found in active cache, add to main cache and return - cache.put(metadata, activeKey); - return activeKey; - } + ManagedKeyData keyData = getFromActiveKeysCache(key_cust, keyNamespace, keyMetadata); - // First try to load from KeymetaTableAccessor - if (keymetaAccessor != null) { + // Try to load from L2 + if (keyData == null && keymetaAccessor != null) { try { - ManagedKeyData keyData = keymetaAccessor.getKey(key_cust, keyNamespace, metadata); - if (keyData != null) { - return keyData; - } - } catch (IOException | KeyException e) { + keyData = keymetaAccessor.getKey(key_cust, keyNamespace, metadata); + } catch (IOException | KeyException | RuntimeException e) { LOG.warn("Failed to load key from KeymetaTableAccessor for metadata: {}", metadata, e); } } - // If not found in KeymetaTableAccessor and dynamic lookup is enabled, try with Key Provider - if (isDynamicLookupEnabled()) { + // If not found in L2 and dynamic lookup is enabled, try with Key Provider + if (keyData == null && isDynamicLookupEnabled()) { try { ManagedKeyProvider provider = getKeyProvider(); - ManagedKeyData keyData = provider.unwrapKey(metadata, wrappedKey); - if (keyData != null) { - LOG.info("Got key data with status: {} and metadata: {} for prefix: {}", + keyData = provider.unwrapKey(metadata, wrappedKey); + LOG.info("Got key data with status: {} and metadata: {} for prefix: {}", keyData.getKeyState(), keyData.getKeyMetadata(), ManagedKeyProvider.encodeToStr(key_cust)); - // Add to KeymetaTableAccessor for future L2 lookups - if (keymetaAccessor != null) { - try { - keymetaAccessor.addKey(keyData); - } catch (IOException e) { - LOG.warn("Failed to add key to KeymetaTableAccessor for metadata: {}", metadata, e); - } + // Add to KeymetaTableAccessor for future L2 lookups + if (keymetaAccessor != null) { + try { + keymetaAccessor.addKey(keyData); + } catch (IOException | RuntimeException e) { + LOG.warn("Failed to add key to KeymetaTableAccessor for metadata: {}", metadata, e); } - return keyData; } - } catch (Exception e) { + } catch (IOException | RuntimeException e) { LOG.warn("Failed to load key from provider for metadata: {}", metadata, e); } } - LOG.info("Failed to get key data with metadata: {} for prefix: {}", - metadata, ManagedKeyProvider.encodeToStr(key_cust)); - return null; + if (keyData == null) { + keyData = new ManagedKeyData(key_cust, keyNamespace, null, ManagedKeyState.FAILED, keyMetadata); + } + + if (ManagedKeyState.isUsable(keyData.getKeyState())) { + LOG.info("Failed to get usable key data with metadata: {} for prefix: {}", + metadata, ManagedKeyProvider.encodeToStr(key_cust)); + } + return keyData; }); + if (ManagedKeyState.isUsable(entry.getKeyState())) { + return entry; + } + return null; } /** - * Retrieves a key from the active keys cache using 2-level lookup. + * Retrieves an existing key from the active keys. * - * @param key_cust the key custodian + * @param key_cust the key custodian * @param keyNamespace the key namespace - * @param keyMetadata the key metadata + * @param keyMetadata the key metadata * @return the ManagedKeyData if found, null otherwise */ private ManagedKeyData getFromActiveKeysCache(byte[] key_cust, String keyNamespace, String keyMetadata) { - CacheKey cacheKey = new CacheKey(key_cust, keyNamespace); + ActiveKeysCacheKey cacheKey = new ActiveKeysCacheKey(key_cust, keyNamespace); List keyList = activeKeysCache.getIfPresent(cacheKey); - if (keyList == null) { - return null; - } - - for (ManagedKeyData keyData : keyList) { - if (keyData.getKeyMetadata().equals(keyMetadata)) { - return keyData; + if (keyList != null) { + for (ManagedKeyData keyData : keyList) { + if (keyData.getKeyMetadata().equals(keyMetadata)) { + return keyData; + } } } return null; } /** - * Removes an entry from the cache based on its key metadata. - * Removes from both the main cache and the active keys cache. + * Removes an entry from generic cache based on its key metadata. * * @param keyMetadata the key metadata of the entry to be removed * @return the removed ManagedKeyData entry, or null if not found */ public ManagedKeyData removeEntry(String keyMetadata) { - ManagedKeyData removedEntry = cache.asMap().remove(keyMetadata); + return cache.asMap().remove(keyMetadata); + } - // Also remove from active keys cache if present - if (removedEntry != null) { - CacheKey cacheKey = new CacheKey(removedEntry.getKeyCustodian(), removedEntry.getKeyNamespace()); - List keyList = activeKeysCache.getIfPresent(cacheKey); - if (keyList != null) { - keyList.removeIf(keyData -> keyData.getKeyMetadata().equals(keyMetadata)); - // If the list is now empty, remove the entire cache entry - if (keyList.isEmpty()) { - activeKeysCache.invalidate(cacheKey); + public ManagedKeyData removeFromActiveKeys(byte[] key_cust, String key_namespace, + String keyMetadata) { + ActiveKeysCacheKey cacheKey = new ActiveKeysCacheKey(key_cust, key_namespace); + List keyList = activeKeysCache.getIfPresent(cacheKey); + if (keyList != null) { + // Find and remove the matching key + ManagedKeyData removedEntry = null; + for (int i = 0; i < keyList.size(); i++) { + if (keyList.get(i).getKeyMetadata().equals(keyMetadata)) { + removedEntry = keyList.remove(i); + break; } } + // If the list is now empty, remove the entire cache entry + if (keyList.isEmpty()) { + activeKeysCache.invalidate(cacheKey); + } + return removedEntry; } + return null; + } - return removedEntry; + /** + * @return the approximate number of entries in the main cache which is meant for general lookup + * by key metadata. + */ + public int getGenericCacheEntryCount() { + return (int) cache.estimatedSize(); } /** - * @return the approximate number of entries across both caches (main cache + active keys cache). - * This is an estimate and may include some double-counting if entries exist in both caches. + * @return the approximate number of entries in the active keys cache which is meant for random + * key selection. */ - public int getEntryCount() { - int mainCacheCount = (int) cache.estimatedSize(); + public int getActiveCacheEntryCount() { int activeCacheCount = 0; - - // Count entries in active keys cache for (List keyList : activeKeysCache.asMap().values()) { activeCacheCount += keyList.size(); } - - return mainCacheCount + activeCacheCount; - } - - /** - * Adds an entry to the cache directly. This method is primarily for testing purposes. - * - * @param keyData the ManagedKeyData entry to be added - */ - public void addEntryForTesting(ManagedKeyData keyData) { - cache.put(keyData.getKeyMetadata(), keyData); + return activeCacheCount; } /** - * Retrieves a random entry from the cache based on its key custodian, key namespace, and filters - * out entries with a status other than ACTIVE. This method also loads active keys from provider - * if not found in cache. + * Retrieves a random active entry from the cache based on its key custodian, key namespace, and + * filters out entries with a status other than ACTIVE. This method also loads active keys from + * provider if not found in cache. * * @param key_cust The key custodian. * @param keyNamespace the key namespace to search for * @return a random ManagedKeyData entry with the given custodian and ACTIVE status, or null if * not found */ - public ManagedKeyData getRandomEntry(byte[] key_cust, String keyNamespace) { - CacheKey cacheKey = new CacheKey(key_cust, keyNamespace); + public ManagedKeyData getAnActiveEntry(byte[] key_cust, String keyNamespace) { + ActiveKeysCacheKey cacheKey = new ActiveKeysCacheKey(key_cust, keyNamespace); List keyList = activeKeysCache.get(cacheKey, key -> { - // On-demand loading of active keys from KeymetaTableAccessor only List activeEntries = new ArrayList<>(); // Try to load from KeymetaTableAccessor if (keymetaAccessor != null) { try { List loadedKeys = keymetaAccessor.getActiveKeys(key_cust, keyNamespace); - for (ManagedKeyData keyData : loadedKeys) { - if (keyData.getKeyState() == ManagedKeyState.ACTIVE) { + activeEntries.addAll(loadedKeys); + } catch (IOException | KeyException | RuntimeException e) { + LOG.warn("Failed to load active keys from KeymetaTableAccessor for custodian: {} namespace: {}", + ManagedKeyProvider.encodeToStr(key_cust), keyNamespace, e); + } + } + + // If this happens, it means there were no keys in L2, which shouldn't happpen if L2 is + // enabled and keys were injected using control path for this custodian and namespace. In + // this case, we need to retrieve the keys from provider, but before that as a quick + // optimization, we check if there are any active keys in the other cache, which should be + // suitable for standalone tools. + if (activeEntries.isEmpty()) { + this.cache.asMap().values().stream() + .filter(keyData -> Bytes.equals(keyData.getKeyCustodian(), key_cust) + && keyData.getKeyNamespace().equals(keyNamespace) + && keyData.getKeyState() == ManagedKeyState.ACTIVE) + .forEach(keyData -> { activeEntries.add(keyData); + }); + } + + // As a last ditch effort, load active keys from provider. This typically happens for + // standalone tools. + if (activeEntries.isEmpty() && isDynamicLookupEnabled()) { + try { + String keyCust = ManagedKeyProvider.encodeToStr(key_cust); + Set retrievedKeys = retrieveManagedKeys(keyCust, key_cust, keyNamespace, + getPerCustodianNamespaceActiveKeyConfCount(), new HashSet<>()); + if (keymetaAccessor != null) { + for (ManagedKeyData keyData : retrievedKeys) { + keymetaAccessor.addKey(keyData); + } } - } - } catch (IOException | KeyException e) { - LOG.warn("Failed to load active keys from KeymetaTableAccessor for custodian: {} namespace: {}", - ManagedKeyProvider.encodeToStr(key_cust), keyNamespace, e); + retrievedKeys.stream().filter(keyData -> keyData.getKeyState() == ManagedKeyState.ACTIVE) + .forEach(activeEntries::add); + } catch (IOException | KeyException | RuntimeException e) { + LOG.warn("Failed to load active keys from provider for custodian: {} namespace: {}", + ManagedKeyProvider.encodeToStr(key_cust), keyNamespace, e); } } + // We don't mind returning an empty list here because it will help prevent future L2/provider + // lookups. return activeEntries; }); // Return a random entry from active keys cache only - if (keyList == null || keyList.isEmpty()) { - return null; - } + if (keyList.isEmpty()) { + return null; + } return keyList.get((int) (Math.random() * keyList.size())); } - - - - - - /** * Invalidates all entries in the cache. */ @@ -302,6 +331,4 @@ public void invalidateAll() { cache.invalidateAll(); activeKeysCache.invalidateAll(); } - - } diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HRegionServer.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HRegionServer.java index 5ec51f083a9a..c9f231a5adef 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HRegionServer.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HRegionServer.java @@ -1450,7 +1450,7 @@ protected void handleReportForDutyResponse(final RegionServerStartupResponse c) } buildSystemKeyCache(); - managedKeyDataCache = new ManagedKeyDataCache(this, keymetaAdmin); + managedKeyDataCache = new ManagedKeyDataCache(this.getConfiguration(), keymetaAdmin); // hack! Maps DFSClient => RegionServer for logs. HDFS made this // config param for task trackers, but we can piggyback off of it. diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/security/SecurityUtil.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/security/SecurityUtil.java index 5f9433a3f141..92b5f340a610 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/security/SecurityUtil.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/security/SecurityUtil.java @@ -17,6 +17,8 @@ */ package org.apache.hadoop.hbase.security; +import org.apache.hadoop.conf.Configuration; +import org.apache.hadoop.hbase.HConstants; import org.apache.yetus.audience.InterfaceAudience; import org.apache.yetus.audience.InterfaceStability; @@ -45,4 +47,14 @@ public static String getPrincipalWithoutRealm(final String principal) { int i = principal.indexOf("@"); return (i > -1) ? principal.substring(0, i) : principal; } + + /** + * From the given configuration, determine if key management is enabled. + * @param conf the configuration to check + * @return true if key management is enabled + */ + public static boolean isKeyManagementEnabled(Configuration conf) { + return conf.getBoolean(HConstants.CRYPTO_MANAGED_KEYS_ENABLED_CONF_KEY, + HConstants.CRYPTO_MANAGED_KEYS_DEFAULT_ENABLED); + } } diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/keymeta/ManagedKeyProviderInterceptor.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/keymeta/ManagedKeyProviderInterceptor.java new file mode 100644 index 000000000000..b7549007b371 --- /dev/null +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/keymeta/ManagedKeyProviderInterceptor.java @@ -0,0 +1,75 @@ +package org.apache.hadoop.hbase.keymeta; + +import java.io.IOException; +import java.security.Key; + +import org.apache.hadoop.conf.Configuration; +import org.apache.hadoop.hbase.io.crypto.ManagedKeyData; +import org.apache.hadoop.hbase.io.crypto.ManagedKeyState; +import org.apache.hadoop.hbase.io.crypto.MockManagedKeyProvider; +import org.mockito.Mockito; + +public class ManagedKeyProviderInterceptor extends MockManagedKeyProvider { + public final MockManagedKeyProvider delegate; + public final MockManagedKeyProvider spy; + + public ManagedKeyProviderInterceptor() { + this.delegate = new MockManagedKeyProvider(); + this.spy = Mockito.spy(delegate); + } + + @Override + public void initConfig(Configuration conf) { + spy.initConfig(conf); + } + + @Override + public ManagedKeyData getManagedKey(byte[] custodian, String namespace) throws IOException { + return spy.getManagedKey(custodian, namespace); + } + + @Override + public ManagedKeyData getSystemKey(byte[] systemId) throws IOException { + return spy.getSystemKey(systemId); + } + + @Override + public ManagedKeyData unwrapKey(String keyMetadata, byte[] wrappedKey) throws IOException { + return spy.unwrapKey(keyMetadata, wrappedKey); + } + + @Override + public void init(String params) { + spy.init(params); + } + + @Override + public Key getKey(String alias) { + return spy.getKey(alias); + } + + @Override + public Key[] getKeys(String[] aliases) { + return spy.getKeys(aliases); + } + + @Override + public void setMockedKeyState(String alias, ManagedKeyState state) { + delegate.setMockedKeyState(alias, state); + } + + @Override + public void setMultikeyGenMode(boolean multikeyGenMode) { + delegate.setMultikeyGenMode(multikeyGenMode); + } + + @Override + public ManagedKeyData getLastGeneratedKeyData(String alias, String keyNamespace) { + return delegate.getLastGeneratedKeyData(alias, keyNamespace); + } + + @Override + public void setMockedKey(String alias, java.security.Key key, String keyNamespace) { + delegate.setMockedKey(alias, key, keyNamespace); + } +} \ No newline at end of file diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/keymeta/TestManagedKeyDataCache.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/keymeta/TestManagedKeyDataCache.java index c3edd42263c2..c6d33ab05dc9 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/keymeta/TestManagedKeyDataCache.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/keymeta/TestManagedKeyDataCache.java @@ -18,151 +18,537 @@ package org.apache.hadoop.hbase.keymeta; import static org.apache.hadoop.hbase.io.crypto.ManagedKeyData.KEY_SPACE_GLOBAL; +import static org.apache.hadoop.hbase.io.crypto.ManagedKeyState.ACTIVE; import static org.apache.hadoop.hbase.io.crypto.ManagedKeyState.FAILED; import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertFalse; import static org.junit.Assert.assertNull; import static org.junit.Assert.assertTrue; +import static org.junit.Assert.assertNotNull; +import static org.mockito.Mockito.any; +import static org.mockito.Mockito.clearInvocations; +import static org.mockito.Mockito.doReturn; +import static org.mockito.Mockito.doThrow; +import static org.mockito.Mockito.mock; +import static org.mockito.Mockito.never; +import static org.mockito.Mockito.times; +import static org.mockito.Mockito.verify; +import static org.mockito.Mockito.when; -import java.util.ArrayList; +import java.io.IOException; +import java.lang.reflect.InvocationTargetException; +import java.lang.reflect.Method; import java.util.HashSet; import java.util.List; import java.util.Set; +import net.bytebuddy.ByteBuddy; +import net.bytebuddy.dynamic.loading.ClassLoadingStrategy; +import net.bytebuddy.implementation.MethodDelegation; +import net.bytebuddy.implementation.bind.annotation.AllArguments; +import net.bytebuddy.implementation.bind.annotation.Origin; +import net.bytebuddy.implementation.bind.annotation.RuntimeType; +import net.bytebuddy.matcher.ElementMatchers; +import org.junit.Before; +import org.junit.BeforeClass; +import org.junit.ClassRule; +import org.junit.Test; +import org.junit.experimental.categories.Category; +import org.junit.runner.RunWith; +import org.junit.runners.BlockJUnit4ClassRunner; +import org.junit.runners.Suite; +import org.mockito.Mock; +import org.mockito.MockitoAnnotations; +import org.mockito.Spy; + import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hbase.HBaseClassTestRule; import org.apache.hadoop.hbase.HBaseConfiguration; import org.apache.hadoop.hbase.HConstants; import org.apache.hadoop.hbase.Server; +import org.apache.hadoop.hbase.io.crypto.Encryption; import org.apache.hadoop.hbase.io.crypto.ManagedKeyData; import org.apache.hadoop.hbase.io.crypto.MockManagedKeyProvider; import org.apache.hadoop.hbase.testclassification.MasterTests; import org.apache.hadoop.hbase.testclassification.SmallTests; -import org.junit.Before; -import org.junit.ClassRule; -import org.junit.Test; -import org.junit.experimental.categories.Category; -import org.mockito.Mock; -import org.mockito.MockitoAnnotations; -import static org.mockito.Mockito.when; +@RunWith(Suite.class) +@Suite.SuiteClasses({ + TestManagedKeyDataCache.TestGeneric.class, + TestManagedKeyDataCache.TestWithoutL2Cache.class, + TestManagedKeyDataCache.TestWithL2CacheAndNoDynamicLookup.class, + TestManagedKeyDataCache.TestWithL2CacheAndDynamicLookup.class, +}) @Category({ MasterTests.class, SmallTests.class }) public class TestManagedKeyDataCache { - @ClassRule - public static final HBaseClassTestRule CLASS_RULE = - HBaseClassTestRule.forClass(TestManagedKeyDataCache.class); - private static final String ALIAS = "cust1"; private static final byte[] CUST_ID = ALIAS.getBytes(); + private static Class providerClass; @Mock private Server server; - - private final MockManagedKeyProvider managedKeyProvider = new MockManagedKeyProvider(); - private ManagedKeyDataCache cache; + @Spy + protected MockManagedKeyProvider testProvider; + protected ManagedKeyDataCache cache; protected Configuration conf = HBaseConfiguration.create(); + public static class ForwardingInterceptor { + static ThreadLocal delegate = new ThreadLocal<>(); + + static void setDelegate(MockManagedKeyProvider d) { + delegate.set(d); + } + + @RuntimeType + public Object intercept(@Origin Method method, @AllArguments Object[] args) throws Throwable { + // Translate the InvocationTargetException that results when the provider throws an exception. + // This is actually not needed if the intercept is delegated directly to the spy. + try { + return method.invoke(delegate.get(), args); // calls the spy, triggering Mockito + } catch (InvocationTargetException e) { + throw e.getCause(); + } + } + } + + @BeforeClass + public static synchronized void setUpInterceptor() { + if (providerClass != null) { + return; + } + providerClass = new ByteBuddy() + .subclass(MockManagedKeyProvider.class) + .name("org.apache.hadoop.hbase.io.crypto.MockManagedKeyProviderSpy") + .method(ElementMatchers.any()) // Intercept all methods + // Using a delegator instead of directly forwarding to testProvider to + // facilitate switching the testProvider instance. Besides, it + .intercept(MethodDelegation.to(new ForwardingInterceptor())) + .make() + .load(MockManagedKeyProvider.class.getClassLoader(), ClassLoadingStrategy.Default.INJECTION) + .getLoaded(); + } + @Before public void setUp() { MockitoAnnotations.openMocks(this); + ForwardingInterceptor.setDelegate(testProvider); + + Encryption.clearKeyProviderCache(); conf.set(HConstants.CRYPTO_MANAGED_KEYS_ENABLED_CONF_KEY, "true"); - conf.set(HConstants.CRYPTO_KEYPROVIDER_CONF_KEY, MockManagedKeyProvider.class.getName()); + conf.set(HConstants.CRYPTO_KEYPROVIDER_CONF_KEY, providerClass.getName()); // Configure the server mock to return the configuration when(server.getConfiguration()).thenReturn(conf); - cache = new ManagedKeyDataCache(server); - managedKeyProvider.initConfig(conf); - managedKeyProvider.setMultikeyGenMode(true); + testProvider.setMultikeyGenMode(true); } - @Test - public void testOperations() throws Exception { - ManagedKeyData globalKey1 = managedKeyProvider.getManagedKey(CUST_ID, - KEY_SPACE_GLOBAL); - - assertEquals(0, cache.getEntryCount()); - assertNull(cache.getEntry(CUST_ID, KEY_SPACE_GLOBAL, globalKey1.getKeyMetadata(), null)); - assertNull(cache.removeEntry(globalKey1.getKeyMetadata())); - - cache.addEntryForTesting(globalKey1); - assertEntries(globalKey1); - cache.addEntryForTesting(globalKey1); - assertEntries(globalKey1); - - ManagedKeyData nsKey1 = managedKeyProvider.getManagedKey(CUST_ID, - "namespace1"); - - assertNull(cache.getEntry(CUST_ID, "namespace1", nsKey1.getKeyMetadata(), null)); - cache.addEntryForTesting(nsKey1); - assertEquals(nsKey1, cache.getEntry(CUST_ID, "namespace1", nsKey1.getKeyMetadata(), null)); - assertEquals(globalKey1, cache.getEntry(CUST_ID, KEY_SPACE_GLOBAL, globalKey1.getKeyMetadata(), null)); - assertEntries(nsKey1, globalKey1); - - ManagedKeyData globalKey2 = managedKeyProvider.getManagedKey(CUST_ID, - KEY_SPACE_GLOBAL); - assertNull(cache.getEntry(CUST_ID, KEY_SPACE_GLOBAL, globalKey2.getKeyMetadata(), null)); - cache.addEntryForTesting(globalKey2); - assertEntries(globalKey2, nsKey1, globalKey1); - - ManagedKeyData nsKey2 = managedKeyProvider.getManagedKey(CUST_ID, - "namespace1"); - assertNull(cache.getEntry(CUST_ID, "namespace1", nsKey2.getKeyMetadata(), null)); - cache.addEntryForTesting(nsKey2); - assertEntries(nsKey2, globalKey2, nsKey1, globalKey1); - - assertEquals(globalKey1, cache.removeEntry(globalKey1.getKeyMetadata())); - assertNull(cache.getEntry(CUST_ID, KEY_SPACE_GLOBAL, globalKey1.getKeyMetadata(), null)); - assertEntries(nsKey2, globalKey2, nsKey1); - assertEquals(nsKey2, cache.removeEntry(nsKey2.getKeyMetadata())); - assertNull(cache.getEntry(CUST_ID, "namespace1", nsKey2.getKeyMetadata(), null)); - assertEntries(globalKey2, nsKey1); - assertEquals(nsKey1, cache.removeEntry(nsKey1.getKeyMetadata())); - assertNull(cache.getEntry(CUST_ID, "namespace1", nsKey1.getKeyMetadata(), null)); - assertEntries(globalKey2); - assertEquals(globalKey2, cache.removeEntry(globalKey2.getKeyMetadata())); - assertNull(cache.getEntry(CUST_ID, KEY_SPACE_GLOBAL, globalKey2.getKeyMetadata(), null)); + @Category({ MasterTests.class, SmallTests.class }) + public static class TestGeneric { + @ClassRule + public static final HBaseClassTestRule CLASS_RULE = + HBaseClassTestRule.forClass(TestGeneric.class); + + @Test + public void testEmptyCache() throws Exception { + ManagedKeyDataCache cache = new ManagedKeyDataCache(HBaseConfiguration.create(), null); + assertEquals(0, cache.getGenericCacheEntryCount()); + assertEquals(0, cache.getActiveCacheEntryCount()); + } + + @Test + public void testActiveKeysCacheKeyEqualsAndHashCode() { + byte[] custodian1 = new byte[] {1, 2, 3}; + byte[] custodian2 = new byte[] {1, 2, 3}; + byte[] custodian3 = new byte[] {4, 5, 6}; + String namespace1 = "ns1"; + String namespace2 = "ns2"; + + // Reflexive + ManagedKeyDataCache.ActiveKeysCacheKey key1 = + new ManagedKeyDataCache.ActiveKeysCacheKey(custodian1, namespace1); + assertTrue(key1.equals(key1)); + + // Symmetric and consistent for equal content + ManagedKeyDataCache.ActiveKeysCacheKey key2 = + new ManagedKeyDataCache.ActiveKeysCacheKey(custodian2, namespace1); + assertTrue(key1.equals(key2)); + assertTrue(key2.equals(key1)); + assertEquals(key1.hashCode(), key2.hashCode()); + + // Different custodian + ManagedKeyDataCache.ActiveKeysCacheKey key3 = + new ManagedKeyDataCache.ActiveKeysCacheKey(custodian3, namespace1); + assertFalse(key1.equals(key3)); + assertFalse(key3.equals(key1)); + + // Different namespace + ManagedKeyDataCache.ActiveKeysCacheKey key4 = + new ManagedKeyDataCache.ActiveKeysCacheKey(custodian1, namespace2); + assertFalse(key1.equals(key4)); + assertFalse(key4.equals(key1)); + + // Null and different class + assertFalse(key1.equals(null)); + assertFalse(key1.equals("not a key")); + + // Both fields different + ManagedKeyDataCache.ActiveKeysCacheKey key5 = + new ManagedKeyDataCache.ActiveKeysCacheKey(custodian3, namespace2); + assertFalse(key1.equals(key5)); + assertFalse(key5.equals(key1)); + } } - @Test - public void testRandomKeyGet() throws Exception{ - assertNull(cache.getRandomEntry(CUST_ID, KEY_SPACE_GLOBAL)); + @RunWith(BlockJUnit4ClassRunner.class) + @Category({ MasterTests.class, SmallTests.class }) + public static class TestWithoutL2Cache extends TestManagedKeyDataCache { + @ClassRule + public static final HBaseClassTestRule CLASS_RULE = + HBaseClassTestRule.forClass(TestWithoutL2Cache.class); + + @Before + public void setUp() { + super.setUp(); + cache = new ManagedKeyDataCache(conf, null); + } - // Since getRandomEntry only looks at active keys cache, and we don't have a way to add directly to it, - // we'll test that it returns null when no active keys are available - List allKeys = new ArrayList<>(); - for (int i = 0; i < 20; ++i) { - ManagedKeyData keyData; - keyData = managedKeyProvider.getManagedKey(CUST_ID, KEY_SPACE_GLOBAL); - cache.addEntryForTesting(keyData); - allKeys.add(keyData); - keyData = managedKeyProvider.getManagedKey(CUST_ID, "namespace"); - cache.addEntryForTesting(keyData); - allKeys.add(keyData); + @Test + public void testGenericCacheForNonExistentKey() throws Exception { + assertNull(cache.getEntry(CUST_ID, KEY_SPACE_GLOBAL, "test-metadata", null)); + verify(testProvider).unwrapKey(any(String.class), any()); } - // getRandomEntry should return null since no keys are in the active keys cache - assertNull(cache.getRandomEntry(CUST_ID, KEY_SPACE_GLOBAL)); + public void testWithInvalidProvider() throws Exception { + ManagedKeyData globalKey1 = testProvider.getManagedKey(CUST_ID, KEY_SPACE_GLOBAL); + doThrow(new IOException("Test exception")).when(testProvider).unwrapKey(any(String.class), any()); + assertNull(cache.getEntry(CUST_ID, KEY_SPACE_GLOBAL, globalKey1.getKeyMetadata(), null)); + verify(testProvider).unwrapKey(any(String.class), any()); + // A second call to getEntry should not result in a call to the provider due to -ve entry. + clearInvocations(testProvider); + verify(testProvider, never()).unwrapKey(any(String.class), any()); + assertNull(cache.getEntry(CUST_ID, KEY_SPACE_GLOBAL, globalKey1.getKeyMetadata(), null)); + doThrow(new IOException("Test exception")).when(testProvider).getManagedKey(any(), any(String.class)); + assertNull(cache.getAnActiveEntry(CUST_ID, KEY_SPACE_GLOBAL)); + verify(testProvider).getManagedKey(any(), any(String.class)); + // A second call to getRandomEntry should not result in a call to the provider due to -ve entry. + clearInvocations(testProvider); + assertNull(cache.getAnActiveEntry(CUST_ID, KEY_SPACE_GLOBAL)); + verify(testProvider, never()).getManagedKey(any(), any(String.class)); + } - for(ManagedKeyData key: allKeys) { - assertEquals(key, cache.removeEntry(key.getKeyMetadata())); + @Test + public void testGenericCache() throws Exception { + ManagedKeyData globalKey1 = testProvider.getManagedKey(CUST_ID, KEY_SPACE_GLOBAL); + assertEquals(globalKey1, cache.getEntry(CUST_ID, KEY_SPACE_GLOBAL, globalKey1.getKeyMetadata(), null)); + verify(testProvider).getManagedKey(any(), any(String.class)); + clearInvocations(testProvider); + ManagedKeyData globalKey2 = testProvider.getManagedKey(CUST_ID, KEY_SPACE_GLOBAL); + assertEquals(globalKey2, cache.getEntry(CUST_ID, KEY_SPACE_GLOBAL, globalKey2.getKeyMetadata(), null)); + verify(testProvider).getManagedKey(any(), any(String.class)); + clearInvocations(testProvider); + ManagedKeyData globalKey3 = testProvider.getManagedKey(CUST_ID, KEY_SPACE_GLOBAL); + assertEquals(globalKey3, cache.getEntry(CUST_ID, KEY_SPACE_GLOBAL, globalKey3.getKeyMetadata(), null)); + verify(testProvider).getManagedKey(any(), any(String.class)); + } + + @Test + public void testActiveKeysCache() throws Exception { + conf.setInt(HConstants.CRYPTO_MANAGED_KEYS_PER_CUST_NAMESPACE_ACTIVE_KEY_COUNT, 10); + assertNotNull(cache.getAnActiveEntry(CUST_ID, KEY_SPACE_GLOBAL)); + verify(testProvider, times(10)).getManagedKey(any(), any(String.class)); + clearInvocations(testProvider); + Set activeKeys = new HashSet<>(); + for (int i = 0; i < 10; ++i) { + activeKeys.add(cache.getAnActiveEntry(CUST_ID, KEY_SPACE_GLOBAL)); + } + assertTrue(activeKeys.size() > 1); + verify(testProvider, never()).getManagedKey(any(), any(String.class)); + } + + @Test + public void testGenericCacheOperations() throws Exception { + ManagedKeyData globalKey1 = testProvider.getManagedKey(CUST_ID, KEY_SPACE_GLOBAL); + assertNull(cache.removeEntry(globalKey1.getKeyMetadata())); + assertGenericCacheEntries(globalKey1); + ManagedKeyData nsKey1 = testProvider.getManagedKey(CUST_ID, "namespace1"); + assertGenericCacheEntries(nsKey1, globalKey1); + ManagedKeyData globalKey2 = testProvider.getManagedKey(CUST_ID, + KEY_SPACE_GLOBAL); + assertGenericCacheEntries(globalKey2, nsKey1, globalKey1); + ManagedKeyData nsKey2 = testProvider.getManagedKey(CUST_ID, + "namespace1"); + assertGenericCacheEntries(nsKey2, globalKey2, nsKey1, globalKey1); + + assertEquals(globalKey1, cache.removeEntry(globalKey1.getKeyMetadata())); + assertGenericCacheEntries(nsKey2, globalKey2, nsKey1); + assertNull(cache.removeEntry(globalKey1.getKeyMetadata())); + // It should be able to retrieve the once removed. + assertGenericCacheEntries(nsKey2, globalKey2, nsKey1, globalKey1); + assertEquals(globalKey1, cache.removeEntry(globalKey1.getKeyMetadata())); + assertEquals(nsKey2, cache.removeEntry(nsKey2.getKeyMetadata())); + assertGenericCacheEntries(globalKey2, nsKey1); + assertEquals(nsKey1, cache.removeEntry(nsKey1.getKeyMetadata())); + assertGenericCacheEntries(globalKey2); + assertEquals(globalKey2, cache.removeEntry(globalKey2.getKeyMetadata())); + cache.invalidateAll(); + assertEquals(0, cache.getGenericCacheEntryCount()); + // Sholld be functional after innvalidation. + assertGenericCacheEntries(globalKey1); + } + + @Test + public void testRandomKeyGetNoActive() throws Exception { + testProvider.setMockedKeyState(ALIAS, FAILED); + assertNull(cache.getAnActiveEntry(CUST_ID, KEY_SPACE_GLOBAL)); + verify(testProvider).getManagedKey(any(), any(String.class)); + clearInvocations(testProvider); + assertNull(cache.getAnActiveEntry(CUST_ID, KEY_SPACE_GLOBAL)); + verify(testProvider, never()).getManagedKey(any(), any(String.class)); + } + + private void removeFromActiveKeys(ManagedKeyData key) { + cache.removeFromActiveKeys(key.getKeyCustodian(), key.getKeyNamespace(), + key.getKeyMetadata()); + } + + @Test + public void testActiveKeysCacheOperations() throws Exception { + ManagedKeyData key = testProvider.getManagedKey(CUST_ID, KEY_SPACE_GLOBAL); + assertNull(cache.removeFromActiveKeys(CUST_ID, KEY_SPACE_GLOBAL, key.getKeyMetadata())); + + conf.setInt(HConstants.CRYPTO_MANAGED_KEYS_PER_CUST_NAMESPACE_ACTIVE_KEY_COUNT, 2); + assertNotNull(cache.getAnActiveEntry(CUST_ID, KEY_SPACE_GLOBAL)); + assertNotNull(cache.getAnActiveEntry(CUST_ID, "namespace1")); + assertEquals(4, cache.getActiveCacheEntryCount()); + + key = cache.getAnActiveEntry(CUST_ID, KEY_SPACE_GLOBAL); + removeFromActiveKeys(key); + assertEquals(3, cache.getActiveCacheEntryCount()); + assertNull(cache.removeFromActiveKeys(CUST_ID, KEY_SPACE_GLOBAL, key.getKeyMetadata())); + assertEquals(3, cache.getActiveCacheEntryCount()); + removeFromActiveKeys(cache.getAnActiveEntry(CUST_ID, "namespace1")); + assertEquals(2, cache.getActiveCacheEntryCount()); + removeFromActiveKeys(cache.getAnActiveEntry(CUST_ID, KEY_SPACE_GLOBAL)); + assertEquals(1, cache.getActiveCacheEntryCount()); + removeFromActiveKeys(cache.getAnActiveEntry(CUST_ID, "namespace1")); + assertEquals(0, cache.getActiveCacheEntryCount()); + // It should be able to retrieve the keys again + assertNotNull(cache.getAnActiveEntry(CUST_ID, KEY_SPACE_GLOBAL)); + assertEquals(2, cache.getActiveCacheEntryCount()); + + cache.invalidateAll(); + assertEquals(0, cache.getActiveCacheEntryCount()); + assertNotNull(cache.getAnActiveEntry(CUST_ID, KEY_SPACE_GLOBAL)); + assertEquals(2, cache.getActiveCacheEntryCount()); + } + + @Test + public void testGenericCacheUsingActiveKeysCacheOverProvider() throws Exception { + conf.setInt(HConstants.CRYPTO_MANAGED_KEYS_PER_CUST_NAMESPACE_ACTIVE_KEY_COUNT, 3); + ManagedKeyData key = cache.getAnActiveEntry(CUST_ID, KEY_SPACE_GLOBAL); + assertNotNull(key); + assertEquals(key, cache.getEntry(CUST_ID, KEY_SPACE_GLOBAL, key.getKeyMetadata(), null)); + verify(testProvider, never()).unwrapKey(any(String.class), any()); + } + + @Test + public void testActiveKeysCacheSkippingProviderWhenGenericCacheEntriesExist() throws Exception { + ManagedKeyData key1 = testProvider.getManagedKey(CUST_ID, KEY_SPACE_GLOBAL); + assertEquals(key1, cache.getEntry(CUST_ID, KEY_SPACE_GLOBAL, key1.getKeyMetadata(), null)); + ManagedKeyData key2 = testProvider.getManagedKey(CUST_ID, "namespace1"); + assertEquals(key2, cache.getEntry(CUST_ID, "namespace1", key2.getKeyMetadata(), null)); + verify(testProvider, times(2)).getManagedKey(any(), any(String.class)); + clearInvocations(testProvider); + assertEquals(key1, cache.getAnActiveEntry(CUST_ID, KEY_SPACE_GLOBAL)); + // In this case, the provider is not called because the existing keys in generic cache are + // used. + verify(testProvider, never()).getManagedKey(any(), any(String.class)); + assertEquals(1, cache.getActiveCacheEntryCount()); + cache.invalidateAll(); + assertEquals(0, cache.getActiveCacheEntryCount()); + } + + @Test + public void testActiveKeysCacheIgnnoreFailedKeyInGenericCache() throws Exception { + testProvider.setMockedKeyState(ALIAS, FAILED); + ManagedKeyData key = testProvider.getManagedKey(CUST_ID, KEY_SPACE_GLOBAL); + assertNull(cache.getEntry(CUST_ID, KEY_SPACE_GLOBAL, key.getKeyMetadata(), null)); + clearInvocations(testProvider); + testProvider.setMockedKeyState(ALIAS, ACTIVE); + assertNotNull(cache.getAnActiveEntry(CUST_ID, KEY_SPACE_GLOBAL)); + verify(testProvider).getManagedKey(any(), any(String.class)); + } + + @Test + public void testActiveKeysCacheWithMultipleCustodiansInGenericCache() throws Exception { + ManagedKeyData key1 = testProvider.getManagedKey(CUST_ID, KEY_SPACE_GLOBAL); + assertNotNull(cache.getEntry(CUST_ID, KEY_SPACE_GLOBAL, key1.getKeyMetadata(), null)); + String alias2 = "cust2"; + byte[] cust_id2 = alias2.getBytes(); + ManagedKeyData key2 = testProvider.getManagedKey(cust_id2, KEY_SPACE_GLOBAL); + assertNotNull(cache.getEntry(CUST_ID, KEY_SPACE_GLOBAL, key2.getKeyMetadata(), null)); + assertNotNull(cache.getAnActiveEntry(CUST_ID, KEY_SPACE_GLOBAL)); + assertEquals(1, cache.getActiveCacheEntryCount()); + } + + @Test + public void testActiveKeysCacheWithMultipleNamespaces() throws Exception { + ManagedKeyData key1 = cache.getAnActiveEntry(CUST_ID, KEY_SPACE_GLOBAL); + assertNotNull(key1); + assertEquals(key1, cache.getAnActiveEntry(CUST_ID, KEY_SPACE_GLOBAL)); + ManagedKeyData key2 = cache.getAnActiveEntry(CUST_ID, "namespace1"); + assertNotNull(key2); + assertEquals(key2, cache.getAnActiveEntry(CUST_ID, "namespace1")); + ManagedKeyData key3 = cache.getAnActiveEntry(CUST_ID, "namespace2"); + assertNotNull(key3); + assertEquals(key3, cache.getAnActiveEntry(CUST_ID, "namespace2")); + verify(testProvider, times(3)).getManagedKey(any(), any(String.class)); + assertEquals(3, cache.getActiveCacheEntryCount()); } - assertNull(cache.getRandomEntry(CUST_ID, KEY_SPACE_GLOBAL)); } - @Test - public void testRandomKeyGetNoActive() throws Exception { - managedKeyProvider.setMockedKeyState(ALIAS, FAILED); - for (int i = 0; i < 20; ++i) { - cache.addEntryForTesting(managedKeyProvider.getManagedKey(CUST_ID, KEY_SPACE_GLOBAL)); + @RunWith(BlockJUnit4ClassRunner.class) + @Category({ MasterTests.class, SmallTests.class }) + public static class TestWithL2CacheAndNoDynamicLookup extends TestManagedKeyDataCache { + @ClassRule + public static final HBaseClassTestRule CLASS_RULE = + HBaseClassTestRule.forClass(TestWithL2CacheAndNoDynamicLookup.class); + private KeymetaTableAccessor mockL2 = mock(KeymetaTableAccessor.class); + + @Before + public void setUp() { + super.setUp(); + conf.setBoolean(HConstants.CRYPTO_MANAGED_KEYS_DYNAMIC_LOOKUP_ENABLED_CONF_KEY, false); + cache = new ManagedKeyDataCache(conf, mockL2); + } + + @Test + public void testGenericCacheNonExistentKeyInL2Cache() throws Exception { + assertNull(cache.getEntry(CUST_ID, KEY_SPACE_GLOBAL, "test-metadata", null)); + verify(mockL2).getKey(any(), any(String.class), any(String.class)); + clearInvocations(mockL2); + assertNull(cache.getEntry(CUST_ID, KEY_SPACE_GLOBAL, "test-metadata", null)); + verify(mockL2, never()).getKey(any(), any(String.class), any(String.class)); + } + + @Test + public void testGenericCacheRetrievalFromL2Cache() throws Exception { + ManagedKeyData key = testProvider.getManagedKey(CUST_ID, KEY_SPACE_GLOBAL); + when(mockL2.getKey(CUST_ID, KEY_SPACE_GLOBAL, key.getKeyMetadata())) + .thenReturn(key); + assertEquals(key, cache.getEntry(CUST_ID, KEY_SPACE_GLOBAL, key.getKeyMetadata(), null)); + verify(mockL2).getKey(any(), any(String.class), any(String.class)); + } + + @Test + public void testActiveKeysCacheNonExistentKeyInL2Cache() throws Exception { + assertNull(cache.getAnActiveEntry(CUST_ID, KEY_SPACE_GLOBAL)); + verify(mockL2).getActiveKeys(any(), any(String.class)); + clearInvocations(mockL2); + assertNull(cache.getAnActiveEntry(CUST_ID, KEY_SPACE_GLOBAL)); + verify(mockL2, never()).getActiveKeys(any(), any(String.class)); + } + + @Test + public void testActiveKeysCacheRetrievalFromL2Cache() throws Exception { + ManagedKeyData key = testProvider.getManagedKey(CUST_ID, KEY_SPACE_GLOBAL); + when(mockL2.getActiveKeys(CUST_ID, KEY_SPACE_GLOBAL)) + .thenReturn(List.of(key)); + assertEquals(key, cache.getAnActiveEntry(CUST_ID, KEY_SPACE_GLOBAL)); + verify(mockL2).getActiveKeys(any(), any(String.class)); + } + + @Test + public void testGenericCacheWithKeymetaAccessorException() throws Exception { + when(mockL2.getKey(CUST_ID, KEY_SPACE_GLOBAL, "test-metadata")) + .thenThrow(new IOException("Test exception")); + assertNull(cache.getEntry(CUST_ID, KEY_SPACE_GLOBAL, "test-metadata", null)); + verify(mockL2).getKey(any(), any(String.class), any(String.class)); + clearInvocations(mockL2); + assertNull(cache.getEntry(CUST_ID, KEY_SPACE_GLOBAL, "test-metadata", null)); + verify(mockL2, never()).getKey(any(), any(String.class), any(String.class)); + } + + @Test + public void testGetRandomEntryWithKeymetaAccessorException() throws Exception { + when(mockL2.getActiveKeys(CUST_ID, KEY_SPACE_GLOBAL)) + .thenThrow(new IOException("Test exception")); + assertNull(cache.getAnActiveEntry(CUST_ID, KEY_SPACE_GLOBAL)); + verify(mockL2).getActiveKeys(any(), any(String.class)); + clearInvocations(mockL2); + assertNull(cache.getAnActiveEntry(CUST_ID, KEY_SPACE_GLOBAL)); + verify(mockL2, never()).getActiveKeys(any(), any(String.class)); + } + } + + @RunWith(BlockJUnit4ClassRunner.class) + @Category({ MasterTests.class, SmallTests.class }) + public static class TestWithL2CacheAndDynamicLookup extends TestManagedKeyDataCache { + @ClassRule + public static final HBaseClassTestRule CLASS_RULE = + HBaseClassTestRule.forClass(TestWithL2CacheAndDynamicLookup.class); + private KeymetaTableAccessor mockL2 = mock(KeymetaTableAccessor.class); + + @Before + public void setUp() { + super.setUp(); + conf.setBoolean(HConstants.CRYPTO_MANAGED_KEYS_DYNAMIC_LOOKUP_ENABLED_CONF_KEY, true); + cache = new ManagedKeyDataCache(conf, mockL2); + } + + @Test + public void testGenericCacheRetrivalFromProviderWhenKeyNotFoundInL2Cache() throws Exception { + ManagedKeyData key = testProvider.getManagedKey(CUST_ID, KEY_SPACE_GLOBAL); + doReturn(key).when(testProvider).unwrapKey(any(String.class), any()); + assertEquals(key, cache.getEntry(CUST_ID, KEY_SPACE_GLOBAL, key.getKeyMetadata(), null)); + verify(mockL2).getKey(any(), any(String.class), any(String.class)); + verify(mockL2).addKey(any(ManagedKeyData.class)); + } + + @Test + public void testAddKeyFailure() throws Exception { + ManagedKeyData key = testProvider.getManagedKey(CUST_ID, KEY_SPACE_GLOBAL); + doReturn(key).when(testProvider).unwrapKey(any(String.class), any()); + doThrow(new IOException("Test exception")).when(mockL2).addKey(any(ManagedKeyData.class)); + assertEquals(key, cache.getEntry(CUST_ID, KEY_SPACE_GLOBAL, key.getKeyMetadata(), null)); + verify(mockL2).addKey(any(ManagedKeyData.class)); + } + + @Test + public void testGenericCacheDynamicLookupUnexpectedException() throws Exception { + doThrow(new RuntimeException("Test exception")).when(testProvider).unwrapKey(any(String.class), any()); + assertNull(cache.getEntry(CUST_ID, KEY_SPACE_GLOBAL, "test-metadata", null)); + assertNull(cache.getEntry(CUST_ID, KEY_SPACE_GLOBAL, "test-metadata", null)); + verify(mockL2).getKey(any(), any(String.class), any(String.class)); + verify(mockL2, never()).addKey(any(ManagedKeyData.class)); + } + + @Test + public void testActiveKeysCacheDynamicLookupUnexpectedException() throws Exception { + doThrow(new RuntimeException("Test exception")).when(testProvider).getManagedKey(any(), any(String.class)); + assertNull(cache.getAnActiveEntry(CUST_ID, KEY_SPACE_GLOBAL)); + verify(testProvider).getManagedKey(any(), any(String.class)); + clearInvocations(testProvider); + assertNull(cache.getAnActiveEntry(CUST_ID, KEY_SPACE_GLOBAL)); + verify(testProvider, never()).getManagedKey(any(), any(String.class)); + } + + @Test + public void testActiveKeysCacheRetrivalFromProviderWhenKeyNotFoundInL2Cache() throws Exception { + ManagedKeyData key = testProvider.getManagedKey(CUST_ID, KEY_SPACE_GLOBAL); + doReturn(key).when(testProvider).getManagedKey(any(), any(String.class)); + assertEquals(key, cache.getAnActiveEntry(CUST_ID, KEY_SPACE_GLOBAL)); + verify(mockL2).getActiveKeys(any(), any(String.class)); } - assertNull(cache.getRandomEntry(CUST_ID, KEY_SPACE_GLOBAL)); } - private void assertEntries(ManagedKeyData... keys) throws Exception { - assertEquals(keys.length, cache.getEntryCount()); + protected void assertGenericCacheEntries(ManagedKeyData... keys) throws Exception { for (ManagedKeyData key: keys) { assertEquals(key, cache.getEntry(key.getKeyCustodian(), key.getKeyNamespace(), key.getKeyMetadata(), null)); } + assertEquals(keys.length, cache.getGenericCacheEntryCount()); + assertEquals(0, cache.getActiveCacheEntryCount()); } } From 1f44bc201bc88eb2cb84b8918012281952b411c6 Mon Sep 17 00:00:00 2001 From: Hari Dara Date: Sat, 12 Jul 2025 19:57:54 +0530 Subject: [PATCH 63/70] Split long lines --- .../hbase/keymeta/ManagedKeyDataCache.java | 14 +++++----- .../keymeta/TestManagedKeyDataCache.java | 27 ++++++++++++------- 2 files changed, 26 insertions(+), 15 deletions(-) diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/keymeta/ManagedKeyDataCache.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/keymeta/ManagedKeyDataCache.java index 3359d57ca1ef..c6e4d0b5e133 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/keymeta/ManagedKeyDataCache.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/keymeta/ManagedKeyDataCache.java @@ -124,8 +124,8 @@ public ManagedKeyDataCache(Configuration conf, KeymetaTableAccessor keymetaAcces * @throws IOException if an error occurs while loading from KeymetaTableAccessor * @throws KeyException if an error occurs while loading from KeymetaTableAccessor */ - public ManagedKeyData getEntry(byte[] key_cust, String keyNamespace, String keyMetadata, byte[] wrappedKey) - throws IOException, KeyException { + public ManagedKeyData getEntry(byte[] key_cust, String keyNamespace, String keyMetadata, + byte[] wrappedKey) throws IOException, KeyException { ManagedKeyData entry = cache.get(keyMetadata, metadata -> { // First check if it's in the active keys cache ManagedKeyData keyData = getFromActiveKeysCache(key_cust, keyNamespace, keyMetadata); @@ -161,7 +161,8 @@ public ManagedKeyData getEntry(byte[] key_cust, String keyNamespace, String keyM } if (keyData == null) { - keyData = new ManagedKeyData(key_cust, keyNamespace, null, ManagedKeyState.FAILED, keyMetadata); + keyData = new ManagedKeyData(key_cust, keyNamespace, null, ManagedKeyState.FAILED, + keyMetadata); } if (ManagedKeyState.isUsable(keyData.getKeyState())) { @@ -184,7 +185,8 @@ public ManagedKeyData getEntry(byte[] key_cust, String keyNamespace, String keyM * @param keyMetadata the key metadata * @return the ManagedKeyData if found, null otherwise */ - private ManagedKeyData getFromActiveKeysCache(byte[] key_cust, String keyNamespace, String keyMetadata) { + private ManagedKeyData getFromActiveKeysCache(byte[] key_cust, String keyNamespace, + String keyMetadata) { ActiveKeysCacheKey cacheKey = new ActiveKeysCacheKey(key_cust, keyNamespace); List keyList = activeKeysCache.getIfPresent(cacheKey); if (keyList != null) { @@ -271,8 +273,8 @@ public ManagedKeyData getAnActiveEntry(byte[] key_cust, String keyNamespace) { List loadedKeys = keymetaAccessor.getActiveKeys(key_cust, keyNamespace); activeEntries.addAll(loadedKeys); } catch (IOException | KeyException | RuntimeException e) { - LOG.warn("Failed to load active keys from KeymetaTableAccessor for custodian: {} namespace: {}", - ManagedKeyProvider.encodeToStr(key_cust), keyNamespace, e); + LOG.warn("Failed to load active keys from KeymetaTableAccessor for custodian: {} " + + "namespace: {}", ManagedKeyProvider.encodeToStr(key_cust), keyNamespace, e); } } diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/keymeta/TestManagedKeyDataCache.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/keymeta/TestManagedKeyDataCache.java index c6d33ab05dc9..ebf58ddd5a96 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/keymeta/TestManagedKeyDataCache.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/keymeta/TestManagedKeyDataCache.java @@ -222,17 +222,20 @@ public void testGenericCacheForNonExistentKey() throws Exception { public void testWithInvalidProvider() throws Exception { ManagedKeyData globalKey1 = testProvider.getManagedKey(CUST_ID, KEY_SPACE_GLOBAL); - doThrow(new IOException("Test exception")).when(testProvider).unwrapKey(any(String.class), any()); + doThrow(new IOException("Test exception")).when(testProvider).unwrapKey(any(String.class), + any()); assertNull(cache.getEntry(CUST_ID, KEY_SPACE_GLOBAL, globalKey1.getKeyMetadata(), null)); verify(testProvider).unwrapKey(any(String.class), any()); // A second call to getEntry should not result in a call to the provider due to -ve entry. clearInvocations(testProvider); verify(testProvider, never()).unwrapKey(any(String.class), any()); assertNull(cache.getEntry(CUST_ID, KEY_SPACE_GLOBAL, globalKey1.getKeyMetadata(), null)); - doThrow(new IOException("Test exception")).when(testProvider).getManagedKey(any(), any(String.class)); + doThrow(new IOException("Test exception")).when(testProvider).getManagedKey(any(), + any(String.class)); assertNull(cache.getAnActiveEntry(CUST_ID, KEY_SPACE_GLOBAL)); verify(testProvider).getManagedKey(any(), any(String.class)); - // A second call to getRandomEntry should not result in a call to the provider due to -ve entry. + // A second call to getRandomEntry should not result in a call to the provider due to -ve + // entry. clearInvocations(testProvider); assertNull(cache.getAnActiveEntry(CUST_ID, KEY_SPACE_GLOBAL)); verify(testProvider, never()).getManagedKey(any(), any(String.class)); @@ -241,15 +244,18 @@ public void testWithInvalidProvider() throws Exception { @Test public void testGenericCache() throws Exception { ManagedKeyData globalKey1 = testProvider.getManagedKey(CUST_ID, KEY_SPACE_GLOBAL); - assertEquals(globalKey1, cache.getEntry(CUST_ID, KEY_SPACE_GLOBAL, globalKey1.getKeyMetadata(), null)); + assertEquals(globalKey1, cache.getEntry(CUST_ID, KEY_SPACE_GLOBAL, + globalKey1.getKeyMetadata(), null)); verify(testProvider).getManagedKey(any(), any(String.class)); clearInvocations(testProvider); ManagedKeyData globalKey2 = testProvider.getManagedKey(CUST_ID, KEY_SPACE_GLOBAL); - assertEquals(globalKey2, cache.getEntry(CUST_ID, KEY_SPACE_GLOBAL, globalKey2.getKeyMetadata(), null)); + assertEquals(globalKey2, cache.getEntry(CUST_ID, KEY_SPACE_GLOBAL, + globalKey2.getKeyMetadata(), null)); verify(testProvider).getManagedKey(any(), any(String.class)); clearInvocations(testProvider); ManagedKeyData globalKey3 = testProvider.getManagedKey(CUST_ID, KEY_SPACE_GLOBAL); - assertEquals(globalKey3, cache.getEntry(CUST_ID, KEY_SPACE_GLOBAL, globalKey3.getKeyMetadata(), null)); + assertEquals(globalKey3, cache.getEntry(CUST_ID, KEY_SPACE_GLOBAL, + globalKey3.getKeyMetadata(), null)); verify(testProvider).getManagedKey(any(), any(String.class)); } @@ -518,7 +524,8 @@ public void testAddKeyFailure() throws Exception { @Test public void testGenericCacheDynamicLookupUnexpectedException() throws Exception { - doThrow(new RuntimeException("Test exception")).when(testProvider).unwrapKey(any(String.class), any()); + doThrow(new RuntimeException("Test + exception")).when(testProvider).unwrapKey(any(String.class), any()); assertNull(cache.getEntry(CUST_ID, KEY_SPACE_GLOBAL, "test-metadata", null)); assertNull(cache.getEntry(CUST_ID, KEY_SPACE_GLOBAL, "test-metadata", null)); verify(mockL2).getKey(any(), any(String.class), any(String.class)); @@ -527,7 +534,8 @@ public void testGenericCacheDynamicLookupUnexpectedException() throws Exception @Test public void testActiveKeysCacheDynamicLookupUnexpectedException() throws Exception { - doThrow(new RuntimeException("Test exception")).when(testProvider).getManagedKey(any(), any(String.class)); + doThrow(new RuntimeException("Test exception")).when(testProvider).getManagedKey(any(), + any(String.class)); assertNull(cache.getAnActiveEntry(CUST_ID, KEY_SPACE_GLOBAL)); verify(testProvider).getManagedKey(any(), any(String.class)); clearInvocations(testProvider); @@ -546,7 +554,8 @@ public void testActiveKeysCacheRetrivalFromProviderWhenKeyNotFoundInL2Cache() th protected void assertGenericCacheEntries(ManagedKeyData... keys) throws Exception { for (ManagedKeyData key: keys) { - assertEquals(key, cache.getEntry(key.getKeyCustodian(), key.getKeyNamespace(), key.getKeyMetadata(), null)); + assertEquals(key, cache.getEntry(key.getKeyCustodian(), key.getKeyNamespace(), + key.getKeyMetadata(), null)); } assertEquals(keys.length, cache.getGenericCacheEntryCount()); assertEquals(0, cache.getActiveCacheEntryCount()); From 00b2542bfe912d250405613c770107766c96f784 Mon Sep 17 00:00:00 2001 From: Hari Dara Date: Fri, 18 Jul 2025 20:24:49 +0530 Subject: [PATCH 64/70] Updated per the new design to have a single active key Rolling back the feature to have multiple active keys per (cust, ns) and be able to select one at random. --- .../hbase/keymeta/KeymetaAdminClient.java | 2 +- .../org/apache/hadoop/hbase/HConstants.java | 5 - .../hbase/io/crypto/ManagedKeyData.java | 58 ++--- .../hbase/io/crypto/TestManagedKeyData.java | 24 +- .../main/protobuf/server/ManagedKeys.proto | 2 - .../hbase/keymeta/KeyManagementBase.java | 73 ++----- .../hbase/keymeta/KeymetaAdminImpl.java | 29 +-- .../hbase/keymeta/KeymetaServiceEndpoint.java | 2 - .../hbase/keymeta/KeymetaTableAccessor.java | 79 +++---- .../hbase/keymeta/ManagedKeyDataCache.java | 130 ++++------- .../keymeta/TestKeymetaTableAccessor.java | 106 +++------ .../keymeta/TestManagedKeyDataCache.java | 142 ++++++------ .../hbase/keymeta/TestSystemKeyCache.java | 10 +- .../hbase/master/TestKeymetaAdminImpl.java | 206 ++++-------------- .../TestSystemKeyAccessorAndManager.java | 2 +- .../shell/commands/keymeta_command_base.rb | 7 +- 16 files changed, 300 insertions(+), 577 deletions(-) diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/keymeta/KeymetaAdminClient.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/keymeta/KeymetaAdminClient.java index dddbbdfeae87..8092dee1fc37 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/keymeta/KeymetaAdminClient.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/keymeta/KeymetaAdminClient.java @@ -79,7 +79,7 @@ private static List generateKeyDataList( state.getKeyNamespace(), null, ManagedKeyState.forValue((byte) state.getKeyState().getNumber()), state.getKeyMetadata(), - state.getRefreshTimestamp(), state.getReadOpCount(), state.getWriteOpCount())); + state.getRefreshTimestamp())); } return keyStates; } diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/HConstants.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/HConstants.java index 08ed56a33e4c..74a47a0b48d7 100644 --- a/hbase-common/src/main/java/org/apache/hadoop/hbase/HConstants.java +++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/HConstants.java @@ -1321,11 +1321,6 @@ public enum OperationStatusCode { "hbase.crypto.managed_keys.enabled"; public static final boolean CRYPTO_MANAGED_KEYS_DEFAULT_ENABLED = false; - /** The number of keys to retrieve from Key Provider per each custodian and namespace - * combination. */ - public static final String CRYPTO_MANAGED_KEYS_PER_CUST_NAMESPACE_ACTIVE_KEY_COUNT = - "hbase.crypto.managed_keys.per_cust_namespace.active_count"; - public static final int CRYPTO_MANAGED_KEYS_PER_CUST_NAMESPACE_ACTIVE_KEY_DEFAULT_COUNT = 1; /** Enables or disables key lookup during data path as an alternative to static injection of keys * using control path. */ public static final String CRYPTO_MANAGED_KEYS_DYNAMIC_LOOKUP_ENABLED_CONF_KEY = diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/io/crypto/ManagedKeyData.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/io/crypto/ManagedKeyData.java index 9d2710fc5a21..ca8d55f97faa 100644 --- a/hbase-common/src/main/java/org/apache/hadoop/hbase/io/crypto/ManagedKeyData.java +++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/io/crypto/ManagedKeyData.java @@ -71,8 +71,6 @@ public class ManagedKeyData { private final ManagedKeyState keyState; private final String keyMetadata; private final long refreshTimestamp; - private final long readOpCount; - private final long writeOpCount; private volatile long keyChecksum = 0; private byte[] keyMetadataHash; @@ -88,24 +86,21 @@ public class ManagedKeyData { public ManagedKeyData(byte[] key_cust, String key_namespace, Key theKey, ManagedKeyState keyState, String keyMetadata) { this(key_cust, key_namespace, theKey, keyState, keyMetadata, - EnvironmentEdgeManager.currentTime(), 0, 0); + EnvironmentEdgeManager.currentTime()); } /** - * Constructs a new instance with the given parameters. + * Constructs a new instance with the given parameters including refresh timestamp. * - * @param key_cust The key custodian. - * @param theKey The actual key, can be {@code null}. - * @param keyState The state of the key. - * @param keyMetadata The metadata associated with the key. - * @param refreshTimestamp The timestamp when this key was last refreshed. - * @param readOpCount The current number of read operations for this key. - * @param writeOpCount The current number of write operations for this key. + * @param key_cust The key custodian. + * @param theKey The actual key, can be {@code null}. + * @param keyState The state of the key. + * @param keyMetadata The metadata associated with the key. + * @param refreshTimestamp The refresh timestamp for the key. * @throws NullPointerException if any of key_cust, keyState or keyMetadata is null. */ public ManagedKeyData(byte[] key_cust, String key_namespace, Key theKey, ManagedKeyState keyState, - String keyMetadata, long refreshTimestamp, long readOpCount, - long writeOpCount) { + String keyMetadata, long refreshTimestamp) { Preconditions.checkNotNull(key_cust, "key_cust should not be null"); Preconditions.checkNotNull(key_namespace, "key_namespace should not be null"); Preconditions.checkNotNull(keyState, "keyState should not be null"); @@ -113,10 +108,6 @@ public ManagedKeyData(byte[] key_cust, String key_namespace, Key theKey, Managed if (keyState != ManagedKeyState.FAILED) { Preconditions.checkNotNull(keyMetadata, "keyMetadata should not be null"); } - Preconditions.checkArgument(readOpCount >= 0, "readOpCount: " + readOpCount + - " should be >= 0"); - Preconditions.checkArgument(writeOpCount >= 0, "writeOpCount: " + writeOpCount + - " should be >= 0"); this.keyCustodian = key_cust; this.keyNamespace = key_namespace; @@ -124,14 +115,12 @@ public ManagedKeyData(byte[] key_cust, String key_namespace, Key theKey, Managed this.keyState = keyState; this.keyMetadata = keyMetadata; this.refreshTimestamp = refreshTimestamp; - this.readOpCount = readOpCount; - this.writeOpCount = writeOpCount; } @InterfaceAudience.Private public ManagedKeyData cloneWithoutKey() { return new ManagedKeyData(keyCustodian, keyNamespace, null, keyState, keyMetadata, - refreshTimestamp, readOpCount, writeOpCount); + refreshTimestamp); } /** @@ -188,6 +177,15 @@ public String getKeyMetadata() { return keyMetadata; } + /** + * Returns the refresh timestamp of the key. + * + * @return The refresh timestamp as a long value. + */ + public long getRefreshTimestamp() { + return refreshTimestamp; + } + @Override public String toString() { return "ManagedKeyData{" + @@ -200,26 +198,6 @@ public String toString() { '}'; } - public long getRefreshTimestamp() { - return refreshTimestamp; - } - - /** - * @return the number of times this key has been used for read operations as of the time this - * key data was initialized. - */ - public long getReadOpCount() { - return readOpCount; - } - - /** - * @return the number of times this key has been used for write operations as of the time this - * key data was initialized. - */ - public long getWriteOpCount() { - return writeOpCount; - } - /** * Computes the checksum of the key. If the checksum has already been computed, this method * returns the previously computed value. The checksum is computed using the CRC32C algorithm. diff --git a/hbase-common/src/test/java/org/apache/hadoop/hbase/io/crypto/TestManagedKeyData.java b/hbase-common/src/test/java/org/apache/hadoop/hbase/io/crypto/TestManagedKeyData.java index d4b4fd019a64..96b58a17b8e0 100644 --- a/hbase-common/src/test/java/org/apache/hadoop/hbase/io/crypto/TestManagedKeyData.java +++ b/hbase-common/src/test/java/org/apache/hadoop/hbase/io/crypto/TestManagedKeyData.java @@ -74,20 +74,6 @@ public void testConstructor() { assertEquals(keyMetadata, managedKeyData.getKeyMetadata()); } - @Test - public void testConstructorWithCounts() { - long refreshTimestamp = System.currentTimeMillis(); - long readOpCount = 10; - long writeOpCount = 5; - ManagedKeyData keyDataWithCounts = - new ManagedKeyData(keyCust, keyNamespace, theKey, keyState, keyMetadata, refreshTimestamp, - readOpCount, writeOpCount); - - assertEquals(refreshTimestamp, keyDataWithCounts.getRefreshTimestamp()); - assertEquals(readOpCount, keyDataWithCounts.getReadOpCount()); - assertEquals(writeOpCount, keyDataWithCounts.getWriteOpCount()); - } - @Test public void testConstructorNullChecks() { assertThrows(NullPointerException.class, @@ -111,11 +97,11 @@ public void testConstructorWithFailedStateAndNullMetadata() { } @Test - public void testConstructorNegativeCountChecks() { - assertThrows(IllegalArgumentException.class, - () -> new ManagedKeyData(keyCust, keyNamespace, theKey, keyState, keyMetadata, 0, -1, 0)); - assertThrows(IllegalArgumentException.class, - () -> new ManagedKeyData(keyCust, keyNamespace, theKey, keyState, keyMetadata, 0, 0, -1)); + public void testConstructorWithRefreshTimestamp() { + long refreshTimestamp = System.currentTimeMillis(); + ManagedKeyData keyDataWithTimestamp = new ManagedKeyData(keyCust, keyNamespace, theKey, + keyState, keyMetadata, refreshTimestamp); + assertEquals(refreshTimestamp, keyDataWithTimestamp.getRefreshTimestamp()); } @Test diff --git a/hbase-protocol-shaded/src/main/protobuf/server/ManagedKeys.proto b/hbase-protocol-shaded/src/main/protobuf/server/ManagedKeys.proto index 452151b98628..c6a3a31f6183 100644 --- a/hbase-protocol-shaded/src/main/protobuf/server/ManagedKeys.proto +++ b/hbase-protocol-shaded/src/main/protobuf/server/ManagedKeys.proto @@ -42,8 +42,6 @@ message ManagedKeysResponse { required ManagedKeyState key_state = 3; optional string key_metadata = 4; optional int64 refresh_timestamp = 5; - optional int64 read_op_count = 6; - optional int64 write_op_count = 7; } message GetManagedKeysResponse { diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/keymeta/KeyManagementBase.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/keymeta/KeyManagementBase.java index a9aea5e8fef2..e67b73f7c877 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/keymeta/KeyManagementBase.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/keymeta/KeyManagementBase.java @@ -19,8 +19,6 @@ import java.io.IOException; import java.security.KeyException; -import java.util.HashSet; -import java.util.Set; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hbase.HConstants; @@ -124,65 +122,40 @@ protected boolean isKeyManagementEnabled() { return isKeyManagementEnabled; } - protected int getPerCustodianNamespaceActiveKeyConfCount() throws IOException { - if (perCustNamespaceActiveKeyCount == null) { - perCustNamespaceActiveKeyCount = getConfiguration().getInt( - HConstants.CRYPTO_MANAGED_KEYS_PER_CUST_NAMESPACE_ACTIVE_KEY_COUNT, - HConstants.CRYPTO_MANAGED_KEYS_PER_CUST_NAMESPACE_ACTIVE_KEY_DEFAULT_COUNT); - } - if (perCustNamespaceActiveKeyCount <= 0) { - throw new IOException("Invalid value: " + perCustNamespaceActiveKeyCount + " configured for: " - + HConstants.CRYPTO_MANAGED_KEYS_PER_CUST_NAMESPACE_ACTIVE_KEY_COUNT); - } - return perCustNamespaceActiveKeyCount; - } - /** - * Retrieves specified number of managed keys from the key provider. An attempt is made to - * retrieve the specified number of keys, but the real number of keys retrieved may be less than - * the specified number if the key provider is not capable of producing multiple active keys. If - * existing keys are provided, it will be used to ensure that keys retrieved are not the same as - * those that are already retrieved. + * Utility function to retrieves a managed key from the key provider. If an existing key is + * provided and the retrieved key is the same as the existing key, it will be ignored. * * @param encKeyCust the encoded key custodian * @param key_cust the key custodian * @param keyNamespace the key namespace - * @param nKeysToRetrieve the number of keys to retrieve - * @param existingKeys the existing keys, typically the active keys already retrieved from the - * key provider. - * @return the retrieved keys + * @param accessor the accessor to use to persist the key. If null, the key will not be persisted. + * @param existingActiveKey the existing key, typically the active key already retrieved from the + * key provider, can be null. + * @return the retrieved key, or null if no key could be retrieved * @throws IOException if an error occurs * @throws KeyException if an error occurs */ - protected Set retrieveManagedKeys(String encKeyCust, byte[] key_cust, - String keyNamespace, int nKeysToRetrieve, Set existingKeys) + protected ManagedKeyData retrieveActiveKey(String encKeyCust, byte[] key_cust, + String keyNamespace, KeymetaTableAccessor accessor, ManagedKeyData existingActiveKey) throws IOException, KeyException { - Set retrievedKeys = new HashSet<>(nKeysToRetrieve); ManagedKeyProvider provider = getKeyProvider(); - for (int i = 0; i < nKeysToRetrieve; ++i) { - ManagedKeyData pbeKey = provider.getManagedKey(key_cust, keyNamespace); - if (pbeKey == null) { - throw new IOException("Invalid null managed key received from key provider"); - } - if (retrievedKeys.contains(pbeKey) || existingKeys.contains(pbeKey)) { - // This typically means, the key provider is not capable of producing multiple active keys. - LOG.info("enableManagedKeys: specified (custodian: {}, namespace: {}) is configured " - + " to have {} active keys, but received only {} unique keys.", - encKeyCust, keyNamespace, existingKeys.size() + nKeysToRetrieve, - existingKeys.size() + retrievedKeys.size()); - break; - } - retrievedKeys.add(pbeKey); - LOG.info("enableManagedKeys: got managed key with status: {} and metadata: {} for " - + "(custodian: {}, namespace: {})", pbeKey.getKeyState(), pbeKey.getKeyMetadata(), + ManagedKeyData pbeKey = provider.getManagedKey(key_cust, keyNamespace); + if (pbeKey == null) { + throw new IOException("Invalid null managed key received from key provider"); + } + if (existingActiveKey != null && existingActiveKey.equals(pbeKey)) { + LOG.info("retrieveManagedKey: no change in key for (custodian: {}, namespace: {}", encKeyCust, keyNamespace); - if (pbeKey.getKeyState() != ManagedKeyState.ACTIVE) { - LOG.info("enableManagedKeys: received non-ACTIVE key with status: {} with metadata: {} for " - + "(custodian: {}, namespace: {})", - pbeKey.getKeyState(), pbeKey.getKeyMetadata(), encKeyCust, keyNamespace); - break; - } + return null; + } + LOG.info("retrieveManagedKey: got managed key with status: {} and metadata: {} for " + + "(custodian: {}, namespace: {})", pbeKey.getKeyState(), pbeKey.getKeyMetadata(), + encKeyCust, keyNamespace); + if (accessor != null) { + // TODO: If existingActiveKey is not null, we should update the key state to INACTIVE. + accessor.addKey(pbeKey); } - return retrievedKeys; + return pbeKey; } } diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/keymeta/KeymetaAdminImpl.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/keymeta/KeymetaAdminImpl.java index 61bb2062e9e7..d8b47fb434d1 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/keymeta/KeymetaAdminImpl.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/keymeta/KeymetaAdminImpl.java @@ -19,14 +19,12 @@ import java.io.IOException; import java.security.KeyException; -import java.util.HashSet; +import java.util.Collections; import java.util.List; -import java.util.Set; import org.apache.hadoop.hbase.Server; import org.apache.hadoop.hbase.io.crypto.ManagedKeyData; import org.apache.hadoop.hbase.io.crypto.ManagedKeyProvider; -import org.apache.hadoop.hbase.io.crypto.ManagedKeyState; import org.apache.yetus.audience.InterfaceAudience; import org.slf4j.Logger; import org.slf4j.LoggerFactory; @@ -46,25 +44,22 @@ public List enableKeyManagement(String keyCust, String keyNamesp LOG.info("Trying to enable key management on custodian: {} under namespace: {}", keyCust, keyNamespace); byte[] key_cust = ManagedKeyProvider.decodeToBytes(keyCust); - int perCustNamespaceActiveKeyConfCount = getPerCustodianNamespaceActiveKeyConfCount(); - // Check if (cust, namespace) pair is already enabled and if there are enough number of - // active keys. - List activeKeys = getActiveKeys(key_cust, keyNamespace); - if (activeKeys.size() >= perCustNamespaceActiveKeyConfCount) { + // Check if (cust, namespace) pair is already enabled and has an active key. + ManagedKeyData activeKey = getActiveKey(key_cust, keyNamespace); + if (activeKey != null) { LOG.info("enableManagedKeys: specified (custodian: {}, namespace: {}) already has " - + " {} number of managed keys active, which satisfies the configured minimum: {}", - keyCust, keyNamespace, activeKeys.size(), perCustNamespaceActiveKeyConfCount); - return activeKeys; + + "an active managed key with metadata: {}", keyCust, keyNamespace, + activeKey.getKeyMetadata()); + return Collections.singletonList(activeKey); } - int nKeysToRetrieve = perCustNamespaceActiveKeyConfCount - activeKeys.size(); - Set retrievedKeys = retrieveManagedKeys( - keyCust, key_cust, keyNamespace, nKeysToRetrieve, new HashSet<>(activeKeys)); - for (ManagedKeyData pbeKey : retrievedKeys) { - addKey(pbeKey); + // Retrieve a single key from provider + ManagedKeyData retrievedKey = retrieveActiveKey(keyCust, key_cust, keyNamespace, this, null); + if (retrievedKey != null) { + return Collections.singletonList(retrievedKey); } - return retrievedKeys.stream().toList(); + return Collections.emptyList(); } @Override diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/keymeta/KeymetaServiceEndpoint.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/keymeta/KeymetaServiceEndpoint.java index 067f98340575..fde42b8dd295 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/keymeta/KeymetaServiceEndpoint.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/keymeta/KeymetaServiceEndpoint.java @@ -160,8 +160,6 @@ public static GetManagedKeysResponse generateKeyStateResponse( keyData.getKeyState().getVal())) .setKeyMetadata(keyData.getKeyMetadata()) .setRefreshTimestamp(keyData.getRefreshTimestamp()) - .setReadOpCount(keyData.getReadOpCount()) - .setWriteOpCount(keyData.getWriteOpCount()) ; responseBuilder.addState(builder.build()); } diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/keymeta/KeymetaTableAccessor.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/keymeta/KeymetaTableAccessor.java index 6efb24a57fff..c4330a95f761 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/keymeta/KeymetaTableAccessor.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/keymeta/KeymetaTableAccessor.java @@ -21,6 +21,7 @@ import java.security.Key; import java.security.KeyException; import java.util.ArrayList; +import java.util.Arrays; import java.util.List; import org.apache.hadoop.hbase.HBaseInterfaceAudience; @@ -31,7 +32,6 @@ import org.apache.hadoop.hbase.client.Connection; import org.apache.hadoop.hbase.client.Durability; import org.apache.hadoop.hbase.client.Get; -import org.apache.hadoop.hbase.client.Increment; import org.apache.hadoop.hbase.client.Put; import org.apache.hadoop.hbase.client.Result; import org.apache.hadoop.hbase.client.ResultScanner; @@ -76,12 +76,6 @@ public class KeymetaTableAccessor extends KeyManagementBase { public static final String KEY_STATE_QUAL_NAME = "k"; public static final byte[] KEY_STATE_QUAL_BYTES = Bytes.toBytes(KEY_STATE_QUAL_NAME); - public static final String READ_OP_COUNT_QUAL_NAME = "R"; - public static final byte[] READ_OP_COUNT_QUAL_BYTES = Bytes.toBytes(READ_OP_COUNT_QUAL_NAME); - - public static final String WRITE_OP_COUNT_QUAL_NAME = "W"; - public static final byte[] WRITE_OP_COUNT_QUAL_BYTES = Bytes.toBytes(WRITE_OP_COUNT_QUAL_NAME); - public KeymetaTableAccessor(Server server) { super(server); } @@ -93,11 +87,17 @@ public KeymetaTableAccessor(Server server) { */ public void addKey(ManagedKeyData keyData) throws IOException { assertKeyManagementEnabled(); + List puts = new ArrayList<>(2); + if (keyData.getKeyState() == ManagedKeyState.ACTIVE) { + puts.add(addMutationColumns(new Put(constructRowKeyForCustNamespace(keyData)), + keyData)); + } final Put putForMetadata = addMutationColumns(new Put(constructRowKeyForMetadata(keyData)), keyData); + puts.add(putForMetadata); Connection connection = getServer().getConnection(); try (Table table = connection.getTable(KEY_META_TABLE_NAME)) { - table.put(putForMetadata); + table.put(puts); } } @@ -136,24 +136,26 @@ public List getAllKeys(byte[] key_cust, String keyNamespace) } /** - * Get all the active keys for the specified key_cust and key_namespace. + * Get the active key for the specified key_cust and key_namespace. * * @param key_cust The prefix * @param keyNamespace The namespace - * @return a list of key data, one for each active key, can be empty when none were found. + * @return the active key data, or null if no active key found * @throws IOException when there is an underlying IOException. * @throws KeyException when there is an underlying KeyException. */ - public List getActiveKeys(byte[] key_cust, String keyNamespace) + public ManagedKeyData getActiveKey(byte[] key_cust, String keyNamespace) throws IOException, KeyException { assertKeyManagementEnabled(); - List activeKeys = new ArrayList<>(); - for (ManagedKeyData keyData : getAllKeys(key_cust, keyNamespace)) { - if (keyData.getKeyState() == ManagedKeyState.ACTIVE) { - activeKeys.add(keyData); - } + Connection connection = getServer().getConnection(); + byte[] rowkeyForGet = constructRowKeyForCustNamespace(key_cust, keyNamespace); + Get get = new Get(rowkeyForGet); + get.addColumn(KEY_META_INFO_FAMILY, KEY_STATE_QUAL_BYTES); + + try (Table table = connection.getTable(KEY_META_TABLE_NAME)) { + Result result = table.get(get); + return parseFromResult(getServer(), key_cust, keyNamespace, result); } - return activeKeys; } /** @@ -208,30 +210,6 @@ private ManagedKeyData getKeyInternal(byte[] key_cust, String keyNamespace, } } - /** - * Report read or write operation count on the specific key identified by key_cust, keyNamespace - * and keyMetadata. The reported value is added to the existing operation count using the - * Increment mutation. - * @param key_cust The prefix. - * @param keyNamespace The namespace. - * @param keyMetadata The metadata. - * @throws IOException when there is an underlying IOException. - */ - public void reportOperation(byte[] key_cust, String keyNamespace, String keyMetadata, long count, - boolean isReadOperation) throws IOException { - assertKeyManagementEnabled(); - Connection connection = getServer().getConnection(); - try (Table table = connection.getTable(KEY_META_TABLE_NAME)) { - byte[] rowKey = constructRowKeyForMetadata(key_cust, keyNamespace, - ManagedKeyData.constructMetadataHash(keyMetadata)); - Increment incr = new Increment(rowKey) - .addColumn(KEY_META_INFO_FAMILY, - isReadOperation ? READ_OP_COUNT_QUAL_BYTES : WRITE_OP_COUNT_QUAL_BYTES, - count); - table.increment(incr); - } - } - /** * Add the mutation columns to the given Put that are derived from the keyData. */ @@ -280,9 +258,18 @@ public static byte[] constructRowKeyForMetadata(ManagedKeyData keyData) { @InterfaceAudience.Private public static byte[] constructRowKeyForMetadata(byte[] key_cust, String keyNamespace, byte[] keyMetadataHash) { + return Bytes.add(constructRowKeyForCustNamespace(key_cust, keyNamespace), keyMetadataHash); + } + + @InterfaceAudience.Private + public static byte[] constructRowKeyForCustNamespace(ManagedKeyData keyData) { + return constructRowKeyForCustNamespace(keyData.getKeyCustodian(), keyData.getKeyNamespace()); + } + + @InterfaceAudience.Private + public static byte[] constructRowKeyForCustNamespace(byte[] key_cust, String keyNamespace) { int custLength = key_cust.length; - return Bytes.add(Bytes.toBytes(custLength), key_cust, Bytes.toBytesBinary(keyNamespace), - keyMetadataHash); + return Bytes.add(Bytes.toBytes(custLength), key_cust, Bytes.toBytesBinary(keyNamespace)); } @InterfaceAudience.Private @@ -315,13 +302,9 @@ public static ManagedKeyData parseFromResult(Server server, byte[] key_cust, Str } long refreshedTimestamp = Bytes.toLong(result.getValue(KEY_META_INFO_FAMILY, REFRESHED_TIMESTAMP_QUAL_BYTES)); - byte[] readOpValue = result.getValue(KEY_META_INFO_FAMILY, READ_OP_COUNT_QUAL_BYTES); - long readOpCount = readOpValue != null ? Bytes.toLong(readOpValue) : 0; - byte[] writeOpValue = result.getValue(KEY_META_INFO_FAMILY, WRITE_OP_COUNT_QUAL_BYTES); - long writeOpCount = writeOpValue != null ? Bytes.toLong(writeOpValue) : 0; ManagedKeyData dekKeyData = new ManagedKeyData(key_cust, keyNamespace, dek, keyState, dekMetadata, - refreshedTimestamp, readOpCount, writeOpCount); + refreshedTimestamp); if (dek != null) { long dekChecksum = Bytes.toLong(result.getValue(KEY_META_INFO_FAMILY, DEK_CHECKSUM_QUAL_BYTES)); diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/keymeta/ManagedKeyDataCache.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/keymeta/ManagedKeyDataCache.java index c6e4d0b5e133..7fb7f8572833 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/keymeta/ManagedKeyDataCache.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/keymeta/ManagedKeyDataCache.java @@ -19,11 +19,7 @@ import java.io.IOException; import java.security.KeyException; -import java.util.ArrayList; -import java.util.HashSet; -import java.util.List; import java.util.Objects; -import java.util.Set; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hbase.HBaseInterfaceAudience; @@ -42,14 +38,14 @@ /** * In-memory cache for ManagedKeyData entries, using key metadata as the cache key. Uses two * independent Caffeine caches: one for general key data and one for active keys only with - * hierarchical structure for efficient random key selection. + * hierarchical structure for efficient single key retrieval. */ @InterfaceAudience.Private public class ManagedKeyDataCache extends KeyManagementBase { private static final Logger LOG = LoggerFactory.getLogger(ManagedKeyDataCache.class); private Cache cache; - private Cache> activeKeysCache; + private Cache activeKeysCache; private final KeymetaTableAccessor keymetaAccessor; /** @@ -165,7 +161,7 @@ public ManagedKeyData getEntry(byte[] key_cust, String keyNamespace, String keyM keyMetadata); } - if (ManagedKeyState.isUsable(keyData.getKeyState())) { + if (!ManagedKeyState.isUsable(keyData.getKeyState())) { LOG.info("Failed to get usable key data with metadata: {} for prefix: {}", metadata, ManagedKeyProvider.encodeToStr(key_cust)); } @@ -178,7 +174,7 @@ public ManagedKeyData getEntry(byte[] key_cust, String keyNamespace, String keyM } /** - * Retrieves an existing key from the active keys. + * Retrieves an existing key from the active keys cache. * * @param key_cust the key custodian * @param keyNamespace the key namespace @@ -188,13 +184,9 @@ public ManagedKeyData getEntry(byte[] key_cust, String keyNamespace, String keyM private ManagedKeyData getFromActiveKeysCache(byte[] key_cust, String keyNamespace, String keyMetadata) { ActiveKeysCacheKey cacheKey = new ActiveKeysCacheKey(key_cust, keyNamespace); - List keyList = activeKeysCache.getIfPresent(cacheKey); - if (keyList != null) { - for (ManagedKeyData keyData : keyList) { - if (keyData.getKeyMetadata().equals(keyMetadata)) { - return keyData; - } - } + ManagedKeyData keyData = activeKeysCache.getIfPresent(cacheKey); + if (keyData != null && keyData.getKeyMetadata().equals(keyMetadata)) { + return keyData; } return null; } @@ -212,21 +204,10 @@ public ManagedKeyData removeEntry(String keyMetadata) { public ManagedKeyData removeFromActiveKeys(byte[] key_cust, String key_namespace, String keyMetadata) { ActiveKeysCacheKey cacheKey = new ActiveKeysCacheKey(key_cust, key_namespace); - List keyList = activeKeysCache.getIfPresent(cacheKey); - if (keyList != null) { - // Find and remove the matching key - ManagedKeyData removedEntry = null; - for (int i = 0; i < keyList.size(); i++) { - if (keyList.get(i).getKeyMetadata().equals(keyMetadata)) { - removedEntry = keyList.remove(i); - break; - } - } - // If the list is now empty, remove the entire cache entry - if (keyList.isEmpty()) { - activeKeysCache.invalidate(cacheKey); - } - return removedEntry; + ManagedKeyData keyData = activeKeysCache.getIfPresent(cacheKey); + if (keyData != null && keyData.getKeyMetadata().equals(keyMetadata)) { + activeKeysCache.invalidate(cacheKey); + return keyData; } return null; } @@ -240,90 +221,73 @@ public int getGenericCacheEntryCount() { } /** - * @return the approximate number of entries in the active keys cache which is meant for random - * key selection. + * @return the approximate number of entries in the active keys cache */ public int getActiveCacheEntryCount() { - int activeCacheCount = 0; - for (List keyList : activeKeysCache.asMap().values()) { - activeCacheCount += keyList.size(); - } - return activeCacheCount; + return (int) activeKeysCache.estimatedSize(); } /** - * Retrieves a random active entry from the cache based on its key custodian, key namespace, and - * filters out entries with a status other than ACTIVE. This method also loads active keys from - * provider if not found in cache. + * Retrieves the active entry from the cache based on its key custodian and key namespace. + * This method also loads active keys from provider if not found in cache. * * @param key_cust The key custodian. * @param keyNamespace the key namespace to search for - * @return a random ManagedKeyData entry with the given custodian and ACTIVE status, or null if + * @return the ManagedKeyData entry with the given custodian and ACTIVE status, or null if * not found */ - public ManagedKeyData getAnActiveEntry(byte[] key_cust, String keyNamespace) { + public ManagedKeyData getActiveEntry(byte[] key_cust, String keyNamespace) { ActiveKeysCacheKey cacheKey = new ActiveKeysCacheKey(key_cust, keyNamespace); - List keyList = activeKeysCache.get(cacheKey, key -> { - List activeEntries = new ArrayList<>(); + ManagedKeyData keyData = activeKeysCache.get(cacheKey, key -> { + ManagedKeyData retrievedKey = null; - // Try to load from KeymetaTableAccessor - if (keymetaAccessor != null) { + // First check if there are any active keys in the generic cache, which should be + // suitable for standalone tools. + retrievedKey = this.cache.asMap().values().stream() + .filter(cachedKeyData -> Bytes.equals(cachedKeyData.getKeyCustodian(), key_cust) + && cachedKeyData.getKeyNamespace().equals(keyNamespace) + && cachedKeyData.getKeyState() == ManagedKeyState.ACTIVE) + .findFirst() + .orElse(null); + + // Try to load from KeymetaTableAccessor if not found in cache + if (retrievedKey == null && keymetaAccessor != null) { try { - List loadedKeys = keymetaAccessor.getActiveKeys(key_cust, keyNamespace); - activeEntries.addAll(loadedKeys); + retrievedKey = keymetaAccessor.getActiveKey(key_cust, keyNamespace); } catch (IOException | KeyException | RuntimeException e) { - LOG.warn("Failed to load active keys from KeymetaTableAccessor for custodian: {} " + LOG.warn("Failed to load active key from KeymetaTableAccessor for custodian: {} " + "namespace: {}", ManagedKeyProvider.encodeToStr(key_cust), keyNamespace, e); } } - // If this happens, it means there were no keys in L2, which shouldn't happpen if L2 is - // enabled and keys were injected using control path for this custodian and namespace. In - // this case, we need to retrieve the keys from provider, but before that as a quick - // optimization, we check if there are any active keys in the other cache, which should be - // suitable for standalone tools. - if (activeEntries.isEmpty()) { - this.cache.asMap().values().stream() - .filter(keyData -> Bytes.equals(keyData.getKeyCustodian(), key_cust) - && keyData.getKeyNamespace().equals(keyNamespace) - && keyData.getKeyState() == ManagedKeyState.ACTIVE) - .forEach(keyData -> { - activeEntries.add(keyData); - }); - } - - // As a last ditch effort, load active keys from provider. This typically happens for + // As a last ditch effort, load active key from provider. This typically happens for // standalone tools. - if (activeEntries.isEmpty() && isDynamicLookupEnabled()) { + if (retrievedKey == null && isDynamicLookupEnabled()) { try { String keyCust = ManagedKeyProvider.encodeToStr(key_cust); - Set retrievedKeys = retrieveManagedKeys(keyCust, key_cust, keyNamespace, - getPerCustodianNamespaceActiveKeyConfCount(), new HashSet<>()); - if (keymetaAccessor != null) { - for (ManagedKeyData keyData : retrievedKeys) { - keymetaAccessor.addKey(keyData); - } - } - retrievedKeys.stream().filter(keyData -> keyData.getKeyState() == ManagedKeyState.ACTIVE) - .forEach(activeEntries::add); + retrievedKey = retrieveActiveKey(keyCust, key_cust, keyNamespace, keymetaAccessor, null); } catch (IOException | KeyException | RuntimeException e) { - LOG.warn("Failed to load active keys from provider for custodian: {} namespace: {}", + LOG.warn("Failed to load active key from provider for custodian: {} namespace: {}", ManagedKeyProvider.encodeToStr(key_cust), keyNamespace, e); } } - // We don't mind returning an empty list here because it will help prevent future L2/provider - // lookups. - return activeEntries; + return retrievedKey; }); - // Return a random entry from active keys cache only - if (keyList.isEmpty()) { - return null; + if (keyData != null && keyData.getKeyState() == ManagedKeyState.ACTIVE) { + return keyData; } + return null; + } - return keyList.get((int) (Math.random() * keyList.size())); + /** + * @deprecated Use {@link #getActiveEntry(byte[], String)} instead. + */ + @Deprecated + public ManagedKeyData getAnActiveEntry(byte[] key_cust, String keyNamespace) { + return getActiveEntry(key_cust, keyNamespace); } /** diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/keymeta/TestKeymetaTableAccessor.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/keymeta/TestKeymetaTableAccessor.java index 3c80f928c9e1..3b3c4c23dc7d 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/keymeta/TestKeymetaTableAccessor.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/keymeta/TestKeymetaTableAccessor.java @@ -27,10 +27,9 @@ import static org.apache.hadoop.hbase.keymeta.KeymetaTableAccessor.DEK_WRAPPED_BY_STK_QUAL_BYTES; import static org.apache.hadoop.hbase.keymeta.KeymetaTableAccessor.KEY_META_INFO_FAMILY; import static org.apache.hadoop.hbase.keymeta.KeymetaTableAccessor.KEY_STATE_QUAL_BYTES; -import static org.apache.hadoop.hbase.keymeta.KeymetaTableAccessor.READ_OP_COUNT_QUAL_BYTES; import static org.apache.hadoop.hbase.keymeta.KeymetaTableAccessor.REFRESHED_TIMESTAMP_QUAL_BYTES; import static org.apache.hadoop.hbase.keymeta.KeymetaTableAccessor.STK_CHECKSUM_QUAL_BYTES; -import static org.apache.hadoop.hbase.keymeta.KeymetaTableAccessor.WRITE_OP_COUNT_QUAL_BYTES; +import static org.apache.hadoop.hbase.keymeta.KeymetaTableAccessor.constructRowKeyForCustNamespace; import static org.apache.hadoop.hbase.keymeta.KeymetaTableAccessor.constructRowKeyForMetadata; import static org.apache.hadoop.hbase.keymeta.KeymetaTableAccessor.parseFromResult; import static org.junit.Assert.assertEquals; @@ -46,6 +45,7 @@ import static org.mockito.Mockito.when; import java.io.IOException; +import java.util.ArrayList; import java.util.Arrays; import java.util.Collection; import java.util.HashMap; @@ -62,7 +62,6 @@ import org.apache.hadoop.hbase.client.Connection; import org.apache.hadoop.hbase.client.Durability; import org.apache.hadoop.hbase.client.Get; -import org.apache.hadoop.hbase.client.Increment; import org.apache.hadoop.hbase.client.Put; import org.apache.hadoop.hbase.client.Result; import org.apache.hadoop.hbase.client.ResultScanner; @@ -94,7 +93,6 @@ TestKeymetaTableAccessor.TestAdd.class, TestKeymetaTableAccessor.TestAddWithNullableFields.class, TestKeymetaTableAccessor.TestGet.class, - TestKeymetaTableAccessor.TestOps.class, }) @Category({ MasterTests.class, SmallTests.class }) public class TestKeymetaTableAccessor { @@ -171,10 +169,17 @@ public void testAddKey() throws Exception { accessor.addKey(keyData); - ArgumentCaptor putCaptor = ArgumentCaptor.forClass(Put.class); + ArgumentCaptor> putCaptor = ArgumentCaptor.forClass(ArrayList.class); verify(table).put(putCaptor.capture()); - Put put = putCaptor.getValue(); - assertPut(keyData, put); + List puts = putCaptor.getValue(); + assertEquals(keyState == ACTIVE ? 2 : 1, puts.size()); + if (keyState == ACTIVE) { + assertPut(keyData, puts.get(0), constructRowKeyForCustNamespace(keyData)); + assertPut(keyData, puts.get(1), constructRowKeyForMetadata(keyData)); + } + else { + assertPut(keyData, puts.get(0), constructRowKeyForMetadata(keyData)); + } } } @@ -192,9 +197,11 @@ public void testAddKeyWithFailedStateAndNullMetadata() throws Exception { accessor.addKey(keyData); - ArgumentCaptor putCaptor = ArgumentCaptor.forClass(Put.class); + ArgumentCaptor> putCaptor = ArgumentCaptor.forClass(ArrayList.class); verify(table).put(putCaptor.capture()); - Put put = putCaptor.getValue(); + List puts = putCaptor.getValue(); + assertEquals(1, puts.size()); + Put put = puts.get(0); // Verify the row key uses state value for metadata hash byte[] expectedRowKey = constructRowKeyForMetadata(CUST_ID, KEY_SPACE_GLOBAL, @@ -295,7 +302,7 @@ public void testGetKeyWithWrappedKey() throws Exception { assertNotNull(result); assertEquals(0, Bytes.compareTo(CUST_ID, result.getKeyCustodian())); assertEquals(KEY_NAMESPACE, result.getKeyNamespace()); - assertEquals(KEY_METADATA, result.getKeyMetadata()); + assertEquals(keyData.getKeyMetadata(), result.getKeyMetadata()); assertEquals(0, Bytes.compareTo(keyData.getTheKey().getEncoded(), result.getTheKey().getEncoded())); assertEquals(ACTIVE, result.getKeyState()); @@ -346,24 +353,6 @@ public void testGetKeyWithoutWrappedKey() throws Exception { assertEquals(FAILED, result.getKeyState()); } - @Test - public void testGetKeyWithOps() throws Exception { - long readCnt = 5; - long writeCnt = 10; - when(result2.getValue(eq(KEY_META_INFO_FAMILY), eq(READ_OP_COUNT_QUAL_BYTES))) - .thenReturn(Bytes.toBytes(readCnt)); - when(result2.getValue(eq(KEY_META_INFO_FAMILY), eq(WRITE_OP_COUNT_QUAL_BYTES))) - .thenReturn(Bytes.toBytes(writeCnt)); - when(table.get(any(Get.class))).thenReturn(result2); - - ManagedKeyData result = accessor.getKey(CUST_ID, KEY_NAMESPACE, KEY_METADATA); - - verify(table).get(any(Get.class)); - assertNotNull(result); - assertEquals(readCnt, result.getReadOpCount()); - assertEquals(writeCnt, result.getWriteOpCount()); - } - @Test public void testGetAllKeys() throws Exception { ManagedKeyData keyData = setupActiveKey(CUST_ID, result1); @@ -374,23 +363,23 @@ public void testGetAllKeys() throws Exception { List allKeys = accessor.getAllKeys(CUST_ID, KEY_NAMESPACE); assertEquals(2, allKeys.size()); - assertEquals(KEY_METADATA, allKeys.get(0).getKeyMetadata()); + assertEquals(keyData.getKeyMetadata(), allKeys.get(0).getKeyMetadata()); assertEquals(keyMetadata2, allKeys.get(1).getKeyMetadata()); verify(table).getScanner(any(Scan.class)); } @Test - public void testGetActiveKeys() throws Exception { + public void testGetActiveKey() throws Exception { ManagedKeyData keyData = setupActiveKey(CUST_ID, result1); - when(scanner.iterator()).thenReturn(List.of(result1, result2).iterator()); - when(table.getScanner(any(Scan.class))).thenReturn(scanner); + when(scanner.iterator()).thenReturn(List.of(result1).iterator()); + when(table.get(any(Get.class))).thenReturn(result1); - List allKeys = accessor.getActiveKeys(CUST_ID, KEY_NAMESPACE); + ManagedKeyData activeKey = accessor.getActiveKey(CUST_ID, KEY_NAMESPACE); - assertEquals(1, allKeys.size()); - assertEquals(KEY_METADATA, allKeys.get(0).getKeyMetadata()); - verify(table).getScanner(any(Scan.class)); + assertNotNull(activeKey); + assertEquals(keyData, activeKey); + verify(table).get(any(Get.class)); } private ManagedKeyData setupActiveKey(byte[] custId, Result result) throws Exception { @@ -401,53 +390,18 @@ private ManagedKeyData setupActiveKey(byte[] custId, Result result) throws Excep .thenReturn(dekWrappedBySTK); when(result.getValue(eq(KEY_META_INFO_FAMILY), eq(DEK_CHECKSUM_QUAL_BYTES))) .thenReturn(Bytes.toBytes(keyData.getKeyChecksum()), Bytes.toBytes(0L)); + // Update the mock to return the correct metadata from the keyData + when(result.getValue(eq(KEY_META_INFO_FAMILY), eq(DEK_METADATA_QUAL_BYTES))) + .thenReturn(keyData.getKeyMetadata().getBytes()); when(table.get(any(Get.class))).thenReturn(result); return keyData; } } - - @RunWith(Parameterized.class) - @Category({ MasterTests.class, SmallTests.class }) - public static class TestOps extends TestKeymetaTableAccessor { - @ClassRule - public static final HBaseClassTestRule CLASS_RULE = - HBaseClassTestRule.forClass(TestOps.class); - - @Parameter(0) - public boolean isReadonly; - - @Parameterized.Parameters(name = "{index},isReadonly={0}") - public static Collection data() { - return Arrays.asList( - new Object[][] { { true }, { false } }); - } - - @Test - public void testReportOperation() throws Exception { - long count = 5; - - accessor.reportOperation(CUST_ID, KEY_NAMESPACE, KEY_METADATA, count, isReadonly); - - ArgumentCaptor incrementCaptor = ArgumentCaptor.forClass(Increment.class); - verify(table).increment(incrementCaptor.capture()); - Increment increment = incrementCaptor.getValue(); - NavigableMap> familyCellMap = increment.getFamilyCellMap(); - List cells = familyCellMap.get(KEY_META_INFO_FAMILY); - assertEquals(1, cells.size()); - Cell cell = cells.get(0); - assertEquals(new Bytes(isReadonly ? READ_OP_COUNT_QUAL_BYTES : WRITE_OP_COUNT_QUAL_BYTES), - new Bytes(cell.getQualifierArray(), cell.getQualifierOffset(), cell.getQualifierLength())); - assertEquals(new Bytes(Bytes.toBytes(count)), - new Bytes(cell.getValueArray(), cell.getValueOffset(), cell.getValueLength())); - } - } - - protected void assertPut(ManagedKeyData keyData, Put put) { + protected void assertPut(ManagedKeyData keyData, Put put, byte[] rowKey) { assertEquals(Durability.SKIP_WAL, put.getDurability()); assertEquals(HConstants.SYSTEMTABLE_QOS, put.getPriority()); - assertTrue(Bytes.compareTo(constructRowKeyForMetadata(keyData), - put.getRow()) == 0); + assertTrue(Bytes.compareTo(rowKey, put.getRow()) == 0); Map valueMap = getValueMap(put); diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/keymeta/TestManagedKeyDataCache.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/keymeta/TestManagedKeyDataCache.java index ebf58ddd5a96..5c665351f17e 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/keymeta/TestManagedKeyDataCache.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/keymeta/TestManagedKeyDataCache.java @@ -38,9 +38,6 @@ import java.io.IOException; import java.lang.reflect.InvocationTargetException; import java.lang.reflect.Method; -import java.util.HashSet; -import java.util.List; -import java.util.Set; import net.bytebuddy.ByteBuddy; import net.bytebuddy.dynamic.loading.ClassLoadingStrategy; @@ -232,12 +229,12 @@ public void testWithInvalidProvider() throws Exception { assertNull(cache.getEntry(CUST_ID, KEY_SPACE_GLOBAL, globalKey1.getKeyMetadata(), null)); doThrow(new IOException("Test exception")).when(testProvider).getManagedKey(any(), any(String.class)); - assertNull(cache.getAnActiveEntry(CUST_ID, KEY_SPACE_GLOBAL)); + assertNull(cache.getActiveEntry(CUST_ID, KEY_SPACE_GLOBAL)); verify(testProvider).getManagedKey(any(), any(String.class)); // A second call to getRandomEntry should not result in a call to the provider due to -ve // entry. clearInvocations(testProvider); - assertNull(cache.getAnActiveEntry(CUST_ID, KEY_SPACE_GLOBAL)); + assertNull(cache.getActiveEntry(CUST_ID, KEY_SPACE_GLOBAL)); verify(testProvider, never()).getManagedKey(any(), any(String.class)); } @@ -261,15 +258,12 @@ public void testGenericCache() throws Exception { @Test public void testActiveKeysCache() throws Exception { - conf.setInt(HConstants.CRYPTO_MANAGED_KEYS_PER_CUST_NAMESPACE_ACTIVE_KEY_COUNT, 10); - assertNotNull(cache.getAnActiveEntry(CUST_ID, KEY_SPACE_GLOBAL)); - verify(testProvider, times(10)).getManagedKey(any(), any(String.class)); + assertNotNull(cache.getActiveEntry(CUST_ID, KEY_SPACE_GLOBAL)); + verify(testProvider).getManagedKey(any(), any(String.class)); clearInvocations(testProvider); - Set activeKeys = new HashSet<>(); - for (int i = 0; i < 10; ++i) { - activeKeys.add(cache.getAnActiveEntry(CUST_ID, KEY_SPACE_GLOBAL)); - } - assertTrue(activeKeys.size() > 1); + ManagedKeyData activeKey = cache.getActiveEntry(CUST_ID, KEY_SPACE_GLOBAL); + assertNotNull(activeKey); + assertEquals(activeKey, cache.getActiveEntry(CUST_ID, KEY_SPACE_GLOBAL)); verify(testProvider, never()).getManagedKey(any(), any(String.class)); } @@ -305,12 +299,12 @@ public void testGenericCacheOperations() throws Exception { } @Test - public void testRandomKeyGetNoActive() throws Exception { + public void testActiveKeyGetNoActive() throws Exception { testProvider.setMockedKeyState(ALIAS, FAILED); - assertNull(cache.getAnActiveEntry(CUST_ID, KEY_SPACE_GLOBAL)); + assertNull(cache.getActiveEntry(CUST_ID, KEY_SPACE_GLOBAL)); verify(testProvider).getManagedKey(any(), any(String.class)); clearInvocations(testProvider); - assertNull(cache.getAnActiveEntry(CUST_ID, KEY_SPACE_GLOBAL)); + assertNull(cache.getActiveEntry(CUST_ID, KEY_SPACE_GLOBAL)); verify(testProvider, never()).getManagedKey(any(), any(String.class)); } @@ -324,36 +318,30 @@ public void testActiveKeysCacheOperations() throws Exception { ManagedKeyData key = testProvider.getManagedKey(CUST_ID, KEY_SPACE_GLOBAL); assertNull(cache.removeFromActiveKeys(CUST_ID, KEY_SPACE_GLOBAL, key.getKeyMetadata())); - conf.setInt(HConstants.CRYPTO_MANAGED_KEYS_PER_CUST_NAMESPACE_ACTIVE_KEY_COUNT, 2); - assertNotNull(cache.getAnActiveEntry(CUST_ID, KEY_SPACE_GLOBAL)); - assertNotNull(cache.getAnActiveEntry(CUST_ID, "namespace1")); - assertEquals(4, cache.getActiveCacheEntryCount()); + assertNotNull(cache.getActiveEntry(CUST_ID, KEY_SPACE_GLOBAL)); + assertNotNull(cache.getActiveEntry(CUST_ID, "namespace1")); + assertEquals(2, cache.getActiveCacheEntryCount()); - key = cache.getAnActiveEntry(CUST_ID, KEY_SPACE_GLOBAL); + key = cache.getActiveEntry(CUST_ID, KEY_SPACE_GLOBAL); removeFromActiveKeys(key); - assertEquals(3, cache.getActiveCacheEntryCount()); + assertEquals(1, cache.getActiveCacheEntryCount()); assertNull(cache.removeFromActiveKeys(CUST_ID, KEY_SPACE_GLOBAL, key.getKeyMetadata())); - assertEquals(3, cache.getActiveCacheEntryCount()); - removeFromActiveKeys(cache.getAnActiveEntry(CUST_ID, "namespace1")); - assertEquals(2, cache.getActiveCacheEntryCount()); - removeFromActiveKeys(cache.getAnActiveEntry(CUST_ID, KEY_SPACE_GLOBAL)); assertEquals(1, cache.getActiveCacheEntryCount()); - removeFromActiveKeys(cache.getAnActiveEntry(CUST_ID, "namespace1")); + removeFromActiveKeys(cache.getActiveEntry(CUST_ID, "namespace1")); assertEquals(0, cache.getActiveCacheEntryCount()); // It should be able to retrieve the keys again - assertNotNull(cache.getAnActiveEntry(CUST_ID, KEY_SPACE_GLOBAL)); - assertEquals(2, cache.getActiveCacheEntryCount()); + assertNotNull(cache.getActiveEntry(CUST_ID, KEY_SPACE_GLOBAL)); + assertEquals(1, cache.getActiveCacheEntryCount()); cache.invalidateAll(); assertEquals(0, cache.getActiveCacheEntryCount()); - assertNotNull(cache.getAnActiveEntry(CUST_ID, KEY_SPACE_GLOBAL)); - assertEquals(2, cache.getActiveCacheEntryCount()); + assertNotNull(cache.getActiveEntry(CUST_ID, KEY_SPACE_GLOBAL)); + assertEquals(1, cache.getActiveCacheEntryCount()); } @Test public void testGenericCacheUsingActiveKeysCacheOverProvider() throws Exception { - conf.setInt(HConstants.CRYPTO_MANAGED_KEYS_PER_CUST_NAMESPACE_ACTIVE_KEY_COUNT, 3); - ManagedKeyData key = cache.getAnActiveEntry(CUST_ID, KEY_SPACE_GLOBAL); + ManagedKeyData key = cache.getActiveEntry(CUST_ID, KEY_SPACE_GLOBAL); assertNotNull(key); assertEquals(key, cache.getEntry(CUST_ID, KEY_SPACE_GLOBAL, key.getKeyMetadata(), null)); verify(testProvider, never()).unwrapKey(any(String.class), any()); @@ -367,9 +355,9 @@ public void testActiveKeysCacheSkippingProviderWhenGenericCacheEntriesExist() th assertEquals(key2, cache.getEntry(CUST_ID, "namespace1", key2.getKeyMetadata(), null)); verify(testProvider, times(2)).getManagedKey(any(), any(String.class)); clearInvocations(testProvider); - assertEquals(key1, cache.getAnActiveEntry(CUST_ID, KEY_SPACE_GLOBAL)); + assertEquals(key1, cache.getActiveEntry(CUST_ID, KEY_SPACE_GLOBAL)); // In this case, the provider is not called because the existing keys in generic cache are - // used. + // used first (before checking keymetaAccessor). verify(testProvider, never()).getManagedKey(any(), any(String.class)); assertEquals(1, cache.getActiveCacheEntryCount()); cache.invalidateAll(); @@ -383,7 +371,7 @@ public void testActiveKeysCacheIgnnoreFailedKeyInGenericCache() throws Exception assertNull(cache.getEntry(CUST_ID, KEY_SPACE_GLOBAL, key.getKeyMetadata(), null)); clearInvocations(testProvider); testProvider.setMockedKeyState(ALIAS, ACTIVE); - assertNotNull(cache.getAnActiveEntry(CUST_ID, KEY_SPACE_GLOBAL)); + assertNotNull(cache.getActiveEntry(CUST_ID, KEY_SPACE_GLOBAL)); verify(testProvider).getManagedKey(any(), any(String.class)); } @@ -395,21 +383,21 @@ public void testActiveKeysCacheWithMultipleCustodiansInGenericCache() throws Exc byte[] cust_id2 = alias2.getBytes(); ManagedKeyData key2 = testProvider.getManagedKey(cust_id2, KEY_SPACE_GLOBAL); assertNotNull(cache.getEntry(CUST_ID, KEY_SPACE_GLOBAL, key2.getKeyMetadata(), null)); - assertNotNull(cache.getAnActiveEntry(CUST_ID, KEY_SPACE_GLOBAL)); + assertNotNull(cache.getActiveEntry(CUST_ID, KEY_SPACE_GLOBAL)); assertEquals(1, cache.getActiveCacheEntryCount()); } @Test public void testActiveKeysCacheWithMultipleNamespaces() throws Exception { - ManagedKeyData key1 = cache.getAnActiveEntry(CUST_ID, KEY_SPACE_GLOBAL); + ManagedKeyData key1 = cache.getActiveEntry(CUST_ID, KEY_SPACE_GLOBAL); assertNotNull(key1); - assertEquals(key1, cache.getAnActiveEntry(CUST_ID, KEY_SPACE_GLOBAL)); - ManagedKeyData key2 = cache.getAnActiveEntry(CUST_ID, "namespace1"); + assertEquals(key1, cache.getActiveEntry(CUST_ID, KEY_SPACE_GLOBAL)); + ManagedKeyData key2 = cache.getActiveEntry(CUST_ID, "namespace1"); assertNotNull(key2); - assertEquals(key2, cache.getAnActiveEntry(CUST_ID, "namespace1")); - ManagedKeyData key3 = cache.getAnActiveEntry(CUST_ID, "namespace2"); + assertEquals(key2, cache.getActiveEntry(CUST_ID, "namespace1")); + ManagedKeyData key3 = cache.getActiveEntry(CUST_ID, "namespace2"); assertNotNull(key3); - assertEquals(key3, cache.getAnActiveEntry(CUST_ID, "namespace2")); + assertEquals(key3, cache.getActiveEntry(CUST_ID, "namespace2")); verify(testProvider, times(3)).getManagedKey(any(), any(String.class)); assertEquals(3, cache.getActiveCacheEntryCount()); } @@ -450,20 +438,20 @@ public void testGenericCacheRetrievalFromL2Cache() throws Exception { @Test public void testActiveKeysCacheNonExistentKeyInL2Cache() throws Exception { - assertNull(cache.getAnActiveEntry(CUST_ID, KEY_SPACE_GLOBAL)); - verify(mockL2).getActiveKeys(any(), any(String.class)); + assertNull(cache.getActiveEntry(CUST_ID, KEY_SPACE_GLOBAL)); + verify(mockL2).getActiveKey(any(), any(String.class)); clearInvocations(mockL2); - assertNull(cache.getAnActiveEntry(CUST_ID, KEY_SPACE_GLOBAL)); - verify(mockL2, never()).getActiveKeys(any(), any(String.class)); + assertNull(cache.getActiveEntry(CUST_ID, KEY_SPACE_GLOBAL)); + verify(mockL2, never()).getActiveKey(any(), any(String.class)); } @Test public void testActiveKeysCacheRetrievalFromL2Cache() throws Exception { ManagedKeyData key = testProvider.getManagedKey(CUST_ID, KEY_SPACE_GLOBAL); - when(mockL2.getActiveKeys(CUST_ID, KEY_SPACE_GLOBAL)) - .thenReturn(List.of(key)); - assertEquals(key, cache.getAnActiveEntry(CUST_ID, KEY_SPACE_GLOBAL)); - verify(mockL2).getActiveKeys(any(), any(String.class)); + when(mockL2.getActiveKey(CUST_ID, KEY_SPACE_GLOBAL)) + .thenReturn(key); + assertEquals(key, cache.getActiveEntry(CUST_ID, KEY_SPACE_GLOBAL)); + verify(mockL2).getActiveKey(any(), any(String.class)); } @Test @@ -478,14 +466,29 @@ public void testGenericCacheWithKeymetaAccessorException() throws Exception { } @Test - public void testGetRandomEntryWithKeymetaAccessorException() throws Exception { - when(mockL2.getActiveKeys(CUST_ID, KEY_SPACE_GLOBAL)) + public void testGetActiveEntryWithKeymetaAccessorException() throws Exception { + when(mockL2.getActiveKey(CUST_ID, KEY_SPACE_GLOBAL)) .thenThrow(new IOException("Test exception")); - assertNull(cache.getAnActiveEntry(CUST_ID, KEY_SPACE_GLOBAL)); - verify(mockL2).getActiveKeys(any(), any(String.class)); + assertNull(cache.getActiveEntry(CUST_ID, KEY_SPACE_GLOBAL)); + verify(mockL2).getActiveKey(any(), any(String.class)); clearInvocations(mockL2); - assertNull(cache.getAnActiveEntry(CUST_ID, KEY_SPACE_GLOBAL)); - verify(mockL2, never()).getActiveKeys(any(), any(String.class)); + assertNull(cache.getActiveEntry(CUST_ID, KEY_SPACE_GLOBAL)); + verify(mockL2, never()).getActiveKey(any(), any(String.class)); + } + + @Test + public void testActiveKeysCacheUsesKeymetaAccessorWhenGenericCacheEmpty() throws Exception { + // Ensure generic cache is empty + cache.invalidateAll(); + + // Mock the keymetaAccessor to return a key + ManagedKeyData key = testProvider.getManagedKey(CUST_ID, KEY_SPACE_GLOBAL); + when(mockL2.getActiveKey(CUST_ID, KEY_SPACE_GLOBAL)) + .thenReturn(key); + + // Get the active entry - it should call keymetaAccessor since generic cache is empty + assertEquals(key, cache.getActiveEntry(CUST_ID, KEY_SPACE_GLOBAL)); + verify(mockL2).getActiveKey(any(), any(String.class)); } } @@ -524,8 +527,7 @@ public void testAddKeyFailure() throws Exception { @Test public void testGenericCacheDynamicLookupUnexpectedException() throws Exception { - doThrow(new RuntimeException("Test - exception")).when(testProvider).unwrapKey(any(String.class), any()); + doThrow(new RuntimeException("Test exception")).when(testProvider).unwrapKey(any(String.class), any()); assertNull(cache.getEntry(CUST_ID, KEY_SPACE_GLOBAL, "test-metadata", null)); assertNull(cache.getEntry(CUST_ID, KEY_SPACE_GLOBAL, "test-metadata", null)); verify(mockL2).getKey(any(), any(String.class), any(String.class)); @@ -536,10 +538,10 @@ public void testGenericCacheDynamicLookupUnexpectedException() throws Exception public void testActiveKeysCacheDynamicLookupUnexpectedException() throws Exception { doThrow(new RuntimeException("Test exception")).when(testProvider).getManagedKey(any(), any(String.class)); - assertNull(cache.getAnActiveEntry(CUST_ID, KEY_SPACE_GLOBAL)); + assertNull(cache.getActiveEntry(CUST_ID, KEY_SPACE_GLOBAL)); verify(testProvider).getManagedKey(any(), any(String.class)); clearInvocations(testProvider); - assertNull(cache.getAnActiveEntry(CUST_ID, KEY_SPACE_GLOBAL)); + assertNull(cache.getActiveEntry(CUST_ID, KEY_SPACE_GLOBAL)); verify(testProvider, never()).getManagedKey(any(), any(String.class)); } @@ -547,8 +549,22 @@ public void testActiveKeysCacheDynamicLookupUnexpectedException() throws Excepti public void testActiveKeysCacheRetrivalFromProviderWhenKeyNotFoundInL2Cache() throws Exception { ManagedKeyData key = testProvider.getManagedKey(CUST_ID, KEY_SPACE_GLOBAL); doReturn(key).when(testProvider).getManagedKey(any(), any(String.class)); - assertEquals(key, cache.getAnActiveEntry(CUST_ID, KEY_SPACE_GLOBAL)); - verify(mockL2).getActiveKeys(any(), any(String.class)); + assertEquals(key, cache.getActiveEntry(CUST_ID, KEY_SPACE_GLOBAL)); + verify(mockL2).getActiveKey(any(), any(String.class)); + } + + @Test + public void testActiveKeysCacheUsesGenericCacheFirst() throws Exception { + // First populate the generic cache with an active key + ManagedKeyData key = testProvider.getManagedKey(CUST_ID, KEY_SPACE_GLOBAL); + assertEquals(key, cache.getEntry(CUST_ID, KEY_SPACE_GLOBAL, key.getKeyMetadata(), null)); + + // Clear invocations to reset the mock state + clearInvocations(mockL2); + + // Now get the active entry - it should use the generic cache first, not call keymetaAccessor + assertEquals(key, cache.getActiveEntry(CUST_ID, KEY_SPACE_GLOBAL)); + verify(mockL2, never()).getActiveKey(any(), any(String.class)); } } diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/keymeta/TestSystemKeyCache.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/keymeta/TestSystemKeyCache.java index 555b5b47c726..a92818f8aada 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/keymeta/TestSystemKeyCache.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/keymeta/TestSystemKeyCache.java @@ -87,11 +87,11 @@ public void setUp() { // Create test key data with different checksums keyData1 = new ManagedKeyData(TEST_CUSTODIAN, TEST_NAMESPACE, testKey1, - ManagedKeyState.ACTIVE, TEST_METADATA_1, 1000L, 0, 0); + ManagedKeyState.ACTIVE, TEST_METADATA_1, 1000L); keyData2 = new ManagedKeyData(TEST_CUSTODIAN, TEST_NAMESPACE, testKey2, - ManagedKeyState.ACTIVE, TEST_METADATA_2, 2000L, 0, 0); + ManagedKeyState.ACTIVE, TEST_METADATA_2, 2000L); keyData3 = new ManagedKeyData(TEST_CUSTODIAN, TEST_NAMESPACE, testKey3, - ManagedKeyState.ACTIVE, TEST_METADATA_3, 3000L, 0, 0); + ManagedKeyState.ACTIVE, TEST_METADATA_3, 3000L); // Create test paths keyPath1 = new Path("/system/keys/key1"); @@ -273,9 +273,9 @@ public void testCacheWithKeysHavingSameChecksum() throws Exception { Key sameKey2 = new SecretKeySpec("identical-bytes".getBytes(), "AES"); ManagedKeyData sameManagedKey1 = new ManagedKeyData(TEST_CUSTODIAN, TEST_NAMESPACE, - sameKey1, ManagedKeyState.ACTIVE, "metadata-A", 1000L, 0, 0); + sameKey1, ManagedKeyState.ACTIVE, "metadata-A", 1000L); ManagedKeyData sameManagedKey2 = new ManagedKeyData(TEST_CUSTODIAN, TEST_NAMESPACE, - sameKey2, ManagedKeyState.ACTIVE, "metadata-B", 2000L, 0, 0); + sameKey2, ManagedKeyState.ACTIVE, "metadata-B", 2000L); // Verify they have the same checksum assertEquals(sameManagedKey1.getKeyChecksum(), sameManagedKey2.getKeyChecksum()); diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestKeymetaAdminImpl.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestKeymetaAdminImpl.java index 6733ad2eb959..165ff9795dd9 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestKeymetaAdminImpl.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestKeymetaAdminImpl.java @@ -26,11 +26,10 @@ import static org.junit.Assert.assertNotNull; import static org.junit.Assert.assertNull; import static org.junit.Assert.assertThrows; -import static org.mockito.ArgumentMatchers.any; +import static org.junit.Assume.assumeTrue; import static org.mockito.ArgumentMatchers.argThat; import static org.mockito.Mockito.mock; import static org.mockito.Mockito.reset; -import static org.mockito.Mockito.times; import static org.mockito.Mockito.verify; import static org.mockito.Mockito.when; @@ -58,7 +57,6 @@ import org.apache.hadoop.hbase.testclassification.MasterTests; import org.apache.hadoop.hbase.testclassification.SmallTests; import org.apache.hadoop.hbase.util.Bytes; -import org.junit.After; import org.junit.Before; import org.junit.ClassRule; import org.junit.Rule; @@ -76,8 +74,6 @@ @Suite.SuiteClasses({ TestKeymetaAdminImpl.TestWhenDisabled.class, TestKeymetaAdminImpl.TestAdminImpl.class, TestKeymetaAdminImpl.TestForKeyProviderNullReturn.class, - TestKeymetaAdminImpl.TestMultiKeyGen.class, - TestKeymetaAdminImpl.TestForInvalidKeyCountConfig.class, }) @Category({ MasterTests.class, SmallTests.class }) public class TestKeymetaAdminImpl { @@ -96,7 +92,7 @@ public class TestKeymetaAdminImpl { protected FileSystem mockFileSystem = mock(FileSystem.class); protected Server mockServer = mock(Server.class); - protected DummyKeymetaAdminImpl keymetaAdmin; + protected KeymetaAdminImplForTest keymetaAdmin; KeymetaTableAccessor keymetaAccessor = mock(KeymetaTableAccessor.class); @Before @@ -110,7 +106,7 @@ public void setUp() throws Exception { when(mockServer.getFileSystem()).thenReturn(mockFileSystem); when(mockServer.getConfiguration()).thenReturn(conf); - keymetaAdmin = new DummyKeymetaAdminImpl(mockServer, keymetaAccessor); + keymetaAdmin = new KeymetaAdminImplForTest(mockServer, keymetaAccessor); } @RunWith(BlockJUnit4ClassRunner.class) @@ -145,141 +141,69 @@ public static class TestAdminImpl extends TestKeymetaAdminImpl { HBaseClassTestRule.forClass(TestAdminImpl.class); @Parameter(0) - public int nKeys; - @Parameter(1) public String keySpace; - @Parameter(2) + @Parameter(1) public ManagedKeyState keyState; - @Parameter(3) + @Parameter(2) public boolean isNullKey; - @Parameters(name = "{index},nKeys={0},keySpace={1},keyState={2}") + @Parameters(name = "{index},keySpace={1},keyState={2}") public static Collection data() { return Arrays.asList( new Object[][] { - { 1, KEY_SPACE_GLOBAL, ACTIVE, false }, - { 1, "ns1", ACTIVE, false }, - { 1, KEY_SPACE_GLOBAL, FAILED, true }, - { 1, KEY_SPACE_GLOBAL, INACTIVE, false }, - { 1, KEY_SPACE_GLOBAL, DISABLED, true }, - { 2, KEY_SPACE_GLOBAL, ACTIVE, false }, + { KEY_SPACE_GLOBAL, ACTIVE, false }, + { "ns1", ACTIVE, false }, + { KEY_SPACE_GLOBAL, FAILED, true }, + { KEY_SPACE_GLOBAL, INACTIVE, false }, + { KEY_SPACE_GLOBAL, DISABLED, true }, }); } - @Override - public void setUp() throws Exception { - super.setUp(); - conf.set(HConstants.CRYPTO_MANAGED_KEYS_PER_CUST_NAMESPACE_ACTIVE_KEY_COUNT, - Integer.toString(nKeys)); - } - @Test public void testEnableAndGet() throws Exception { MockManagedKeyProvider managedKeyProvider = (MockManagedKeyProvider) Encryption.getKeyProvider(conf); - String cust = "cust1"; - managedKeyProvider.setMockedKeyState(cust, keyState); - String encodedCust = ManagedKeyProvider.encodeToStr(cust.getBytes()); + managedKeyProvider.setMockedKeyState(CUST, keyState); List managedKeyStates = - keymetaAdmin.enableKeyManagement(encodedCust, keySpace); + keymetaAdmin.enableKeyManagement(ENCODED_CUST, keySpace); assertNotNull(managedKeyStates); assertEquals(1, managedKeyStates.size()); assertEquals(keyState, managedKeyStates.get(0).getKeyState()); verify(keymetaAccessor).addKey(argThat( (ManagedKeyData keyData) -> assertKeyData(keyData, keyState, - isNullKey ? null : managedKeyProvider.getMockedKey(cust, + isNullKey ? null : managedKeyProvider.getMockedKey(CUST, keySpace)))); - verify(keymetaAccessor).getAllKeys(cust.getBytes(), keySpace); + verify(keymetaAccessor).getActiveKey(CUST.getBytes(), keySpace); reset(keymetaAccessor); - keymetaAdmin.getManagedKeys(encodedCust, keySpace); - verify(keymetaAccessor).getAllKeys(cust.getBytes(), keySpace); - } - } - - @RunWith(Parameterized.class) - @Category({ MasterTests.class, SmallTests.class }) - public static class TestMultiKeyGen extends TestKeymetaAdminImpl { - @ClassRule - public static final HBaseClassTestRule CLASS_RULE = - HBaseClassTestRule.forClass(TestKeymetaAdminImpl.TestMultiKeyGen.class); - - @Parameter(0) - public String keySpace; - - private MockManagedKeyProvider managedKeyProvider; - - @Parameters(name = "{index},keySpace={0}") - public static Collection data() { - return Arrays.asList( - new Object[][] { - { KEY_SPACE_GLOBAL }, - { "ns1" }, - }); - } - - @Override - public void setUp() throws Exception { - super.setUp(); - conf.set(HConstants.CRYPTO_MANAGED_KEYS_PER_CUST_NAMESPACE_ACTIVE_KEY_COUNT, "3"); - managedKeyProvider = (MockManagedKeyProvider) Encryption.getKeyProvider(conf); - managedKeyProvider.setMultikeyGenMode(true); - } - - @After - public void tearDown() { - // Reset as this instance gets reused for more than 1 test. - managedKeyProvider.setMockedKeyState(CUST, ACTIVE); + keymetaAdmin.getManagedKeys(ENCODED_CUST, keySpace); + verify(keymetaAccessor).getAllKeys(CUST.getBytes(), keySpace); } @Test - public void testEnable() throws Exception { - List managedKeyStates; - // Test 1: Enable key management with 3 keys - managedKeyStates = keymetaAdmin.enableKeyManagement(ENCODED_CUST, keySpace); - assertKeys(managedKeyStates, 3); - verify(keymetaAccessor).getAllKeys(CUST.getBytes(), keySpace); - verify(keymetaAccessor, times(3)).addKey(any()); - - // Test 2: Enable key management with 3 keys, but already enabled - reset(keymetaAccessor); - when(keymetaAccessor.getAllKeys(CUST.getBytes(), keySpace)).thenReturn(managedKeyStates); - managedKeyStates = keymetaAdmin.enableKeyManagement(ENCODED_CUST, keySpace); - assertKeys(managedKeyStates, 3); - verify(keymetaAccessor, times(0)).addKey(any()); - - // Test 3: Enable key management with 4 keys, but only 1 key is added - reset(keymetaAccessor); - when(keymetaAccessor.getAllKeys(CUST.getBytes(), keySpace)).thenReturn(managedKeyStates); - keymetaAdmin.activeKeyCountOverride = 4; - managedKeyStates = keymetaAdmin.enableKeyManagement(ENCODED_CUST, keySpace); - assertKeys(managedKeyStates, 1); - verify(keymetaAccessor, times(1)).addKey(any()); - - // Test 4: Enable key management when key provider is not able to generate any new keys - reset(keymetaAccessor); - when(keymetaAccessor.getAllKeys(CUST.getBytes(), keySpace)).thenReturn(managedKeyStates); - managedKeyProvider.setMultikeyGenMode(false); - managedKeyStates = keymetaAdmin.enableKeyManagement(ENCODED_CUST, keySpace); - assertKeys(managedKeyStates, 0); - verify(keymetaAccessor, times(0)).addKey(any()); - - // Test 5: Enable key management when key provider is not able to generate any new keys - reset(keymetaAccessor); - managedKeyProvider.setMockedKeyState(CUST, FAILED); - managedKeyStates = keymetaAdmin.enableKeyManagement(ENCODED_CUST, keySpace); - assertNotNull(managedKeyStates); - assertEquals(1, managedKeyStates.size()); - assertEquals(FAILED, managedKeyStates.get(0).getKeyState()); - verify(keymetaAccessor, times(1)).addKey(any()); + public void testEnableKeyManagement() throws Exception { + assumeTrue(keyState == ACTIVE); + List keys = keymetaAdmin.enableKeyManagement(ENCODED_CUST, "namespace1"); + assertEquals(1, keys.size()); + assertEquals(ManagedKeyState.ACTIVE, keys.get(0).getKeyState()); + assertEquals(ENCODED_CUST, keys.get(0).getKeyCustodianEncoded()); + assertEquals("namespace1", keys.get(0).getKeyNamespace()); + + // Second call should return the same keys since our mock key provider returns the same key + List keys2 = keymetaAdmin.enableKeyManagement(ENCODED_CUST, "namespace1"); + assertEquals(1, keys2.size()); + assertEquals(keys.get(0), keys2.get(0)); } - private static void assertKeys(List managedKeyStates, int expectedCnt) { - assertNotNull(managedKeyStates); - assertEquals(expectedCnt, managedKeyStates.size()); - for (int i = 0; i < managedKeyStates.size(); ++i) { - assertEquals(ACTIVE, managedKeyStates.get(i).getKeyState()); - } + @Test + public void testEnableKeyManagementWithMultipleNamespaces() throws Exception { + List keys = keymetaAdmin.enableKeyManagement(ENCODED_CUST, "namespace1"); + assertEquals(1, keys.size()); + assertEquals("namespace1", keys.get(0).getKeyNamespace()); + + List keys2 = keymetaAdmin.enableKeyManagement(ENCODED_CUST, "namespace2"); + assertEquals(1, keys2.size()); + assertEquals("namespace2", keys2.get(0).getKeyNamespace()); } } @@ -315,55 +239,11 @@ public void test() throws Exception { } } - @RunWith(Parameterized.class) - @Category({ MasterTests.class, SmallTests.class }) - public static class TestForInvalidKeyCountConfig extends TestKeymetaAdminImpl { - @ClassRule public static final HBaseClassTestRule CLASS_RULE = - HBaseClassTestRule.forClass(TestForInvalidKeyCountConfig.class); - - @Parameter(0) - public String keyCount;; - @Parameter(1) - public String keySpace; - @Parameter(2) - public Class expectedExType; - @Parameters(name = "{index},keyCount={0},keySpace={1}expectedExType={2}") - public static Collection data() { - return Arrays.asList(new Object[][] { - { "0", KEY_SPACE_GLOBAL, IOException.class }, - { "-1", KEY_SPACE_GLOBAL, IOException.class }, - { "abc", KEY_SPACE_GLOBAL, NumberFormatException.class }, - { "0", "ns1", IOException.class }, - { "-1", "ns1", IOException.class }, - { "abc", "ns1", NumberFormatException.class }, - }); - } - - @Test - public void test() throws Exception { - conf.set(HConstants.CRYPTO_MANAGED_KEYS_PER_CUST_NAMESPACE_ACTIVE_KEY_COUNT, keyCount); - String cust = "cust1"; - String encodedCust = ManagedKeyProvider.encodeToStr(cust.getBytes()); - assertThrows(expectedExType, () -> - keymetaAdmin.enableKeyManagement(encodedCust, keySpace)); - } - } - - private class DummyKeymetaAdminImpl extends KeymetaAdminImpl { - public DummyKeymetaAdminImpl(Server mockServer, KeymetaTableAccessor mockAccessor) { + private class KeymetaAdminImplForTest extends KeymetaAdminImpl { + public KeymetaAdminImplForTest(Server mockServer, KeymetaTableAccessor mockAccessor) { super(mockServer); } - public Integer activeKeyCountOverride; - - @Override - protected int getPerCustodianNamespaceActiveKeyConfCount() throws IOException { - if (activeKeyCountOverride != null) { - return activeKeyCountOverride; - } - return super.getPerCustodianNamespaceActiveKeyConfCount(); - } - @Override public void addKey(ManagedKeyData keyData) throws IOException { keymetaAccessor.addKey(keyData); @@ -374,6 +254,12 @@ public List getAllKeys(byte[] key_cust, String keyNamespace) throws IOException, KeyException { return keymetaAccessor.getAllKeys(key_cust, keyNamespace); } + + @Override + public ManagedKeyData getActiveKey(byte[] key_cust, String keyNamespace) + throws IOException, KeyException { + return keymetaAccessor.getActiveKey(key_cust, keyNamespace); + } } protected boolean assertKeyData(ManagedKeyData keyData, ManagedKeyState expKeyState, diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestSystemKeyAccessorAndManager.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestSystemKeyAccessorAndManager.java index 2e62dbee0007..d7045b245616 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestSystemKeyAccessorAndManager.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestSystemKeyAccessorAndManager.java @@ -369,7 +369,7 @@ public void testLoadSystemKeySuccess() throws Exception { Key testKey = new SecretKeySpec("test-key-bytes".getBytes(), "AES"); ManagedKeyData testKeyData = new ManagedKeyData( "custodian".getBytes(), "namespace", testKey, - ManagedKeyState.ACTIVE, testMetadata, 1000L, 0, 0); + ManagedKeyState.ACTIVE, testMetadata, 1000L); // Mock key provider ManagedKeyProvider realProvider = mock(ManagedKeyProvider.class); diff --git a/hbase-shell/src/main/ruby/shell/commands/keymeta_command_base.rb b/hbase-shell/src/main/ruby/shell/commands/keymeta_command_base.rb index 92b52f5e4be5..e2af5f524cc3 100644 --- a/hbase-shell/src/main/ruby/shell/commands/keymeta_command_base.rb +++ b/hbase-shell/src/main/ruby/shell/commands/keymeta_command_base.rb @@ -23,8 +23,7 @@ module Commands # KeymetaCommandBase is a base class for all key management commands. class KeymetaCommandBase < Command def print_key_statuses(statuses) - formatter.header(%w[ENCODED-KEY NAMESPACE STATUS METADATA METADATA-HASH - REFRESH-TIMESTAMP READ-OP-COUNT WRITE-OP-COUNT]) + formatter.header(%w[ENCODED-KEY NAMESPACE STATUS METADATA METADATA-HASH REFRESH-TIMESTAMP]) statuses.each { |status| formatter.row(format_status_row(status)) } formatter.footer(statuses.size) end @@ -38,9 +37,7 @@ def format_status_row(status) status.getKeyStatus.toString, status.getKeyMetadata, status.getKeyMetadataHashEncoded, - status.getRefreshTimestamp, - status.getReadOpCount, - status.getWriteOpCount + status.getRefreshTimestamp ] end end From 7b6c310f1210298c4c304ec00279c8685ce4ea13 Mon Sep 17 00:00:00 2001 From: Hari Dara Date: Sun, 20 Jul 2025 20:50:25 +0530 Subject: [PATCH 65/70] Fix test failures --- .../hadoop/hbase/keymeta/ManagedKeyDataCache.java | 13 +++++-------- .../hbase/keymeta/TestManagedKeyDataCache.java | 3 ++- 2 files changed, 7 insertions(+), 9 deletions(-) diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/keymeta/ManagedKeyDataCache.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/keymeta/ManagedKeyDataCache.java index 7fb7f8572833..27294230effa 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/keymeta/ManagedKeyDataCache.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/keymeta/ManagedKeyDataCache.java @@ -273,6 +273,11 @@ public ManagedKeyData getActiveEntry(byte[] key_cust, String keyNamespace) { } } + if (retrievedKey == null) { + retrievedKey = new ManagedKeyData(key_cust, keyNamespace, null, ManagedKeyState.FAILED, + null); + } + return retrievedKey; }); @@ -282,14 +287,6 @@ public ManagedKeyData getActiveEntry(byte[] key_cust, String keyNamespace) { return null; } - /** - * @deprecated Use {@link #getActiveEntry(byte[], String)} instead. - */ - @Deprecated - public ManagedKeyData getAnActiveEntry(byte[] key_cust, String keyNamespace) { - return getActiveEntry(key_cust, keyNamespace); - } - /** * Invalidates all entries in the cache. */ diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/keymeta/TestManagedKeyDataCache.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/keymeta/TestManagedKeyDataCache.java index 5c665351f17e..07217086e610 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/keymeta/TestManagedKeyDataCache.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/keymeta/TestManagedKeyDataCache.java @@ -535,12 +535,13 @@ public void testGenericCacheDynamicLookupUnexpectedException() throws Exception } @Test - public void testActiveKeysCacheDynamicLookupUnexpectedException() throws Exception { + public void testActiveKeysCacheDynamicLookupWithUnexpectedException() throws Exception { doThrow(new RuntimeException("Test exception")).when(testProvider).getManagedKey(any(), any(String.class)); assertNull(cache.getActiveEntry(CUST_ID, KEY_SPACE_GLOBAL)); verify(testProvider).getManagedKey(any(), any(String.class)); clearInvocations(testProvider); + // A 2nd invocation should not result in a call to the provider. assertNull(cache.getActiveEntry(CUST_ID, KEY_SPACE_GLOBAL)); verify(testProvider, never()).getManagedKey(any(), any(String.class)); } From 431df7598e4d88cf85a59cc988308bea7794a4ae Mon Sep 17 00:00:00 2001 From: Hari Dara Date: Sun, 20 Jul 2025 21:17:20 +0530 Subject: [PATCH 66/70] Removed the key removal, will add it back with correct pattern --- .../hbase/keymeta/ManagedKeyDataCache.java | 33 +++-------------- .../keymeta/TestManagedKeyDataCache.java | 37 ------------------- 2 files changed, 6 insertions(+), 64 deletions(-) diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/keymeta/ManagedKeyDataCache.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/keymeta/ManagedKeyDataCache.java index 27294230effa..f6ee4585f438 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/keymeta/ManagedKeyDataCache.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/keymeta/ManagedKeyDataCache.java @@ -44,7 +44,7 @@ public class ManagedKeyDataCache extends KeyManagementBase { private static final Logger LOG = LoggerFactory.getLogger(ManagedKeyDataCache.class); - private Cache cache; + private Cache cacheByMetadata; private Cache activeKeysCache; private final KeymetaTableAccessor keymetaAccessor; @@ -99,7 +99,7 @@ public ManagedKeyDataCache(Configuration conf, KeymetaTableAccessor keymetaAcces int activeKeysMaxEntries = conf.getInt( HConstants.CRYPTO_MANAGED_KEYS_L1_ACTIVE_CACHE_MAX_NS_ENTRIES_CONF_KEY, HConstants.CRYPTO_MANAGED_KEYS_L1_ACTIVE_CACHE_MAX_NS_ENTRIES_DEFAULT); - this.cache = Caffeine.newBuilder() + this.cacheByMetadata = Caffeine.newBuilder() .maximumSize(maxEntries) .build(); this.activeKeysCache = Caffeine.newBuilder() @@ -122,7 +122,7 @@ public ManagedKeyDataCache(Configuration conf, KeymetaTableAccessor keymetaAcces */ public ManagedKeyData getEntry(byte[] key_cust, String keyNamespace, String keyMetadata, byte[] wrappedKey) throws IOException, KeyException { - ManagedKeyData entry = cache.get(keyMetadata, metadata -> { + ManagedKeyData entry = cacheByMetadata.get(keyMetadata, metadata -> { // First check if it's in the active keys cache ManagedKeyData keyData = getFromActiveKeysCache(key_cust, keyNamespace, keyMetadata); @@ -191,33 +191,12 @@ private ManagedKeyData getFromActiveKeysCache(byte[] key_cust, String keyNamespa return null; } - /** - * Removes an entry from generic cache based on its key metadata. - * - * @param keyMetadata the key metadata of the entry to be removed - * @return the removed ManagedKeyData entry, or null if not found - */ - public ManagedKeyData removeEntry(String keyMetadata) { - return cache.asMap().remove(keyMetadata); - } - - public ManagedKeyData removeFromActiveKeys(byte[] key_cust, String key_namespace, - String keyMetadata) { - ActiveKeysCacheKey cacheKey = new ActiveKeysCacheKey(key_cust, key_namespace); - ManagedKeyData keyData = activeKeysCache.getIfPresent(cacheKey); - if (keyData != null && keyData.getKeyMetadata().equals(keyMetadata)) { - activeKeysCache.invalidate(cacheKey); - return keyData; - } - return null; - } - /** * @return the approximate number of entries in the main cache which is meant for general lookup * by key metadata. */ public int getGenericCacheEntryCount() { - return (int) cache.estimatedSize(); + return (int) cacheByMetadata.estimatedSize(); } /** @@ -244,7 +223,7 @@ public ManagedKeyData getActiveEntry(byte[] key_cust, String keyNamespace) { // First check if there are any active keys in the generic cache, which should be // suitable for standalone tools. - retrievedKey = this.cache.asMap().values().stream() + retrievedKey = this.cacheByMetadata.asMap().values().stream() .filter(cachedKeyData -> Bytes.equals(cachedKeyData.getKeyCustodian(), key_cust) && cachedKeyData.getKeyNamespace().equals(keyNamespace) && cachedKeyData.getKeyState() == ManagedKeyState.ACTIVE) @@ -291,7 +270,7 @@ public ManagedKeyData getActiveEntry(byte[] key_cust, String keyNamespace) { * Invalidates all entries in the cache. */ public void invalidateAll() { - cache.invalidateAll(); + cacheByMetadata.invalidateAll(); activeKeysCache.invalidateAll(); } } diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/keymeta/TestManagedKeyDataCache.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/keymeta/TestManagedKeyDataCache.java index 07217086e610..1e1908f60f2d 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/keymeta/TestManagedKeyDataCache.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/keymeta/TestManagedKeyDataCache.java @@ -270,8 +270,6 @@ public void testActiveKeysCache() throws Exception { @Test public void testGenericCacheOperations() throws Exception { ManagedKeyData globalKey1 = testProvider.getManagedKey(CUST_ID, KEY_SPACE_GLOBAL); - assertNull(cache.removeEntry(globalKey1.getKeyMetadata())); - assertGenericCacheEntries(globalKey1); ManagedKeyData nsKey1 = testProvider.getManagedKey(CUST_ID, "namespace1"); assertGenericCacheEntries(nsKey1, globalKey1); ManagedKeyData globalKey2 = testProvider.getManagedKey(CUST_ID, @@ -280,22 +278,6 @@ public void testGenericCacheOperations() throws Exception { ManagedKeyData nsKey2 = testProvider.getManagedKey(CUST_ID, "namespace1"); assertGenericCacheEntries(nsKey2, globalKey2, nsKey1, globalKey1); - - assertEquals(globalKey1, cache.removeEntry(globalKey1.getKeyMetadata())); - assertGenericCacheEntries(nsKey2, globalKey2, nsKey1); - assertNull(cache.removeEntry(globalKey1.getKeyMetadata())); - // It should be able to retrieve the once removed. - assertGenericCacheEntries(nsKey2, globalKey2, nsKey1, globalKey1); - assertEquals(globalKey1, cache.removeEntry(globalKey1.getKeyMetadata())); - assertEquals(nsKey2, cache.removeEntry(nsKey2.getKeyMetadata())); - assertGenericCacheEntries(globalKey2, nsKey1); - assertEquals(nsKey1, cache.removeEntry(nsKey1.getKeyMetadata())); - assertGenericCacheEntries(globalKey2); - assertEquals(globalKey2, cache.removeEntry(globalKey2.getKeyMetadata())); - cache.invalidateAll(); - assertEquals(0, cache.getGenericCacheEntryCount()); - // Sholld be functional after innvalidation. - assertGenericCacheEntries(globalKey1); } @Test @@ -308,31 +290,12 @@ public void testActiveKeyGetNoActive() throws Exception { verify(testProvider, never()).getManagedKey(any(), any(String.class)); } - private void removeFromActiveKeys(ManagedKeyData key) { - cache.removeFromActiveKeys(key.getKeyCustodian(), key.getKeyNamespace(), - key.getKeyMetadata()); - } - @Test public void testActiveKeysCacheOperations() throws Exception { - ManagedKeyData key = testProvider.getManagedKey(CUST_ID, KEY_SPACE_GLOBAL); - assertNull(cache.removeFromActiveKeys(CUST_ID, KEY_SPACE_GLOBAL, key.getKeyMetadata())); - assertNotNull(cache.getActiveEntry(CUST_ID, KEY_SPACE_GLOBAL)); assertNotNull(cache.getActiveEntry(CUST_ID, "namespace1")); assertEquals(2, cache.getActiveCacheEntryCount()); - key = cache.getActiveEntry(CUST_ID, KEY_SPACE_GLOBAL); - removeFromActiveKeys(key); - assertEquals(1, cache.getActiveCacheEntryCount()); - assertNull(cache.removeFromActiveKeys(CUST_ID, KEY_SPACE_GLOBAL, key.getKeyMetadata())); - assertEquals(1, cache.getActiveCacheEntryCount()); - removeFromActiveKeys(cache.getActiveEntry(CUST_ID, "namespace1")); - assertEquals(0, cache.getActiveCacheEntryCount()); - // It should be able to retrieve the keys again - assertNotNull(cache.getActiveEntry(CUST_ID, KEY_SPACE_GLOBAL)); - assertEquals(1, cache.getActiveCacheEntryCount()); - cache.invalidateAll(); assertEquals(0, cache.getActiveCacheEntryCount()); assertNotNull(cache.getActiveEntry(CUST_ID, KEY_SPACE_GLOBAL)); From ff7f6ef312f43544d6c0da4c9f1be3a77263e62f Mon Sep 17 00:00:00 2001 From: Hari Dara Date: Mon, 21 Jul 2025 11:07:09 +0530 Subject: [PATCH 67/70] test coverage --- .../hbase/keymeta/ManagedKeyDataCache.java | 10 +++++ .../keymeta/TestManagedKeyDataCache.java | 45 ++++++++++++++++++- 2 files changed, 53 insertions(+), 2 deletions(-) diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/keymeta/ManagedKeyDataCache.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/keymeta/ManagedKeyDataCache.java index f6ee4585f438..fa1a9c3ef925 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/keymeta/ManagedKeyDataCache.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/keymeta/ManagedKeyDataCache.java @@ -273,4 +273,14 @@ public void invalidateAll() { cacheByMetadata.invalidateAll(); activeKeysCache.invalidateAll(); } + + @InterfaceAudience.LimitedPrivate({ HBaseInterfaceAudience.UNITTEST }) + public ManagedKeyData removeEntry(String keyMetadata) { + return cacheByMetadata.asMap().remove(keyMetadata); + } + + @InterfaceAudience.LimitedPrivate({ HBaseInterfaceAudience.UNITTEST }) + public ManagedKeyData removeFromActiveKeys(byte[] key_cust, String keyNamespace) { + return activeKeysCache.asMap().remove(new ActiveKeysCacheKey(key_cust, keyNamespace)); + } } diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/keymeta/TestManagedKeyDataCache.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/keymeta/TestManagedKeyDataCache.java index 1e1908f60f2d..aff6b8c043df 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/keymeta/TestManagedKeyDataCache.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/keymeta/TestManagedKeyDataCache.java @@ -22,6 +22,7 @@ import static org.apache.hadoop.hbase.io.crypto.ManagedKeyState.FAILED; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertFalse; +import static org.junit.Assert.assertNotEquals; import static org.junit.Assert.assertNull; import static org.junit.Assert.assertTrue; import static org.junit.Assert.assertNotNull; @@ -517,18 +518,58 @@ public void testActiveKeysCacheRetrivalFromProviderWhenKeyNotFoundInL2Cache() th verify(mockL2).getActiveKey(any(), any(String.class)); } + @Test + public void testGenericCacheUsesActiveKeysCacheFirst() throws Exception { + // First populate the active keys cache with an active key + ManagedKeyData key1 = cache.getActiveEntry(CUST_ID, KEY_SPACE_GLOBAL); + verify(testProvider).getManagedKey(any(), any(String.class)); + clearInvocations(testProvider); + + // Now get the generic cache entry - it should use the active keys cache first, not call + // keymetaAccessor + assertEquals(key1, cache.getEntry(CUST_ID, KEY_SPACE_GLOBAL, key1.getKeyMetadata(), null)); + verify(testProvider, never()).getManagedKey(any(), any(String.class)); + + // Lookup a diffrent key. + ManagedKeyData key2 = cache.getActiveEntry(CUST_ID, "namespace1"); + assertNotEquals(key1, key2); + verify(testProvider).getManagedKey(any(), any(String.class)); + clearInvocations(testProvider); + + // Now get the generic cache entry - it should use the active keys cache first, not call + // keymetaAccessor + assertEquals(key2, cache.getEntry(CUST_ID, "namespace1", key2.getKeyMetadata(), null)); + verify(testProvider, never()).getManagedKey(any(), any(String.class)); + } + + @Test + public void testGetOlerEntryFromGenericCache() throws Exception { + testProvider.setMultikeyGenMode(true); + + // Get one version of the key in to ActiveKeysCache + ManagedKeyData key1 = cache.getActiveEntry(CUST_ID, KEY_SPACE_GLOBAL); + assertNotNull(key1); + clearInvocations(testProvider); + + // Now try to lookup another version of the key, it should lookup and discard the active key. + ManagedKeyData key2 = testProvider.getManagedKey(CUST_ID, KEY_SPACE_GLOBAL); + assertEquals(key2, cache.getEntry(CUST_ID, KEY_SPACE_GLOBAL, key2.getKeyMetadata(), null)); + verify(testProvider).unwrapKey(any(String.class), any()); + } + @Test public void testActiveKeysCacheUsesGenericCacheFirst() throws Exception { // First populate the generic cache with an active key ManagedKeyData key = testProvider.getManagedKey(CUST_ID, KEY_SPACE_GLOBAL); assertEquals(key, cache.getEntry(CUST_ID, KEY_SPACE_GLOBAL, key.getKeyMetadata(), null)); + verify(testProvider).unwrapKey(any(String.class), any()); // Clear invocations to reset the mock state - clearInvocations(mockL2); + clearInvocations(testProvider); // Now get the active entry - it should use the generic cache first, not call keymetaAccessor assertEquals(key, cache.getActiveEntry(CUST_ID, KEY_SPACE_GLOBAL)); - verify(mockL2, never()).getActiveKey(any(), any(String.class)); + verify(testProvider, never()).unwrapKey(any(String.class), any()); } } From 7a23d5de0844ead0bd5acfcdbf6ba1f38c784455 Mon Sep 17 00:00:00 2001 From: Hari Dara Date: Mon, 21 Jul 2025 11:54:48 +0530 Subject: [PATCH 68/70] Improvement to caching logic --- .../hbase/keymeta/ManagedKeyDataCache.java | 30 +++-------- .../keymeta/TestManagedKeyDataCache.java | 51 ++++++++++++------- 2 files changed, 42 insertions(+), 39 deletions(-) diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/keymeta/ManagedKeyDataCache.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/keymeta/ManagedKeyDataCache.java index fa1a9c3ef925..1764759b71f2 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/keymeta/ManagedKeyDataCache.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/keymeta/ManagedKeyDataCache.java @@ -161,6 +161,11 @@ public ManagedKeyData getEntry(byte[] key_cust, String keyNamespace, String keyM keyMetadata); } + // Also update activeKeysCache if relevant and is missing. + if (keyData.getKeyState() == ManagedKeyState.ACTIVE) { + activeKeysCache.asMap().putIfAbsent(new ActiveKeysCacheKey(key_cust, keyNamespace), keyData); + } + if (!ManagedKeyState.isUsable(keyData.getKeyState())) { LOG.info("Failed to get usable key data with metadata: {} for prefix: {}", metadata, ManagedKeyProvider.encodeToStr(key_cust)); @@ -221,17 +226,8 @@ public ManagedKeyData getActiveEntry(byte[] key_cust, String keyNamespace) { ManagedKeyData keyData = activeKeysCache.get(cacheKey, key -> { ManagedKeyData retrievedKey = null; - // First check if there are any active keys in the generic cache, which should be - // suitable for standalone tools. - retrievedKey = this.cacheByMetadata.asMap().values().stream() - .filter(cachedKeyData -> Bytes.equals(cachedKeyData.getKeyCustodian(), key_cust) - && cachedKeyData.getKeyNamespace().equals(keyNamespace) - && cachedKeyData.getKeyState() == ManagedKeyState.ACTIVE) - .findFirst() - .orElse(null); - // Try to load from KeymetaTableAccessor if not found in cache - if (retrievedKey == null && keymetaAccessor != null) { + if (keymetaAccessor != null) { try { retrievedKey = keymetaAccessor.getActiveKey(key_cust, keyNamespace); } catch (IOException | KeyException | RuntimeException e) { @@ -254,13 +250,13 @@ public ManagedKeyData getActiveEntry(byte[] key_cust, String keyNamespace) { if (retrievedKey == null) { retrievedKey = new ManagedKeyData(key_cust, keyNamespace, null, ManagedKeyState.FAILED, - null); + null); } return retrievedKey; }); - if (keyData != null && keyData.getKeyState() == ManagedKeyState.ACTIVE) { + if (keyData.getKeyState() == ManagedKeyState.ACTIVE) { return keyData; } return null; @@ -273,14 +269,4 @@ public void invalidateAll() { cacheByMetadata.invalidateAll(); activeKeysCache.invalidateAll(); } - - @InterfaceAudience.LimitedPrivate({ HBaseInterfaceAudience.UNITTEST }) - public ManagedKeyData removeEntry(String keyMetadata) { - return cacheByMetadata.asMap().remove(keyMetadata); - } - - @InterfaceAudience.LimitedPrivate({ HBaseInterfaceAudience.UNITTEST }) - public ManagedKeyData removeFromActiveKeys(byte[] key_cust, String keyNamespace) { - return activeKeysCache.asMap().remove(new ActiveKeysCacheKey(key_cust, keyNamespace)); - } } diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/keymeta/TestManagedKeyDataCache.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/keymeta/TestManagedKeyDataCache.java index aff6b8c043df..c44e7d45061b 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/keymeta/TestManagedKeyDataCache.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/keymeta/TestManagedKeyDataCache.java @@ -19,7 +19,9 @@ import static org.apache.hadoop.hbase.io.crypto.ManagedKeyData.KEY_SPACE_GLOBAL; import static org.apache.hadoop.hbase.io.crypto.ManagedKeyState.ACTIVE; +import static org.apache.hadoop.hbase.io.crypto.ManagedKeyState.DISABLED; import static org.apache.hadoop.hbase.io.crypto.ManagedKeyState.FAILED; +import static org.apache.hadoop.hbase.io.crypto.ManagedKeyState.INACTIVE; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertFalse; import static org.junit.Assert.assertNotEquals; @@ -39,6 +41,8 @@ import java.io.IOException; import java.lang.reflect.InvocationTargetException; import java.lang.reflect.Method; +import java.util.Arrays; +import java.util.stream.Collectors; import net.bytebuddy.ByteBuddy; import net.bytebuddy.dynamic.loading.ClassLoadingStrategy; @@ -66,6 +70,7 @@ import org.apache.hadoop.hbase.Server; import org.apache.hadoop.hbase.io.crypto.Encryption; import org.apache.hadoop.hbase.io.crypto.ManagedKeyData; +import org.apache.hadoop.hbase.io.crypto.ManagedKeyState; import org.apache.hadoop.hbase.io.crypto.MockManagedKeyProvider; import org.apache.hadoop.hbase.testclassification.MasterTests; import org.apache.hadoop.hbase.testclassification.SmallTests; @@ -273,8 +278,7 @@ public void testGenericCacheOperations() throws Exception { ManagedKeyData globalKey1 = testProvider.getManagedKey(CUST_ID, KEY_SPACE_GLOBAL); ManagedKeyData nsKey1 = testProvider.getManagedKey(CUST_ID, "namespace1"); assertGenericCacheEntries(nsKey1, globalKey1); - ManagedKeyData globalKey2 = testProvider.getManagedKey(CUST_ID, - KEY_SPACE_GLOBAL); + ManagedKeyData globalKey2 = testProvider.getManagedKey(CUST_ID, KEY_SPACE_GLOBAL); assertGenericCacheEntries(globalKey2, nsKey1, globalKey1); ManagedKeyData nsKey2 = testProvider.getManagedKey(CUST_ID, "namespace1"); @@ -312,31 +316,39 @@ public void testGenericCacheUsingActiveKeysCacheOverProvider() throws Exception } @Test - public void testActiveKeysCacheSkippingProviderWhenGenericCacheEntriesExist() throws Exception { + public void testThatActiveKeysCache_SkipsProvider_WhenLoadedViaGenericCache() throws Exception { ManagedKeyData key1 = testProvider.getManagedKey(CUST_ID, KEY_SPACE_GLOBAL); assertEquals(key1, cache.getEntry(CUST_ID, KEY_SPACE_GLOBAL, key1.getKeyMetadata(), null)); ManagedKeyData key2 = testProvider.getManagedKey(CUST_ID, "namespace1"); assertEquals(key2, cache.getEntry(CUST_ID, "namespace1", key2.getKeyMetadata(), null)); verify(testProvider, times(2)).getManagedKey(any(), any(String.class)); + assertEquals(2, cache.getActiveCacheEntryCount()); clearInvocations(testProvider); assertEquals(key1, cache.getActiveEntry(CUST_ID, KEY_SPACE_GLOBAL)); - // In this case, the provider is not called because the existing keys in generic cache are - // used first (before checking keymetaAccessor). + assertEquals(key2, cache.getActiveEntry(CUST_ID, "namespace1")); + // ACTIVE keys are automatically added to activeKeysCache when loaded + // via getEntry, so getActiveEntry will find them there and won't call the provider verify(testProvider, never()).getManagedKey(any(), any(String.class)); - assertEquals(1, cache.getActiveCacheEntryCount()); cache.invalidateAll(); assertEquals(0, cache.getActiveCacheEntryCount()); } @Test - public void testActiveKeysCacheIgnnoreFailedKeyInGenericCache() throws Exception { + public void testThatNonActiveKey_IsIgnored_WhenLoadedViaGenericCache() throws Exception { testProvider.setMockedKeyState(ALIAS, FAILED); ManagedKeyData key = testProvider.getManagedKey(CUST_ID, KEY_SPACE_GLOBAL); assertNull(cache.getEntry(CUST_ID, KEY_SPACE_GLOBAL, key.getKeyMetadata(), null)); - clearInvocations(testProvider); - testProvider.setMockedKeyState(ALIAS, ACTIVE); - assertNotNull(cache.getActiveEntry(CUST_ID, KEY_SPACE_GLOBAL)); - verify(testProvider).getManagedKey(any(), any(String.class)); + assertEquals(0, cache.getActiveCacheEntryCount()); + + testProvider.setMockedKeyState(ALIAS, DISABLED); + key = testProvider.getManagedKey(CUST_ID, KEY_SPACE_GLOBAL); + assertNull(cache.getEntry(CUST_ID, KEY_SPACE_GLOBAL, key.getKeyMetadata(), null)); + assertEquals(0, cache.getActiveCacheEntryCount()); + + testProvider.setMockedKeyState(ALIAS, INACTIVE); + key = testProvider.getManagedKey(CUST_ID, "namespace1"); + assertEquals(key, cache.getEntry(CUST_ID, "namespace1", key.getKeyMetadata(), null)); + assertEquals(0, cache.getActiveCacheEntryCount()); } @Test @@ -348,6 +360,7 @@ public void testActiveKeysCacheWithMultipleCustodiansInGenericCache() throws Exc ManagedKeyData key2 = testProvider.getManagedKey(cust_id2, KEY_SPACE_GLOBAL); assertNotNull(cache.getEntry(CUST_ID, KEY_SPACE_GLOBAL, key2.getKeyMetadata(), null)); assertNotNull(cache.getActiveEntry(CUST_ID, KEY_SPACE_GLOBAL)); + // ACTIVE keys are automatically added to activeKeysCache when loaded. assertEquals(1, cache.getActiveCacheEntryCount()); } @@ -543,9 +556,7 @@ public void testGenericCacheUsesActiveKeysCacheFirst() throws Exception { } @Test - public void testGetOlerEntryFromGenericCache() throws Exception { - testProvider.setMultikeyGenMode(true); - + public void testGetOlderEntryFromGenericCache() throws Exception { // Get one version of the key in to ActiveKeysCache ManagedKeyData key1 = cache.getActiveEntry(CUST_ID, KEY_SPACE_GLOBAL); assertNotNull(key1); @@ -558,7 +569,7 @@ public void testGetOlerEntryFromGenericCache() throws Exception { } @Test - public void testActiveKeysCacheUsesGenericCacheFirst() throws Exception { + public void testThatActiveKeysCache_PopulatedByGenericCache() throws Exception { // First populate the generic cache with an active key ManagedKeyData key = testProvider.getManagedKey(CUST_ID, KEY_SPACE_GLOBAL); assertEquals(key, cache.getEntry(CUST_ID, KEY_SPACE_GLOBAL, key.getKeyMetadata(), null)); @@ -567,7 +578,7 @@ public void testActiveKeysCacheUsesGenericCacheFirst() throws Exception { // Clear invocations to reset the mock state clearInvocations(testProvider); - // Now get the active entry - it should use the generic cache first, not call keymetaAccessor + // Now get the active entry - it should already be there due to the generic cache first assertEquals(key, cache.getActiveEntry(CUST_ID, KEY_SPACE_GLOBAL)); verify(testProvider, never()).unwrapKey(any(String.class), any()); } @@ -579,6 +590,12 @@ protected void assertGenericCacheEntries(ManagedKeyData... keys) throws Exceptio key.getKeyMetadata(), null)); } assertEquals(keys.length, cache.getGenericCacheEntryCount()); - assertEquals(0, cache.getActiveCacheEntryCount()); + int activeKeysCount = Arrays.stream(keys) + .filter(key -> key.getKeyState() == ManagedKeyState.ACTIVE) + .map(key -> new ManagedKeyDataCache.ActiveKeysCacheKey(key.getKeyCustodian(), + key.getKeyNamespace())) + .collect(Collectors.toSet()) + .size(); + assertEquals(activeKeysCount, cache.getActiveCacheEntryCount()); } } From 59bc07adaa1dec3f544c83c238a8cd81d8a1cc62 Mon Sep 17 00:00:00 2001 From: Hari Dara Date: Mon, 21 Jul 2025 12:50:38 +0530 Subject: [PATCH 69/70] Fix for failing tests --- .../hadoop/hbase/keymeta/KeyManagementBase.java | 4 +++- .../hadoop/hbase/keymeta/KeymetaAdminImpl.java | 5 +---- .../hbase/keymeta/KeymetaTableAccessor.java | 6 ++++-- .../hbase/keymeta/ManagedKeyDataCache.java | 3 ++- .../hbase/master/TestKeymetaAdminImpl.java | 16 +++++++--------- 5 files changed, 17 insertions(+), 17 deletions(-) diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/keymeta/KeyManagementBase.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/keymeta/KeyManagementBase.java index e67b73f7c877..31c770785604 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/keymeta/KeyManagementBase.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/keymeta/KeyManagementBase.java @@ -144,16 +144,18 @@ protected ManagedKeyData retrieveActiveKey(String encKeyCust, byte[] key_cust, if (pbeKey == null) { throw new IOException("Invalid null managed key received from key provider"); } + /* Will be useful when refresh API is implemented. if (existingActiveKey != null && existingActiveKey.equals(pbeKey)) { LOG.info("retrieveManagedKey: no change in key for (custodian: {}, namespace: {}", encKeyCust, keyNamespace); return null; } + // TODO: If existingActiveKey is not null, we should update the key state to INACTIVE. + */ LOG.info("retrieveManagedKey: got managed key with status: {} and metadata: {} for " + "(custodian: {}, namespace: {})", pbeKey.getKeyState(), pbeKey.getKeyMetadata(), encKeyCust, keyNamespace); if (accessor != null) { - // TODO: If existingActiveKey is not null, we should update the key state to INACTIVE. accessor.addKey(pbeKey); } return pbeKey; diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/keymeta/KeymetaAdminImpl.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/keymeta/KeymetaAdminImpl.java index d8b47fb434d1..02fb31b770e6 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/keymeta/KeymetaAdminImpl.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/keymeta/KeymetaAdminImpl.java @@ -56,10 +56,7 @@ public List enableKeyManagement(String keyCust, String keyNamesp // Retrieve a single key from provider ManagedKeyData retrievedKey = retrieveActiveKey(keyCust, key_cust, keyNamespace, this, null); - if (retrievedKey != null) { - return Collections.singletonList(retrievedKey); - } - return Collections.emptyList(); + return Collections.singletonList(retrievedKey); } @Override diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/keymeta/KeymetaTableAccessor.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/keymeta/KeymetaTableAccessor.java index c4330a95f761..08d92a4e1a20 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/keymeta/KeymetaTableAccessor.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/keymeta/KeymetaTableAccessor.java @@ -22,7 +22,9 @@ import java.security.KeyException; import java.util.ArrayList; import java.util.Arrays; +import java.util.HashSet; import java.util.List; +import java.util.Set; import org.apache.hadoop.hbase.HBaseInterfaceAudience; import org.apache.hadoop.hbase.HConstants; @@ -124,14 +126,14 @@ public List getAllKeys(byte[] key_cust, String keyNamespace) try (Table table = connection.getTable(KEY_META_TABLE_NAME)) { ResultScanner scanner = table.getScanner(scan); - List allKeys = new ArrayList<>(); + Set allKeys = new HashSet<>(); for (Result result : scanner) { ManagedKeyData keyData = parseFromResult(getServer(), key_cust, keyNamespace, result); if (keyData != null) { allKeys.add(keyData); } } - return allKeys; + return allKeys.stream().toList(); } } diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/keymeta/ManagedKeyDataCache.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/keymeta/ManagedKeyDataCache.java index 1764759b71f2..87c2195543c2 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/keymeta/ManagedKeyDataCache.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/keymeta/ManagedKeyDataCache.java @@ -163,7 +163,8 @@ public ManagedKeyData getEntry(byte[] key_cust, String keyNamespace, String keyM // Also update activeKeysCache if relevant and is missing. if (keyData.getKeyState() == ManagedKeyState.ACTIVE) { - activeKeysCache.asMap().putIfAbsent(new ActiveKeysCacheKey(key_cust, keyNamespace), keyData); + activeKeysCache.asMap().putIfAbsent(new ActiveKeysCacheKey(key_cust, keyNamespace), + keyData); } if (!ManagedKeyState.isUsable(keyData.getKeyState())) { diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestKeymetaAdminImpl.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestKeymetaAdminImpl.java index 165ff9795dd9..9304029aedf7 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestKeymetaAdminImpl.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestKeymetaAdminImpl.java @@ -164,17 +164,15 @@ public void testEnableAndGet() throws Exception { MockManagedKeyProvider managedKeyProvider = (MockManagedKeyProvider) Encryption.getKeyProvider(conf); managedKeyProvider.setMockedKeyState(CUST, keyState); - List managedKeyStates = + when(keymetaAccessor.getActiveKey(CUST.getBytes(), keySpace)).thenReturn( + managedKeyProvider.getManagedKey(CUST.getBytes(), keySpace)); + + List managedKeys = keymetaAdmin.enableKeyManagement(ENCODED_CUST, keySpace); - assertNotNull(managedKeyStates); - assertEquals(1, managedKeyStates.size()); - assertEquals(keyState, managedKeyStates.get(0).getKeyState()); - verify(keymetaAccessor).addKey(argThat( - (ManagedKeyData keyData) -> assertKeyData(keyData, keyState, - isNullKey ? null : managedKeyProvider.getMockedKey(CUST, - keySpace)))); + assertNotNull(managedKeys); + assertEquals(1, managedKeys.size()); + assertEquals(keyState, managedKeys.get(0).getKeyState()); verify(keymetaAccessor).getActiveKey(CUST.getBytes(), keySpace); - reset(keymetaAccessor); keymetaAdmin.getManagedKeys(ENCODED_CUST, keySpace); verify(keymetaAccessor).getAllKeys(CUST.getBytes(), keySpace); From 370dc407ef3c4107160f1775ef2a523e7f1f6282 Mon Sep 17 00:00:00 2001 From: Hari Dara Date: Tue, 22 Jul 2025 21:57:43 +0530 Subject: [PATCH 70/70] Missing license header --- .../keymeta/ManagedKeyProviderInterceptor.java | 17 +++++++++++++++++ 1 file changed, 17 insertions(+) diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/keymeta/ManagedKeyProviderInterceptor.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/keymeta/ManagedKeyProviderInterceptor.java index b7549007b371..8e428c163127 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/keymeta/ManagedKeyProviderInterceptor.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/keymeta/ManagedKeyProviderInterceptor.java @@ -1,3 +1,20 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ package org.apache.hadoop.hbase.keymeta; import java.io.IOException;